diff --git a/.cspell-partial.json b/.cspell-partial.json index e231eddd712ea..13849ef298019 100644 --- a/.cspell-partial.json +++ b/.cspell-partial.json @@ -5,5 +5,5 @@ "perception/bytetrack/lib/**" ], "ignoreRegExpList": [], - "words": ["dltype", "tvmgen", "quantizer", "imageio", "mimsave"] + "words": ["dltype", "tvmgen"] } diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index e35b948e6153e..d3d9149cd2c8d 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -6,10 +6,10 @@ common/autoware_auto_common/** opensource@apex.ai satoshi.ota@tier4.jp shumpei.w common/autoware_auto_geometry/** satoshi.ota@tier4.jp common/autoware_auto_perception_rviz_plugin/** opensource@apex.ai satoshi.tanaka@tier4.jp shunsuke.miura@tier4.jp taiki.tanaka@tier4.jp takeshi.miura@tier4.jp yoshi.ri@tier4.jp common/autoware_auto_tf2/** jit.ray.c@gmail.com satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp tomoya.kimura@tier4.jp +common/autoware_overlay_rviz_plugin/autoware_overlay_msgs/** team-spatzenhirn@uni-ulm.de +common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/** khalil@leodrive.ai common/autoware_point_types/** taichi.higashide@tier4.jp common/autoware_testing/** adam.dabrowski@robotec.ai satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp tomoya.kimura@tier4.jp -common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/** khalil@leodrive.ai -common/awf_vehicle_rviz_plugin/rviz_2d_overlay_msgs/** team-spatzenhirn@uni-ulm.de common/bag_time_manager_rviz_plugin/** taiki.tanaka@tier4.jp common/component_interface_specs/** isamu.takagi@tier4.jp yukihiro.saito@tier4.jp common/component_interface_tools/** isamu.takagi@tier4.jp @@ -42,7 +42,6 @@ common/tier4_camera_view_rviz_plugin/** makoto.ybauta@tier4.jp uken.ryu@tier4.jp common/tier4_control_rviz_plugin/** taiki.tanaka@tier4.jp common/tier4_datetime_rviz_plugin/** isamu.takagi@tier4.jp common/tier4_debug_rviz_plugin/** takayuki.murooka@tier4.jp -common/tier4_debug_tools/** ryohsuke.mitsudome@tier4.jp common/tier4_localization_rviz_plugin/** isamu.takagi@tier4.jp takamasa.horibe@tier4.jp common/tier4_logging_level_configure_rviz_plugin/** kosuke.takeuchi@tier4.jp satoshi.ota@tier4.jp takamasa.horibe@tier4.jp common/tier4_perception_rviz_plugin/** yukihiro.saito@tier4.jp @@ -58,7 +57,7 @@ common/time_utils/** christopherj.ho@gmail.com shumpei.wakabayashi@tier4.jp tomo common/traffic_light_recognition_marker_publisher/** shumpei.wakabayashi@tier4.jp takeshi.miura@tier4.jp tomoya.kimura@tier4.jp common/traffic_light_utils/** mingyu.li@tier4.jp satoshi.ota@tier4.jp shunsuke.miura@tier4.jp common/tvm_utility/** ambroise.vincent@arm.com xinyu.wang@tier4.jp -control/autonomous_emergency_braking/** takamasa.horibe@tier4.jp tomoya.kimura@tier4.jp +control/autonomous_emergency_braking/** mamoru.sobue@tier4.jp takamasa.horibe@tier4.jp tomoya.kimura@tier4.jp control/control_performance_analysis/** berkay@leodrive.ai fumiya.watanabe@tier4.jp satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp taiki.tanaka@tier4.jp takamasa.horibe@tier4.jp takayuki.murooka@tier4.jp tomoya.kimura@tier4.jp control/control_validator/** kyoichi.sugahara@tier4.jp makoto.kurihara@tier4.jp mamoru.sobue@tier4.jp takamasa.horibe@tier4.jp takayuki.murooka@tier4.jp control/external_cmd_selector/** fumiya.watanabe@tier4.jp satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp taiki.tanaka@tier4.jp takamasa.horibe@tier4.jp takayuki.murooka@tier4.jp tomoya.kimura@tier4.jp @@ -67,7 +66,7 @@ control/lane_departure_checker/** kyoichi.sugahara@tier4.jp makoto.kurihara@tier control/mpc_lateral_controller/** takamasa.horibe@tier4.jp takayuki.murooka@tier4.jp control/obstacle_collision_checker/** fumiya.watanabe@tier4.jp satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp taiki.tanaka@tier4.jp takamasa.horibe@tier4.jp takayuki.murooka@tier4.jp tomoya.kimura@tier4.jp control/operation_mode_transition_manager/** takamasa.horibe@tier4.jp tomoya.kimura@tier4.jp -control/pid_longitudinal_controller/** takamasa.horibe@tier4.jp takayuki.murooka@tier4.jp +control/pid_longitudinal_controller/** mamoru.sobue@tier4.jp takamasa.horibe@tier4.jp takayuki.murooka@tier4.jp control/predicted_path_checker/** berkay@leodrive.ai control/pure_pursuit/** takamasa.horibe@tier4.jp control/shift_decider/** takamasa.horibe@tier4.jp @@ -81,7 +80,7 @@ evaluator/planning_evaluator/** kyoichi.sugahara@tier4.jp maxime.clement@tier4.j evaluator/tier4_metrics_rviz_plugin/** kyoichi.sugahara@tier4.jp maxime.clement@tier4.jp satoshi.ota@tier4.jp launch/tier4_autoware_api_launch/** isamu.takagi@tier4.jp ryohsuke.mitsudome@tier4.jp launch/tier4_control_launch/** takamasa.horibe@tier4.jp takayuki.murooka@tier4.jp -launch/tier4_localization_launch/** anh.nguyen.2@tier4.jp kento.yabuuchi.2@tier4.jp koji.minoda@tier4.jp ryu.yamamoto@tier4.jp shintaro.sakoda@tier4.jp taiki.yamada@tier4.jp yamato.ando@tier4.jp +launch/tier4_localization_launch/** anh.nguyen.2@tier4.jp kento.yabuuchi.2@tier4.jp koji.minoda@tier4.jp masahiro.sakamoto@tier4.jp ryu.yamamoto@tier4.jp shintaro.sakoda@tier4.jp taiki.yamada@tier4.jp yamato.ando@tier4.jp launch/tier4_map_launch/** anh.nguyen.2@tier4.jp kento.yabuuchi.2@tier4.jp koji.minoda@tier4.jp masahiro.sakamoto@tier4.jp ryohsuke.mitsudome@tier4.jp ryu.yamamoto@tier4.jp shintaro.sakoda@tier4.jp taiki.yamada@tier4.jp yamato.ando@tier4.jp launch/tier4_perception_launch/** shunsuke.miura@tier4.jp yoshi.ri@tier4.jp yukihiro.saito@tier4.jp launch/tier4_planning_launch/** fumiya.watanabe@tier4.jp kosuke.takeuchi@tier4.jp kyoichi.sugahara@tier4.jp makoto.kurihara@tier4.jp mamoru.sobue@tier4.jp satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp taiki.tanaka@tier4.jp takamasa.horibe@tier4.jp takayuki.murooka@tier4.jp tomohito.ando@tier4.jp tomoya.kimura@tier4.jp zhe.shen@tier4.jp zulfaqar.azmi@tier4.jp @@ -98,6 +97,7 @@ localization/localization_error_monitor/** anh.nguyen.2@tier4.jp kento.yabuuchi. localization/localization_util/** anh.nguyen.2@tier4.jp kento.yabuuchi.2@tier4.jp masahiro.sakamoto@tier4.jp ryu.yamamoto@tier4.jp shintaro.sakoda@tier4.jp taiki.yamada@tier4.jp yamato.ando@tier4.jp localization/ndt_scan_matcher/** anh.nguyen.2@tier4.jp kento.yabuuchi.2@tier4.jp koji.minoda@tier4.jp masahiro.sakamoto@tier4.jp ryu.yamamoto@tier4.jp shintaro.sakoda@tier4.jp taiki.yamada@tier4.jp yamato.ando@tier4.jp localization/pose2twist/** anh.nguyen.2@tier4.jp kento.yabuuchi.2@tier4.jp masahiro.sakamoto@tier4.jp ryu.yamamoto@tier4.jp shintaro.sakoda@tier4.jp taiki.yamada@tier4.jp yamato.ando@tier4.jp +localization/pose_estimator_arbiter/** kento.yabuuchi.2@tier4.jp localization/pose_initializer/** anh.nguyen.2@tier4.jp isamu.takagi@tier4.jp kento.yabuuchi.2@tier4.jp masahiro.sakamoto@tier4.jp ryu.yamamoto@tier4.jp shintaro.sakoda@tier4.jp taiki.yamada@tier4.jp yamato.ando@tier4.jp localization/pose_instability_detector/** anh.nguyen.2@tier4.jp kento.yabuuchi.2@tier4.jp masahiro.sakamoto@tier4.jp ryu.yamamoto@tier4.jp shintaro.sakoda@tier4.jp taiki.yamada@tier4.jp yamato.ando@tier4.jp localization/stop_filter/** anh.nguyen.2@tier4.jp kento.yabuuchi.2@tier4.jp koji.minoda@tier4.jp masahiro.sakamoto@tier4.jp ryu.yamamoto@tier4.jp shintaro.sakoda@tier4.jp taiki.yamada@tier4.jp yamato.ando@tier4.jp @@ -112,7 +112,7 @@ map/map_height_fitter/** anh.nguyen.2@tier4.jp isamu.takagi@tier4.jp kento.yabuu map/map_loader/** anh.nguyen.2@tier4.jp kento.yabuuchi.2@tier4.jp koji.minoda@tier4.jp masahiro.sakamoto@tier4.jp ryohsuke.mitsudome@tier4.jp ryu.yamamoto@tier4.jp shintaro.sakoda@tier4.jp taiki.yamada@tier4.jp yamato.ando@tier4.jp map/map_projection_loader/** anh.nguyen.2@tier4.jp kento.yabuuchi.2@tier4.jp koji.minoda@tier4.jp masahiro.sakamoto@tier4.jp ryu.yamamoto@tier4.jp shintaro.sakoda@tier4.jp taiki.yamada@tier4.jp yamato.ando@tier4.jp map/map_tf_generator/** anh.nguyen.2@tier4.jp kento.yabuuchi.2@tier4.jp masahiro.sakamoto@tier4.jp ryu.yamamoto@tier4.jp shintaro.sakoda@tier4.jp taiki.yamada@tier4.jp yamato.ando@tier4.jp -map/util/lanelet2_map_preprocessor/** ryohsuke.mitsudome@tier4.jp +map/util/lanelet2_map_preprocessor/** anh.nguyen.2@tier4.jp kento.yabuuchi.2@tier4.jp masahiro.sakamoto@tier4.jp ryohsuke.mitsudome@tier4.jp ryu.yamamoto@tier4.jp shintaro.sakoda@tier4.jp taiki.yamada@tier4.jp yamato.ando@tier4.jp perception/bytetrack/** manato.hirabayashi@tier4.jp perception/cluster_merger/** dai.nguyen@tier4.jp shunsuke.miura@tier4.jp yukihiro.saito@tier4.jp perception/compare_map_segmentation/** abrahammonrroy@yahoo.com dai.nguyen@tier4.jp yukihiro.saito@tier4.jp @@ -122,14 +122,12 @@ perception/detected_object_validation/** dai.nguyen@tier4.jp shunsuke.miura@tier perception/detection_by_tracker/** yoshi.ri@tier4.jp yukihiro.saito@tier4.jp perception/elevation_map_loader/** kosuke.takeuchi@tier4.jp taichi.higashide@tier4.jp perception/euclidean_cluster/** yukihiro.saito@tier4.jp -perception/front_vehicle_velocity_estimator/** satoshi.tanaka@tier4.jp shunsuke.miura@tier4.jp perception/ground_segmentation/** abrahammonrroy@yahoo.com dai.nguyen@tier4.jp shunsuke.miura@tier4.jp yukihiro.saito@tier4.jp -perception/heatmap_visualizer/** kotaro.uetake@tier4.jp -perception/image_projection_based_fusion/** dai.nguyen@tier4.jp kotaro.uetake@tier4.jp shunsuke.miura@tier4.jp tao.zhong@tier4.jp yoshi.ri@tier4.jp yukihiro.saito@tier4.jp +perception/image_projection_based_fusion/** dai.nguyen@tier4.jp koji.minoda@tier4.jp kotaro.uetake@tier4.jp shunsuke.miura@tier4.jp tao.zhong@tier4.jp yoshi.ri@tier4.jp yukihiro.saito@tier4.jp perception/lidar_apollo_instance_segmentation/** yukihiro.saito@tier4.jp perception/lidar_apollo_segmentation_tvm/** ambroise.vincent@arm.com xinyu.wang@tier4.jp perception/lidar_apollo_segmentation_tvm_nodes/** ambroise.vincent@arm.com xinyu.wang@tier4.jp -perception/lidar_centerpoint/** kenzo.lobos@tier4.jp +perception/lidar_centerpoint/** kenzo.lobos@tier4.jp koji.minoda@tier4.jp perception/lidar_centerpoint_tvm/** carl.liu@autocore.ai xinyu.wang@tier4.jp perception/map_based_prediction/** kotaro.uetake@tier4.jp kyoichi.sugahara@tier4.jp shunsuke.miura@tier4.jp takayuki.murooka@tier4.jp tomoya.kimura@tier4.jp yoshi.ri@tier4.jp perception/multi_object_tracker/** yoshi.ri@tier4.jp yukihiro.saito@tier4.jp @@ -146,7 +144,6 @@ perception/radar_tracks_msgs_converter/** satoshi.tanaka@tier4.jp shunsuke.miura perception/shape_estimation/** yoshi.ri@tier4.jp yukihiro.saito@tier4.jp perception/simple_object_merger/** satoshi.tanaka@tier4.jp shunsuke.miura@tier4.jp yoshi.ri@tier4.jp perception/tensorrt_classifier/** mingyu.li@tier4.jp -perception/tensorrt_yolo/** daisuke.nishimatsu@tier4.jp perception/tensorrt_yolox/** daisuke.nishimatsu@tier4.jp dan.umeda@tier4.jp manato.hirabayashi@tier4.jp perception/tracking_object_merger/** yoshi.ri@tier4.jp yukihiro.saito@tier4.jp perception/traffic_light_arbiter/** kenzo.lobos@tier4.jp shunsuke.miura@tier4.jp @@ -155,16 +152,16 @@ perception/traffic_light_fine_detector/** shunsuke.miura@tier4.jp tao.zhong@tier perception/traffic_light_map_based_detector/** yukihiro.saito@tier4.jp perception/traffic_light_multi_camera_fusion/** shunsuke.miura@tier4.jp tao.zhong@tier4.jp perception/traffic_light_occlusion_predictor/** shunsuke.miura@tier4.jp tao.zhong@tier4.jp -perception/traffic_light_ssd_fine_detector/** daisuke.nishimatsu@tier4.jp perception/traffic_light_visualization/** yukihiro.saito@tier4.jp planning/behavior_path_avoidance_by_lane_change_module/** fumiya.watanabe@tier4.jp satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp tomohito.ando@tier4.jp tomoya.kimura@tier4.jp zulfaqar.azmi@tier4.jp -planning/behavior_path_avoidance_module/** fumiya.watanabe@tier4.jp kyoichi.sugahara@tier4.jp satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp takamasa.horibe@tier4.jp tomohito.ando@tier4.jp tomoya.kimura@tier4.jp +planning/behavior_path_avoidance_module/** fumiya.watanabe@tier4.jp kyoichi.sugahara@tier4.jp satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp takamasa.horibe@tier4.jp tomohito.ando@tier4.jp tomoya.kimura@tier4.jp zulfaqar.azmi@tier4.jp planning/behavior_path_dynamic_avoidance_module/** kosuke.takeuchi@tier4.jp satoshi.ota@tier4.jp takayuki.murooka@tier4.jp yuki.takagi@tier4.jp planning/behavior_path_external_request_lane_change_module/** fumiya.watanabe@tier4.jp kosuke.takeuchi@tier4.jp shumpei.wakabayashi@tier4.jp tomohito.ando@tier4.jp tomoya.kimura@tier4.jp zulfaqar.azmi@tier4.jp planning/behavior_path_goal_planner_module/** daniel.sanchez@tier4.jp kosuke.takeuchi@tier4.jp kyoichi.sugahara@tier4.jp mamoru.sobue@tier4.jp satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp tomohito.ando@tier4.jp tomoya.kimura@tier4.jp -planning/behavior_path_lane_change_module/** fumiya.watanabe@tier4.jp kosuke.takeuchi@tier4.jp shumpei.wakabayashi@tier4.jp tomohito.ando@tier4.jp tomoya.kimura@tier4.jp zulfaqar.azmi@tier4.jp +planning/behavior_path_lane_change_module/** fumiya.watanabe@tier4.jp kosuke.takeuchi@tier4.jp satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp tomohito.ando@tier4.jp tomoya.kimura@tier4.jp zulfaqar.azmi@tier4.jp planning/behavior_path_planner/** fumiya.watanabe@tier4.jp kosuke.takeuchi@tier4.jp kyoichi.sugahara@tier4.jp satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp takamasa.horibe@tier4.jp takayuki.murooka@tier4.jp tomohito.ando@tier4.jp tomoya.kimura@tier4.jp zulfaqar.azmi@tier4.jp planning/behavior_path_planner_common/** daniel.sanchez@tier4.jp fumiya.watanabe@tier4.jp kosuke.takeuchi@tier4.jp mamoru.sobue@tier4.jp maxime.clement@tier4.jp satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp takamasa.horibe@tier4.jp takayuki.murooka@tier4.jp tomoya.kimura@tier4.jp zulfaqar.azmi@tier4.jp +planning/behavior_path_sampling_planner_module/** daniel.sanchez@tier4.jp maxime.clement@tier4.jp planning/behavior_path_side_shift_module/** fumiya.watanabe@tier4.jp kyoichi.sugahara@tier4.jp satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp tomohito.ando@tier4.jp tomoya.kimura@tier4.jp planning/behavior_path_start_planner_module/** daniel.sanchez@tier4.jp kosuke.takeuchi@tier4.jp kyoichi.sugahara@tier4.jp mamoru.sobue@tier4.jp satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp tomohito.ando@tier4.jp tomoya.kimura@tier4.jp planning/behavior_velocity_blind_spot_module/** mamoru.sobue@tier4.jp shumpei.wakabayashi@tier4.jp tomoya.kimura@tier4.jp @@ -194,10 +191,9 @@ planning/motion_velocity_smoother/** fumiya.watanabe@tier4.jp makoto.kurihara@ti planning/objects_of_interest_marker_interface/** fumiya.watanabe@tier4.jp kosuke.takeuchi@tier4.jp zulfaqar.azmi@tier4.jp planning/obstacle_avoidance_planner/** kosuke.takeuchi@tier4.jp takamasa.horibe@tier4.jp takayuki.murooka@tier4.jp planning/obstacle_cruise_planner/** kosuke.takeuchi@tier4.jp satoshi.ota@tier4.jp takayuki.murooka@tier4.jp yuki.takagi@tier4.jp -planning/obstacle_stop_planner/** berkay@leodrive.ai satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp taiki.tanaka@tier4.jp tomoya.kimura@tier4.jp +planning/obstacle_stop_planner/** berkay@leodrive.ai bnk@leodrive.ai satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp taiki.tanaka@tier4.jp tomoya.kimura@tier4.jp planning/obstacle_velocity_limiter/** maxime.clement@tier4.jp planning/path_smoother/** maxime.clement@tier4.jp takayuki.murooka@tier4.jp -planning/planning_debug_tools/** taiki.tanaka@tier4.jp takamasa.horibe@tier4.jp takayuki.murooka@tier4.jp planning/planning_test_utils/** kyoichi.sugahara@tier4.jp takamasa.horibe@tier4.jp planning/planning_topic_converter/** kosuke.takeuchi@tier4.jp satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp planning/planning_validator/** kosuke.takeuchi@tier4.jp takamasa.horibe@tier4.jp @@ -211,12 +207,12 @@ planning/sampling_based_planner/sampler_common/** maxime.clement@tier4.jp planning/scenario_selector/** fumiya.watanabe@tier4.jp satoshi.ota@tier4.jp shumpei.wakabayashi@tier4.jp taiki.tanaka@tier4.jp takamasa.horibe@tier4.jp takayuki.murooka@tier4.jp tomoya.kimura@tier4.jp planning/static_centerline_optimizer/** kosuke.takeuchi@tier4.jp takayuki.murooka@tier4.jp planning/surround_obstacle_checker/** satoshi.ota@tier4.jp -sensing/gnss_poser/** koji.minoda@tier4.jp yamato.ando@tier4.jp +sensing/gnss_poser/** anh.nguyen.2@tier4.jp kento.yabuuchi.2@tier4.jp koji.minoda@tier4.jp masahiro.sakamoto@tier4.jp ryu.yamamoto@tier4.jp shintaro.sakoda@tier4.jp taiki.yamada@tier4.jp yamato.ando@tier4.jp sensing/image_diagnostics/** dai.nguyen@tier4.jp sensing/image_transport_decompressor/** yukihiro.saito@tier4.jp sensing/imu_corrector/** koji.minoda@tier4.jp taiki.yamada@tier4.jp yamato.ando@tier4.jp sensing/livox/livox_tag_filter/** ryohsuke.mitsudome@tier4.jp -sensing/pointcloud_preprocessor/** abrahammonrroy@yahoo.com shunsuke.miura@tier4.jp yukihiro.saito@tier4.jp +sensing/pointcloud_preprocessor/** abrahammonrroy@yahoo.com kyoichi.sugahara@tier4.jp shunsuke.miura@tier4.jp yukihiro.saito@tier4.jp sensing/radar_scan_to_pointcloud2/** satoshi.tanaka@tier4.jp shunsuke.miura@tier4.jp taekjin.lee@tier4.jp yoshi.ri@tier4.jp sensing/radar_static_pointcloud_filter/** satoshi.tanaka@tier4.jp shunsuke.miura@tier4.jp taekjin.lee@tier4.jp yoshi.ri@tier4.jp sensing/radar_threshold_filter/** satoshi.tanaka@tier4.jp shunsuke.miura@tier4.jp taekjin.lee@tier4.jp yoshi.ri@tier4.jp @@ -237,15 +233,14 @@ system/diagnostic_graph_aggregator/** isamu.takagi@tier4.jp system/dummy_diag_publisher/** fumihito.ito@tier4.jp tetsuhiro.kawaguchi@tier4.jp system/dummy_infrastructure/** ryohsuke.mitsudome@tier4.jp system/duplicated_node_checker/** shumpei.wakabayashi@tier4.jp uken.ryu@tier4.jp -system/emergency_handler/** makoto.kurihara@tier4.jp +system/emergency_handler/** makoto.kurihara@tier4.jp ryuta.kambe@tier4.jp tetsuhiro.kawaguchi@tier4.jp system/mrm_comfortable_stop_operator/** makoto.kurihara@tier4.jp tomohito.ando@tier4.jp system/mrm_emergency_stop_operator/** makoto.kurihara@tier4.jp tomohito.ando@tier4.jp system/system_error_monitor/** fumihito.ito@tier4.jp system/system_monitor/** fumihito.ito@tier4.jp tetsuhiro.kawaguchi@tier4.jp system/topic_state_monitor/** ryohsuke.mitsudome@tier4.jp system/velodyne_monitor/** fumihito.ito@tier4.jp -tools/simulator_test/simulator_compatibility_test/** shpark@morai.ai shumpei.wakabayashi@tier4.jp tomoya.kimura@tier4.jp -vehicle/accel_brake_map_calibrator/accel_brake_map_calibrator/** taiki.tanaka@tier4.jp takeshi.miura@tier4.jp tomoya.kimura@tier4.jp +vehicle/accel_brake_map_calibrator/** taiki.tanaka@tier4.jp takeshi.miura@tier4.jp tomoya.kimura@tier4.jp vehicle/external_cmd_converter/** takamasa.horibe@tier4.jp vehicle/raw_vehicle_cmd_converter/** makoto.kurihara@tier4.jp taiki.tanaka@tier4.jp takamasa.horibe@tier4.jp vehicle/steer_offset_estimator/** taiki.tanaka@tier4.jp diff --git a/common/awf_vehicle_rviz_plugin/rviz_2d_overlay_msgs/CHANGELOG.rst b/common/autoware_overlay_rviz_plugin/autoware_overlay_msgs/CHANGELOG.rst similarity index 100% rename from common/awf_vehicle_rviz_plugin/rviz_2d_overlay_msgs/CHANGELOG.rst rename to common/autoware_overlay_rviz_plugin/autoware_overlay_msgs/CHANGELOG.rst diff --git a/common/awf_vehicle_rviz_plugin/rviz_2d_overlay_msgs/CMakeLists.txt b/common/autoware_overlay_rviz_plugin/autoware_overlay_msgs/CMakeLists.txt similarity index 92% rename from common/awf_vehicle_rviz_plugin/rviz_2d_overlay_msgs/CMakeLists.txt rename to common/autoware_overlay_rviz_plugin/autoware_overlay_msgs/CMakeLists.txt index 9e9a8f277fd53..e23a4e755cbc4 100644 --- a/common/awf_vehicle_rviz_plugin/rviz_2d_overlay_msgs/CMakeLists.txt +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_msgs/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.5) -project(rviz_2d_overlay_msgs) +project(autoware_overlay_msgs) if (NOT CMAKE_CXX_STANDARD) set(CMAKE_CXX_STANDARD 17) diff --git a/common/awf_vehicle_rviz_plugin/rviz_2d_overlay_msgs/msg/OverlayText.msg b/common/autoware_overlay_rviz_plugin/autoware_overlay_msgs/msg/OverlayText.msg similarity index 100% rename from common/awf_vehicle_rviz_plugin/rviz_2d_overlay_msgs/msg/OverlayText.msg rename to common/autoware_overlay_rviz_plugin/autoware_overlay_msgs/msg/OverlayText.msg diff --git a/common/awf_vehicle_rviz_plugin/rviz_2d_overlay_msgs/package.xml b/common/autoware_overlay_rviz_plugin/autoware_overlay_msgs/package.xml similarity index 96% rename from common/awf_vehicle_rviz_plugin/rviz_2d_overlay_msgs/package.xml rename to common/autoware_overlay_rviz_plugin/autoware_overlay_msgs/package.xml index 53396c64aa156..4881b126ffffb 100644 --- a/common/awf_vehicle_rviz_plugin/rviz_2d_overlay_msgs/package.xml +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_msgs/package.xml @@ -1,7 +1,7 @@ - rviz_2d_overlay_msgs + autoware_overlay_msgs 1.3.0 Messages describing 2D overlays for RVIZ, extracted/derived from the jsk_visualization package. Team Spatzenhirn diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/CMakeLists.txt b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/CMakeLists.txt similarity index 94% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/CMakeLists.txt rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/CMakeLists.txt index da67a6f63aeae..afb12bffeeaa7 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/CMakeLists.txt +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.8) -project(awf_2d_overlay_vehicle) +project(autoware_overlay_rviz_plugin) # find dependencies find_package(ament_cmake_auto REQUIRED) @@ -9,7 +9,7 @@ find_package(tier4_planning_msgs REQUIRED) find_package(autoware_perception_msgs REQUIRED) ament_auto_find_build_dependencies() -find_package(rviz_2d_overlay_msgs REQUIRED) +find_package(autoware_overlay_msgs REQUIRED) find_package(rviz_common REQUIRED) find_package(rviz_rendering REQUIRED) @@ -90,7 +90,7 @@ ament_target_dependencies( PUBLIC rviz_common rviz_rendering - rviz_2d_overlay_msgs + autoware_overlay_msgs autoware_auto_vehicle_msgs tier4_planning_msgs autoware_perception_msgs @@ -136,5 +136,5 @@ install( add_definitions(-DQT_NO_KEYWORDS) ament_package( - CONFIG_EXTRAS "awf_2d_overlay_vehicle-extras.cmake" + CONFIG_EXTRAS "autoware_overlay_rviz_plugin-extras.cmake" ) diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/LICENSE b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/LICENSE similarity index 100% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/LICENSE rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/LICENSE diff --git a/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/README.md b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/README.md new file mode 100644 index 0000000000000..0d0def1a46997 --- /dev/null +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/README.md @@ -0,0 +1,54 @@ +# autoware_overlay_rviz_plugin + +Plugin for displaying 2D overlays over the RViz2 3D scene. + +Based on the [jsk_visualization](https://github.com/jsk-ros-pkg/jsk_visualization) +package, under the 3-Clause BSD license. + +## Purpose + +This plugin provides a visual and easy-to-understand display of vehicle speed, turn signal, steering status and gears. + +## Inputs / Outputs + +### Input + +| Name | Type | Description | +| ------------------------------------------------------- | ------------------------------------------------------- | ------------------------------------ | +| `/vehicle/status/velocity_status` | `autoware_auto_vehicle_msgs::msg::VelocityReport` | The topic is vehicle velocity | +| `/vehicle/status/turn_indicators_status` | `autoware_auto_vehicle_msgs::msg::TurnIndicatorsReport` | The topic is status of turn signal | +| `/vehicle/status/hazard_status` | `autoware_auto_vehicle_msgs::msg::HazardReport` | The topic is status of hazard | +| `/vehicle/status/steering_status` | `autoware_auto_vehicle_msgs::msg::SteeringReport` | The topic is status of steering | +| `/vehicle/status/gear_status` | `autoware_auto_vehicle_msgs::msg::GearReport` | The topic is status of gear | +| `/planning/scenario_planning/current_max_velocity` | `tier4_planning_msgs::msg::VelocityLimit` | The topic is velocity limit | +| `/perception/traffic_light_recognition/traffic_signals` | `autoware_perception_msgs::msg::TrafficSignalArray` | The topic is status of traffic light | + +## Parameter + +### Core Parameters + +#### SignalDisplay + +| Name | Type | Default Value | Description | +| ------------------------ | ------ | -------------------- | --------------------------------- | +| `property_width_` | int | 128 | Width of the plotter window [px] | +| `property_height_` | int | 128 | Height of the plotter window [px] | +| `property_left_` | int | 128 | Left of the plotter window [px] | +| `property_top_` | int | 128 | Top of the plotter window [px] | +| `property_signal_color_` | QColor | QColor(25, 255, 240) | Turn Signal color | + +## Assumptions / Known limits + +TBD. + +## Usage + +1. Start `rviz2` and click `Add` button under the `Displays` panel. + + ![select_add](./assets/images/select_add.png) + +2. Under `By display type` tab, select `autoware_overlay_rviz_plugin/SignalDisplay` and press OK. + +3. Enter the names of the topics if necessary. + + ![select_topic_name](./assets/images/select_topic_name.png) diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/font/Quicksand/LICENSE b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/font/Quicksand/LICENSE similarity index 100% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/font/Quicksand/LICENSE rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/font/Quicksand/LICENSE diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/font/Quicksand/static/Quicksand-Bold.ttf b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/font/Quicksand/static/Quicksand-Bold.ttf similarity index 100% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/font/Quicksand/static/Quicksand-Bold.ttf rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/font/Quicksand/static/Quicksand-Bold.ttf diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/font/Quicksand/static/Quicksand-Light.ttf b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/font/Quicksand/static/Quicksand-Light.ttf similarity index 100% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/font/Quicksand/static/Quicksand-Light.ttf rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/font/Quicksand/static/Quicksand-Light.ttf diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/font/Quicksand/static/Quicksand-Medium.ttf b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/font/Quicksand/static/Quicksand-Medium.ttf similarity index 100% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/font/Quicksand/static/Quicksand-Medium.ttf rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/font/Quicksand/static/Quicksand-Medium.ttf diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/font/Quicksand/static/Quicksand-Regular.ttf b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/font/Quicksand/static/Quicksand-Regular.ttf similarity index 100% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/font/Quicksand/static/Quicksand-Regular.ttf rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/font/Quicksand/static/Quicksand-Regular.ttf diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/font/Quicksand/static/Quicksand-SemiBold.ttf b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/font/Quicksand/static/Quicksand-SemiBold.ttf similarity index 100% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/font/Quicksand/static/Quicksand-SemiBold.ttf rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/font/Quicksand/static/Quicksand-SemiBold.ttf diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/images/arrow.png b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/images/arrow.png similarity index 100% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/images/arrow.png rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/images/arrow.png diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/images/select_add.png b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/images/select_add.png similarity index 100% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/images/select_add.png rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/images/select_add.png diff --git a/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/images/select_topic_name.png b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/images/select_topic_name.png new file mode 100644 index 0000000000000..490552efc114f Binary files /dev/null and b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/images/select_topic_name.png differ diff --git a/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/images/traffic.png b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/images/traffic.png new file mode 100644 index 0000000000000..79715b0573e1a Binary files /dev/null and b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/images/traffic.png differ diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/images/wheel.png b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/images/wheel.png similarity index 100% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/images/wheel.png rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/assets/images/wheel.png diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/awf_2d_overlay_vehicle-extras.cmake b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin-extras.cmake similarity index 100% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/awf_2d_overlay_vehicle-extras.cmake rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin-extras.cmake diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/gear_display.hpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/gear_display.hpp similarity index 94% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/gear_display.hpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/gear_display.hpp index b289bfc541cee..3fe473d5d5123 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/gear_display.hpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/gear_display.hpp @@ -29,7 +29,7 @@ #include #include -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { class GearDisplay @@ -44,6 +44,6 @@ class GearDisplay QColor gray = QColor(194, 194, 194); }; -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin #endif // GEAR_DISPLAY_HPP_ diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/overlay_text_display.hpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/overlay_text_display.hpp similarity index 93% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/overlay_text_display.hpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/overlay_text_display.hpp index 9b3ecccf6301c..ccea51b9a5349 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/overlay_text_display.hpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/overlay_text_display.hpp @@ -50,7 +50,7 @@ #ifndef OVERLAY_TEXT_DISPLAY_HPP_ #define OVERLAY_TEXT_DISPLAY_HPP_ -#include "rviz_2d_overlay_msgs/msg/overlay_text.hpp" +#include "autoware_overlay_msgs/msg/overlay_text.hpp" #ifndef Q_MOC_RUN #include "overlay_utils.hpp" @@ -69,10 +69,10 @@ #include #endif -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { class OverlayTextDisplay -: public rviz_common::RosTopicDisplay +: public rviz_common::RosTopicDisplay { Q_OBJECT public: @@ -80,7 +80,7 @@ class OverlayTextDisplay virtual ~OverlayTextDisplay(); protected: - awf_2d_overlay_vehicle::OverlayObject::SharedPtr overlay_; + autoware_overlay_rviz_plugin::OverlayObject::SharedPtr overlay_; int texture_width_; int texture_height_; @@ -150,8 +150,8 @@ protected Q_SLOTS: void updateLineWidth(); private: - void processMessage(rviz_2d_overlay_msgs::msg::OverlayText::ConstSharedPtr msg) override; + void processMessage(autoware_overlay_msgs::msg::OverlayText::ConstSharedPtr msg) override; }; -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin #endif // OVERLAY_TEXT_DISPLAY_HPP_ diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/overlay_utils.hpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/overlay_utils.hpp similarity index 90% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/overlay_utils.hpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/overlay_utils.hpp index 1270f15ca80ae..8581628ef0bce 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/overlay_utils.hpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/overlay_utils.hpp @@ -54,7 +54,7 @@ #include #include -#include "rviz_2d_overlay_msgs/msg/overlay_text.hpp" +#include "autoware_overlay_msgs/msg/overlay_text.hpp" #include #include @@ -70,7 +70,7 @@ #include #include -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { class OverlayObject; @@ -90,15 +90,15 @@ class ScopedPixelBuffer }; enum class VerticalAlignment : uint8_t { - CENTER = rviz_2d_overlay_msgs::msg::OverlayText::CENTER, - TOP = rviz_2d_overlay_msgs::msg::OverlayText::TOP, - BOTTOM = rviz_2d_overlay_msgs::msg::OverlayText::BOTTOM, + CENTER = autoware_overlay_msgs::msg::OverlayText::CENTER, + TOP = autoware_overlay_msgs::msg::OverlayText::TOP, + BOTTOM = autoware_overlay_msgs::msg::OverlayText::BOTTOM, }; enum class HorizontalAlignment : uint8_t { - LEFT = rviz_2d_overlay_msgs::msg::OverlayText::LEFT, - RIGHT = rviz_2d_overlay_msgs::msg::OverlayText::RIGHT, - CENTER = rviz_2d_overlay_msgs::msg::OverlayText::CENTER + LEFT = autoware_overlay_msgs::msg::OverlayText::LEFT, + RIGHT = autoware_overlay_msgs::msg::OverlayText::RIGHT, + CENTER = autoware_overlay_msgs::msg::OverlayText::CENTER }; /** @@ -136,6 +136,6 @@ class OverlayObject Ogre::MaterialPtr panel_material_; Ogre::TexturePtr texture_; }; -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin #endif // OVERLAY_UTILS_HPP_ diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/signal_display.hpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/signal_display.hpp similarity index 93% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/signal_display.hpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/signal_display.hpp index aff475aba2f33..4fa39f2c5a903 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/signal_display.hpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/signal_display.hpp @@ -39,7 +39,7 @@ #include #endif -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { class SignalDisplay : public rviz_common::Display { @@ -70,7 +70,7 @@ private Q_SLOTS: private: std::mutex mutex_; - awf_2d_overlay_vehicle::OverlayObject::SharedPtr overlay_; + autoware_overlay_rviz_plugin::OverlayObject::SharedPtr overlay_; rviz_common::properties::IntProperty * property_width_; rviz_common::properties::IntProperty * property_height_; rviz_common::properties::IntProperty * property_left_; @@ -84,7 +84,8 @@ private Q_SLOTS: std::unique_ptr traffic_topic_property_; std::unique_ptr speed_limit_topic_property_; - void drawBackground(QPainter & painter, const QRectF & backgroundRect); + void drawHorizontalRoundedRectangle(QPainter & painter, const QRectF & backgroundRect); + void drawVerticalRoundedRectangle(QPainter & painter, const QRectF & backgroundRect); void setupRosSubscriptions(); std::unique_ptr steering_wheel_display_; @@ -119,6 +120,6 @@ private Q_SLOTS: const autoware_perception_msgs::msg::TrafficSignalArray::ConstSharedPtr msg); void drawWidget(QImage & hud); }; -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin #endif // SIGNAL_DISPLAY_HPP_ diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/speed_display.hpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/speed_display.hpp similarity index 94% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/speed_display.hpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/speed_display.hpp index 317e45917f927..0669028dba3b2 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/speed_display.hpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/speed_display.hpp @@ -29,7 +29,7 @@ #include #include -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { class SpeedDisplay @@ -44,6 +44,6 @@ class SpeedDisplay QColor gray = QColor(194, 194, 194); }; -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin #endif // SPEED_DISPLAY_HPP_ diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/speed_limit_display.hpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/speed_limit_display.hpp similarity index 78% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/speed_limit_display.hpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/speed_limit_display.hpp index 00eae077ff2ac..b59f4acf380db 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/speed_limit_display.hpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/speed_limit_display.hpp @@ -23,13 +23,14 @@ #include #include +#include "autoware_auto_vehicle_msgs/msg/velocity_report.hpp" #include #include #include #include -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { class SpeedLimitDisplay @@ -38,12 +39,14 @@ class SpeedLimitDisplay SpeedLimitDisplay(); void drawSpeedLimitIndicator(QPainter & painter, const QRectF & backgroundRect); void updateSpeedLimitData(const tier4_planning_msgs::msg::VelocityLimit::ConstSharedPtr msg); + void updateSpeedData(const autoware_auto_vehicle_msgs::msg::VelocityReport::ConstSharedPtr & msg); private: - float current_limit; // Internal variable to store current gear + float current_limit; // Internal variable to store current gear + float current_speed_; // Internal variable to store current speed QColor gray = QColor(194, 194, 194); }; -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin #endif // SPEED_LIMIT_DISPLAY_HPP_ diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/steering_wheel_display.hpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/steering_wheel_display.hpp similarity index 94% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/steering_wheel_display.hpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/steering_wheel_display.hpp index a8291064c3807..121ee9a0a4d84 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/steering_wheel_display.hpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/steering_wheel_display.hpp @@ -29,7 +29,7 @@ #include #include -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { class SteeringWheelDisplay @@ -49,6 +49,6 @@ class SteeringWheelDisplay QImage coloredImage(const QImage & source, const QColor & color); }; -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin #endif // STEERING_WHEEL_DISPLAY_HPP_ diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/traffic_display.hpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/traffic_display.hpp similarity index 84% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/traffic_display.hpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/traffic_display.hpp index bf776d27dfa94..20410a78edc44 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/traffic_display.hpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/traffic_display.hpp @@ -31,7 +31,7 @@ #include #include -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { class TrafficDisplay @@ -45,18 +45,15 @@ class TrafficDisplay private: QImage traffic_light_image_; - // yellow #CFC353 - QColor yellow = QColor(207, 195, 83); - // red #CF5353 - QColor red = QColor(207, 83, 83); - // green #53CF5F - QColor green = QColor(83, 207, 95); - // gray #C2C2C2 - QColor gray = QColor(194, 194, 194); + + const QColor tl_red_; + const QColor tl_yellow_; + const QColor tl_green_; + const QColor tl_gray_; QImage coloredImage(const QImage & source, const QColor & color); }; -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin #endif // TRAFFIC_DISPLAY_HPP_ diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/turn_signals_display.hpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/turn_signals_display.hpp similarity index 94% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/turn_signals_display.hpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/turn_signals_display.hpp index cd659883c9ca0..87f141353d5b2 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/include/turn_signals_display.hpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/include/turn_signals_display.hpp @@ -32,7 +32,7 @@ #include -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { class TurnSignalsDisplay @@ -47,7 +47,7 @@ class TurnSignalsDisplay private: QImage arrowImage; - QColor gray = QColor(194, 194, 194); + QColor gray = QColor(46, 46, 46); int current_turn_signal_; // Internal variable to store turn signal state int current_hazard_lights_; // Internal variable to store hazard lights state @@ -58,6 +58,6 @@ class TurnSignalsDisplay const std::chrono::milliseconds blink_interval_{500}; // Blink interval in milliseconds }; -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin #endif // TURN_SIGNALS_DISPLAY_HPP_ diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/package.xml b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/package.xml similarity index 91% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/package.xml rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/package.xml index b19b384d020b6..cc0ada00fa41b 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/package.xml +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/package.xml @@ -1,7 +1,7 @@ - awf_2d_overlay_vehicle + autoware_overlay_rviz_plugin 0.0.1 RViz2 plugin for 2D overlays in the 3D view. Mainly a port of the JSK overlay plugin @@ -12,9 +12,9 @@ BSD-3-Clause autoware_auto_vehicle_msgs + autoware_overlay_msgs autoware_perception_msgs boost - rviz_2d_overlay_msgs rviz_common rviz_ogre_vendor rviz_rendering diff --git a/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/plugins_description.xml b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/plugins_description.xml new file mode 100644 index 0000000000000..05c33f2fd10a5 --- /dev/null +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/plugins_description.xml @@ -0,0 +1,5 @@ + + + Signal overlay plugin for the 3D view. + + diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/gear_display.cpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/gear_display.cpp similarity index 92% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/gear_display.cpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/gear_display.cpp index 153e106f04264..8f05ce6fdce1c 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/gear_display.cpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/gear_display.cpp @@ -31,12 +31,13 @@ #include #include -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { GearDisplay::GearDisplay() : current_gear_(0) { - std::string package_path = ament_index_cpp::get_package_share_directory("awf_2d_overlay_vehicle"); + std::string package_path = + ament_index_cpp::get_package_share_directory("autoware_overlay_rviz_plugin"); std::string font_path = package_path + "/assets/font/Quicksand/static/Quicksand-Regular.ttf"; std::string font_path2 = package_path + "/assets/font/Quicksand/static/Quicksand-Bold.ttf"; int fontId = QFontDatabase::addApplicationFont( @@ -91,8 +92,8 @@ void GearDisplay::drawGearIndicator(QPainter & painter, const QRectF & backgroun int gearY = backgroundRect.height() - gearBoxSize - 20; QRect gearRect(gearX, gearY, gearBoxSize, gearBoxSize); painter.setBrush(QColor(0, 0, 0, 0)); - painter.drawRoundedRect(gearRect, 5, 5); + painter.drawRoundedRect(gearRect, 10, 10); painter.drawText(gearRect, Qt::AlignCenter, QString::fromStdString(gearString)); } -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/overlay_text_display.cpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/overlay_text_display.cpp similarity index 93% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/overlay_text_display.cpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/overlay_text_display.cpp index 053d0f6e981c0..b853e14a5858d 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/overlay_text_display.cpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/overlay_text_display.cpp @@ -67,7 +67,7 @@ #include #include -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { OverlayTextDisplay::OverlayTextDisplay() : texture_width_(0), @@ -105,15 +105,15 @@ OverlayTextDisplay::OverlayTextDisplay() hor_alignment_property_ = new rviz_common::properties::EnumProperty( "hor_alignment", "left", "horizontal alignment of the overlay", this, SLOT(updateHorizontalAlignment())); - hor_alignment_property_->addOption("left", rviz_2d_overlay_msgs::msg::OverlayText::LEFT); - hor_alignment_property_->addOption("center", rviz_2d_overlay_msgs::msg::OverlayText::CENTER); - hor_alignment_property_->addOption("right", rviz_2d_overlay_msgs::msg::OverlayText::RIGHT); + hor_alignment_property_->addOption("left", autoware_overlay_msgs::msg::OverlayText::LEFT); + hor_alignment_property_->addOption("center", autoware_overlay_msgs::msg::OverlayText::CENTER); + hor_alignment_property_->addOption("right", autoware_overlay_msgs::msg::OverlayText::RIGHT); ver_alignment_property_ = new rviz_common::properties::EnumProperty( "ver_alignment", "top", "vertical alignment of the overlay", this, SLOT(updateVerticalAlignment())); - ver_alignment_property_->addOption("top", rviz_2d_overlay_msgs::msg::OverlayText::TOP); - ver_alignment_property_->addOption("center", rviz_2d_overlay_msgs::msg::OverlayText::CENTER); - ver_alignment_property_->addOption("bottom", rviz_2d_overlay_msgs::msg::OverlayText::BOTTOM); + ver_alignment_property_->addOption("top", autoware_overlay_msgs::msg::OverlayText::TOP); + ver_alignment_property_->addOption("center", autoware_overlay_msgs::msg::OverlayText::CENTER); + ver_alignment_property_->addOption("bottom", autoware_overlay_msgs::msg::OverlayText::BOTTOM); width_property_ = new rviz_common::properties::IntProperty( "width", 128, "width position", this, SLOT(updateWidth())); width_property_->setMin(0); @@ -212,7 +212,7 @@ void OverlayTextDisplay::update(float /*wall_dt*/, float /*ros_dt*/) overlay_->updateTextureSize(texture_width_, texture_height_); { - awf_2d_overlay_vehicle::ScopedPixelBuffer buffer = overlay_->getBuffer(); + autoware_overlay_rviz_plugin::ScopedPixelBuffer buffer = overlay_->getBuffer(); QImage Hud = buffer.getQImage(*overlay_, bg_color_); QPainter painter(&Hud); painter.setRenderHint(QPainter::Antialiasing, true); @@ -288,7 +288,7 @@ void OverlayTextDisplay::reset() } } -void OverlayTextDisplay::processMessage(rviz_2d_overlay_msgs::msg::OverlayText::ConstSharedPtr msg) +void OverlayTextDisplay::processMessage(autoware_overlay_msgs::msg::OverlayText::ConstSharedPtr msg) { if (!isEnabled()) { return; @@ -297,13 +297,13 @@ void OverlayTextDisplay::processMessage(rviz_2d_overlay_msgs::msg::OverlayText:: static int count = 0; std::stringstream ss; ss << "OverlayTextDisplayObject" << count++; - overlay_.reset(new awf_2d_overlay_vehicle::OverlayObject(ss.str())); + overlay_.reset(new autoware_overlay_rviz_plugin::OverlayObject(ss.str())); overlay_->show(); } if (overlay_) { - if (msg->action == rviz_2d_overlay_msgs::msg::OverlayText::DELETE) { + if (msg->action == autoware_overlay_msgs::msg::OverlayText::DELETE) { overlay_->hide(); - } else if (msg->action == rviz_2d_overlay_msgs::msg::OverlayText::ADD) { + } else if (msg->action == autoware_overlay_msgs::msg::OverlayText::ADD) { overlay_->show(); } } @@ -550,7 +550,7 @@ void OverlayTextDisplay::updateLineWidth() } } -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin #include -PLUGINLIB_EXPORT_CLASS(awf_2d_overlay_vehicle::OverlayTextDisplay, rviz_common::Display) +PLUGINLIB_EXPORT_CLASS(autoware_overlay_rviz_plugin::OverlayTextDisplay, rviz_common::Display) diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/overlay_utils.cpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/overlay_utils.cpp similarity index 99% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/overlay_utils.cpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/overlay_utils.cpp index e65a74415fb6a..d6f061e724369 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/overlay_utils.cpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/overlay_utils.cpp @@ -52,7 +52,7 @@ #include -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { ScopedPixelBuffer::ScopedPixelBuffer(Ogre::HardwarePixelBufferSharedPtr pixel_buffer) : pixel_buffer_(pixel_buffer) @@ -264,4 +264,4 @@ unsigned int OverlayObject::getTextureHeight() const return 0; } } -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/signal_display.cpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/signal_display.cpp similarity index 92% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/signal_display.cpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/signal_display.cpp index f2a82dd386b37..b2995327eb472 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/signal_display.cpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/signal_display.cpp @@ -33,7 +33,7 @@ #include #include -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { SignalDisplay::SignalDisplay() @@ -47,7 +47,7 @@ SignalDisplay::SignalDisplay() property_top_ = new rviz_common::properties::IntProperty( "Top", 10, "Top position of the overlay", this, SLOT(updateOverlayPosition())); property_signal_color_ = new rviz_common::properties::ColorProperty( - "Signal Color", QColor(94, 130, 255), "Color of the signal arrows", this, + "Signal Color", QColor(QString("#00E678")), "Color of the signal arrows", this, SLOT(updateOverlayColor())); // Initialize the component displays @@ -68,7 +68,7 @@ void SignalDisplay::onInitialize() static int count = 0; std::stringstream ss; ss << "SignalDisplayObject" << count++; - overlay_.reset(new awf_2d_overlay_vehicle::OverlayObject(ss.str())); + overlay_.reset(new autoware_overlay_rviz_plugin::OverlayObject(ss.str())); overlay_->show(); updateOverlaySize(); updateOverlayPosition(); @@ -206,7 +206,7 @@ void SignalDisplay::update(float /* wall_dt */, float /* ros_dt */) if (!overlay_) { return; } - awf_2d_overlay_vehicle::ScopedPixelBuffer buffer = overlay_->getBuffer(); + autoware_overlay_rviz_plugin::ScopedPixelBuffer buffer = overlay_->getBuffer(); QImage hud = buffer.getQImage(*overlay_); hud.fill(Qt::transparent); drawWidget(hud); @@ -297,6 +297,7 @@ void SignalDisplay::updateSpeedData( if (speed_display_) { speed_display_->updateSpeedData(msg); + speed_limit_display_->updateSpeedData(msg); queueRender(); } } @@ -324,7 +325,7 @@ void SignalDisplay::drawWidget(QImage & hud) painter.setRenderHint(QPainter::Antialiasing, true); QRectF backgroundRect(0, 0, 322, hud.height()); - drawBackground(painter, backgroundRect); + drawHorizontalRoundedRectangle(painter, backgroundRect); // Draw components if (steering_wheel_display_) { @@ -342,9 +343,9 @@ void SignalDisplay::drawWidget(QImage & hud) // a 27px space between the two halves of the HUD - QRectF smallerBackgroundRect(349, 0, 168, hud.height() / 2); + QRectF smallerBackgroundRect(340, 0, 190.0 / 2, hud.height()); - drawBackground(painter, smallerBackgroundRect); + drawVerticalRoundedRectangle(painter, smallerBackgroundRect); if (traffic_display_) { traffic_display_->drawTrafficLightIndicator(painter, smallerBackgroundRect); @@ -357,13 +358,33 @@ void SignalDisplay::drawWidget(QImage & hud) painter.end(); } -void SignalDisplay::drawBackground(QPainter & painter, const QRectF & backgroundRect) +void SignalDisplay::drawHorizontalRoundedRectangle( + QPainter & painter, const QRectF & backgroundRect) { - painter.setBrush(QColor(0, 0, 0, 255 * 0.2)); // Black background with opacity + painter.setRenderHint(QPainter::Antialiasing, true); + QColor colorFromHSV; + colorFromHSV.setHsv(0, 0, 0); // Hue, Saturation, Value + colorFromHSV.setAlphaF(0.65); // Transparency + + painter.setBrush(colorFromHSV); + painter.setPen(Qt::NoPen); painter.drawRoundedRect( backgroundRect, backgroundRect.height() / 2, backgroundRect.height() / 2); // Circular ends } +void SignalDisplay::drawVerticalRoundedRectangle(QPainter & painter, const QRectF & backgroundRect) +{ + painter.setRenderHint(QPainter::Antialiasing, true); + QColor colorFromHSV; + colorFromHSV.setHsv(0, 0, 0); // Hue, Saturation, Value + colorFromHSV.setAlphaF(0.65); // Transparency + + painter.setBrush(colorFromHSV); + + painter.setPen(Qt::NoPen); + painter.drawRoundedRect( + backgroundRect, backgroundRect.width() / 2, backgroundRect.width() / 2); // Circular ends +} void SignalDisplay::reset() { @@ -495,7 +516,7 @@ void SignalDisplay::topic_updated_traffic() }); } -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin #include -PLUGINLIB_EXPORT_CLASS(awf_2d_overlay_vehicle::SignalDisplay, rviz_common::Display) +PLUGINLIB_EXPORT_CLASS(autoware_overlay_rviz_plugin::SignalDisplay, rviz_common::Display) diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/speed_display.cpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/speed_display.cpp similarity index 95% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/speed_display.cpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/speed_display.cpp index 8212758c09a9f..b1df14956d16c 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/speed_display.cpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/speed_display.cpp @@ -31,12 +31,13 @@ #include #include -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { SpeedDisplay::SpeedDisplay() : current_speed_(0.0) { - std::string package_path = ament_index_cpp::get_package_share_directory("awf_2d_overlay_vehicle"); + std::string package_path = + ament_index_cpp::get_package_share_directory("autoware_overlay_rviz_plugin"); std::string font_path = package_path + "/assets/font/Quicksand/static/Quicksand-Regular.ttf"; std::string font_path2 = package_path + "/assets/font/Quicksand/static/Quicksand-Bold.ttf"; int fontId = QFontDatabase::addApplicationFont( @@ -106,4 +107,4 @@ void SpeedDisplay::drawSpeedDisplay(QPainter & painter, const QRectF & backgroun painter.drawText(unitPos, speedUnit); } -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/speed_limit_display.cpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/speed_limit_display.cpp similarity index 54% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/speed_limit_display.cpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/speed_limit_display.cpp index fcc1df998798b..6913ef6a48ecd 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/speed_limit_display.cpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/speed_limit_display.cpp @@ -31,12 +31,13 @@ #include #include -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { -SpeedLimitDisplay::SpeedLimitDisplay() : current_limit(0.0) +SpeedLimitDisplay::SpeedLimitDisplay() : current_limit(0.0), current_speed_(0.0) { - std::string package_path = ament_index_cpp::get_package_share_directory("awf_2d_overlay_vehicle"); + std::string package_path = + ament_index_cpp::get_package_share_directory("autoware_overlay_rviz_plugin"); std::string font_path = package_path + "/assets/font/Quicksand/static/Quicksand-Regular.ttf"; std::string font_path2 = package_path + "/assets/font/Quicksand/static/Quicksand-Bold.ttf"; int fontId = QFontDatabase::addApplicationFont( @@ -54,45 +55,81 @@ void SpeedLimitDisplay::updateSpeedLimitData( current_limit = msg->max_velocity; } +void SpeedLimitDisplay::updateSpeedData( + const autoware_auto_vehicle_msgs::msg::VelocityReport::ConstSharedPtr & msg) +{ + try { + float speed = msg->longitudinal_velocity; + current_speed_ = speed; + } catch (const std::exception & e) { + // Log the error + std::cerr << "Error in processMessage: " << e.what() << std::endl; + } +} + void SpeedLimitDisplay::drawSpeedLimitIndicator(QPainter & painter, const QRectF & backgroundRect) { // Enable Antialiasing for smoother drawing painter.setRenderHint(QPainter::Antialiasing, true); painter.setRenderHint(QPainter::SmoothPixmapTransform, true); - // #C2C2C2 - painter.setPen(QPen(gray, 2, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin)); - painter.setBrush(QBrush(gray, Qt::SolidPattern)); + const double color_s_min = 0.4; + const double color_s_max = 0.8; + QColor colorMin; + colorMin.setHsvF(0.0, color_s_min, 1.0); + QColor colorMax; + colorMax.setHsvF(0.0, color_s_max, 1.0); + + QColor borderColor = colorMin; + if (current_limit > 0.0) { + double speed_to_limit_ratio = current_speed_ / current_limit; + const double speed_to_limit_ratio_min = 0.6; + const double speed_to_limit_ratio_max = 0.9; + + if (speed_to_limit_ratio >= speed_to_limit_ratio_max) { + borderColor = colorMax; + } else if (speed_to_limit_ratio > speed_to_limit_ratio_min) { + double interpolation_factor = (speed_to_limit_ratio - speed_to_limit_ratio_min) / + (speed_to_limit_ratio_max - speed_to_limit_ratio_min); + // Interpolate between colorMin and colorMax + double saturation = color_s_min + (color_s_max - color_s_min) * interpolation_factor; + + borderColor.setHsvF(0.0, saturation, 1.0); + } + } // Define the area for the outer circle QRectF outerCircleRect = backgroundRect; - outerCircleRect.setWidth(backgroundRect.width() / 2 - 20); - outerCircleRect.setHeight(backgroundRect.height() - 20); - outerCircleRect.moveTopLeft(QPointF(backgroundRect.left() + 10, backgroundRect.top() + 10)); + outerCircleRect.setWidth(backgroundRect.width() - 30); + outerCircleRect.setHeight(backgroundRect.width() - 30); + outerCircleRect.moveTopLeft(QPointF(backgroundRect.left() + 15, backgroundRect.top() + 10)); + + // Now use borderColor for drawing + painter.setPen(QPen(borderColor, 2, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin)); + painter.setBrush(borderColor); + // Draw the outer circle for the speed limit indicator + painter.drawEllipse(outerCircleRect); // Define the area for the inner circle QRectF innerCircleRect = outerCircleRect; - innerCircleRect.setWidth(outerCircleRect.width() / 1.375); - innerCircleRect.setHeight(outerCircleRect.height() / 1.375); + innerCircleRect.setWidth(outerCircleRect.width() / 1.25); + innerCircleRect.setHeight(outerCircleRect.height() / 1.25); innerCircleRect.moveCenter(outerCircleRect.center()); - // Draw the outer circle - painter.drawEllipse(outerCircleRect); + painter.setRenderHint(QPainter::Antialiasing, true); + QColor colorFromHSV; + colorFromHSV.setHsv(0, 0, 0); // Hue, Saturation, Value - // Change the composition mode and draw the inner circle - painter.setCompositionMode(QPainter::CompositionMode_Clear); + painter.setBrush(colorFromHSV); painter.drawEllipse(innerCircleRect); - // Reset the composition mode - painter.setCompositionMode(QPainter::CompositionMode_SourceOver); - int current_limit_int = std::round(current_limit * 3.6); // Define the text to be drawn QString text = QString::number(current_limit_int); // Set the font and color for the text - QFont font = QFont("Quicksand", 14, QFont::Bold); + QFont font = QFont("Quicksand", 24, QFont::Bold); painter.setFont(font); // #C2C2C2 @@ -102,4 +139,4 @@ void SpeedLimitDisplay::drawSpeedLimitIndicator(QPainter & painter, const QRectF painter.drawText(innerCircleRect, Qt::AlignCenter, text); } -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/steering_wheel_display.cpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/steering_wheel_display.cpp similarity index 95% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/steering_wheel_display.cpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/steering_wheel_display.cpp index b4da8ff5f8ffb..5882d7780dc92 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/steering_wheel_display.cpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/steering_wheel_display.cpp @@ -31,13 +31,14 @@ #include #include -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { SteeringWheelDisplay::SteeringWheelDisplay() { // Load the Quicksand font - std::string package_path = ament_index_cpp::get_package_share_directory("awf_2d_overlay_vehicle"); + std::string package_path = + ament_index_cpp::get_package_share_directory("autoware_overlay_rviz_plugin"); std::string font_path = package_path + "/assets/font/Quicksand/static/Quicksand-Regular.ttf"; std::string font_path2 = package_path + "/assets/font/Quicksand/static/Quicksand-Bold.ttf"; int fontId = QFontDatabase::addApplicationFont( @@ -120,4 +121,4 @@ QImage SteeringWheelDisplay::coloredImage(const QImage & source, const QColor & return result; } -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/traffic_display.cpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/traffic_display.cpp similarity index 54% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/traffic_display.cpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/traffic_display.cpp index 233da1f36b4a7..4fea8f78b80da 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/traffic_display.cpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/traffic_display.cpp @@ -31,13 +31,18 @@ #include #include -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { TrafficDisplay::TrafficDisplay() +: tl_red_(QString("#cc3d3d")), + tl_yellow_(QString("#ccb43d")), + tl_green_(QString("#3dcc55")), + tl_gray_(QString("#4f4f4f")) { // Load the traffic light image - std::string package_path = ament_index_cpp::get_package_share_directory("awf_2d_overlay_vehicle"); + std::string package_path = + ament_index_cpp::get_package_share_directory("autoware_overlay_rviz_plugin"); std::string image_path = package_path + "/assets/images/traffic.png"; traffic_light_image_.load(image_path.c_str()); } @@ -54,50 +59,60 @@ void TrafficDisplay::drawTrafficLightIndicator(QPainter & painter, const QRectF painter.setRenderHint(QPainter::Antialiasing, true); painter.setRenderHint(QPainter::SmoothPixmapTransform, true); + painter.setBrush(QBrush(tl_gray_, Qt::SolidPattern)); + painter.setPen(Qt::NoPen); // Define the area for the circle (background) QRectF circleRect = backgroundRect; - circleRect.setWidth(backgroundRect.width() / 2 - 20); - circleRect.setHeight(backgroundRect.height() - 20); - circleRect.moveTopRight(QPointF(backgroundRect.right() - 10, backgroundRect.top() + 10)); - - painter.setBrush(QBrush(gray)); - painter.drawEllipse(circleRect.center(), 30, 30); - - // Define the area for the traffic light image (should be smaller or positioned within the circle) - QRectF imageRect = - circleRect.adjusted(15, 15, -15, -15); // Adjusting the rectangle to make the image smaller - - QImage scaled_traffic_image = traffic_light_image_.scaled( - imageRect.size().toSize(), Qt::KeepAspectRatio, Qt::SmoothTransformation); - - if (current_traffic_.signals.size() > 0) { + circleRect.setWidth(backgroundRect.width() - 30); + circleRect.setHeight(backgroundRect.width() - 30); + circleRect.moveTopRight(QPointF( + backgroundRect.left() + circleRect.width() + 15, + backgroundRect.top() + circleRect.height() + 30)); + painter.drawEllipse(circleRect); + + if (!current_traffic_.signals.empty()) { switch (current_traffic_.signals[0].elements[0].color) { case 1: - painter.setBrush(QBrush(red)); - painter.drawEllipse(circleRect.center(), 30, 30); + painter.setBrush(QBrush(tl_red_)); + painter.drawEllipse(circleRect); break; case 2: - painter.setBrush(QBrush(yellow)); - painter.drawEllipse(circleRect.center(), 30, 30); + painter.setBrush(QBrush(tl_yellow_)); + painter.drawEllipse(circleRect); break; case 3: - painter.setBrush(QBrush(green)); - painter.drawEllipse(circleRect.center(), 30, 30); + painter.setBrush(QBrush(tl_green_)); + painter.drawEllipse(circleRect); break; case 4: - painter.setBrush(QBrush(gray)); - painter.drawEllipse(circleRect.center(), 30, 30); + painter.setBrush(tl_gray_); + painter.drawEllipse(circleRect); break; default: - painter.setBrush(QBrush(gray)); - painter.drawEllipse(circleRect.center(), 30, 30); + painter.setBrush(tl_gray_); + painter.drawEllipse(circleRect); break; } } - // make the image thicker - painter.setPen(QPen(Qt::black, 2, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin)); - painter.drawImage(imageRect, scaled_traffic_image); + // Scaling factor (e.g., 1.5 for 150% size) + float scaleFactor = 1.00; + + // Calculate the scaled size + QSize scaledSize = traffic_light_image_.size() * scaleFactor; + + // Ensure the scaled image is still within the circle bounds or adjust scaleFactor accordingly + + // Calculate the centered rectangle for the scaled image + QRectF scaledImageRect(0, 0, scaledSize.width(), scaledSize.height()); + scaledImageRect.moveCenter(circleRect.center()); + + // Scale the image to the new size + QImage scaledTrafficImage = + traffic_light_image_.scaled(scaledSize, Qt::KeepAspectRatio, Qt::SmoothTransformation); + + // Draw the scaled and centered image + painter.drawImage(scaledImageRect.topLeft(), scaledTrafficImage); } QImage TrafficDisplay::coloredImage(const QImage & source, const QColor & color) @@ -110,4 +125,4 @@ QImage TrafficDisplay::coloredImage(const QImage & source, const QColor & color) return result; } -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/turn_signals_display.cpp b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/turn_signals_display.cpp similarity index 95% rename from common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/turn_signals_display.cpp rename to common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/turn_signals_display.cpp index a9b780cb4eab2..b6d37062bf08b 100644 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/src/turn_signals_display.cpp +++ b/common/autoware_overlay_rviz_plugin/autoware_overlay_rviz_plugin/src/turn_signals_display.cpp @@ -31,7 +31,7 @@ #include #include -namespace awf_2d_overlay_vehicle +namespace autoware_overlay_rviz_plugin { TurnSignalsDisplay::TurnSignalsDisplay() : current_turn_signal_(0) @@ -39,7 +39,8 @@ TurnSignalsDisplay::TurnSignalsDisplay() : current_turn_signal_(0) last_toggle_time_ = std::chrono::steady_clock::now(); // Load the arrow image - std::string package_path = ament_index_cpp::get_package_share_directory("awf_2d_overlay_vehicle"); + std::string package_path = + ament_index_cpp::get_package_share_directory("autoware_overlay_rviz_plugin"); std::string image_path = package_path + "/assets/images/arrow.png"; arrowImage.load(image_path.c_str()); } @@ -119,4 +120,4 @@ QImage TurnSignalsDisplay::coloredImage(const QImage & source, const QColor & co return result; } -} // namespace awf_2d_overlay_vehicle +} // namespace autoware_overlay_rviz_plugin diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/README.md b/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/README.md deleted file mode 100644 index 6728f26878e10..0000000000000 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# awf_2d_overlay_vehicle - -Plugin for displaying 2D overlays over the RViz2 3D scene. - -Based on the [jsk_visualization](https://github.com/jsk-ros-pkg/jsk_visualization) -package, under the 3-Clause BSD license. - -## Purpose - -This plugin provides a visual and easy-to-understand display of vehicle speed, turn signal, steering status and gears. - -## Inputs / Outputs - -### Input - -| Name | Type | Description | -| ---------------------------------------- | ------------------------------------------------------- | ---------------------------------- | -| `/vehicle/status/velocity_status` | `autoware_auto_vehicle_msgs::msg::VelocityReport` | The topic is vehicle twist | -| `/vehicle/status/turn_indicators_status` | `autoware_auto_vehicle_msgs::msg::TurnIndicatorsReport` | The topic is status of turn signal | -| `/vehicle/status/hazard_status` | `autoware_auto_vehicle_msgs::msg::HazardReport` | The topic is status of hazard | -| `/vehicle/status/steering_status` | `autoware_auto_vehicle_msgs::msg::SteeringReport` | The topic is status of steering | -| `/vehicle/status/gear_status` | `autoware_auto_vehicle_msgs::msg::GearReport` | The topic is status of gear | - -## Parameter - -### Core Parameters - -#### SignalDisplay - -| Name | Type | Default Value | Description | -| ------------------------ | ------ | -------------------- | --------------------------------- | -| `property_width_` | int | 128 | Width of the plotter window [px] | -| `property_height_` | int | 128 | Height of the plotter window [px] | -| `property_left_` | int | 128 | Left of the plotter window [px] | -| `property_top_` | int | 128 | Top of the plotter window [px] | -| `property_signal_color_` | QColor | QColor(25, 255, 240) | Turn Signal color | - -## Assumptions / Known limits - -TBD. - -## Usage - -1. Start rviz and select Add under the Displays panel. - - ![select_add](./assets/images/select_add.png) - -2. Select any one of the tier4_vehicle_rviz_plugin and press OK. - - ![select_vehicle_plugin](./assets/images/select_vehicle_plugin.png) - -3. Enter the name of the topic where you want to view the status. - - ![select_topic_name](./assets/images/select_topic_name.png) diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/images/select_topic_name.png b/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/images/select_topic_name.png deleted file mode 100644 index 5466bcf0050df..0000000000000 Binary files a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/images/select_topic_name.png and /dev/null differ diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/images/select_vehicle_plugin.png b/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/images/select_vehicle_plugin.png deleted file mode 100644 index 863f63d728616..0000000000000 Binary files a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/images/select_vehicle_plugin.png and /dev/null differ diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/images/traffic.png b/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/images/traffic.png deleted file mode 100644 index 960985a3da5de..0000000000000 Binary files a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/assets/images/traffic.png and /dev/null differ diff --git a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/plugins_description.xml b/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/plugins_description.xml deleted file mode 100644 index 8a5af0abcf0dd..0000000000000 --- a/common/awf_vehicle_rviz_plugin/awf_2d_overlay_vehicle/plugins_description.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - Signal overlay plugin for the 3D view. - - diff --git a/common/tier4_adapi_rviz_plugin/README.md b/common/tier4_adapi_rviz_plugin/README.md index 41ef7c6b6243b..4056130776178 100644 --- a/common/tier4_adapi_rviz_plugin/README.md +++ b/common/tier4_adapi_rviz_plugin/README.md @@ -9,3 +9,21 @@ Enable or disable of allow_goal_modification option can be set with the check bo Push the mode button in the waypoint to enter waypoint mode. In this mode, the pose is added to waypoints. Press the apply button to set the route using the saved waypoints (the last one is a goal). Reset the saved waypoints with the reset button. + +## Material Design Icons + +This project uses [Material Design Icons](https://developers.google.com/fonts/docs/material_symbols) by Google. These icons are used under the terms of the Apache License, Version 2.0. + +Material Design Icons are a collection of symbols provided by Google that are used to enhance the user interface of applications, websites, and other digital products. + +### License + +The Material Design Icons are licensed under the Apache License, Version 2.0. You may obtain a copy of the License at: + + + +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + +### Acknowledgments + +We would like to express our gratitude to Google for making these icons available to the community, helping developers and designers enhance the visual appeal and user experience of their projects. diff --git a/common/tier4_adapi_rviz_plugin/icons/classes/RouteTool.png b/common/tier4_adapi_rviz_plugin/icons/classes/RouteTool.png index 6a67573717ae1..fad4466a99326 100644 Binary files a/common/tier4_adapi_rviz_plugin/icons/classes/RouteTool.png and b/common/tier4_adapi_rviz_plugin/icons/classes/RouteTool.png differ diff --git a/common/tier4_adapi_rviz_plugin/src/route_panel.cpp b/common/tier4_adapi_rviz_plugin/src/route_panel.cpp index 3e99761fd9fb3..5bdee028bf4b3 100644 --- a/common/tier4_adapi_rviz_plugin/src/route_panel.cpp +++ b/common/tier4_adapi_rviz_plugin/src/route_panel.cpp @@ -14,11 +14,13 @@ #include "route_panel.hpp" +#include #include #include #include #include +#include namespace tier4_adapi_rviz_plugins { @@ -28,6 +30,11 @@ RoutePanel::RoutePanel(QWidget * parent) : rviz_common::Panel(parent) waypoints_mode_ = new QPushButton("mode"); waypoints_reset_ = new QPushButton("reset"); waypoints_apply_ = new QPushButton("apply"); + adapi_clear_ = new QPushButton("clear"); + adapi_set_ = new QPushButton("set"); + adapi_change_ = new QPushButton("change"); + adapi_response_ = new QLabel("the response will be displayed here"); + adapi_auto_clear_ = new QCheckBox("auto clear"); allow_goal_modification_ = new QCheckBox("allow goal modification"); waypoints_mode_->setCheckable(true); @@ -37,6 +44,22 @@ RoutePanel::RoutePanel(QWidget * parent) : rviz_common::Panel(parent) connect(waypoints_reset_, &QPushButton::clicked, this, &RoutePanel::onWaypointsReset); connect(waypoints_apply_, &QPushButton::clicked, this, &RoutePanel::onWaypointsApply); + const auto buttons = new QButtonGroup(this); + buttons->addButton(adapi_set_); + buttons->addButton(adapi_change_); + buttons->setExclusive(true); + adapi_set_->setCheckable(true); + adapi_change_->setCheckable(true); + adapi_response_->setAlignment(Qt::AlignCenter); + + connect(adapi_clear_, &QPushButton::clicked, this, &RoutePanel::clearRoute); + connect(adapi_set_, &QPushButton::clicked, [this] { adapi_mode_ = AdapiMode::Set; }); + connect(adapi_change_, &QPushButton::clicked, [this] { adapi_mode_ = AdapiMode::Change; }); + + adapi_auto_clear_->setChecked(true); + adapi_set_->setChecked(true); + adapi_mode_ = AdapiMode::Set; + const auto layout = new QVBoxLayout(); setLayout(layout); @@ -52,6 +75,19 @@ RoutePanel::RoutePanel(QWidget * parent) : rviz_common::Panel(parent) waypoints_group_ = group; } + // adapi group + { + const auto group = new QGroupBox("adapi"); + const auto local = new QGridLayout(); + local->addWidget(adapi_clear_, 0, 0); + local->addWidget(adapi_set_, 0, 1); + local->addWidget(adapi_change_, 0, 2); + local->addWidget(adapi_auto_clear_, 1, 0); + local->addWidget(adapi_response_, 1, 1, 1, 2); + group->setLayout(local); + layout->addWidget(group); + } + // options group { const auto group = new QGroupBox("options"); @@ -73,25 +109,14 @@ void RoutePanel::onInitialize() const auto adaptor = component_interface_utils::NodeAdaptor(node.get()); adaptor.init_cli(cli_clear_); - adaptor.init_cli(cli_route_); -} - -void RoutePanel::setRoute(const PoseStamped & pose) -{ - const auto req = std::make_shared(); - cli_clear_->async_send_request(req, [this, pose](auto) { - const auto req = std::make_shared(); - req->header = pose.header; - req->goal = pose.pose; - req->option.allow_goal_modification = allow_goal_modification_->isChecked(); - cli_route_->async_send_request(req); - }); + adaptor.init_cli(cli_set_); + adaptor.init_cli(cli_change_); } void RoutePanel::onPose(const PoseStamped::ConstSharedPtr msg) { if (!waypoints_mode_->isChecked()) { - setRoute(*msg); + requestRoute(*msg); } else { waypoints_.push_back(*msg); waypoints_group_->setTitle(QString("waypoints (count: %1)").arg(waypoints_.size())); @@ -120,8 +145,7 @@ void RoutePanel::onWaypointsApply() { if (waypoints_.empty()) return; - const auto req = std::make_shared(); - cli_clear_->async_send_request(req, [this](auto) { + const auto call = [this] { const auto req = std::make_shared(); req->header = waypoints_.back().header; req->goal = waypoints_.back().pose; @@ -129,9 +153,66 @@ void RoutePanel::onWaypointsApply() req->waypoints.push_back(waypoints_[i].pose); } req->option.allow_goal_modification = allow_goal_modification_->isChecked(); - cli_route_->async_send_request(req); + asyncSendRequest(req); onWaypointsReset(); - }); + }; + + if (adapi_mode_ == AdapiMode::Set && adapi_auto_clear_->isChecked()) { + const auto req = std::make_shared(); + cli_clear_->async_send_request(req, [call](auto) { call(); }); + } else { + call(); + } +} + +void RoutePanel::requestRoute(const PoseStamped & pose) +{ + const auto call = [this, pose] { + const auto req = std::make_shared(); + req->header = pose.header; + req->goal = pose.pose; + req->option.allow_goal_modification = allow_goal_modification_->isChecked(); + asyncSendRequest(req); + }; + + if (adapi_mode_ == AdapiMode::Set && adapi_auto_clear_->isChecked()) { + const auto req = std::make_shared(); + cli_clear_->async_send_request(req, [call](auto) { call(); }); + } else { + call(); + } +} + +void RoutePanel::clearRoute() +{ + const auto callback = [this](auto future) { + const auto status = future.get()->status; + std::string text = status.success ? "OK" : "Error"; + text += "[" + std::to_string(status.code) + "]"; + text += " " + status.message; + adapi_response_->setText(QString::fromStdString(text)); + }; + + const auto req = std::make_shared(); + cli_clear_->async_send_request(req, callback); +} + +void RoutePanel::asyncSendRequest(SetRoutePoints::Service::Request::SharedPtr req) +{ + const auto callback = [this](auto future) { + const auto status = future.get()->status; + std::string text = status.success ? "OK" : "Error"; + text += "[" + std::to_string(status.code) + "]"; + text += " " + status.message; + adapi_response_->setText(QString::fromStdString(text)); + }; + + if (adapi_mode_ == AdapiMode::Set) { + cli_set_->async_send_request(req, callback); + } + if (adapi_mode_ == AdapiMode::Change) { + cli_change_->async_send_request(req, callback); + } } } // namespace tier4_adapi_rviz_plugins diff --git a/common/tier4_adapi_rviz_plugin/src/route_panel.hpp b/common/tier4_adapi_rviz_plugin/src/route_panel.hpp index f7c953d65dd2d..47855d243a564 100644 --- a/common/tier4_adapi_rviz_plugin/src/route_panel.hpp +++ b/common/tier4_adapi_rviz_plugin/src/route_panel.hpp @@ -15,8 +15,10 @@ #ifndef ROUTE_PANEL_HPP_ #define ROUTE_PANEL_HPP_ +#include #include #include +#include #include #include #include @@ -35,6 +37,7 @@ class RoutePanel : public rviz_common::Panel Q_OBJECT using ClearRoute = autoware_ad_api::routing::ClearRoute; using SetRoutePoints = autoware_ad_api::routing::SetRoutePoints; + using ChangeRoutePoints = autoware_ad_api::routing::ChangeRoutePoints; using PoseStamped = geometry_msgs::msg::PoseStamped; public: @@ -45,6 +48,11 @@ class RoutePanel : public rviz_common::Panel QPushButton * waypoints_mode_; QPushButton * waypoints_reset_; QPushButton * waypoints_apply_; + QPushButton * adapi_clear_; + QPushButton * adapi_set_; + QPushButton * adapi_change_; + QLabel * adapi_response_; + QCheckBox * adapi_auto_clear_; QGroupBox * waypoints_group_; QCheckBox * allow_goal_modification_; @@ -52,11 +60,17 @@ class RoutePanel : public rviz_common::Panel std::vector waypoints_; void onPose(const PoseStamped::ConstSharedPtr msg); + enum AdapiMode { Set, Change }; + AdapiMode adapi_mode_; + component_interface_utils::Client::SharedPtr cli_clear_; - component_interface_utils::Client::SharedPtr cli_route_; - void setRoute(const PoseStamped & pose); + component_interface_utils::Client::SharedPtr cli_set_; + component_interface_utils::Client::SharedPtr cli_change_; + void requestRoute(const PoseStamped & pose); + void asyncSendRequest(SetRoutePoints::Service::Request::SharedPtr req); private slots: + void clearRoute(); void onWaypointsMode(bool clicked); void onWaypointsReset(); void onWaypointsApply(); diff --git a/common/tier4_automatic_goal_rviz_plugin/README.md b/common/tier4_automatic_goal_rviz_plugin/README.md index 48fb577375714..6fd626d5a7642 100644 --- a/common/tier4_automatic_goal_rviz_plugin/README.md +++ b/common/tier4_automatic_goal_rviz_plugin/README.md @@ -78,3 +78,21 @@ In this situation, check the terminal output for more information. - Often it is enough to try again. - Sometimes a clearing of the current route is required before retrying. + +## Material Design Icons + +This project uses [Material Design Icons](https://developers.google.com/fonts/docs/material_symbols) by Google. These icons are used under the terms of the Apache License, Version 2.0. + +Material Design Icons are a collection of symbols provided by Google that are used to enhance the user interface of applications, websites, and other digital products. + +### License + +The Material Design Icons are licensed under the Apache License, Version 2.0. You may obtain a copy of the License at: + + + +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + +### Acknowledgments + +We would like to express our gratitude to Google for making these icons available to the community, helping developers and designers enhance the visual appeal and user experience of their projects. diff --git a/common/tier4_automatic_goal_rviz_plugin/icons/classes/AutowareAutomaticCheckpointTool.png b/common/tier4_automatic_goal_rviz_plugin/icons/classes/AutowareAutomaticCheckpointTool.png index 6a67573717ae1..4f5b4961f2500 100644 Binary files a/common/tier4_automatic_goal_rviz_plugin/icons/classes/AutowareAutomaticCheckpointTool.png and b/common/tier4_automatic_goal_rviz_plugin/icons/classes/AutowareAutomaticCheckpointTool.png differ diff --git a/common/tier4_automatic_goal_rviz_plugin/icons/classes/AutowareAutomaticGoalTool.png b/common/tier4_automatic_goal_rviz_plugin/icons/classes/AutowareAutomaticGoalTool.png index 6a67573717ae1..58d12f95ebfd6 100644 Binary files a/common/tier4_automatic_goal_rviz_plugin/icons/classes/AutowareAutomaticGoalTool.png and b/common/tier4_automatic_goal_rviz_plugin/icons/classes/AutowareAutomaticGoalTool.png differ diff --git a/common/tier4_camera_view_rviz_plugin/README.md b/common/tier4_camera_view_rviz_plugin/README.md index 99480d5a92e1c..3297b578dd609 100644 --- a/common/tier4_camera_view_rviz_plugin/README.md +++ b/common/tier4_camera_view_rviz_plugin/README.md @@ -7,3 +7,21 @@ Add the `tier4_camera_view_rviz_plugin/ThirdPersonViewTool` tool to the RViz. Pu ## BirdEyeView Tool Add the `tier4_camera_view_rviz_plugin/BirdEyeViewTool` tool to the RViz. Push the button, the camera will turn to the BEV view, the target frame is consistent with the latest frame. Short cut key 'r'. + +## Material Design Icons + +This project uses [Material Design Icons](https://developers.google.com/fonts/docs/material_symbols) by Google. These icons are used under the terms of the Apache License, Version 2.0. + +Material Design Icons are a collection of symbols provided by Google that are used to enhance the user interface of applications, websites, and other digital products. + +### License + +The Material Design Icons are licensed under the Apache License, Version 2.0. You may obtain a copy of the License at: + + + +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + +### Acknowledgments + +We would like to express our gratitude to Google for making these icons available to the community, helping developers and designers enhance the visual appeal and user experience of their projects. diff --git a/common/tier4_camera_view_rviz_plugin/icons/classes/BirdEyeViewTool.png b/common/tier4_camera_view_rviz_plugin/icons/classes/BirdEyeViewTool.png index 6a67573717ae1..ae5a41b241e7b 100644 Binary files a/common/tier4_camera_view_rviz_plugin/icons/classes/BirdEyeViewTool.png and b/common/tier4_camera_view_rviz_plugin/icons/classes/BirdEyeViewTool.png differ diff --git a/common/tier4_camera_view_rviz_plugin/icons/classes/ThirdPersonViewTool.png b/common/tier4_camera_view_rviz_plugin/icons/classes/ThirdPersonViewTool.png index 6a67573717ae1..93cb3f411bc35 100644 Binary files a/common/tier4_camera_view_rviz_plugin/icons/classes/ThirdPersonViewTool.png and b/common/tier4_camera_view_rviz_plugin/icons/classes/ThirdPersonViewTool.png differ diff --git a/common/tier4_debug_tools/CMakeLists.txt b/common/tier4_debug_tools/CMakeLists.txt deleted file mode 100644 index 609b873725429..0000000000000 --- a/common/tier4_debug_tools/CMakeLists.txt +++ /dev/null @@ -1,33 +0,0 @@ -cmake_minimum_required(VERSION 3.14) -project(tier4_debug_tools) - -find_package(autoware_cmake REQUIRED) -autoware_package() - -find_package(Eigen3 REQUIRED) - -include_directories( - SYSTEM - ${EIGEN3_INCLUDE_DIR} -) - -ament_auto_add_library(lateral_error_publisher SHARED - src/lateral_error_publisher.cpp -) - -rclcpp_components_register_node(lateral_error_publisher - PLUGIN "LateralErrorPublisher" - EXECUTABLE lateral_error_publisher_node -) - -ament_auto_package( - INSTALL_TO_SHARE - config - launch -) - -install(PROGRAMS scripts/stop_reason2pose.py scripts/pose2tf.py scripts/tf2pose.py - scripts/case_converter.py scripts/self_pose_listener.py - scripts/stop_reason2tf DESTINATION lib/${PROJECT_NAME}) - -install(FILES DESTINATION share/${PROJECT_NAME}) diff --git a/common/tier4_debug_tools/README.md b/common/tier4_debug_tools/README.md deleted file mode 100644 index 606172d1d57f3..0000000000000 --- a/common/tier4_debug_tools/README.md +++ /dev/null @@ -1,132 +0,0 @@ -# tier4_debug_tools - -This package provides useful features for debugging Autoware. - -## Usage - -### tf2pose - -This tool converts any `tf` to `pose` topic. -With this tool, for example, you can plot `x` values of `tf` in `rqt_multiplot`. - -```sh -ros2 run tier4_debug_tools tf2pose {tf_from} {tf_to} {hz} -``` - -Example: - -```sh -$ ros2 run tier4_debug_tools tf2pose base_link ndt_base_link 100 - -$ ros2 topic echo /tf2pose/pose -n1 -header: - seq: 13 - stamp: - secs: 1605168366 - nsecs: 549174070 - frame_id: "base_link" -pose: - position: - x: 0.0387684271191 - y: -0.00320360406477 - z: 0.000276674520819 - orientation: - x: 0.000335221893885 - y: 0.000122020672186 - z: -0.00539673212896 - w: 0.999985368502 ---- -``` - -### pose2tf - -This tool converts any `pose` topic to `tf`. - -```sh -ros2 run tier4_debug_tools pose2tf {pose_topic_name} {tf_name} -``` - -Example: - -```sh -$ ros2 run tier4_debug_tools pose2tf /localization/pose_estimator/pose ndt_pose - -$ ros2 run tf tf_echo ndt_pose ndt_base_link 100 -At time 1605168365.449 -- Translation: [0.000, 0.000, 0.000] -- Rotation: in Quaternion [0.000, 0.000, 0.000, 1.000] - in RPY (radian) [0.000, -0.000, 0.000] - in RPY (degree) [0.000, -0.000, 0.000] -``` - -### stop_reason2pose - -This tool extracts `pose` from `stop_reasons`. -Topics without numbers such as `/stop_reason2pose/pose/detection_area` are the nearest stop_reasons, and topics with numbers are individual stop_reasons that are roughly matched with previous ones. - -```sh -ros2 run tier4_debug_tools stop_reason2pose {stop_reason_topic_name} -``` - -Example: - -```sh -$ ros2 run tier4_debug_tools stop_reason2pose /planning/scenario_planning/status/stop_reasons - -$ ros2 topic list | ag stop_reason2pose -/stop_reason2pose/pose/detection_area -/stop_reason2pose/pose/detection_area_1 -/stop_reason2pose/pose/obstacle_stop -/stop_reason2pose/pose/obstacle_stop_1 - -$ ros2 topic echo /stop_reason2pose/pose/detection_area -n1 -header: - seq: 1 - stamp: - secs: 1605168355 - nsecs: 821713 - frame_id: "map" -pose: - position: - x: 60608.8433457 - y: 43886.2410876 - z: 44.9078212441 - orientation: - x: 0.0 - y: 0.0 - z: -0.190261378408 - w: 0.981733470901 ---- -``` - -### stop_reason2tf - -This is an all-in-one script that uses `tf2pose`, `pose2tf`, and `stop_reason2pose`. -With this tool, you can view the relative position from base_link to the nearest stop_reason. - -```sh -ros2 run tier4_debug_tools stop_reason2tf {stop_reason_name} -``` - -Example: - -```sh -$ ros2 run tier4_debug_tools stop_reason2tf obstacle_stop -At time 1605168359.501 -- Translation: [0.291, -0.095, 0.266] -- Rotation: in Quaternion [0.007, 0.011, -0.005, 1.000] - in RPY (radian) [0.014, 0.023, -0.010] - in RPY (degree) [0.825, 1.305, -0.573] -``` - -### lateral_error_publisher - -This node calculate the control error and localization error in the trajectory normal direction as shown in the figure below. - -![lateral_error_publisher_overview](./media/lateral_error_publisher.svg) - -Set the reference trajectory, vehicle pose and ground truth pose in the launch file. - -```sh -ros2 launch tier4_debug_tools lateral_error_publisher.launch.xml -``` diff --git a/common/tier4_debug_tools/config/lateral_error_publisher.param.yaml b/common/tier4_debug_tools/config/lateral_error_publisher.param.yaml deleted file mode 100644 index ecde03de7c658..0000000000000 --- a/common/tier4_debug_tools/config/lateral_error_publisher.param.yaml +++ /dev/null @@ -1,3 +0,0 @@ -/**: - ros__parameters: - yaw_threshold_to_search_closest: 0.785398 # yaw threshold to search closest index [rad] diff --git a/common/tier4_debug_tools/include/tier4_debug_tools/lateral_error_publisher.hpp b/common/tier4_debug_tools/include/tier4_debug_tools/lateral_error_publisher.hpp deleted file mode 100644 index 962fee8a370a4..0000000000000 --- a/common/tier4_debug_tools/include/tier4_debug_tools/lateral_error_publisher.hpp +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2021 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef TIER4_DEBUG_TOOLS__LATERAL_ERROR_PUBLISHER_HPP_ -#define TIER4_DEBUG_TOOLS__LATERAL_ERROR_PUBLISHER_HPP_ - -#define EIGEN_MPL2_ONLY - -#include -#include -#include -#include - -#include -#include -#include - -class LateralErrorPublisher : public rclcpp::Node -{ -public: - explicit LateralErrorPublisher(const rclcpp::NodeOptions & node_options); - -private: - /* Parameters */ - double yaw_threshold_to_search_closest_; - - /* States */ - autoware_auto_planning_msgs::msg::Trajectory::SharedPtr - current_trajectory_ptr_; //!< @brief reference trajectory - geometry_msgs::msg::PoseWithCovarianceStamped::SharedPtr - current_vehicle_pose_ptr_; //!< @brief current EKF pose - geometry_msgs::msg::PoseWithCovarianceStamped::SharedPtr - current_ground_truth_pose_ptr_; //!< @brief current GNSS pose - - /* Publishers and Subscribers */ - rclcpp::Subscription::SharedPtr - sub_trajectory_; //!< @brief subscription for reference trajectory - rclcpp::Subscription::SharedPtr - sub_vehicle_pose_; //!< @brief subscription for vehicle pose - rclcpp::Subscription::SharedPtr - sub_ground_truth_pose_; //!< @brief subscription for gnss pose - rclcpp::Publisher::SharedPtr - pub_control_lateral_error_; //!< @brief publisher for control lateral error - rclcpp::Publisher::SharedPtr - pub_localization_lateral_error_; //!< @brief publisher for localization lateral error - rclcpp::Publisher::SharedPtr - pub_lateral_error_; //!< @brief publisher for lateral error (control + localization) - - /** - * @brief set current_trajectory_ with received message - */ - void onTrajectory(const autoware_auto_planning_msgs::msg::Trajectory::SharedPtr); - /** - * @brief set current_vehicle_pose_ with received message - */ - void onVehiclePose(const geometry_msgs::msg::PoseWithCovarianceStamped::SharedPtr msg); - /** - * @brief set current_ground_truth_pose_ and calculate lateral error - */ - void onGroundTruthPose(const geometry_msgs::msg::PoseWithCovarianceStamped::SharedPtr msg); -}; - -#endif // TIER4_DEBUG_TOOLS__LATERAL_ERROR_PUBLISHER_HPP_ diff --git a/common/tier4_debug_tools/launch/lateral_error_publisher.launch.xml b/common/tier4_debug_tools/launch/lateral_error_publisher.launch.xml deleted file mode 100644 index cd1188326fb86..0000000000000 --- a/common/tier4_debug_tools/launch/lateral_error_publisher.launch.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - diff --git a/common/tier4_debug_tools/media/lateral_error_publisher.svg b/common/tier4_debug_tools/media/lateral_error_publisher.svg deleted file mode 100644 index 9d44bc2621bcd..0000000000000 --- a/common/tier4_debug_tools/media/lateral_error_publisher.svg +++ /dev/null @@ -1,183 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
-
- â‘  -
-
-
-
- â‘  -
-
- - - - - - - - - -
-
-
- Closest trajectory point -
-
-
-
- Closest trajectory poi... -
-
- - - -
-
-
- Next trajectory point -
-
-
-
- Next trajectory point -
-
- - - -
-
-
- Trajectory -
-
-
-
- Trajectory -
-
- - - - -
-
-
- Ground truth pose -
-
-
-
- Ground truth pose -
-
- - - - -
-
-
- Vehicle pose -
-
-
-
- Vehicle pose -
-
- - - -
-
-
- â‘¡ -
-
-
-
- â‘¡ -
-
- - - - -
-
-
- â‘  Control lateral error -
- â‘¡ Localization lateral error -
- â‘ +â‘¡ Lateral error -
-
-
-
- â‘  Control lateral error... -
-
-
- - - - Viewer does not support full SVG 1.1 - - -
diff --git a/common/tier4_debug_tools/package.xml b/common/tier4_debug_tools/package.xml deleted file mode 100644 index 8f03a3b55cf26..0000000000000 --- a/common/tier4_debug_tools/package.xml +++ /dev/null @@ -1,31 +0,0 @@ - - - - tier4_debug_tools - 0.1.0 - The tier4_debug_tools package - Ryohsuke Mitsudome - Apache License 2.0 - - ament_cmake_auto - autoware_cmake - - autoware_auto_planning_msgs - geometry_msgs - motion_utils - rclcpp - rclcpp_components - tf2_ros - tier4_autoware_utils - tier4_debug_msgs - - launch_ros - python3-rtree - rclpy - ament_lint_auto - autoware_lint_common - - - ament_cmake - - diff --git a/common/tier4_debug_tools/scripts/case_converter.py b/common/tier4_debug_tools/scripts/case_converter.py deleted file mode 100755 index 2fcc3eebac62e..0000000000000 --- a/common/tier4_debug_tools/scripts/case_converter.py +++ /dev/null @@ -1,25 +0,0 @@ -#! /usr/bin/env python3 - -# Copyright 2020 Tier IV, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import re - - -def pascal2snake(s): - return re.sub(r"([a-z])([A-Z])", r"\1_\2", s).lower() - - -def snake2pascal(s): - return "".join([w.capitalize() for w in s.split("_")]) diff --git a/common/tier4_debug_tools/scripts/pose2tf.py b/common/tier4_debug_tools/scripts/pose2tf.py deleted file mode 100755 index c954e370d4069..0000000000000 --- a/common/tier4_debug_tools/scripts/pose2tf.py +++ /dev/null @@ -1,78 +0,0 @@ -#! /usr/bin/env python3 - -# Copyright 2020 Tier IV, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import argparse -import sys - -from geometry_msgs.msg import PoseStamped -from geometry_msgs.msg import TransformStamped -import rclpy -from rclpy.node import Node -import tf2_ros - - -class Pose2TfNode(Node): - def __init__(self, options): - super().__init__("pose2tf") - self._options = options - self._tf_broadcaster = tf2_ros.TransformBroadcaster(self) - self._sub_pose = self.create_subscription( - PoseStamped, self._options.topic_name, self._on_pose, 1 - ) - - def _on_pose(self, msg): - try: - tfs = Pose2TfNode.create_transform(self, msg) - transforms = [] - transforms.append(tfs) - self._tf_broadcaster.sendTransform(transforms) - except tf2_ros.TransformException as e: - print(e) - - @staticmethod - def create_transform(self, msg): - tfs = TransformStamped() - tfs.header.frame_id = msg.header.frame_id - tfs.header.stamp = msg.header.stamp - tfs.child_frame_id = self._options.tf_name - tfs.transform.translation.x = msg.pose.position.x - tfs.transform.translation.y = msg.pose.position.y - tfs.transform.translation.z = msg.pose.position.z - tfs.transform.rotation.x = msg.pose.orientation.x - tfs.transform.rotation.y = msg.pose.orientation.y - tfs.transform.rotation.z = msg.pose.orientation.z - tfs.transform.rotation.w = msg.pose.orientation.w - return tfs - - -def main(args): - print("{}".format(args)) - rclpy.init() - - parser = argparse.ArgumentParser() - parser.add_argument("topic_name", type=str) - parser.add_argument("tf_name", type=str) - ns = parser.parse_args(args) - - pose2tf_node = Pose2TfNode(ns) - rclpy.spin(pose2tf_node) - pose2tf_node.destroy_node() - rclpy.shutdown() - - -if __name__ == "__main__": - main(sys.argv[1:]) diff --git a/common/tier4_debug_tools/scripts/self_pose_listener.py b/common/tier4_debug_tools/scripts/self_pose_listener.py deleted file mode 100755 index 0087bfbbbdf1e..0000000000000 --- a/common/tier4_debug_tools/scripts/self_pose_listener.py +++ /dev/null @@ -1,56 +0,0 @@ -#! /usr/bin/env python3 - -# Copyright 2020 Tier IV, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from geometry_msgs.msg import PoseStamped -import rclpy -from rclpy.node import Node -from tf2_ros import LookupException -from tf2_ros.buffer import Buffer -from tf2_ros.transform_listener import TransformListener - - -class SelfPoseListener(Node): - def __init__(self): - super().__init__("self_pose_listener") - self.tf_buffer = Buffer() - self._tf_listener = TransformListener(self.tf_buffer, self) - self.self_pose = PoseStamped() - - def get_current_pose(self): - try: - tf = self.tf_buffer.lookup_transform("map", "base_link", rclpy.time.Time()) - tf_time = self.tf_buffer.get_latest_common_time("map", "base_link") - self.self_pose = SelfPoseListener.create_pose(tf_time, "map", tf) - except LookupException as e: - self.get_logger().warn("Required transformation not found: `{}`".format(str(e))) - return None - - @staticmethod - def create_pose(time, frame_id, tf): - pose = PoseStamped() - - pose.header.stamp = time.to_msg() - pose.header.frame_id = frame_id - - pose.pose.position.x = tf.transform.translation.x - pose.pose.position.y = tf.transform.translation.y - pose.pose.position.z = tf.transform.translation.z - pose.pose.orientation.x = tf.transform.rotation.x - pose.pose.orientation.y = tf.transform.rotation.y - pose.pose.orientation.z = tf.transform.rotation.z - pose.pose.orientation.w = tf.transform.rotation.w - - return pose diff --git a/common/tier4_debug_tools/scripts/stop_reason2pose.py b/common/tier4_debug_tools/scripts/stop_reason2pose.py deleted file mode 100755 index 433f4e75042f8..0000000000000 --- a/common/tier4_debug_tools/scripts/stop_reason2pose.py +++ /dev/null @@ -1,167 +0,0 @@ -#! /usr/bin/env python3 - -# Copyright 2020 Tier IV, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import math -import sys - -from case_converter import pascal2snake -from geometry_msgs.msg import PoseStamped -import numpy as np -import rclpy -from rclpy.node import Node -from rtree import index -from self_pose_listener import SelfPoseListener -from tier4_planning_msgs.msg import StopReasonArray - - -class StopReason2PoseNode(Node): - def __init__(self, options): - super().__init__("stop_reason2pose_node") - self._options = options - self._sub_pose = self.create_subscription( - StopReasonArray, self._options.topic_name, self._on_stop_reasons, 1 - ) - self._pub_pose_map = {} - self._idx_map = {} - self._pose_map = {} - self._self_pose_listener = SelfPoseListener() - self.timer = self.create_timer((1.0 / 100), self._self_pose_listener.get_current_pose) - - def _on_stop_reasons(self, msg): - for stop_reason in msg.stop_reasons: - snake_case_stop_reason = pascal2snake(stop_reason.reason) - - if len(stop_reason.stop_factors) == 0: - self.get_logger().warn("stop_factor is null") - return - - for stop_factor in stop_reason.stop_factors: - pose = PoseStamped() - pose.header = msg.header - pose.pose = stop_factor.stop_pose - - # Get nearest pose - th_dist = 1.0 - nearest_pose_id = self._get_nearest_pose_id( - snake_case_stop_reason, pose.pose, th_dist - ) - if nearest_pose_id: - self._update_pose(snake_case_stop_reason, pose.pose, nearest_pose_id) - pose_id = nearest_pose_id - else: - pose_id = self._register_pose(snake_case_stop_reason, pose.pose) - - pose_topic_name = "{snake_case_stop_reason}_{pose_id}".format(**locals()) - topic_ns = "/tier4_debug_tools/stop_reason2pose/" - if pose_topic_name not in self._pub_pose_map: - self._pub_pose_map[pose_topic_name] = self.create_publisher( - PoseStamped, topic_ns + pose_topic_name, 1 - ) - self._pub_pose_map[pose_topic_name].publish(pose) - - # Publish nearest stop_reason without number - nearest_pose = PoseStamped() - nearest_pose.header = msg.header - nearest_pose.pose = self._get_nearest_pose_in_array( - stop_reason, self._self_pose_listener.self_pose - ) - - if nearest_pose.pose: - if snake_case_stop_reason not in self._pub_pose_map: - topic_ns = "/tier4_debug_tools/stop_reason2pose/" - self._pub_pose_map[snake_case_stop_reason] = self.create_publisher( - PoseStamped, topic_ns + snake_case_stop_reason, 1 - ) - self._pub_pose_map[snake_case_stop_reason].publish(nearest_pose) - - def _get_nearest_pose_in_array(self, stop_reason, self_pose): - poses = [stop_factor.stop_pose for stop_factor in stop_reason.stop_factors] - if not poses: - return None - - distances = [StopReason2PoseNode.calc_distance2d(p, self_pose.pose) for p in poses] - nearest_idx = np.argmin(distances) - - return poses[nearest_idx] - - def _find_nearest_pose_id(self, name, pose): - if name not in self._idx_map: - self._idx_map[name] = index.Index() - - return self._idx_map[name].nearest(StopReason2PoseNode.pose2boundingbox(pose), 1) - - def _get_nearest_pose_id(self, name, pose, th_dist): - nearest_pose_ids = list(self._find_nearest_pose_id(name, pose)) - if not nearest_pose_ids: - return None - - nearest_pose_id = nearest_pose_ids[0] - nearest_pose = self._get_pose(name, nearest_pose_id) - if not nearest_pose: - return None - - dist = StopReason2PoseNode.calc_distance2d(pose, nearest_pose) - if dist > th_dist: - return None - - return nearest_pose_id - - def _get_pose(self, name, pose_id): - if name not in self._pose_map: - return None - - return self._pose_map[name][pose_id] - - def _update_pose(self, name, pose, pose_id): - self._pose_map[name][id] = pose - self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) - - def _register_pose(self, name, pose): - if name not in self._pose_map: - self._pose_map[name] = {} - - pose_id = len(self._pose_map[name]) + 1 - self._pose_map[name][pose_id] = pose - self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) - return pose_id - - @staticmethod - def calc_distance2d(pose1, pose2): - p1 = pose1.position - p2 = pose2.position - return math.hypot(p1.x - p2.x, p1.y - p2.y) - - @staticmethod - def pose2boundingbox(pose): - return [pose.position.x, pose.position.y, pose.position.x, pose.position.y] - - -def main(args): - rclpy.init() - - parser = argparse.ArgumentParser() - parser.add_argument("topic_name", type=str) - ns = parser.parse_args(args) - - stop_reason2pose_node = StopReason2PoseNode(ns) - rclpy.spin(stop_reason2pose_node) - stop_reason2pose_node.destroy_node() - rclpy.shutdown() - - -if __name__ == "__main__": - main(sys.argv[1:]) diff --git a/common/tier4_debug_tools/scripts/stop_reason2tf b/common/tier4_debug_tools/scripts/stop_reason2tf deleted file mode 100755 index 60d5956130869..0000000000000 --- a/common/tier4_debug_tools/scripts/stop_reason2tf +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env bash - -stop_reason_name="$1" - -if [ -z "${stop_reason_name}" ]; then - echo "Please input stop_reason_name as the 1st argument." - exit 1 -fi - -ros2 run tier4_debug_tools stop_reason2pose.py /planning/scenario_planning/status/stop_reasons >/dev/null 2>&1 & -ros2 run tier4_debug_tools pose2tf.py /tier4_debug_tools/stop_reason2pose/"${stop_reason_name}" "${stop_reason_name}" >/dev/null 2>&1 & -ros2 run tier4_debug_tools tf2pose.py "${stop_reason_name}" base_link 100 >/dev/null 2>&1 & -ros2 run tf2_ros tf2_echo "${stop_reason_name}" base_link 100 2>/dev/null - -wait diff --git a/common/tier4_debug_tools/scripts/tf2pose.py b/common/tier4_debug_tools/scripts/tf2pose.py deleted file mode 100755 index a4422fdd8015b..0000000000000 --- a/common/tier4_debug_tools/scripts/tf2pose.py +++ /dev/null @@ -1,83 +0,0 @@ -#! /usr/bin/env python3 - -# Copyright 2020 Tier IV, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import sys - -from geometry_msgs.msg import PoseStamped -import rclpy -from rclpy.node import Node -from tf2_ros import LookupException -from tf2_ros.buffer import Buffer -from tf2_ros.transform_listener import TransformListener - - -class Tf2PoseNode(Node): - def __init__(self, options): - super().__init__("tf2pose") - - self._options = options - self.tf_buffer = Buffer() - self.tf_listener = TransformListener(self.tf_buffer, self) - self._pub_pose = self.create_publisher(PoseStamped, "/tier4_debug_tools/tf2pose/pose", 1) - self.timer = self.create_timer((1.0 / self._options.hz), self._on_timer) - - def _on_timer(self): - try: - tf = self.tf_buffer.lookup_transform( - self._options.tf_from, self._options.tf_to, rclpy.time.Time() - ) - time = self.tf_buffer.get_latest_common_time(self._options.tf_from, self._options.tf_to) - pose = Tf2PoseNode.create_pose(time, self._options.tf_from, tf) - self._pub_pose.publish(pose) - except LookupException as e: - print(e) - - @staticmethod - def create_pose(time, frame_id, tf): - pose = PoseStamped() - - pose.header.stamp = time.to_msg() - pose.header.frame_id = frame_id - - pose.pose.position.x = tf.transform.translation.x - pose.pose.position.y = tf.transform.translation.y - pose.pose.position.z = tf.transform.translation.z - pose.pose.orientation.x = tf.transform.rotation.x - pose.pose.orientation.y = tf.transform.rotation.y - pose.pose.orientation.z = tf.transform.rotation.z - pose.pose.orientation.w = tf.transform.rotation.w - - return pose - - -def main(args): - rclpy.init() - - parser = argparse.ArgumentParser() - parser.add_argument("tf_from", type=str) - parser.add_argument("tf_to", type=str) - parser.add_argument("hz", type=int, default=10) - ns = parser.parse_args(args) - - tf2pose_node = Tf2PoseNode(ns) - rclpy.spin(tf2pose_node) - tf2pose_node.destroy_node() - rclpy.shutdown() - - -if __name__ == "__main__": - main(sys.argv[1:]) diff --git a/common/tier4_debug_tools/src/lateral_error_publisher.cpp b/common/tier4_debug_tools/src/lateral_error_publisher.cpp deleted file mode 100644 index 18c97a47aa55a..0000000000000 --- a/common/tier4_debug_tools/src/lateral_error_publisher.cpp +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright 2021 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "tier4_debug_tools/lateral_error_publisher.hpp" - -#include - -LateralErrorPublisher::LateralErrorPublisher(const rclcpp::NodeOptions & node_options) -: Node("lateral_error_publisher", node_options) -{ - using std::placeholders::_1; - - /* Parameters */ - yaw_threshold_to_search_closest_ = - declare_parameter("yaw_threshold_to_search_closest", M_PI / 4.0); - - /* Publishers and Subscribers */ - sub_trajectory_ = create_subscription( - "~/input/reference_trajectory", rclcpp::QoS{1}, - std::bind(&LateralErrorPublisher::onTrajectory, this, _1)); - sub_vehicle_pose_ = create_subscription( - "~/input/vehicle_pose_with_covariance", rclcpp::QoS{1}, - std::bind(&LateralErrorPublisher::onVehiclePose, this, _1)); - sub_ground_truth_pose_ = create_subscription( - "~/input/ground_truth_pose_with_covariance", rclcpp::QoS{1}, - std::bind(&LateralErrorPublisher::onGroundTruthPose, this, _1)); - pub_control_lateral_error_ = - create_publisher("~/control_lateral_error", 1); - pub_localization_lateral_error_ = - create_publisher("~/localization_lateral_error", 1); - pub_lateral_error_ = - create_publisher("~/lateral_error", 1); -} - -void LateralErrorPublisher::onTrajectory( - const autoware_auto_planning_msgs::msg::Trajectory::SharedPtr msg) -{ - current_trajectory_ptr_ = msg; -} - -void LateralErrorPublisher::onVehiclePose( - const geometry_msgs::msg::PoseWithCovarianceStamped::SharedPtr msg) -{ - current_vehicle_pose_ptr_ = msg; -} - -void LateralErrorPublisher::onGroundTruthPose( - const geometry_msgs::msg::PoseWithCovarianceStamped::SharedPtr msg) -{ - current_ground_truth_pose_ptr_ = msg; - - // Guard - if (current_trajectory_ptr_ == nullptr || current_vehicle_pose_ptr_ == nullptr) { - RCLCPP_WARN_THROTTLE( - this->get_logger(), *this->get_clock(), 1000 /* ms */, - "Reference trajectory or EKF pose is not received"); - return; - } - if (current_trajectory_ptr_->points.size() < 2) { - RCLCPP_WARN_THROTTLE( - this->get_logger(), *this->get_clock(), 1000 /* ms */, "Reference trajectory is too short"); - return; - } - - // Search closest trajectory point with vehicle pose - const auto closest_index = motion_utils::findNearestIndex( - current_trajectory_ptr_->points, current_vehicle_pose_ptr_->pose.pose, - std::numeric_limits::max(), yaw_threshold_to_search_closest_); - if (!closest_index) { - RCLCPP_WARN_THROTTLE( - this->get_logger(), *this->get_clock(), 1000 /* ms */, "Failed to search closest index"); - return; - } - - // Calculate the normal vector in the reference trajectory direction - size_t base_index = 0; - size_t next_index = 0; - if (*closest_index == current_trajectory_ptr_->points.size() - 1) { - base_index = *closest_index - 1; - next_index = *closest_index; - } else { - base_index = *closest_index; - next_index = *closest_index + 1; - } - - const auto & base_pose = current_trajectory_ptr_->points.at(base_index).pose; - const auto & next_pose = current_trajectory_ptr_->points.at(next_index).pose; - const double dx = next_pose.position.x - base_pose.position.x; - const double dy = next_pose.position.y - base_pose.position.y; - const Eigen::Vector2d trajectory_direction(dx, dy); - RCLCPP_DEBUG(this->get_logger(), "trajectory direction: (%f, %f)", dx, dy); - - const auto rotation = Eigen::Rotation2Dd(M_PI_2); - const Eigen::Vector2d normal_direction = rotation * trajectory_direction; - RCLCPP_DEBUG( - this->get_logger(), "normal direction: (%f, %f)", normal_direction(0), normal_direction(1)); - const Eigen::Vector2d unit_normal_direction = normal_direction.normalized(); - RCLCPP_DEBUG( - this->get_logger(), "unit normal direction: (%f, %f)", unit_normal_direction(0), - unit_normal_direction(1)); - - // Calculate control lateral error - const auto & closest_pose = current_trajectory_ptr_->points.at(*closest_index).pose; - const auto & vehicle_pose = current_vehicle_pose_ptr_->pose.pose; - const Eigen::Vector2d closest_to_vehicle( - vehicle_pose.position.x - closest_pose.position.x, - vehicle_pose.position.y - closest_pose.position.y); - const auto control_lateral_error = closest_to_vehicle.dot(unit_normal_direction); - RCLCPP_DEBUG(this->get_logger(), "control_lateral_error: %f", control_lateral_error); - - // Calculate localization lateral error - const auto ground_truth_pose = current_ground_truth_pose_ptr_->pose.pose; - const Eigen::Vector2d vehicle_to_ground_truth( - ground_truth_pose.position.x - vehicle_pose.position.x, - ground_truth_pose.position.y - vehicle_pose.position.y); - const auto localization_lateral_error = vehicle_to_ground_truth.dot(unit_normal_direction); - RCLCPP_DEBUG(this->get_logger(), "localization_lateral_error: %f", localization_lateral_error); - - const auto lateral_error = control_lateral_error + localization_lateral_error; - RCLCPP_DEBUG(this->get_logger(), "localization_error: %f", lateral_error); - - // Publish lateral errors - tier4_debug_msgs::msg::Float32Stamped control_msg; - control_msg.stamp = this->now(); - control_msg.data = static_cast(control_lateral_error); - pub_control_lateral_error_->publish(control_msg); - - tier4_debug_msgs::msg::Float32Stamped localization_msg; - localization_msg.stamp = this->now(); - localization_msg.data = static_cast(localization_lateral_error); - pub_localization_lateral_error_->publish(localization_msg); - - tier4_debug_msgs::msg::Float32Stamped sum_msg; - sum_msg.stamp = this->now(); - sum_msg.data = static_cast(lateral_error); - pub_lateral_error_->publish(sum_msg); -} - -#include -RCLCPP_COMPONENTS_REGISTER_NODE(LateralErrorPublisher) diff --git a/common/tier4_perception_rviz_plugin/README.md b/common/tier4_perception_rviz_plugin/README.md index 7ce9ba5114ef9..6ec6a1a0e28a1 100644 --- a/common/tier4_perception_rviz_plugin/README.md +++ b/common/tier4_perception_rviz_plugin/README.md @@ -112,3 +112,21 @@ You can interactively manipulate the object. | ADD | Shift + Click Right Button | | MOVE | Hold down Right Button + Drug and Drop | | DELETE | Alt + Click Right Button | + +## Material Design Icons + +This project uses [Material Design Icons](https://developers.google.com/fonts/docs/material_symbols) by Google. These icons are used under the terms of the Apache License, Version 2.0. + +Material Design Icons are a collection of symbols provided by Google that are used to enhance the user interface of applications, websites, and other digital products. + +### License + +The Material Design Icons are licensed under the Apache License, Version 2.0. You may obtain a copy of the License at: + + + +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + +### Acknowledgments + +We would like to express our gratitude to Google for making these icons available to the community, helping developers and designers enhance the visual appeal and user experience of their projects. diff --git a/common/tier4_perception_rviz_plugin/icons/classes/BikeInitialPoseTool.png b/common/tier4_perception_rviz_plugin/icons/classes/BikeInitialPoseTool.png index 6a67573717ae1..b36ea7c84774b 100644 Binary files a/common/tier4_perception_rviz_plugin/icons/classes/BikeInitialPoseTool.png and b/common/tier4_perception_rviz_plugin/icons/classes/BikeInitialPoseTool.png differ diff --git a/common/tier4_perception_rviz_plugin/icons/classes/BusInitialPoseTool.png b/common/tier4_perception_rviz_plugin/icons/classes/BusInitialPoseTool.png index 6a67573717ae1..d6ef8fb25c063 100644 Binary files a/common/tier4_perception_rviz_plugin/icons/classes/BusInitialPoseTool.png and b/common/tier4_perception_rviz_plugin/icons/classes/BusInitialPoseTool.png differ diff --git a/common/tier4_perception_rviz_plugin/icons/classes/CarInitialPoseTool.png b/common/tier4_perception_rviz_plugin/icons/classes/CarInitialPoseTool.png index 6a67573717ae1..2959a2c639c3e 100644 Binary files a/common/tier4_perception_rviz_plugin/icons/classes/CarInitialPoseTool.png and b/common/tier4_perception_rviz_plugin/icons/classes/CarInitialPoseTool.png differ diff --git a/common/tier4_perception_rviz_plugin/icons/classes/DeleteAllObjectsTool.png b/common/tier4_perception_rviz_plugin/icons/classes/DeleteAllObjectsTool.png index 6a67573717ae1..b1991030c4bb0 100644 Binary files a/common/tier4_perception_rviz_plugin/icons/classes/DeleteAllObjectsTool.png and b/common/tier4_perception_rviz_plugin/icons/classes/DeleteAllObjectsTool.png differ diff --git a/common/tier4_perception_rviz_plugin/icons/classes/PedestrianInitialPoseTool.png b/common/tier4_perception_rviz_plugin/icons/classes/PedestrianInitialPoseTool.png index 6a67573717ae1..2ae4aa47e3c74 100644 Binary files a/common/tier4_perception_rviz_plugin/icons/classes/PedestrianInitialPoseTool.png and b/common/tier4_perception_rviz_plugin/icons/classes/PedestrianInitialPoseTool.png differ diff --git a/common/tier4_perception_rviz_plugin/icons/classes/UnknownInitialPoseTool.png b/common/tier4_perception_rviz_plugin/icons/classes/UnknownInitialPoseTool.png index 6a67573717ae1..138457d72e0ac 100644 Binary files a/common/tier4_perception_rviz_plugin/icons/classes/UnknownInitialPoseTool.png and b/common/tier4_perception_rviz_plugin/icons/classes/UnknownInitialPoseTool.png differ diff --git a/common/tier4_planning_rviz_plugin/README.md b/common/tier4_planning_rviz_plugin/README.md index 8b9154422eae6..ef9df25f2b657 100644 --- a/common/tier4_planning_rviz_plugin/README.md +++ b/common/tier4_planning_rviz_plugin/README.md @@ -125,3 +125,21 @@ This plugin displays the path, trajectory, and maximum speed. ![select_planning_plugin](./images/select_planning_plugin.png) 3. Enter the name of the topic where you want to view the path or trajectory. ![select_topic_name](./images/select_topic_name.png) + +## Material Design Icons + +This project uses [Material Design Icons](https://developers.google.com/fonts/docs/material_symbols) by Google. These icons are used under the terms of the Apache License, Version 2.0. + +Material Design Icons are a collection of symbols provided by Google that are used to enhance the user interface of applications, websites, and other digital products. + +### License + +The Material Design Icons are licensed under the Apache License, Version 2.0. You may obtain a copy of the License at: + + + +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + +### Acknowledgments + +We would like to express our gratitude to Google for making these icons available to the community, helping developers and designers enhance the visual appeal and user experience of their projects. diff --git a/common/tier4_planning_rviz_plugin/icons/classes/MissionCheckpointTool.png b/common/tier4_planning_rviz_plugin/icons/classes/MissionCheckpointTool.png index 6a67573717ae1..4f5b4961f2500 100644 Binary files a/common/tier4_planning_rviz_plugin/icons/classes/MissionCheckpointTool.png and b/common/tier4_planning_rviz_plugin/icons/classes/MissionCheckpointTool.png differ diff --git a/common/tier4_target_object_type_rviz_plugin/src/target_object_type_panel.cpp b/common/tier4_target_object_type_rviz_plugin/src/target_object_type_panel.cpp index cf5960cac281f..e014307942bab 100644 --- a/common/tier4_target_object_type_rviz_plugin/src/target_object_type_panel.cpp +++ b/common/tier4_target_object_type_rviz_plugin/src/target_object_type_panel.cpp @@ -60,7 +60,12 @@ void TargetObjectTypePanel::setParameters() modules_ = { "avoidance", "avoidance_by_lane_change", + "dynamic_avoidance", "lane_change", + "start_planner", + "goal_planner", + "crosswalk", + "surround_obstacle_checker", "obstacle_cruise (inside)", "obstacle_cruise (outside)", "obstacle_stop", @@ -89,15 +94,8 @@ void TargetObjectTypePanel::setParameters() ParamNameEnableObject param_name; param_name.node = "/planning/scenario_planning/lane_driving/behavior_planning/behavior_path_planner"; - param_name.ns = "avoidance.target_object"; - param_name.name.emplace("car", "car.is_target"); - param_name.name.emplace("truck", "truck.is_target"); - param_name.name.emplace("bus", "bus.is_target"); - param_name.name.emplace("trailer", "trailer.is_target"); - param_name.name.emplace("unknown", "unknown.is_target"); - param_name.name.emplace("bicycle", "bicycle.is_target"); - param_name.name.emplace("motorcycle", "motorcycle.is_target"); - param_name.name.emplace("pedestrian", "pedestrian.is_target"); + param_name.ns = "avoidance.target_filtering.target_type"; + param_name.name = default_param_name.name; param_names_.emplace(module, param_name); } @@ -107,15 +105,8 @@ void TargetObjectTypePanel::setParameters() ParamNameEnableObject param_name; param_name.node = "/planning/scenario_planning/lane_driving/behavior_planning/behavior_path_planner"; - param_name.ns = "avoidance_by_lane_change.target_object"; - param_name.name.emplace("car", "car.is_target"); - param_name.name.emplace("truck", "truck.is_target"); - param_name.name.emplace("bus", "bus.is_target"); - param_name.name.emplace("trailer", "trailer.is_target"); - param_name.name.emplace("unknown", "unknown.is_target"); - param_name.name.emplace("bicycle", "bicycle.is_target"); - param_name.name.emplace("motorcycle", "motorcycle.is_target"); - param_name.name.emplace("pedestrian", "pedestrian.is_target"); + param_name.ns = "avoidance_by_lane_change.target_filtering.target_type"; + param_name.name = default_param_name.name; param_names_.emplace(module, param_name); } @@ -130,6 +121,64 @@ void TargetObjectTypePanel::setParameters() param_names_.emplace(module, param_name); } + // start_planner + { + const auto module = "start_planner"; + ParamNameEnableObject param_name; + param_name.node = + "/planning/scenario_planning/lane_driving/behavior_planning/behavior_path_planner"; + param_name.ns = "start_planner.path_safety_check.target_filtering.object_types_to_check"; + param_name.name.emplace("car", "check_car"); + param_name.name.emplace("truck", "check_truck"); + param_name.name.emplace("bus", "check_bus"); + param_name.name.emplace("trailer", "check_trailer"); + param_name.name.emplace("unknown", "check_unknown"); + param_name.name.emplace("bicycle", "check_bicycle"); + param_name.name.emplace("motorcycle", "check_motorcycle"); + param_name.name.emplace("pedestrian", "check_pedestrian"); + param_names_.emplace(module, param_name); + } + + // goal_planner + { + const auto module = "goal_planner"; + ParamNameEnableObject param_name; + param_name.node = + "/planning/scenario_planning/lane_driving/behavior_planning/behavior_path_planner"; + param_name.ns = "goal_planner.path_safety_check.target_filtering.object_types_to_check"; + param_name.name.emplace("car", "check_car"); + param_name.name.emplace("truck", "check_truck"); + param_name.name.emplace("bus", "check_bus"); + param_name.name.emplace("trailer", "check_trailer"); + param_name.name.emplace("unknown", "check_unknown"); + param_name.name.emplace("bicycle", "check_bicycle"); + param_name.name.emplace("motorcycle", "check_motorcycle"); + param_name.name.emplace("pedestrian", "check_pedestrian"); + param_names_.emplace(module, param_name); + } + + // dynamic_avoidance + { + const auto module = "dynamic_avoidance"; + ParamNameEnableObject param_name; + param_name.node = + "/planning/scenario_planning/lane_driving/behavior_planning/behavior_path_planner"; + param_name.ns = "dynamic_avoidance.target_object"; + param_name.name = default_param_name.name; + param_names_.emplace(module, param_name); + } + + // crosswalk + { + const auto module = "crosswalk"; + ParamNameEnableObject param_name; + param_name.node = + "/planning/scenario_planning/lane_driving/behavior_planning/behavior_velocity_planner"; + param_name.ns = "crosswalk.object_filtering.target_object"; + param_name.name = default_param_name.name; + param_names_.emplace(module, param_name); + } + // obstacle cruise (inside) { const auto module = "obstacle_cruise (inside)"; @@ -152,6 +201,24 @@ void TargetObjectTypePanel::setParameters() param_names_.emplace(module, param_name); } + // surround_obstacle_check + { + const auto module = "surround_obstacle_checker"; + ParamNameEnableObject param_name; + param_name.node = + "/planning/scenario_planning/lane_driving/motion_planning/surround_obstacle_checker"; + param_name.ns = ""; + param_name.name.emplace("car", "car.enable_check"); + param_name.name.emplace("truck", "truck.enable_check"); + param_name.name.emplace("bus", "bus.enable_check"); + param_name.name.emplace("trailer", "trailer.enable_check"); + param_name.name.emplace("unknown", "unknown.enable_check"); + param_name.name.emplace("bicycle", "bicycle.enable_check"); + param_name.name.emplace("motorcycle", "motorcycle.enable_check"); + param_name.name.emplace("pedestrian", "pedestrian.enable_check"); + param_names_.emplace(module, param_name); + } + // obstacle stop { const auto module = "obstacle_stop"; @@ -225,7 +292,8 @@ void TargetObjectTypePanel::updateMatrix() continue; } - std::string param_name = module_params.ns + "." + module_params.name.at(target); + std::string param_name = + (module_params.ns.empty() ? "" : module_params.ns + ".") + module_params.name.at(target); auto parameter_result = parameters_client->get_parameters({param_name}); if (!parameter_result.empty()) { diff --git a/common/traffic_light_utils/include/traffic_light_utils/traffic_light_utils.hpp b/common/traffic_light_utils/include/traffic_light_utils/traffic_light_utils.hpp index 6ce95acc4ef1d..9c3acd4f45fa4 100644 --- a/common/traffic_light_utils/include/traffic_light_utils/traffic_light_utils.hpp +++ b/common/traffic_light_utils/include/traffic_light_utils/traffic_light_utils.hpp @@ -17,9 +17,9 @@ #include "autoware_perception_msgs/msg/traffic_signal.hpp" #include "autoware_perception_msgs/msg/traffic_signal_element.hpp" +#include "tier4_perception_msgs/msg/traffic_light.hpp" #include "tier4_perception_msgs/msg/traffic_light_element.hpp" #include "tier4_perception_msgs/msg/traffic_light_roi.hpp" -#include "tier4_perception_msgs/msg/traffic_signal.hpp" #include #include @@ -35,9 +35,9 @@ bool isRoiValid( void setRoiInvalid(tier4_perception_msgs::msg::TrafficLightRoi & roi); -bool isSignalUnknown(const tier4_perception_msgs::msg::TrafficSignal & signal); +bool isSignalUnknown(const tier4_perception_msgs::msg::TrafficLight & signal); -void setSignalUnknown(tier4_perception_msgs::msg::TrafficSignal & signal, float confidence = -1); +void setSignalUnknown(tier4_perception_msgs::msg::TrafficLight & signal, float confidence = -1); /** * @brief Checks if a traffic light state includes a circle-shaped light with the specified color. diff --git a/common/traffic_light_utils/src/traffic_light_utils.cpp b/common/traffic_light_utils/src/traffic_light_utils.cpp index c8f2ca85b2089..0bd3d85a9b71f 100644 --- a/common/traffic_light_utils/src/traffic_light_utils.cpp +++ b/common/traffic_light_utils/src/traffic_light_utils.cpp @@ -33,14 +33,14 @@ void setRoiInvalid(tier4_perception_msgs::msg::TrafficLightRoi & roi) roi.roi.height = roi.roi.width = 0; } -bool isSignalUnknown(const tier4_perception_msgs::msg::TrafficSignal & signal) +bool isSignalUnknown(const tier4_perception_msgs::msg::TrafficLight & signal) { return signal.elements.size() == 1 && signal.elements[0].shape == tier4_perception_msgs::msg::TrafficLightElement::UNKNOWN && signal.elements[0].color == tier4_perception_msgs::msg::TrafficLightElement::UNKNOWN; } -void setSignalUnknown(tier4_perception_msgs::msg::TrafficSignal & signal, float confidence) +void setSignalUnknown(tier4_perception_msgs::msg::TrafficLight & signal, float confidence) { signal.elements.resize(1); signal.elements[0].shape = tier4_perception_msgs::msg::TrafficLightElement::UNKNOWN; diff --git a/common/traffic_light_utils/test/test_traffic_light_utils.cpp b/common/traffic_light_utils/test/test_traffic_light_utils.cpp index bd777265b4195..a80510693c487 100644 --- a/common/traffic_light_utils/test/test_traffic_light_utils.cpp +++ b/common/traffic_light_utils/test/test_traffic_light_utils.cpp @@ -48,7 +48,7 @@ TEST(setRoiInvalid, set_roi_size) TEST(isSignalUnknown, signal_element) { - tier4_perception_msgs::msg::TrafficSignal test_signal; + tier4_perception_msgs::msg::TrafficLight test_signal; tier4_perception_msgs::msg::TrafficLightElement element; element.color = tier4_perception_msgs::msg::TrafficLightElement::UNKNOWN; element.shape = tier4_perception_msgs::msg::TrafficLightElement::UNKNOWN; @@ -60,7 +60,7 @@ TEST(isSignalUnknown, signal_element) TEST(setSignalUnknown, set_signal_element) { - tier4_perception_msgs::msg::TrafficSignal test_signal; + tier4_perception_msgs::msg::TrafficLight test_signal; tier4_perception_msgs::msg::TrafficLightElement element; element.color = tier4_perception_msgs::msg::TrafficLightElement::RED; element.shape = tier4_perception_msgs::msg::TrafficLightElement::CROSS; diff --git a/control/autonomous_emergency_braking/include/autonomous_emergency_braking/node.hpp b/control/autonomous_emergency_braking/include/autonomous_emergency_braking/node.hpp index 27ec775110109..fb1973d777138 100644 --- a/control/autonomous_emergency_braking/include/autonomous_emergency_braking/node.hpp +++ b/control/autonomous_emergency_braking/include/autonomous_emergency_braking/node.hpp @@ -41,6 +41,7 @@ #include #include +#include #include #include @@ -140,10 +141,9 @@ class AEB : public rclcpp::Node bool hasCollision( const double current_v, const Path & ego_path, const std::vector & objects); - void generateEgoPath( - const double curr_v, const double curr_w, Path & path, std::vector & polygons); - void generateEgoPath( - const Trajectory & predicted_traj, Path & path, std::vector & polygons); + Path generateEgoPath(const double curr_v, const double curr_w, std::vector & polygons); + std::optional generateEgoPath( + const Trajectory & predicted_traj, std::vector & polygons); void createObjectData( const Path & ego_path, const std::vector & ego_polys, const rclcpp::Time & stamp, std::vector & objects); diff --git a/control/autonomous_emergency_braking/package.xml b/control/autonomous_emergency_braking/package.xml index d45078064da3c..1ac255c21921b 100644 --- a/control/autonomous_emergency_braking/package.xml +++ b/control/autonomous_emergency_braking/package.xml @@ -6,6 +6,7 @@ Autonomous Emergency Braking package as a ROS 2 node Takamasa Horibe Tomoya Kimura + Mamoru Sobue Apache License 2.0 diff --git a/control/autonomous_emergency_braking/src/node.cpp b/control/autonomous_emergency_braking/src/node.cpp index 960aadf226b70..2b5553971a8d5 100644 --- a/control/autonomous_emergency_braking/src/node.cpp +++ b/control/autonomous_emergency_braking/src/node.cpp @@ -309,7 +309,6 @@ bool AEB::checkCollision(MarkerArray & debug_markers) // step3. create ego path based on sensor data bool has_collision_ego = false; if (use_imu_path_) { - Path ego_path; std::vector ego_polys; const double current_w = angular_velocity_ptr_->z; constexpr double color_r = 0.0 / 256.0; @@ -317,8 +316,7 @@ bool AEB::checkCollision(MarkerArray & debug_markers) constexpr double color_b = 205.0 / 256.0; constexpr double color_a = 0.999; const auto current_time = get_clock()->now(); - generateEgoPath(current_v, current_w, ego_path, ego_polys); - + const auto ego_path = generateEgoPath(current_v, current_w, ego_polys); std::vector objects; createObjectData(ego_path, ego_polys, current_time, objects); has_collision_ego = hasCollision(current_v, ego_path, objects); @@ -332,7 +330,6 @@ bool AEB::checkCollision(MarkerArray & debug_markers) // step4. transform predicted trajectory from control module bool has_collision_predicted = false; if (use_predicted_trajectory_) { - Path predicted_path; std::vector predicted_polys; const auto predicted_traj_ptr = predicted_traj_ptr_; constexpr double color_r = 0.0; @@ -340,15 +337,18 @@ bool AEB::checkCollision(MarkerArray & debug_markers) constexpr double color_b = 0.0; constexpr double color_a = 0.999; const auto current_time = predicted_traj_ptr->header.stamp; - generateEgoPath(*predicted_traj_ptr, predicted_path, predicted_polys); - std::vector objects; - createObjectData(predicted_path, predicted_polys, current_time, objects); - has_collision_predicted = hasCollision(current_v, predicted_path, objects); - - std::string ns = "predicted"; - addMarker( - current_time, predicted_path, predicted_polys, objects, color_r, color_g, color_b, color_a, - ns, debug_markers); + const auto predicted_path_opt = generateEgoPath(*predicted_traj_ptr, predicted_polys); + if (predicted_path_opt) { + const auto & predicted_path = predicted_path_opt.value(); + std::vector objects; + createObjectData(predicted_path, predicted_polys, current_time, objects); + has_collision_predicted = hasCollision(current_v, predicted_path, objects); + + std::string ns = "predicted"; + addMarker( + current_time, predicted_path, predicted_polys, objects, color_r, color_g, color_b, color_a, + ns, debug_markers); + } } return has_collision_ego || has_collision_predicted; @@ -384,9 +384,10 @@ bool AEB::hasCollision( return false; } -void AEB::generateEgoPath( - const double curr_v, const double curr_w, Path & path, std::vector & polygons) +Path AEB::generateEgoPath( + const double curr_v, const double curr_w, std::vector & polygons) { + Path path; double curr_x = 0.0; double curr_y = 0.0; double curr_yaw = 0.0; @@ -397,7 +398,7 @@ void AEB::generateEgoPath( if (curr_v < 0.1) { // if current velocity is too small, assume it stops at the same point - return; + return path; } constexpr double epsilon = 1e-6; @@ -435,14 +436,14 @@ void AEB::generateEgoPath( for (size_t i = 0; i < path.size() - 1; ++i) { polygons.at(i) = createPolygon(path.at(i), path.at(i + 1), vehicle_info_, expand_width_); } + return path; } -void AEB::generateEgoPath( - const Trajectory & predicted_traj, Path & path, - std::vector & polygons) +std::optional AEB::generateEgoPath( + const Trajectory & predicted_traj, std::vector & polygons) { if (predicted_traj.points.empty()) { - return; + return std::nullopt; } geometry_msgs::msg::TransformStamped transform_stamped{}; @@ -452,10 +453,11 @@ void AEB::generateEgoPath( rclcpp::Duration::from_seconds(0.5)); } catch (tf2::TransformException & ex) { RCLCPP_ERROR_STREAM(get_logger(), "[AEB] Failed to look up transform from base_link to map"); - return; + return std::nullopt; } // create path + Path path; path.resize(predicted_traj.points.size()); for (size_t i = 0; i < predicted_traj.points.size(); ++i) { geometry_msgs::msg::Pose map_pose; @@ -471,6 +473,7 @@ void AEB::generateEgoPath( for (size_t i = 0; i < path.size() - 1; ++i) { polygons.at(i) = createPolygon(path.at(i), path.at(i + 1), vehicle_info_, expand_width_); } + return path; } void AEB::createObjectData( diff --git a/control/control_validator/config/control_validator.param.yaml b/control/control_validator/config/control_validator.param.yaml index 7bbe6a466799b..12709b18b7932 100644 --- a/control/control_validator/config/control_validator.param.yaml +++ b/control/control_validator/config/control_validator.param.yaml @@ -1,8 +1,6 @@ /**: ros__parameters: - publish_diag: true # if true, diagnostic msg is published - # If the number of consecutive invalid trajectory exceeds this threshold, the Diag will be set to ERROR. # (For example, threshold = 1 means, even if the trajectory is invalid, Diag will not be ERROR if # the next trajectory is valid.) diff --git a/control/control_validator/include/control_validator/control_validator.hpp b/control/control_validator/include/control_validator/control_validator.hpp index 48b7eba7412a2..eba9bf700ba08 100644 --- a/control/control_validator/include/control_validator/control_validator.hpp +++ b/control/control_validator/include/control_validator/control_validator.hpp @@ -75,7 +75,6 @@ class ControlValidator : public rclcpp::Node rclcpp::Publisher::SharedPtr pub_markers_; // system parameters - bool publish_diag_ = true; int diag_error_count_threshold_ = 0; bool display_on_terminal_ = true; diff --git a/control/control_validator/src/control_validator.cpp b/control/control_validator/src/control_validator.cpp index ffce38f009a41..5282e31fef898 100644 --- a/control/control_validator/src/control_validator.cpp +++ b/control/control_validator/src/control_validator.cpp @@ -46,14 +46,11 @@ ControlValidator::ControlValidator(const rclcpp::NodeOptions & options) setupParameters(); - if (publish_diag_) { - setupDiag(); - } + setupDiag(); } void ControlValidator::setupParameters() { - publish_diag_ = declare_parameter("publish_diag"); diag_error_count_threshold_ = declare_parameter("diag_error_count_threshold"); display_on_terminal_ = declare_parameter("display_on_terminal"); @@ -121,6 +118,11 @@ bool ControlValidator::isDataReady() void ControlValidator::onReferenceTrajectory(const Trajectory::ConstSharedPtr msg) { + if (msg->points.size() < 2) { + RCLCPP_ERROR(get_logger(), "planning trajectory size is invalid (%lu)", msg->points.size()); + return; + } + current_reference_trajectory_ = msg; return; diff --git a/control/joy_controller/config/joy_controller.param.yaml b/control/joy_controller/config/joy_controller.param.yaml index 73a5d028985c5..c9fe348867e68 100644 --- a/control/joy_controller/config/joy_controller.param.yaml +++ b/control/joy_controller/config/joy_controller.param.yaml @@ -1,6 +1,6 @@ /**: ros__parameters: - joy_type: DS4 + joy_type: $(var joy_type) update_rate: 10.0 accel_ratio: 3.0 brake_ratio: 5.0 diff --git a/control/joy_controller/launch/joy_controller.launch.xml b/control/joy_controller/launch/joy_controller.launch.xml index d2804a7339046..c392ca100a9c0 100644 --- a/control/joy_controller/launch/joy_controller.launch.xml +++ b/control/joy_controller/launch/joy_controller.launch.xml @@ -30,8 +30,7 @@ - - + diff --git a/control/pid_longitudinal_controller/include/pid_longitudinal_controller/longitudinal_controller_utils.hpp b/control/pid_longitudinal_controller/include/pid_longitudinal_controller/longitudinal_controller_utils.hpp index cc1c9c08887ba..ac4fec8dacb7d 100644 --- a/control/pid_longitudinal_controller/include/pid_longitudinal_controller/longitudinal_controller_utils.hpp +++ b/control/pid_longitudinal_controller/include/pid_longitudinal_controller/longitudinal_controller_utils.hpp @@ -30,6 +30,7 @@ #include #include +#include namespace autoware::motion::control::pid_longitudinal_controller { @@ -62,11 +63,11 @@ double getPitchByPose(const Quaternion & quaternion); * @brief calculate pitch angle from trajectory on map * NOTE: there is currently no z information so this always returns 0.0 * @param [in] trajectory input trajectory - * @param [in] closest_idx nearest index to current vehicle position + * @param [in] start_idx nearest index to current vehicle position * @param [in] wheel_base length of wheel base */ double getPitchByTraj( - const Trajectory & trajectory, const size_t closest_idx, const double wheel_base); + const Trajectory & trajectory, const size_t start_idx, const double wheel_base); /** * @brief calculate vehicle pose after time delay by moving the vehicle at current velocity and @@ -82,7 +83,7 @@ Pose calcPoseAfterTimeDelay( * @param [in] point Interpolated point is nearest to this point. */ template -TrajectoryPoint lerpTrajectoryPoint( +std::pair lerpTrajectoryPoint( const T & points, const Pose & pose, const double max_dist, const double max_yaw) { TrajectoryPoint interpolated_point; @@ -101,6 +102,8 @@ TrajectoryPoint lerpTrajectoryPoint( points.at(i).pose.position.x, points.at(i + 1).pose.position.x, interpolate_ratio); interpolated_point.pose.position.y = interpolation::lerp( points.at(i).pose.position.y, points.at(i + 1).pose.position.y, interpolate_ratio); + interpolated_point.pose.position.z = interpolation::lerp( + points.at(i).pose.position.z, points.at(i + 1).pose.position.z, interpolate_ratio); interpolated_point.pose.orientation = interpolation::lerpOrientation( points.at(i).pose.orientation, points.at(i + 1).pose.orientation, interpolate_ratio); interpolated_point.longitudinal_velocity_mps = interpolation::lerp( @@ -114,7 +117,7 @@ TrajectoryPoint lerpTrajectoryPoint( points.at(i).heading_rate_rps, points.at(i + 1).heading_rate_rps, interpolate_ratio); } - return interpolated_point; + return std::make_pair(interpolated_point, seg_idx); } /** @@ -138,6 +141,17 @@ double applyDiffLimitFilter( double applyDiffLimitFilter( const double input_val, const double prev_val, const double dt, const double max_val, const double min_val); + +/** + * @brief calculate the projected pose after distance from the current index + * @param [in] src_idx current index + * @param [in] distance distance to project + * @param [in] trajectory reference trajectory + */ +geometry_msgs::msg::Pose findTrajectoryPoseAfterDistance( + const size_t src_idx, const double distance, + const autoware_auto_planning_msgs::msg::Trajectory & trajectory); + } // namespace longitudinal_utils } // namespace autoware::motion::control::pid_longitudinal_controller diff --git a/control/pid_longitudinal_controller/include/pid_longitudinal_controller/pid_longitudinal_controller.hpp b/control/pid_longitudinal_controller/include/pid_longitudinal_controller/pid_longitudinal_controller.hpp index b4ac9e1506eb7..8df7cb3b9b3f8 100644 --- a/control/pid_longitudinal_controller/include/pid_longitudinal_controller/pid_longitudinal_controller.hpp +++ b/control/pid_longitudinal_controller/include/pid_longitudinal_controller/pid_longitudinal_controller.hpp @@ -66,13 +66,25 @@ class PidLongitudinalController : public trajectory_follower::LongitudinalContro double vel{0.0}; double acc{0.0}; }; - + struct StateAfterDelay + { + StateAfterDelay(const double velocity, const double acceleration, const double distance) + : vel(velocity), acc(acceleration), running_distance(distance) + { + } + double vel{0.0}; + double acc{0.0}; + double running_distance{0.0}; + }; enum class Shift { Forward = 0, Reverse }; struct ControlData { bool is_far_from_trajectory{false}; + autoware_auto_planning_msgs::msg::Trajectory interpolated_traj{}; size_t nearest_idx{0}; // nearest_idx = 0 when nearest_idx is not found with findNearestIdx + size_t target_idx{0}; + StateAfterDelay state_after_delay{0.0, 0.0, 0.0}; Motion current_motion{}; Shift shift{Shift::Forward}; // shift is used only to calculate the sign of pitch compensation double stop_dist{0.0}; // signed distance that is positive when car is before the stopline @@ -184,7 +196,9 @@ class PidLongitudinalController : public trajectory_follower::LongitudinalContro double m_min_jerk; // slope compensation - bool m_use_traj_for_pitch; + enum class SlopeSource { RAW_PITCH = 0, TRAJECTORY_PITCH, TRAJECTORY_ADAPTIVE }; + SlopeSource m_slope_source{SlopeSource::RAW_PITCH}; + double m_adaptive_trajectory_velocity_th; std::shared_ptr m_lpf_pitch{nullptr}; double m_max_pitch_rad; double m_min_pitch_rad; @@ -276,11 +290,9 @@ class PidLongitudinalController : public trajectory_follower::LongitudinalContro /** * @brief calculate control command based on the current control state - * @param [in] current_pose current ego pose * @param [in] control_data control data */ - Motion calcCtrlCmd( - const geometry_msgs::msg::Pose & current_pose, const ControlData & control_data); + Motion calcCtrlCmd(const ControlData & control_data); /** * @brief publish control command @@ -309,9 +321,9 @@ class PidLongitudinalController : public trajectory_follower::LongitudinalContro /** * @brief calculate direction (forward or backward) that vehicle moves - * @param [in] nearest_idx nearest index on trajectory to vehicle + * @param [in] control_data data for control calculation */ - enum Shift getCurrentShift(const size_t nearest_idx) const; + enum Shift getCurrentShift(const ControlData & control_data) const; /** * @brief filter acceleration command with limitation of acceleration and jerk, and slope @@ -341,8 +353,7 @@ class PidLongitudinalController : public trajectory_follower::LongitudinalContro * @param [in] motion delay compensated target motion */ Motion keepBrakeBeforeStop( - const autoware_auto_planning_msgs::msg::Trajectory & traj, const Motion & target_motion, - const size_t nearest_idx) const; + const ControlData & control_data, const Motion & target_motion, const size_t nearest_idx) const; /** * @brief interpolate trajectory point that is nearest to vehicle @@ -350,7 +361,8 @@ class PidLongitudinalController : public trajectory_follower::LongitudinalContro * @param [in] point vehicle position * @param [in] nearest_idx index of the trajectory point nearest to the vehicle position */ - autoware_auto_planning_msgs::msg::TrajectoryPoint calcInterpolatedTargetValue( + std::pair + calcInterpolatedTrajPointAndSegment( const autoware_auto_planning_msgs::msg::Trajectory & traj, const geometry_msgs::msg::Pose & pose) const; @@ -359,18 +371,14 @@ class PidLongitudinalController : public trajectory_follower::LongitudinalContro * @param [in] current_motion current velocity and acceleration of the vehicle * @param [in] delay_compensation_time predicted time delay */ - double predictedVelocityInTargetPoint( + StateAfterDelay predictedStateAfterDelay( const Motion current_motion, const double delay_compensation_time) const; /** * @brief calculate velocity feedback with feed forward and pid controller - * @param [in] target_motion reference velocity and acceleration. This acceleration will be used - * as feed forward. - * @param [in] dt time step to use - * @param [in] current_vel current velocity of the vehicle + * @param [in] control_data data for control calculation */ - double applyVelocityFeedback( - const Motion target_motion, const double dt, const double current_vel, const Shift & shift); + double applyVelocityFeedback(const ControlData & control_data); /** * @brief update variables for debugging about pitch @@ -383,12 +391,9 @@ class PidLongitudinalController : public trajectory_follower::LongitudinalContro /** * @brief update variables for velocity and acceleration * @param [in] ctrl_cmd latest calculated control command - * @param [in] current_pose current pose of the vehicle * @param [in] control_data data for control calculation */ - void updateDebugVelAcc( - const Motion & ctrl_cmd, const geometry_msgs::msg::Pose & current_pose, - const ControlData & control_data); + void updateDebugVelAcc(const ControlData & control_data); double getTimeUnderControl(); }; diff --git a/control/pid_longitudinal_controller/package.xml b/control/pid_longitudinal_controller/package.xml index 0c2da18f9185c..959aca689816a 100644 --- a/control/pid_longitudinal_controller/package.xml +++ b/control/pid_longitudinal_controller/package.xml @@ -7,6 +7,7 @@ Takamasa Horibe Takayuki Murooka + Mamoru Sobue Apache 2.0 diff --git a/control/pid_longitudinal_controller/param/longitudinal_controller_defaults.param.yaml b/control/pid_longitudinal_controller/param/longitudinal_controller_defaults.param.yaml index f1ddea7ebad88..e95428096c17c 100644 --- a/control/pid_longitudinal_controller/param/longitudinal_controller_defaults.param.yaml +++ b/control/pid_longitudinal_controller/param/longitudinal_controller_defaults.param.yaml @@ -69,8 +69,9 @@ max_jerk: 2.0 min_jerk: -5.0 - # pitch - use_trajectory_for_pitch_calculation: false + # slope compensation lpf_pitch_gain: 0.95 + slope_source: "raw_pitch" # raw_pitch, trajectory_pitch or trajectory_adaptive + adaptive_trajectory_velocity_th: 1.0 max_pitch_rad: 0.1 min_pitch_rad: -0.1 diff --git a/control/pid_longitudinal_controller/src/longitudinal_controller_utils.cpp b/control/pid_longitudinal_controller/src/longitudinal_controller_utils.cpp index 33a54663ccaef..28cd6e1824859 100644 --- a/control/pid_longitudinal_controller/src/longitudinal_controller_utils.cpp +++ b/control/pid_longitudinal_controller/src/longitudinal_controller_utils.cpp @@ -84,7 +84,7 @@ double getPitchByPose(const Quaternion & quaternion_msg) } double getPitchByTraj( - const Trajectory & trajectory, const size_t nearest_idx, const double wheel_base) + const Trajectory & trajectory, const size_t start_idx, const double wheel_base) { // cannot calculate pitch if (trajectory.points.size() <= 1) { @@ -92,17 +92,17 @@ double getPitchByTraj( } const auto [prev_idx, next_idx] = [&]() { - for (size_t i = nearest_idx + 1; i < trajectory.points.size(); ++i) { - const double dist = tier4_autoware_utils::calcDistance2d( - trajectory.points.at(nearest_idx), trajectory.points.at(i)); + for (size_t i = start_idx + 1; i < trajectory.points.size(); ++i) { + const double dist = tier4_autoware_utils::calcDistance3d( + trajectory.points.at(start_idx), trajectory.points.at(i)); if (dist > wheel_base) { // calculate pitch from trajectory between rear wheel (nearest) and front center (i) - return std::make_pair(nearest_idx, i); + return std::make_pair(start_idx, i); } } // NOTE: The ego pose is close to the goal. return std::make_pair( - std::min(nearest_idx, trajectory.points.size() - 2), trajectory.points.size() - 1); + std::min(start_idx, trajectory.points.size() - 2), trajectory.points.size() - 1); }(); return tier4_autoware_utils::calcElevationAngle( @@ -158,5 +158,33 @@ double applyDiffLimitFilter( const double min_val = -max_val; return applyDiffLimitFilter(input_val, prev_val, dt, max_val, min_val); } + +geometry_msgs::msg::Pose findTrajectoryPoseAfterDistance( + const size_t src_idx, const double distance, + const autoware_auto_planning_msgs::msg::Trajectory & trajectory) +{ + double remain_dist = distance; + geometry_msgs::msg::Pose p = trajectory.points.back().pose; + for (size_t i = src_idx; i < trajectory.points.size() - 1; ++i) { + const double dist = tier4_autoware_utils::calcDistance3d( + trajectory.points.at(i).pose, trajectory.points.at(i + 1).pose); + if (remain_dist < dist) { + if (remain_dist <= 0.0) { + return trajectory.points.at(i).pose; + } + double ratio = remain_dist / dist; + const auto p0 = trajectory.points.at(i).pose; + const auto p1 = trajectory.points.at(i + 1).pose; + p = trajectory.points.at(i).pose; + p.position.x = interpolation::lerp(p0.position.x, p1.position.x, ratio); + p.position.y = interpolation::lerp(p0.position.y, p1.position.y, ratio); + p.position.z = interpolation::lerp(p0.position.z, p1.position.z, ratio); + p.orientation = interpolation::lerpOrientation(p0.orientation, p1.orientation, ratio); + break; + } + remain_dist -= dist; + } + return p; +} } // namespace longitudinal_utils } // namespace autoware::motion::control::pid_longitudinal_controller diff --git a/control/pid_longitudinal_controller/src/pid_longitudinal_controller.cpp b/control/pid_longitudinal_controller/src/pid_longitudinal_controller.cpp index 31c48fb5060a8..aab2ecf8f081e 100644 --- a/control/pid_longitudinal_controller/src/pid_longitudinal_controller.cpp +++ b/control/pid_longitudinal_controller/src/pid_longitudinal_controller.cpp @@ -168,12 +168,27 @@ PidLongitudinalController::PidLongitudinalController(rclcpp::Node & node) m_min_jerk = node.declare_parameter("min_jerk"); // [m/s^3] // parameters for slope compensation - m_use_traj_for_pitch = node.declare_parameter("use_trajectory_for_pitch_calculation"); + m_adaptive_trajectory_velocity_th = + node.declare_parameter("adaptive_trajectory_velocity_th"); // [m/s^2] const double lpf_pitch_gain{node.declare_parameter("lpf_pitch_gain")}; m_lpf_pitch = std::make_shared(0.0, lpf_pitch_gain); m_max_pitch_rad = node.declare_parameter("max_pitch_rad"); // [rad] m_min_pitch_rad = node.declare_parameter("min_pitch_rad"); // [rad] + // check slope source is proper + const std::string slope_source = node.declare_parameter( + "slope_source"); // raw_pitch, trajectory_pitch or trajectory_adaptive + if (slope_source == "raw_pitch") { + m_slope_source = SlopeSource::RAW_PITCH; + } else if (slope_source == "trajectory_pitch") { + m_slope_source = SlopeSource::TRAJECTORY_PITCH; + } else if (slope_source == "trajectory_adaptive") { + m_slope_source = SlopeSource::TRAJECTORY_ADAPTIVE; + } else { + RCLCPP_ERROR(logger_, "Slope source is not valid. Using raw_pitch option as default"); + m_slope_source = SlopeSource::RAW_PITCH; + } + // ego nearest index search m_ego_nearest_dist_threshold = node.has_parameter("ego_nearest_dist_threshold") @@ -355,6 +370,7 @@ rcl_interfaces::msg::SetParametersResult PidLongitudinalController::paramCallbac // slope compensation update_param("max_pitch_rad", m_max_pitch_rad); update_param("min_pitch_rad", m_min_pitch_rad); + update_param("adaptive_trajectory_velocity_th", m_adaptive_trajectory_velocity_th); rcl_interfaces::msg::SetParametersResult result; result.successful = true; @@ -401,7 +417,7 @@ trajectory_follower::LongitudinalOutput PidLongitudinalController::run( updateControlState(control_data); // calculate control command - const Motion ctrl_cmd = calcCtrlCmd(current_pose, control_data); + const Motion ctrl_cmd = calcCtrlCmd(control_data); // publish control command const auto cmd_msg = createCtrlCmdMsg(ctrl_cmd, control_data.current_motion.vel); @@ -428,19 +444,29 @@ PidLongitudinalController::ControlData PidLongitudinalController::getControlData // current velocity and acceleration control_data.current_motion.vel = m_current_kinematic_state.twist.twist.linear.x; control_data.current_motion.acc = m_current_accel.accel.accel.linear.x; + control_data.interpolated_traj = m_trajectory; - // nearest idx - const size_t nearest_idx = motion_utils::findFirstNearestIndexWithSoftConstraints( - m_trajectory.points, current_pose, m_ego_nearest_dist_threshold, m_ego_nearest_yaw_threshold); - const auto & nearest_point = m_trajectory.points.at(nearest_idx).pose; + // calculate the interpolated point and segment + const auto current_interpolated_pose = + calcInterpolatedTrajPointAndSegment(control_data.interpolated_traj, current_pose); + + // Insert the interpolated point + control_data.interpolated_traj.points.insert( + control_data.interpolated_traj.points.begin() + current_interpolated_pose.second + 1, + current_interpolated_pose.first); + control_data.nearest_idx = current_interpolated_pose.second + 1; + control_data.target_idx = control_data.nearest_idx; + const auto nearest_point = current_interpolated_pose.first; + auto target_point = current_interpolated_pose.first; // check if the deviation is worth emergency m_diagnostic_data.trans_deviation = - tier4_autoware_utils::calcDistance2d(nearest_point, current_pose); + tier4_autoware_utils::calcDistance2d(current_interpolated_pose.first, current_pose); const bool is_dist_deviation_large = m_state_transition_params.emergency_state_traj_trans_dev < m_diagnostic_data.trans_deviation; m_diagnostic_data.rot_deviation = std::abs(tier4_autoware_utils::normalizeRadian( - tf2::getYaw(nearest_point.orientation) - tf2::getYaw(current_pose.orientation))); + tf2::getYaw(current_interpolated_pose.first.pose.orientation) - + tf2::getYaw(current_pose.orientation))); const bool is_yaw_deviation_large = m_state_transition_params.emergency_state_traj_rot_dev < m_diagnostic_data.rot_deviation; @@ -449,10 +475,52 @@ PidLongitudinalController::ControlData PidLongitudinalController::getControlData control_data.is_far_from_trajectory = true; return control_data; } - control_data.nearest_idx = nearest_idx; + + // Delay compensation - Calculate the distance we got, predicted velocity and predicted + // acceleration after delay + control_data.state_after_delay = + predictedStateAfterDelay(control_data.current_motion, m_delay_compensation_time); + + // calculate the target motion for delay compensation + constexpr double min_running_dist = 0.01; + if (control_data.state_after_delay.running_distance > min_running_dist) { + const auto target_pose = longitudinal_utils::findTrajectoryPoseAfterDistance( + control_data.nearest_idx, control_data.state_after_delay.running_distance, + control_data.interpolated_traj); + const auto target_interpolated_point = + calcInterpolatedTrajPointAndSegment(control_data.interpolated_traj, target_pose); + control_data.target_idx = target_interpolated_point.second + 1; + control_data.interpolated_traj.points.insert( + control_data.interpolated_traj.points.begin() + control_data.target_idx, + target_interpolated_point.first); + target_point = target_interpolated_point.first; + } + + // ========================================================================================== + // NOTE: due to removeOverlapPoints(), the obtained control_data.target_idx and + // control_data.nearest_idx may become invalid if the number of points decreased. + // current API does not provide the way to check duplication beforehand and this function + // does not tell how many/which index points were removed, so there is no way + // to tell if our `control_data.target_idx` point still exists or removed. + // ========================================================================================== + // Remove overlapped points after inserting the interpolated points + control_data.interpolated_traj.points = + motion_utils::removeOverlapPoints(control_data.interpolated_traj.points); + control_data.nearest_idx = motion_utils::findFirstNearestIndexWithSoftConstraints( + control_data.interpolated_traj.points, nearest_point.pose, m_ego_nearest_dist_threshold, + m_ego_nearest_yaw_threshold); + control_data.target_idx = motion_utils::findFirstNearestIndexWithSoftConstraints( + control_data.interpolated_traj.points, target_point.pose, m_ego_nearest_dist_threshold, + m_ego_nearest_yaw_threshold); + + // send debug values + m_debug_values.setValues(DebugValues::TYPE::PREDICTED_VEL, control_data.state_after_delay.vel); + m_debug_values.setValues( + DebugValues::TYPE::TARGET_VEL, + control_data.interpolated_traj.points.at(control_data.target_idx).longitudinal_velocity_mps); // shift - control_data.shift = getCurrentShift(control_data.nearest_idx); + control_data.shift = getCurrentShift(control_data); if (control_data.shift != m_prev_shift) { m_pid_vel.reset(); } @@ -460,15 +528,34 @@ PidLongitudinalController::ControlData PidLongitudinalController::getControlData // distance to stopline control_data.stop_dist = longitudinal_utils::calcStopDistance( - current_pose, m_trajectory, m_ego_nearest_dist_threshold, m_ego_nearest_yaw_threshold); + control_data.interpolated_traj.points.at(control_data.nearest_idx).pose, + control_data.interpolated_traj, m_ego_nearest_dist_threshold, m_ego_nearest_yaw_threshold); // pitch // NOTE: getPitchByTraj() calculates the pitch angle as defined in // ../media/slope_definition.drawio.svg while getPitchByPose() is not, so `raw_pitch` is reversed const double raw_pitch = (-1.0) * longitudinal_utils::getPitchByPose(current_pose.orientation); - const double traj_pitch = - longitudinal_utils::getPitchByTraj(m_trajectory, control_data.nearest_idx, m_wheel_base); - control_data.slope_angle = m_use_traj_for_pitch ? traj_pitch : m_lpf_pitch->filter(raw_pitch); + const double traj_pitch = longitudinal_utils::getPitchByTraj( + control_data.interpolated_traj, control_data.target_idx, m_wheel_base); + + if (m_slope_source == SlopeSource::RAW_PITCH) { + control_data.slope_angle = m_lpf_pitch->filter(raw_pitch); + } else if (m_slope_source == SlopeSource::TRAJECTORY_PITCH) { + control_data.slope_angle = traj_pitch; + } else if (m_slope_source == SlopeSource::TRAJECTORY_ADAPTIVE) { + // if velocity is high, use target idx for slope, otherwise, use raw_pitch + if (control_data.current_motion.vel > m_adaptive_trajectory_velocity_th) { + control_data.slope_angle = traj_pitch; + m_lpf_pitch->filter(raw_pitch); + } else { + control_data.slope_angle = m_lpf_pitch->filter(raw_pitch); + } + } else { + RCLCPP_ERROR_THROTTLE( + logger_, *clock_, 3000, "Slope source is not valid. Using raw_pitch option as default"); + control_data.slope_angle = m_lpf_pitch->filter(raw_pitch); + } + updatePitchDebugValues(control_data.slope_angle, traj_pitch, raw_pitch); return control_data; @@ -539,8 +626,12 @@ void PidLongitudinalController::updateControlState(const ControlData & control_d ? (clock_->now() - *m_last_running_time).seconds() > p.stopped_state_entry_duration_time : false; - const double current_vel_cmd = - std::fabs(m_trajectory.points.at(control_data.nearest_idx).longitudinal_velocity_mps); + // ========================================================================================== + // NOTE: due to removeOverlapPoints() in getControlData() m_trajectory and + // control_data.interpolated_traj have different size. + // ========================================================================================== + const double current_vel_cmd = std::fabs( + control_data.interpolated_traj.points.at(control_data.nearest_idx).longitudinal_velocity_mps); const bool emergency_condition = m_enable_overshoot_emergency && stop_dist < -p.emergency_state_overshoot_stop_dist && current_vel_cmd < vel_epsilon; @@ -555,7 +646,7 @@ void PidLongitudinalController::updateControlState(const ControlData & control_d return; }; - const auto info_once = [this](const auto & s) { RCLCPP_INFO_ONCE(logger_, "%s", s); }; + const auto debug_msg_once = [this](const auto & s) { RCLCPP_DEBUG_ONCE(logger_, "%s", s); }; const bool is_under_control = m_current_operation_mode.is_autoware_control_enabled && m_current_operation_mode.mode == OperationModeState::AUTONOMOUS; @@ -581,8 +672,7 @@ void PidLongitudinalController::updateControlState(const ControlData & control_d if (m_enable_smooth_stop) { if (stopping_condition) { // predictions after input time delay - const double pred_vel_in_target = - predictedVelocityInTargetPoint(control_data.current_motion, m_delay_compensation_time); + const double pred_vel_in_target = control_data.state_after_delay.vel; const double pred_stop_dist = control_data.stop_dist - 0.5 * (pred_vel_in_target + current_vel) * m_delay_compensation_time; @@ -621,10 +711,10 @@ void PidLongitudinalController::updateControlState(const ControlData & control_d if (m_control_state == ControlState::STOPPED) { // -- debug print -- if (has_nonzero_target_vel && !departure_condition_from_stopped) { - info_once("target speed > 0, but departure condition is not met. Keep STOPPED."); + debug_msg_once("target speed > 0, but departure condition is not met. Keep STOPPED."); } if (has_nonzero_target_vel && keep_stopped_condition) { - info_once("target speed > 0, but keep stop condition is met. Keep STOPPED."); + debug_msg_once("target speed > 0, but keep stop condition is met. Keep STOPPED."); } // --------------- @@ -662,41 +752,32 @@ void PidLongitudinalController::updateControlState(const ControlData & control_d } PidLongitudinalController::Motion PidLongitudinalController::calcCtrlCmd( - const geometry_msgs::msg::Pose & current_pose, const ControlData & control_data) + const ControlData & control_data) { - const size_t nearest_idx = control_data.nearest_idx; - const double current_vel = control_data.current_motion.vel; - const double current_acc = control_data.current_motion.acc; + const size_t target_idx = control_data.target_idx; // velocity and acceleration command - Motion raw_ctrl_cmd{}; - Motion target_motion{}; + Motion raw_ctrl_cmd{ + control_data.interpolated_traj.points.at(target_idx).longitudinal_velocity_mps, + control_data.interpolated_traj.points.at(target_idx).acceleration_mps2}; + if (m_control_state == ControlState::DRIVE) { - const auto target_pose = longitudinal_utils::calcPoseAfterTimeDelay( - current_pose, m_delay_compensation_time, current_vel, current_acc); - const auto target_interpolated_point = calcInterpolatedTargetValue(m_trajectory, target_pose); - target_motion = Motion{ - target_interpolated_point.longitudinal_velocity_mps, - target_interpolated_point.acceleration_mps2}; - - target_motion = keepBrakeBeforeStop(m_trajectory, target_motion, nearest_idx); - - const double pred_vel_in_target = - predictedVelocityInTargetPoint(control_data.current_motion, m_delay_compensation_time); - m_debug_values.setValues(DebugValues::TYPE::PREDICTED_VEL, pred_vel_in_target); - - raw_ctrl_cmd.vel = target_motion.vel; - raw_ctrl_cmd.acc = - applyVelocityFeedback(target_motion, control_data.dt, pred_vel_in_target, control_data.shift); + raw_ctrl_cmd.vel = + control_data.interpolated_traj.points.at(control_data.target_idx).longitudinal_velocity_mps; + raw_ctrl_cmd.acc = applyVelocityFeedback(control_data); + raw_ctrl_cmd = keepBrakeBeforeStop(control_data, raw_ctrl_cmd, target_idx); + RCLCPP_DEBUG( logger_, "[feedback control] vel: %3.3f, acc: %3.3f, dt: %3.3f, v_curr: %3.3f, v_ref: %3.3f " "feedback_ctrl_cmd.ac: %3.3f", - raw_ctrl_cmd.vel, raw_ctrl_cmd.acc, control_data.dt, current_vel, target_motion.vel, + raw_ctrl_cmd.vel, raw_ctrl_cmd.acc, control_data.dt, control_data.current_motion.vel, + control_data.interpolated_traj.points.at(control_data.target_idx).longitudinal_velocity_mps, raw_ctrl_cmd.acc); } else if (m_control_state == ControlState::STOPPING) { raw_ctrl_cmd.acc = m_smooth_stop.calculate( - control_data.stop_dist, current_vel, current_acc, m_vel_hist, m_delay_compensation_time); + control_data.stop_dist, control_data.current_motion.vel, control_data.current_motion.acc, + m_vel_hist, m_delay_compensation_time); raw_ctrl_cmd.vel = m_stopped_state_params.vel; RCLCPP_DEBUG( @@ -722,7 +803,7 @@ PidLongitudinalController::Motion PidLongitudinalController::calcCtrlCmd( const Motion filtered_ctrl_cmd{raw_ctrl_cmd.vel, filtered_acc_cmd}; // update debug visualization - updateDebugVelAcc(target_motion, current_pose, control_data); + updateDebugVelAcc(control_data); return filtered_ctrl_cmd; } @@ -739,7 +820,8 @@ autoware_auto_control_msgs::msg::LongitudinalCommand PidLongitudinalController:: // store current velocity history m_vel_hist.push_back({clock_->now(), current_vel}); - while (m_vel_hist.size() > static_cast(0.5 / m_longitudinal_ctrl_period)) { + while (m_vel_hist.size() > + static_cast(m_delay_compensation_time / m_longitudinal_ctrl_period)) { m_vel_hist.erase(m_vel_hist.begin()); } @@ -791,11 +873,12 @@ double PidLongitudinalController::getDt() } enum PidLongitudinalController::Shift PidLongitudinalController::getCurrentShift( - const size_t nearest_idx) const + const ControlData & control_data) const { constexpr double epsilon = 1e-5; - const double target_vel = m_trajectory.points.at(nearest_idx).longitudinal_velocity_mps; + const double target_vel = + control_data.interpolated_traj.points.at(control_data.target_idx).longitudinal_velocity_mps; if (target_vel > epsilon) { return Shift::Forward; @@ -815,6 +898,7 @@ double PidLongitudinalController::calcFilteredAcc( // store ctrl cmd without slope filter storeAccelCmd(acc_max_filtered); + // apply slope compensation const double acc_slope_filtered = applySlopeCompensation(acc_max_filtered, control_data.slope_angle, control_data.shift); m_debug_values.setValues(DebugValues::TYPE::ACC_CMD_SLOPE_APPLIED, acc_slope_filtered); @@ -866,15 +950,15 @@ double PidLongitudinalController::applySlopeCompensation( } PidLongitudinalController::Motion PidLongitudinalController::keepBrakeBeforeStop( - const autoware_auto_planning_msgs::msg::Trajectory & traj, const Motion & target_motion, - const size_t nearest_idx) const + const ControlData & control_data, const Motion & target_motion, const size_t nearest_idx) const { Motion output_motion = target_motion; if (m_enable_brake_keeping_before_stop == false) { return output_motion; } - // const auto stop_idx = motion_utils::searchZeroVelocityIndex(traj.points); + const auto traj = control_data.interpolated_traj; + const auto stop_idx = motion_utils::searchZeroVelocityIndex(traj.points); if (!stop_idx) { return output_motion; @@ -899,13 +983,13 @@ PidLongitudinalController::Motion PidLongitudinalController::keepBrakeBeforeStop return output_motion; } -autoware_auto_planning_msgs::msg::TrajectoryPoint -PidLongitudinalController::calcInterpolatedTargetValue( +std::pair +PidLongitudinalController::calcInterpolatedTrajPointAndSegment( const autoware_auto_planning_msgs::msg::Trajectory & traj, const geometry_msgs::msg::Pose & pose) const { if (traj.points.size() == 1) { - return traj.points.at(0); + return std::make_pair(traj.points.at(0), 0); } // apply linear interpolation @@ -913,59 +997,69 @@ PidLongitudinalController::calcInterpolatedTargetValue( traj.points, pose, m_ego_nearest_dist_threshold, m_ego_nearest_yaw_threshold); } -double PidLongitudinalController::predictedVelocityInTargetPoint( +PidLongitudinalController::StateAfterDelay PidLongitudinalController::predictedStateAfterDelay( const Motion current_motion, const double delay_compensation_time) const { const double current_vel = current_motion.vel; const double current_acc = current_motion.acc; - - if (std::fabs(current_vel) < 1e-01) { // when velocity is low, no prediction - return current_vel; - } - - const double current_vel_abs = std::fabs(current_vel); - if (m_ctrl_cmd_vec.size() == 0) { - const double pred_vel = current_vel + current_acc * delay_compensation_time; + double running_distance = 0.0; + double pred_vel = current_vel; + double pred_acc = current_acc; + + if (m_ctrl_cmd_vec.empty() || m_current_operation_mode.mode != OperationModeState::AUTONOMOUS) { + // check time to stop + const double time_to_stop = -current_vel / current_acc; + const double delay_time_calculation = + time_to_stop > 0.0 && time_to_stop < delay_compensation_time ? time_to_stop + : delay_compensation_time; + // simple linear prediction + pred_vel = current_vel + current_acc * delay_time_calculation; + running_distance = std::abs( + delay_time_calculation * current_vel + + 0.5 * current_acc * delay_time_calculation * delay_time_calculation); // avoid to change sign of current_vel and pred_vel - return pred_vel > 0 ? std::copysign(pred_vel, current_vel) : 0.0; + return StateAfterDelay{pred_vel, pred_acc, running_distance}; } - double pred_vel = current_vel_abs; - - const auto past_delay_time = - clock_->now() - rclcpp::Duration::from_seconds(delay_compensation_time); for (std::size_t i = 0; i < m_ctrl_cmd_vec.size(); ++i) { - if ((clock_->now() - m_ctrl_cmd_vec.at(i).stamp).seconds() < m_delay_compensation_time) { - if (i == 0) { - // size of m_ctrl_cmd_vec is less than m_delay_compensation_time - pred_vel = current_vel_abs + - static_cast(m_ctrl_cmd_vec.at(i).acceleration) * delay_compensation_time; - return pred_vel > 0 ? std::copysign(pred_vel, current_vel) : 0.0; - } + if ((clock_->now() - m_ctrl_cmd_vec.at(i).stamp).seconds() < delay_compensation_time) { // add velocity to accel * dt - const double acc = m_ctrl_cmd_vec.at(i - 1).acceleration; - const auto curr_time_i = rclcpp::Time(m_ctrl_cmd_vec.at(i).stamp); - const double time_to_next_acc = std::min( - (curr_time_i - rclcpp::Time(m_ctrl_cmd_vec.at(i - 1).stamp)).seconds(), - (curr_time_i - past_delay_time).seconds()); - pred_vel += acc * time_to_next_acc; + const double time_to_next_acc = + (i == m_ctrl_cmd_vec.size() - 1) + ? std::min( + (clock_->now() - m_ctrl_cmd_vec.back().stamp).seconds(), delay_compensation_time) + : std::min( + (rclcpp::Time(m_ctrl_cmd_vec.at(i + 1).stamp) - + rclcpp::Time(m_ctrl_cmd_vec.at(i).stamp)) + .seconds(), + delay_compensation_time); + const double acc = m_ctrl_cmd_vec.at(i).acceleration; + // because acc_cmd is positive when vehicle is running backward + pred_acc = std::copysignf(1.0, static_cast(pred_vel)) * acc; + running_distance += std::abs( + std::abs(pred_vel) * time_to_next_acc + 0.5 * acc * time_to_next_acc * time_to_next_acc); + pred_vel += pred_vel < 0.0 ? (-acc * time_to_next_acc) : (acc * time_to_next_acc); + if (pred_vel / current_vel < 0.0) { + // sign of velocity is changed + pred_vel = 0.0; + break; + } } } - const double last_acc = m_ctrl_cmd_vec.at(m_ctrl_cmd_vec.size() - 1).acceleration; - const double time_to_current = - (clock_->now() - m_ctrl_cmd_vec.at(m_ctrl_cmd_vec.size() - 1).stamp).seconds(); - pred_vel += last_acc * time_to_current; - - // avoid to change sign of current_vel and pred_vel - return pred_vel > 0 ? std::copysign(pred_vel, current_vel) : 0.0; + return StateAfterDelay{pred_vel, pred_acc, running_distance}; } -double PidLongitudinalController::applyVelocityFeedback( - const Motion target_motion, const double dt, const double current_vel, const Shift & shift) +double PidLongitudinalController::applyVelocityFeedback(const ControlData & control_data) { // NOTE: Acceleration command is always positive even if the ego drives backward. - const double vel_sign = (shift == Shift::Forward) ? 1.0 : (shift == Shift::Reverse ? -1.0 : 0.0); + const double vel_sign = (control_data.shift == Shift::Forward) + ? 1.0 + : (control_data.shift == Shift::Reverse ? -1.0 : 0.0); + const double current_vel = control_data.current_motion.vel; + const auto target_motion = Motion{ + control_data.interpolated_traj.points.at(control_data.target_idx).longitudinal_velocity_mps, + control_data.interpolated_traj.points.at(control_data.target_idx).acceleration_mps2}; const double diff_vel = (target_motion.vel - current_vel) * vel_sign; const bool is_under_control = m_current_operation_mode.is_autoware_control_enabled && m_current_operation_mode.mode == OperationModeState::AUTONOMOUS; @@ -978,11 +1072,12 @@ double PidLongitudinalController::applyVelocityFeedback( const bool enable_integration = (vehicle_is_moving || (m_enable_integration_at_low_speed && vehicle_is_stuck)) && is_under_control; + const double error_vel_filtered = m_lpf_vel_error->filter(diff_vel); std::vector pid_contributions(3); const double pid_acc = - m_pid_vel.calculate(error_vel_filtered, dt, enable_integration, pid_contributions); + m_pid_vel.calculate(error_vel_filtered, control_data.dt, enable_integration, pid_contributions); // Feedforward scaling: // This is for the coordinate conversion where feedforward is applied, from Time to Arclength. @@ -993,7 +1088,8 @@ double PidLongitudinalController::applyVelocityFeedback( constexpr double ff_scale_min = 0.5; // for safety const double ff_scale = std::clamp( std::abs(current_vel) / std::max(std::abs(target_motion.vel), 0.1), ff_scale_min, ff_scale_max); - const double ff_acc = target_motion.acc * ff_scale; + const double ff_acc = + control_data.interpolated_traj.points.at(control_data.target_idx).acceleration_mps2 * ff_scale; const double feedback_acc = ff_acc + pid_acc; @@ -1020,21 +1116,25 @@ void PidLongitudinalController::updatePitchDebugValues( m_debug_values.setValues(DebugValues::TYPE::PITCH_RAW_TRAJ_DEG, traj_pitch * to_degrees); } -void PidLongitudinalController::updateDebugVelAcc( - const Motion & target_motion, const geometry_msgs::msg::Pose & current_pose, - const ControlData & control_data) +void PidLongitudinalController::updateDebugVelAcc(const ControlData & control_data) { - const double current_vel = control_data.current_motion.vel; - - const auto interpolated_point = calcInterpolatedTargetValue(m_trajectory, current_pose); - - m_debug_values.setValues(DebugValues::TYPE::CURRENT_VEL, current_vel); - m_debug_values.setValues(DebugValues::TYPE::TARGET_VEL, target_motion.vel); - m_debug_values.setValues(DebugValues::TYPE::TARGET_ACC, target_motion.acc); + m_debug_values.setValues(DebugValues::TYPE::CURRENT_VEL, control_data.current_motion.vel); + m_debug_values.setValues( + DebugValues::TYPE::TARGET_VEL, + control_data.interpolated_traj.points.at(control_data.target_idx).longitudinal_velocity_mps); + m_debug_values.setValues( + DebugValues::TYPE::TARGET_ACC, + control_data.interpolated_traj.points.at(control_data.target_idx).acceleration_mps2); + m_debug_values.setValues( + DebugValues::TYPE::NEAREST_VEL, + control_data.interpolated_traj.points.at(control_data.nearest_idx).longitudinal_velocity_mps); + m_debug_values.setValues( + DebugValues::TYPE::NEAREST_ACC, + control_data.interpolated_traj.points.at(control_data.nearest_idx).acceleration_mps2); m_debug_values.setValues( - DebugValues::TYPE::NEAREST_VEL, interpolated_point.longitudinal_velocity_mps); - m_debug_values.setValues(DebugValues::TYPE::NEAREST_ACC, interpolated_point.acceleration_mps2); - m_debug_values.setValues(DebugValues::TYPE::ERROR_VEL, target_motion.vel - current_vel); + DebugValues::TYPE::ERROR_VEL, + control_data.interpolated_traj.points.at(control_data.nearest_idx).longitudinal_velocity_mps - + control_data.current_motion.vel); } void PidLongitudinalController::setupDiagnosticUpdater() diff --git a/control/pid_longitudinal_controller/test/test_longitudinal_controller_utils.cpp b/control/pid_longitudinal_controller/test/test_longitudinal_controller_utils.cpp index 58aa867b2deab..02bcce8c91c4b 100644 --- a/control/pid_longitudinal_controller/test/test_longitudinal_controller_utils.cpp +++ b/control/pid_longitudinal_controller/test/test_longitudinal_controller_utils.cpp @@ -132,32 +132,30 @@ TEST(TestLongitudinalControllerUtils, getPitchByTraj) point.pose.position.z = 0.0; traj.points.push_back(point); // non stopping trajectory: stop distance = trajectory length - point.pose.position.x = 1.0; + point.pose.position.x = 0.6; point.pose.position.y = 0.0; - point.pose.position.z = 1.0; + point.pose.position.z = 0.8; traj.points.push_back(point); - point.pose.position.x = 2.0; + point.pose.position.x = 1.2; point.pose.position.y = 0.0; point.pose.position.z = 0.0; traj.points.push_back(point); - point.pose.position.x = 3.0; + point.pose.position.x = 1.8; point.pose.position.y = 0.0; - point.pose.position.z = 0.5; + point.pose.position.z = 0.8; traj.points.push_back(point); size_t closest_idx = 0; EXPECT_DOUBLE_EQ( - std::abs(longitudinal_utils::getPitchByTraj(traj, closest_idx, wheel_base)), M_PI_4); + longitudinal_utils::getPitchByTraj(traj, closest_idx, wheel_base), std::atan2(0.8, 0.6)); closest_idx = 1; EXPECT_DOUBLE_EQ( - std::abs(longitudinal_utils::getPitchByTraj(traj, closest_idx, wheel_base)), M_PI_4); + longitudinal_utils::getPitchByTraj(traj, closest_idx, wheel_base), std::atan2(-0.8, 0.6)); closest_idx = 2; EXPECT_DOUBLE_EQ( - std::abs(longitudinal_utils::getPitchByTraj(traj, closest_idx, wheel_base)), - std::atan2(0.5, 1)); + longitudinal_utils::getPitchByTraj(traj, closest_idx, wheel_base), std::atan2(0.8, 0.6)); closest_idx = 3; EXPECT_DOUBLE_EQ( - std::abs(longitudinal_utils::getPitchByTraj(traj, closest_idx, wheel_base)), - std::atan2(0.5, 1)); + longitudinal_utils::getPitchByTraj(traj, closest_idx, wheel_base), std::atan2(0.8, 0.6)); } TEST(TestLongitudinalControllerUtils, calcPoseAfterTimeDelay) @@ -355,95 +353,113 @@ TEST(TestLongitudinalControllerUtils, lerpTrajectoryPoint) TrajectoryPoint p; p.pose.position.x = 0.0; p.pose.position.y = 0.0; + p.pose.position.z = 0.0; p.longitudinal_velocity_mps = 10.0; p.acceleration_mps2 = 10.0; points.push_back(p); p.pose.position.x = 1.0; p.pose.position.y = 0.0; + p.pose.position.z = 0.0; p.longitudinal_velocity_mps = 20.0; p.acceleration_mps2 = 20.0; points.push_back(p); p.pose.position.x = 1.0; p.pose.position.y = 1.0; + p.pose.position.z = 1.0; p.longitudinal_velocity_mps = 30.0; p.acceleration_mps2 = 30.0; points.push_back(p); p.pose.position.x = 2.0; p.pose.position.y = 1.0; + p.pose.position.z = 2.0; p.longitudinal_velocity_mps = 40.0; p.acceleration_mps2 = 40.0; points.push_back(p); - TrajectoryPoint result; Pose pose; double max_dist = 3.0; double max_yaw = 0.7; // Points on the trajectory gives back the original trajectory points values pose.position.x = 0.0; pose.position.y = 0.0; - result = longitudinal_utils::lerpTrajectoryPoint(points, pose, max_dist, max_yaw); - EXPECT_NEAR(result.pose.position.x, pose.position.x, abs_err); - EXPECT_NEAR(result.pose.position.y, pose.position.y, abs_err); - EXPECT_NEAR(result.longitudinal_velocity_mps, 10.0, abs_err); - EXPECT_NEAR(result.acceleration_mps2, 10.0, abs_err); + pose.position.z = 0.0; + + auto result = longitudinal_utils::lerpTrajectoryPoint(points, pose, max_dist, max_yaw); + EXPECT_NEAR(result.first.pose.position.x, pose.position.x, abs_err); + EXPECT_NEAR(result.first.pose.position.y, pose.position.y, abs_err); + EXPECT_NEAR(result.first.pose.position.z, pose.position.z, abs_err); + EXPECT_NEAR(result.first.longitudinal_velocity_mps, 10.0, abs_err); + EXPECT_NEAR(result.first.acceleration_mps2, 10.0, abs_err); pose.position.x = 1.0; pose.position.y = 0.0; + pose.position.z = 0.0; result = longitudinal_utils::lerpTrajectoryPoint(points, pose, max_dist, max_yaw); - EXPECT_NEAR(result.pose.position.x, pose.position.x, abs_err); - EXPECT_NEAR(result.pose.position.y, pose.position.y, abs_err); - EXPECT_NEAR(result.longitudinal_velocity_mps, 20.0, abs_err); - EXPECT_NEAR(result.acceleration_mps2, 20.0, abs_err); + EXPECT_NEAR(result.first.pose.position.x, pose.position.x, abs_err); + EXPECT_NEAR(result.first.pose.position.y, pose.position.y, abs_err); + EXPECT_NEAR(result.first.pose.position.z, pose.position.z, abs_err); + EXPECT_NEAR(result.first.longitudinal_velocity_mps, 20.0, abs_err); + EXPECT_NEAR(result.first.acceleration_mps2, 20.0, abs_err); pose.position.x = 1.0; pose.position.y = 1.0; + pose.position.z = 1.0; result = longitudinal_utils::lerpTrajectoryPoint(points, pose, max_dist, max_yaw); - EXPECT_NEAR(result.pose.position.x, pose.position.x, abs_err); - EXPECT_NEAR(result.pose.position.y, pose.position.y, abs_err); - EXPECT_NEAR(result.longitudinal_velocity_mps, 30.0, abs_err); - EXPECT_NEAR(result.acceleration_mps2, 30.0, abs_err); + EXPECT_NEAR(result.first.pose.position.x, pose.position.x, abs_err); + EXPECT_NEAR(result.first.pose.position.y, pose.position.y, abs_err); + EXPECT_NEAR(result.first.pose.position.z, pose.position.z, abs_err); + EXPECT_NEAR(result.first.longitudinal_velocity_mps, 30.0, abs_err); + EXPECT_NEAR(result.first.acceleration_mps2, 30.0, abs_err); pose.position.x = 2.0; pose.position.y = 1.0; + pose.position.z = 2.0; result = longitudinal_utils::lerpTrajectoryPoint(points, pose, max_dist, max_yaw); - EXPECT_NEAR(result.pose.position.x, pose.position.x, abs_err); - EXPECT_NEAR(result.pose.position.y, pose.position.y, abs_err); - EXPECT_NEAR(result.longitudinal_velocity_mps, 40.0, abs_err); - EXPECT_NEAR(result.acceleration_mps2, 40.0, abs_err); + EXPECT_NEAR(result.first.pose.position.x, pose.position.x, abs_err); + EXPECT_NEAR(result.first.pose.position.y, pose.position.y, abs_err); + EXPECT_NEAR(result.first.pose.position.z, pose.position.z, abs_err); + EXPECT_NEAR(result.first.longitudinal_velocity_mps, 40.0, abs_err); + EXPECT_NEAR(result.first.acceleration_mps2, 40.0, abs_err); // Interpolate between trajectory points pose.position.x = 0.5; pose.position.y = 0.0; + pose.position.z = 0.0; result = longitudinal_utils::lerpTrajectoryPoint(points, pose, max_dist, max_yaw); - EXPECT_NEAR(result.pose.position.x, pose.position.x, abs_err); - EXPECT_NEAR(result.pose.position.y, pose.position.y, abs_err); - EXPECT_NEAR(result.longitudinal_velocity_mps, 15.0, abs_err); - EXPECT_NEAR(result.acceleration_mps2, 15.0, abs_err); + EXPECT_NEAR(result.first.pose.position.x, pose.position.x, abs_err); + EXPECT_NEAR(result.first.pose.position.y, pose.position.y, abs_err); + EXPECT_NEAR(result.first.pose.position.z, pose.position.z, abs_err); + EXPECT_NEAR(result.first.longitudinal_velocity_mps, 15.0, abs_err); + EXPECT_NEAR(result.first.acceleration_mps2, 15.0, abs_err); pose.position.x = 0.75; pose.position.y = 0.0; + pose.position.z = 0.0; result = longitudinal_utils::lerpTrajectoryPoint(points, pose, max_dist, max_yaw); - EXPECT_NEAR(result.pose.position.x, pose.position.x, abs_err); - EXPECT_NEAR(result.pose.position.y, pose.position.y, abs_err); - EXPECT_NEAR(result.longitudinal_velocity_mps, 17.5, abs_err); - EXPECT_NEAR(result.acceleration_mps2, 17.5, abs_err); + EXPECT_NEAR(result.first.pose.position.x, pose.position.x, abs_err); + EXPECT_NEAR(result.first.pose.position.y, pose.position.y, abs_err); + EXPECT_NEAR(result.first.pose.position.z, pose.position.z, abs_err); + EXPECT_NEAR(result.first.longitudinal_velocity_mps, 17.5, abs_err); + EXPECT_NEAR(result.first.acceleration_mps2, 17.5, abs_err); // Interpolate away from the trajectory (interpolated point is projected) pose.position.x = 0.5; pose.position.y = -1.0; result = longitudinal_utils::lerpTrajectoryPoint(points, pose, max_dist, max_yaw); - EXPECT_NEAR(result.pose.position.x, pose.position.x, abs_err); - EXPECT_NEAR(result.pose.position.y, 0.0, abs_err); - EXPECT_NEAR(result.longitudinal_velocity_mps, 15.0, abs_err); - EXPECT_NEAR(result.acceleration_mps2, 15.0, abs_err); + EXPECT_NEAR(result.first.pose.position.x, pose.position.x, abs_err); + EXPECT_NEAR(result.first.pose.position.y, 0.0, abs_err); + EXPECT_NEAR(result.first.pose.position.z, pose.position.z, abs_err); + EXPECT_NEAR(result.first.longitudinal_velocity_mps, 15.0, abs_err); + EXPECT_NEAR(result.first.acceleration_mps2, 15.0, abs_err); // Ambiguous projections: possibility with the lowest index is used pose.position.x = 0.5; pose.position.y = 0.5; result = longitudinal_utils::lerpTrajectoryPoint(points, pose, max_dist, max_yaw); - EXPECT_NEAR(result.pose.position.x, pose.position.x, abs_err); - EXPECT_NEAR(result.pose.position.y, 0.0, abs_err); - EXPECT_NEAR(result.longitudinal_velocity_mps, 15.0, abs_err); - EXPECT_NEAR(result.acceleration_mps2, 15.0, abs_err); + EXPECT_NEAR(result.first.pose.position.x, pose.position.x, abs_err); + EXPECT_NEAR(result.first.pose.position.y, 0.0, abs_err); + EXPECT_NEAR(result.first.pose.position.z, pose.position.z, abs_err); + EXPECT_NEAR(result.first.longitudinal_velocity_mps, 15.0, abs_err); + EXPECT_NEAR(result.first.acceleration_mps2, 15.0, abs_err); } TEST(TestLongitudinalControllerUtils, applyDiffLimitFilter) @@ -486,3 +502,63 @@ TEST(TestLongitudinalControllerUtils, applyDiffLimitFilter) prev = new_val; } } + +TEST(TestLongitudinalControllerUtils, findTrajectoryPoseAfterDistance) +{ + using autoware_auto_planning_msgs::msg::Trajectory; + using autoware_auto_planning_msgs::msg::TrajectoryPoint; + using geometry_msgs::msg::Pose; + const double abs_err = 1e-5; + Trajectory traj; + TrajectoryPoint point; + point.pose.position.x = 0.0; + point.pose.position.y = 0.0; + point.pose.position.z = 0.0; + traj.points.push_back(point); + point.pose.position.x = 1.0; + point.pose.position.y = 0.0; + point.pose.position.z = 0.0; + traj.points.push_back(point); + point.pose.position.x = 1.0; + point.pose.position.y = 1.0; + point.pose.position.z = 1.0; + traj.points.push_back(point); + point.pose.position.x = 2.0; + point.pose.position.y = 1.0; + point.pose.position.z = 2.0; + traj.points.push_back(point); + size_t src_idx = 0; + double distance = 0.0; + Pose result = longitudinal_utils::findTrajectoryPoseAfterDistance(src_idx, distance, traj); + EXPECT_NEAR(result.position.x, 0.0, abs_err); + EXPECT_NEAR(result.position.y, 0.0, abs_err); + EXPECT_NEAR(result.position.z, 0.0, abs_err); + + src_idx = 0; + distance = 0.5; + result = longitudinal_utils::findTrajectoryPoseAfterDistance(src_idx, distance, traj); + EXPECT_NEAR(result.position.x, 0.5, abs_err); + EXPECT_NEAR(result.position.y, 0.0, abs_err); + EXPECT_NEAR(result.position.z, 0.0, abs_err); + + src_idx = 0; + distance = 1.0; + result = longitudinal_utils::findTrajectoryPoseAfterDistance(src_idx, distance, traj); + EXPECT_NEAR(result.position.x, 1.0, abs_err); + EXPECT_NEAR(result.position.y, 0.0, abs_err); + EXPECT_NEAR(result.position.z, 0.0, abs_err); + + src_idx = 0; + distance = 1.5; + result = longitudinal_utils::findTrajectoryPoseAfterDistance(src_idx, distance, traj); + EXPECT_NEAR(result.position.x, 1.0, abs_err); + EXPECT_NEAR(result.position.y, 1.0 / (2.0 * sqrt(2.0)), abs_err); + EXPECT_NEAR(result.position.z, 1.0 / (2.0 * sqrt(2.0)), abs_err); + + src_idx = 0; + distance = 20.0; // beyond the trajectory, should return the last point + result = longitudinal_utils::findTrajectoryPoseAfterDistance(src_idx, distance, traj); + EXPECT_NEAR(result.position.x, 2.0, abs_err); + EXPECT_NEAR(result.position.y, 1.0, abs_err); + EXPECT_NEAR(result.position.z, 2.0, abs_err); +} diff --git a/control/trajectory_follower_node/CMakeLists.txt b/control/trajectory_follower_node/CMakeLists.txt index c64a51b3dde7f..653b02eb39ed6 100644 --- a/control/trajectory_follower_node/CMakeLists.txt +++ b/control/trajectory_follower_node/CMakeLists.txt @@ -62,4 +62,5 @@ ament_auto_package( param test launch + config ) diff --git a/control/trajectory_follower_node/config/simple_trajectory_follower.param.yaml b/control/trajectory_follower_node/config/simple_trajectory_follower.param.yaml new file mode 100644 index 0000000000000..8194990a136ea --- /dev/null +++ b/control/trajectory_follower_node/config/simple_trajectory_follower.param.yaml @@ -0,0 +1,5 @@ +/**: + ros__parameters: + use_external_target_vel: $(var use_external_target_vel) + external_target_vel: $(var external_target_vel) + lateral_deviation: $(var lateral_deviation) diff --git a/control/trajectory_follower_node/launch/simple_trajectory_follower.launch.xml b/control/trajectory_follower_node/launch/simple_trajectory_follower.launch.xml index 8ce799e17e632..0c4c3faac73a9 100644 --- a/control/trajectory_follower_node/launch/simple_trajectory_follower.launch.xml +++ b/control/trajectory_follower_node/launch/simple_trajectory_follower.launch.xml @@ -5,9 +5,7 @@ - - - + diff --git a/control/vehicle_cmd_gate/config/vehicle_cmd_gate.param.yaml b/control/vehicle_cmd_gate/config/vehicle_cmd_gate.param.yaml index 7ad685217f13d..54c87f45b6a96 100644 --- a/control/vehicle_cmd_gate/config/vehicle_cmd_gate.param.yaml +++ b/control/vehicle_cmd_gate/config/vehicle_cmd_gate.param.yaml @@ -3,7 +3,7 @@ update_rate: 10.0 system_emergency_heartbeat_timeout: 0.5 use_emergency_handling: false - check_external_emergency_heartbeat: false + check_external_emergency_heartbeat: $(var check_external_emergency_heartbeat) use_start_request: false enable_cmd_limit_filter: true filter_activated_count_threshold: 5 diff --git a/control/vehicle_cmd_gate/launch/vehicle_cmd_gate.launch.xml b/control/vehicle_cmd_gate/launch/vehicle_cmd_gate.launch.xml index a6cf7cdcf8c08..c5368276b488a 100644 --- a/control/vehicle_cmd_gate/launch/vehicle_cmd_gate.launch.xml +++ b/control/vehicle_cmd_gate/launch/vehicle_cmd_gate.launch.xml @@ -1,8 +1,6 @@ - - @@ -44,10 +42,7 @@ - + - - - diff --git a/control/vehicle_cmd_gate/src/vehicle_cmd_gate.cpp b/control/vehicle_cmd_gate/src/vehicle_cmd_gate.cpp index 1bed4ee67f412..aec2ede53d9e7 100644 --- a/control/vehicle_cmd_gate/src/vehicle_cmd_gate.cpp +++ b/control/vehicle_cmd_gate/src/vehicle_cmd_gate.cpp @@ -430,13 +430,17 @@ void VehicleCmdGate::publishControlCommands(const Commands & commands) // Check engage if (!is_engaged_) { - filtered_commands.control = createStopControlCmd(); + filtered_commands.control.longitudinal = createLongitudinalStopControlCmd(); } // Check pause. Place this check after all other checks as it needs the final output. adapi_pause_->update(filtered_commands.control); if (adapi_pause_->is_paused()) { - filtered_commands.control = createStopControlCmd(); + if (is_engaged_) { + filtered_commands.control.longitudinal = createLongitudinalStopControlCmd(); + } else { + filtered_commands.control = createStopControlCmd(); + } } // Check if command filtering option is enable @@ -599,6 +603,17 @@ AckermannControlCommand VehicleCmdGate::createStopControlCmd() const return cmd; } +LongitudinalCommand VehicleCmdGate::createLongitudinalStopControlCmd() const +{ + LongitudinalCommand cmd; + const auto t = this->now(); + cmd.stamp = t; + cmd.speed = 0.0; + cmd.acceleration = stop_hold_acceleration_; + + return cmd; +} + AckermannControlCommand VehicleCmdGate::createEmergencyStopControlCmd() const { AckermannControlCommand cmd; diff --git a/control/vehicle_cmd_gate/src/vehicle_cmd_gate.hpp b/control/vehicle_cmd_gate/src/vehicle_cmd_gate.hpp index c36aab240ae79..b79c58d120443 100644 --- a/control/vehicle_cmd_gate/src/vehicle_cmd_gate.hpp +++ b/control/vehicle_cmd_gate/src/vehicle_cmd_gate.hpp @@ -55,6 +55,7 @@ namespace vehicle_cmd_gate using autoware_adapi_v1_msgs::msg::MrmState; using autoware_adapi_v1_msgs::msg::OperationModeState; using autoware_auto_control_msgs::msg::AckermannControlCommand; +using autoware_auto_control_msgs::msg::LongitudinalCommand; using autoware_auto_vehicle_msgs::msg::GearCommand; using autoware_auto_vehicle_msgs::msg::HazardLightsCommand; using autoware_auto_vehicle_msgs::msg::SteeringReport; @@ -220,6 +221,7 @@ class VehicleCmdGate : public rclcpp::Node // Algorithm AckermannControlCommand prev_control_cmd_; AckermannControlCommand createStopControlCmd() const; + LongitudinalCommand createLongitudinalStopControlCmd() const; AckermannControlCommand createEmergencyStopControlCmd() const; std::shared_ptr prev_time_; diff --git a/control/vehicle_cmd_gate/test/src/test_filter_in_vehicle_cmd_gate_node.cpp b/control/vehicle_cmd_gate/test/src/test_filter_in_vehicle_cmd_gate_node.cpp index f02235ed1ecbf..f9f3087a6b69c 100644 --- a/control/vehicle_cmd_gate/test/src/test_filter_in_vehicle_cmd_gate_node.cpp +++ b/control/vehicle_cmd_gate/test/src/test_filter_in_vehicle_cmd_gate_node.cpp @@ -331,6 +331,9 @@ std::shared_ptr generateNode() {"--ros-args", "--params-file", vehicle_cmd_gate_dir + "/config/vehicle_cmd_gate.param.yaml", "--ros-args", "--params-file", vehicle_info_util_dir + "/config/vehicle_info.param.yaml"}); + node_options.append_parameter_override( + "check_external_emergency_heartbeat", true); // This parameter has to be set when launching. + const auto override = [&](const auto s, const std::vector v) { node_options.append_parameter_override>(s, v); }; diff --git a/evaluator/tier4_metrics_rviz_plugin/package.xml b/evaluator/tier4_metrics_rviz_plugin/package.xml index 7bd930f4f3c4c..fe84a0ab81760 100644 --- a/evaluator/tier4_metrics_rviz_plugin/package.xml +++ b/evaluator/tier4_metrics_rviz_plugin/package.xml @@ -13,6 +13,7 @@ autoware_cmake diagnostic_msgs + libqt5-charts-dev libqt5-core libqt5-gui libqt5-widgets diff --git a/launch/tier4_control_launch/launch/control.launch.py b/launch/tier4_control_launch/launch/control.launch.py index 06718a2cf0ffa..b49ff7bde2ed3 100644 --- a/launch/tier4_control_launch/launch/control.launch.py +++ b/launch/tier4_control_launch/launch/control.launch.py @@ -116,23 +116,6 @@ def launch_setup(context, *args, **kwargs): parameters=[nearest_search_param, lane_departure_checker_param, vehicle_info_param], extra_arguments=[{"use_intra_process_comms": LaunchConfiguration("use_intra_process")}], ) - # control validator checker - control_validator_component = ComposableNode( - package="control_validator", - plugin="control_validator::ControlValidator", - name="control_validator", - remappings=[ - ("~/input/kinematics", "/localization/kinematic_state"), - ("~/input/reference_trajectory", "/planning/scenario_planning/trajectory"), - ( - "~/input/predicted_trajectory", - "/control/trajectory_follower/lateral/predicted_trajectory", - ), - ("~/output/validation_status", "~/validation_status"), - ], - parameters=[control_validator_param], - extra_arguments=[{"use_intra_process_comms": LaunchConfiguration("use_intra_process")}], - ) # shift decider shift_decider_component = ComposableNode( @@ -351,7 +334,6 @@ def launch_setup(context, *args, **kwargs): executable=LaunchConfiguration("container_executable"), composable_node_descriptions=[ controller_component, - control_validator_component, lane_departure_component, shift_decider_component, vehicle_cmd_gate_component, @@ -360,6 +342,23 @@ def launch_setup(context, *args, **kwargs): ], ) + # control validator checker + control_validator_component = ComposableNode( + package="control_validator", + plugin="control_validator::ControlValidator", + name="control_validator", + remappings=[ + ("~/input/kinematics", "/localization/kinematic_state"), + ("~/input/reference_trajectory", "/planning/scenario_planning/trajectory"), + ( + "~/input/predicted_trajectory", + "/control/trajectory_follower/lateral/predicted_trajectory", + ), + ("~/output/validation_status", "~/validation_status"), + ], + parameters=[control_validator_param], + ) + group = GroupAction( [ PushRosNamespace("control"), @@ -372,7 +371,26 @@ def launch_setup(context, *args, **kwargs): ] ) - return [group] + control_validator_group = GroupAction( + [ + PushRosNamespace("control"), + ComposableNodeContainer( + name="control_validator_container", + namespace="", + package="rclcpp_components", + executable=LaunchConfiguration("container_executable"), + composable_node_descriptions=[ + control_validator_component, + ComposableNode( + package="glog_component", + plugin="GlogComponent", + name="glog_validator_component", + ), + ], + ), + ] + ) + return [group, control_validator_group] def generate_launch_description(): diff --git a/launch/tier4_localization_launch/launch/localization.launch.xml b/launch/tier4_localization_launch/launch/localization.launch.xml index 9f51f7e6ea7a1..9d3fba05b4a6e 100644 --- a/launch/tier4_localization_launch/launch/localization.launch.xml +++ b/launch/tier4_localization_launch/launch/localization.launch.xml @@ -4,15 +4,18 @@ + + + + + + - - - - + diff --git a/launch/tier4_localization_launch/launch/pose_twist_estimator/ar_tag_based_localizer.launch.xml b/launch/tier4_localization_launch/launch/pose_twist_estimator/ar_tag_based_localizer.launch.xml index 98f5fcab7c91a..225c7ab9d1146 100644 --- a/launch/tier4_localization_launch/launch/pose_twist_estimator/ar_tag_based_localizer.launch.xml +++ b/launch/tier4_localization_launch/launch/pose_twist_estimator/ar_tag_based_localizer.launch.xml @@ -9,7 +9,7 @@ - + diff --git a/launch/tier4_localization_launch/launch/pose_twist_estimator/ndt_scan_matcher.launch.xml b/launch/tier4_localization_launch/launch/pose_twist_estimator/ndt_scan_matcher.launch.xml index bee300c587816..686e6d6cf12bc 100644 --- a/launch/tier4_localization_launch/launch/pose_twist_estimator/ndt_scan_matcher.launch.xml +++ b/launch/tier4_localization_launch/launch/pose_twist_estimator/ndt_scan_matcher.launch.xml @@ -12,7 +12,7 @@ - + diff --git a/launch/tier4_localization_launch/launch/util/util.launch.xml b/launch/tier4_localization_launch/launch/util/util.launch.xml index 4d0f7505d16a6..aeba276308646 100644 --- a/launch/tier4_localization_launch/launch/util/util.launch.xml +++ b/launch/tier4_localization_launch/launch/util/util.launch.xml @@ -10,21 +10,21 @@ - + - + - + diff --git a/launch/tier4_localization_launch/package.xml b/launch/tier4_localization_launch/package.xml index a0a1cc1f6df64..d0046acf294b5 100644 --- a/launch/tier4_localization_launch/package.xml +++ b/launch/tier4_localization_launch/package.xml @@ -11,6 +11,7 @@ NGUYEN Viet Anh Taiki Yamada Shintaro Sakoda + Masahiro Sakamoto Apache License 2.0 Yamato Ando diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/detection.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/detection.launch.xml index 50b73709cc902..9c3e815e8abb4 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/detection/detection.launch.xml +++ b/launch/tier4_perception_launch/launch/object_recognition/detection/detection.launch.xml @@ -7,7 +7,6 @@ - @@ -56,7 +55,6 @@ - @@ -130,7 +128,6 @@ - @@ -190,7 +187,6 @@ - @@ -219,7 +215,6 @@ - diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml index 2232feb6d7c67..ddcbbbaeae107 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml +++ b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml @@ -4,7 +4,6 @@ - @@ -38,7 +37,7 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml index 34f0a01d0a688..f9b65fd5c30eb 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml +++ b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml @@ -21,7 +21,7 @@ - + diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_rule_detector.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_rule_detector.launch.xml index cec0c3bc05aac..0fabd5e98e45f 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_rule_detector.launch.xml +++ b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_rule_detector.launch.xml @@ -3,7 +3,6 @@ - @@ -12,7 +11,6 @@ - @@ -27,7 +25,7 @@ - + diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/merger/lidar_merger.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/merger/lidar_merger.launch.xml index 6ccff44933966..0ecc89c8743e8 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/detection/merger/lidar_merger.launch.xml +++ b/launch/tier4_perception_launch/launch/object_recognition/detection/merger/lidar_merger.launch.xml @@ -45,6 +45,7 @@ + diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/pointcloud_map_filter.launch.py b/launch/tier4_perception_launch/launch/object_recognition/detection/pointcloud_map_filter.launch.py index 1dcb9cfc90daf..a5af24595c593 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/detection/pointcloud_map_filter.launch.py +++ b/launch/tier4_perception_launch/launch/object_recognition/detection/pointcloud_map_filter.launch.py @@ -20,7 +20,6 @@ from launch.conditions import IfCondition from launch.conditions import UnlessCondition from launch.substitutions import LaunchConfiguration -from launch_ros.actions import ComposableNodeContainer from launch_ros.actions import LoadComposableNodes from launch_ros.descriptions import ComposableNode import yaml @@ -140,21 +139,11 @@ def launch_setup(context, *args, **kwargs): pipeline = PointcloudMapFilterPipeline(context) components = [] components.extend(pipeline.create_pipeline()) - individual_container = ComposableNodeContainer( - name=LaunchConfiguration("individual_container_name"), - namespace="", - package="rclcpp_components", - executable=LaunchConfiguration("container_executable"), - composable_node_descriptions=components, - condition=UnlessCondition(LaunchConfiguration("use_pointcloud_container")), - output="screen", - ) pointcloud_container_loader = LoadComposableNodes( composable_node_descriptions=components, target_container=LaunchConfiguration("pointcloud_container_name"), - condition=IfCondition(LaunchConfiguration("use_pointcloud_container")), ) - return [individual_container, pointcloud_container_loader] + return [pointcloud_container_loader] def generate_launch_description(): @@ -167,9 +156,7 @@ def add_launch_arg(name: str, default_value=None): add_launch_arg("output_topic", "") add_launch_arg("use_multithread", "False") add_launch_arg("use_intra_process", "True") - add_launch_arg("use_pointcloud_container", "False") add_launch_arg("pointcloud_container_name", "pointcloud_container") - add_launch_arg("individual_container_name", "pointcloud_map_filter_container") add_launch_arg("use_pointcloud_map", "true") set_container_executable = SetLaunchConfiguration( "container_executable", diff --git a/launch/tier4_perception_launch/launch/object_recognition/tracking/tracking.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/tracking/tracking.launch.xml index 85c6e87c0fd19..fed268084792a 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/tracking/tracking.launch.xml +++ b/launch/tier4_perception_launch/launch/object_recognition/tracking/tracking.launch.xml @@ -36,6 +36,7 @@ + diff --git a/launch/tier4_perception_launch/launch/obstacle_segmentation/ground_segmentation/ground_segmentation.launch.py b/launch/tier4_perception_launch/launch/obstacle_segmentation/ground_segmentation/ground_segmentation.launch.py index 66a0bb4d0fb63..99074d4ae4a98 100644 --- a/launch/tier4_perception_launch/launch/obstacle_segmentation/ground_segmentation/ground_segmentation.launch.py +++ b/launch/tier4_perception_launch/launch/obstacle_segmentation/ground_segmentation/ground_segmentation.launch.py @@ -21,7 +21,6 @@ from launch.conditions import UnlessCondition from launch.substitutions import LaunchConfiguration from launch.substitutions import PathJoinSubstitution -from launch_ros.actions import ComposableNodeContainer from launch_ros.actions import LoadComposableNodes from launch_ros.descriptions import ComposableNode from launch_ros.substitutions import FindPackageShare @@ -504,21 +503,11 @@ def launch_setup(context, *args, **kwargs): output_topic=pipeline.output_topic, ) ) - individual_container = ComposableNodeContainer( - name=LaunchConfiguration("individual_container_name"), - namespace="", - package="rclcpp_components", - executable=LaunchConfiguration("container_executable"), - composable_node_descriptions=components, - condition=UnlessCondition(LaunchConfiguration("use_pointcloud_container")), - output="screen", - ) pointcloud_container_loader = LoadComposableNodes( composable_node_descriptions=components, target_container=LaunchConfiguration("pointcloud_container_name"), - condition=IfCondition(LaunchConfiguration("use_pointcloud_container")), ) - return [individual_container, pointcloud_container_loader] + return [pointcloud_container_loader] def generate_launch_description(): @@ -530,9 +519,7 @@ def add_launch_arg(name: str, default_value=None): add_launch_arg("base_frame", "base_link") add_launch_arg("use_multithread", "False") add_launch_arg("use_intra_process", "True") - add_launch_arg("use_pointcloud_container", "False") add_launch_arg("pointcloud_container_name", "pointcloud_container") - add_launch_arg("individual_container_name", "ground_segmentation_container") add_launch_arg("input/pointcloud", "/sensing/lidar/concatenated/pointcloud") set_container_executable = SetLaunchConfiguration( diff --git a/launch/tier4_perception_launch/launch/occupancy_grid_map/probabilistic_occupancy_grid_map.launch.xml b/launch/tier4_perception_launch/launch/occupancy_grid_map/probabilistic_occupancy_grid_map.launch.xml index 244e0940acb70..fce3da96ce888 100644 --- a/launch/tier4_perception_launch/launch/occupancy_grid_map/probabilistic_occupancy_grid_map.launch.xml +++ b/launch/tier4_perception_launch/launch/occupancy_grid_map/probabilistic_occupancy_grid_map.launch.xml @@ -6,7 +6,6 @@ - @@ -23,7 +22,7 @@ - + @@ -39,7 +38,7 @@ - + @@ -55,7 +54,7 @@ - + diff --git a/launch/tier4_perception_launch/launch/perception.launch.xml b/launch/tier4_perception_launch/launch/perception.launch.xml index 0d517ced97d17..dbb74335f79d2 100644 --- a/launch/tier4_perception_launch/launch/perception.launch.xml +++ b/launch/tier4_perception_launch/launch/perception.launch.xml @@ -82,7 +82,6 @@ default="false" description="if use_empty_dynamic_object_publisher:=true, /perception/object_recognition/objects topic has an empty DynamicObjectArray" /> - @@ -131,7 +130,6 @@ - @@ -146,7 +144,6 @@ - @@ -197,7 +194,6 @@ - diff --git a/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light.launch.xml b/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light.launch.xml index 7b81a42fdb79f..aa47d094d824e 100644 --- a/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light.launch.xml +++ b/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light.launch.xml @@ -24,12 +24,9 @@ - - - - - - + + + @@ -49,10 +46,11 @@ - - + + + @@ -62,12 +60,9 @@ - - - - - - + + + @@ -106,10 +101,11 @@ - - + + + @@ -119,12 +115,9 @@ - - - - - - + + + diff --git a/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light_node_container.launch.py b/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light_node_container.launch.py index dc5a2443d3f45..e467aba5d4e98 100644 --- a/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light_node_container.launch.py +++ b/launch/tier4_perception_launch/launch/traffic_light_recognition/traffic_light_node_container.launch.py @@ -17,6 +17,7 @@ from ament_index_python.packages import get_package_share_directory from launch import LaunchDescription from launch.actions import DeclareLaunchArgument +from launch.actions import OpaqueFunction from launch.actions import SetLaunchConfiguration from launch.conditions import IfCondition from launch.conditions import UnlessCondition @@ -24,83 +25,29 @@ from launch_ros.actions import ComposableNodeContainer from launch_ros.actions import LoadComposableNodes from launch_ros.descriptions import ComposableNode +from launch_ros.parameter_descriptions import ParameterFile -def generate_launch_description(): - launch_arguments = [] - - def add_launch_arg(name: str, default_value=None, description=None): - # a default_value of None is equivalent to not passing that kwarg at all - launch_arguments.append( - DeclareLaunchArgument(name, default_value=default_value, description=description) - ) - - fine_detector_share_dir = get_package_share_directory("traffic_light_fine_detector") - classifier_share_dir = get_package_share_directory("traffic_light_classifier") - add_launch_arg("enable_image_decompressor", "True") - add_launch_arg("enable_fine_detection", "True") - add_launch_arg("input/image", "/sensing/camera/traffic_light/image_raw") - add_launch_arg("output/rois", "/perception/traffic_light_recognition/rois") - add_launch_arg( - "output/traffic_signals", - "/perception/traffic_light_recognition/traffic_signals", - ) - add_launch_arg( - "output/car/traffic_signals", "/perception/traffic_light_recognition/car/traffic_signals" - ) - add_launch_arg( - "output/pedestrian/traffic_signals", - "/perception/traffic_light_recognition/pedestrian/traffic_signals", - ) - - # traffic_light_fine_detector - add_launch_arg( - "fine_detector_model_path", - os.path.join(fine_detector_share_dir, "data", "tlr_yolox_s.onnx"), - ) - add_launch_arg( - "fine_detector_label_path", - os.path.join(fine_detector_share_dir, "data", "tlr_labels.txt"), - ) - add_launch_arg("fine_detector_precision", "fp16") - add_launch_arg("fine_detector_score_thresh", "0.3") - add_launch_arg("fine_detector_nms_thresh", "0.65") - - add_launch_arg("approximate_sync", "False") - - # traffic_light_classifier - add_launch_arg("classifier_type", "1") - add_launch_arg( - "car_classifier_model_path", - os.path.join(classifier_share_dir, "data", "traffic_light_classifier_efficientNet_b1.onnx"), - ) - add_launch_arg( - "pedestrian_classifier_model_path", - os.path.join( - classifier_share_dir, "data", "pedestrian_traffic_light_classifier_efficientNet_b1.onnx" - ), - ) - add_launch_arg( - "car_classifier_label_path", os.path.join(classifier_share_dir, "data", "lamp_labels.txt") - ) - add_launch_arg( - "pedestrian_classifier_label_path", - os.path.join(classifier_share_dir, "data", "lamp_labels_ped.txt"), - ) - add_launch_arg("classifier_precision", "fp16") - add_launch_arg("classifier_mean", "[123.675, 116.28, 103.53]") - add_launch_arg("classifier_std", "[58.395, 57.12, 57.375]") - add_launch_arg("backlight_threshold", "0.85") - - add_launch_arg("use_intra_process", "False") - add_launch_arg("use_multithread", "False") - +def launch_setup(context, *args, **kwargs): def create_parameter_dict(*args): result = {} for x in args: result[x] = LaunchConfiguration(x) return result + fine_detector_model_param = ParameterFile( + param_file=LaunchConfiguration("fine_detector_param_path").perform(context), + allow_substs=True, + ) + car_traffic_light_classifier_model_param = ParameterFile( + param_file=LaunchConfiguration("car_classifier_param_path").perform(context), + allow_substs=True, + ) + pedestrian_traffic_light_classifier_model_param = ParameterFile( + param_file=LaunchConfiguration("pedestrian_classifier_param_path").perform(context), + allow_substs=True, + ) + container = ComposableNodeContainer( name="traffic_light_node_container", namespace="", @@ -112,19 +59,7 @@ def create_parameter_dict(*args): plugin="traffic_light::TrafficLightClassifierNodelet", name="car_traffic_light_classifier", namespace="classification", - parameters=[ - { - "approximate_sync": LaunchConfiguration("approximate_sync"), - "classifier_type": LaunchConfiguration("classifier_type"), - "classify_traffic_light_type": 0, - "classifier_model_path": LaunchConfiguration("car_classifier_model_path"), - "classifier_label_path": LaunchConfiguration("car_classifier_label_path"), - "classifier_precision": LaunchConfiguration("classifier_precision"), - "classifier_mean": LaunchConfiguration("classifier_mean"), - "classifier_std": LaunchConfiguration("classifier_std"), - "backlight_threshold": LaunchConfiguration("backlight_threshold"), - } - ], + parameters=[car_traffic_light_classifier_model_param], remappings=[ ("~/input/image", LaunchConfiguration("input/image")), ("~/input/rois", LaunchConfiguration("output/rois")), @@ -139,23 +74,7 @@ def create_parameter_dict(*args): plugin="traffic_light::TrafficLightClassifierNodelet", name="pedestrian_traffic_light_classifier", namespace="classification", - parameters=[ - { - "approximate_sync": LaunchConfiguration("approximate_sync"), - "classifier_type": LaunchConfiguration("classifier_type"), - "classify_traffic_light_type": 1, - "classifier_model_path": LaunchConfiguration( - "pedestrian_classifier_model_path" - ), - "classifier_label_path": LaunchConfiguration( - "pedestrian_classifier_label_path" - ), - "classifier_precision": LaunchConfiguration("classifier_precision"), - "classifier_mean": LaunchConfiguration("classifier_mean"), - "classifier_std": LaunchConfiguration("classifier_std"), - "backlight_threshold": LaunchConfiguration("backlight_threshold"), - } - ], + parameters=[pedestrian_traffic_light_classifier_model_param], remappings=[ ("~/input/image", LaunchConfiguration("input/image")), ("~/input/rois", LaunchConfiguration("output/rois")), @@ -214,14 +133,6 @@ def create_parameter_dict(*args): condition=IfCondition(LaunchConfiguration("enable_image_decompressor")), ) - fine_detector_param = create_parameter_dict( - "fine_detector_model_path", - "fine_detector_label_path", - "fine_detector_precision", - "fine_detector_score_thresh", - "fine_detector_nms_thresh", - ) - fine_detector_loader = LoadComposableNodes( composable_node_descriptions=[ ComposableNode( @@ -229,7 +140,7 @@ def create_parameter_dict(*args): plugin="traffic_light::TrafficLightFineDetectorNodelet", name="traffic_light_fine_detector", namespace="detection", - parameters=[fine_detector_param], + parameters=[fine_detector_model_param], remappings=[ ("~/input/image", LaunchConfiguration("input/image")), ("~/input/rois", "rough/rois"), @@ -245,6 +156,61 @@ def create_parameter_dict(*args): condition=IfCondition(LaunchConfiguration("enable_fine_detection")), ) + return [container, decompressor_loader, fine_detector_loader] + + +def generate_launch_description(): + launch_arguments = [] + + def add_launch_arg(name: str, default_value=None, description=None): + # a default_value of None is equivalent to not passing that kwarg at all + launch_arguments.append( + DeclareLaunchArgument(name, default_value=default_value, description=description) + ) + + fine_detector_share_dir = get_package_share_directory("traffic_light_fine_detector") + classifier_share_dir = get_package_share_directory("traffic_light_classifier") + add_launch_arg("enable_image_decompressor", "True") + add_launch_arg("enable_fine_detection", "True") + add_launch_arg("input/image", "/sensing/camera/traffic_light/image_raw") + add_launch_arg("output/rois", "/perception/traffic_light_recognition/rois") + add_launch_arg( + "output/traffic_signals", + "/perception/traffic_light_recognition/traffic_signals", + ) + add_launch_arg( + "output/car/traffic_signals", "/perception/traffic_light_recognition/car/traffic_signals" + ) + add_launch_arg( + "output/pedestrian/traffic_signals", + "/perception/traffic_light_recognition/pedestrian/traffic_signals", + ) + + # traffic_light_fine_detector + add_launch_arg( + "fine_detector_param_path", + os.path.join(fine_detector_share_dir, "config", "traffic_light_fine_detector.param.yaml"), + ) + + # traffic_light_classifier + add_launch_arg( + "car_classifier_param_path", + os.path.join( + classifier_share_dir, "config", "car_traffic_light_classifier_efficientNet.param.yaml" + ), + ) + add_launch_arg( + "pedestrian_classifier_param_path", + os.path.join( + classifier_share_dir, + "config", + "pedestrian_traffic_light_classifier_efficientNet.param.yaml", + ), + ) + + add_launch_arg("use_intra_process", "False") + add_launch_arg("use_multithread", "False") + set_container_executable = SetLaunchConfiguration( "container_executable", "component_container", @@ -262,8 +228,6 @@ def create_parameter_dict(*args): *launch_arguments, set_container_executable, set_container_mt_executable, - container, - decompressor_loader, - fine_detector_loader, + OpaqueFunction(function=launch_setup), ] ) diff --git a/launch/tier4_perception_launch/package.xml b/launch/tier4_perception_launch/package.xml index 7e6ba83747a8b..2d007ae8c5ff0 100644 --- a/launch/tier4_perception_launch/package.xml +++ b/launch/tier4_perception_launch/package.xml @@ -37,7 +37,6 @@ radar_object_clustering radar_object_tracker shape_estimation - tensorrt_yolo topic_tools tracking_object_merger traffic_light_arbiter diff --git a/launch/tier4_planning_launch/launch/scenario_planning/lane_driving/behavior_planning/behavior_planning.launch.xml b/launch/tier4_planning_launch/launch/scenario_planning/lane_driving/behavior_planning/behavior_planning.launch.xml index 602bd25ad8d27..90ebd23215be5 100644 --- a/launch/tier4_planning_launch/launch/scenario_planning/lane_driving/behavior_planning/behavior_planning.launch.xml +++ b/launch/tier4_planning_launch/launch/scenario_planning/lane_driving/behavior_planning/behavior_planning.launch.xml @@ -269,59 +269,28 @@ - - - - - - - - - - - - - - + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + diff --git a/launch/tier4_sensing_launch/launch/sensing.launch.xml b/launch/tier4_sensing_launch/launch/sensing.launch.xml index af9e4481a1430..391a1f8bad641 100644 --- a/launch/tier4_sensing_launch/launch/sensing.launch.xml +++ b/launch/tier4_sensing_launch/launch/sensing.launch.xml @@ -2,7 +2,6 @@ - @@ -14,7 +13,6 @@ - diff --git a/localization/ndt_scan_matcher/include/ndt_scan_matcher/map_update_module.hpp b/localization/ndt_scan_matcher/include/ndt_scan_matcher/map_update_module.hpp index 157421fc3ccb1..38b5a932ae91d 100644 --- a/localization/ndt_scan_matcher/include/ndt_scan_matcher/map_update_module.hpp +++ b/localization/ndt_scan_matcher/include/ndt_scan_matcher/map_update_module.hpp @@ -36,29 +36,28 @@ #include #include #include +#include #include class MapUpdateModule { using PointSource = pcl::PointXYZ; using PointTarget = pcl::PointXYZ; - using NormalDistributionsTransform = - pclomp::MultiGridNormalDistributionsTransform; + using NdtType = pclomp::MultiGridNormalDistributionsTransform; + using NdtPtrType = std::shared_ptr; public: MapUpdateModule( - rclcpp::Node * node, std::mutex * ndt_ptr_mutex, - std::shared_ptr ndt_ptr, + rclcpp::Node * node, std::mutex * ndt_ptr_mutex, NdtPtrType & ndt_ptr, HyperParameters::DynamicMapLoading param); private: friend class NDTScanMatcher; - void update_ndt( - const std::vector & maps_to_add, - const std::vector & map_ids_to_remove); + // Update the specified NDT + void update_ndt(const geometry_msgs::msg::Point & position, NdtType & ndt); void update_map(const geometry_msgs::msg::Point & position); - [[nodiscard]] bool should_update_map(const geometry_msgs::msg::Point & position) const; + [[nodiscard]] bool should_update_map(const geometry_msgs::msg::Point & position); void publish_partial_pcd_map(); rclcpp::Publisher::SharedPtr loaded_pcd_pub_; @@ -66,7 +65,7 @@ class MapUpdateModule rclcpp::Client::SharedPtr pcd_loader_client_; - std::shared_ptr ndt_ptr_; + NdtPtrType & ndt_ptr_; std::mutex * ndt_ptr_mutex_; rclcpp::Logger logger_; rclcpp::Clock::SharedPtr clock_; @@ -74,6 +73,10 @@ class MapUpdateModule std::optional last_update_position_ = std::nullopt; HyperParameters::DynamicMapLoading param_; + + // Indicate if there is a prefetch thread waiting for being collected + NdtPtrType secondary_ndt_ptr_; + bool need_rebuild_; }; #endif // NDT_SCAN_MATCHER__MAP_UPDATE_MODULE_HPP_ diff --git a/localization/ndt_scan_matcher/src/map_update_module.cpp b/localization/ndt_scan_matcher/src/map_update_module.cpp index 402ec9da32782..17bafcfedfe80 100644 --- a/localization/ndt_scan_matcher/src/map_update_module.cpp +++ b/localization/ndt_scan_matcher/src/map_update_module.cpp @@ -15,9 +15,9 @@ #include "ndt_scan_matcher/map_update_module.hpp" MapUpdateModule::MapUpdateModule( - rclcpp::Node * node, std::mutex * ndt_ptr_mutex, - std::shared_ptr ndt_ptr, HyperParameters::DynamicMapLoading param) -: ndt_ptr_(std::move(ndt_ptr)), + rclcpp::Node * node, std::mutex * ndt_ptr_mutex, NdtPtrType & ndt_ptr, + HyperParameters::DynamicMapLoading param) +: ndt_ptr_(ndt_ptr), ndt_ptr_mutex_(ndt_ptr_mutex), logger_(node->get_logger()), clock_(node->get_clock()), @@ -28,29 +28,90 @@ MapUpdateModule::MapUpdateModule( pcd_loader_client_ = node->create_client("pcd_loader_service"); + + secondary_ndt_ptr_.reset(new NdtType); + + if (ndt_ptr_) { + *secondary_ndt_ptr_ = *ndt_ptr_; + } else { + RCLCPP_ERROR_STREAM_THROTTLE(logger_, *clock_, 1000, "Attempt to update a null NDT pointer."); + } + + // Initially, a direct map update on ndt_ptr_ is needed. + // ndt_ptr_'s mutex is locked until it is fully rebuilt. + // From the second update, the update is done on secondary_ndt_ptr_, + // and ndt_ptr_ is only locked when swapping its pointer with + // secondary_ndt_ptr_. + need_rebuild_ = true; } -bool MapUpdateModule::should_update_map(const geometry_msgs::msg::Point & position) const +bool MapUpdateModule::should_update_map(const geometry_msgs::msg::Point & position) { if (last_update_position_ == std::nullopt) { return false; } + const double dx = position.x - last_update_position_.value().x; const double dy = position.y - last_update_position_.value().y; const double distance = std::hypot(dx, dy); if (distance + param_.lidar_radius > param_.map_radius) { RCLCPP_ERROR_STREAM_THROTTLE(logger_, *clock_, 1000, "Dynamic map loading is not keeping up."); + // If the map does not keep up with the current position, + // lock ndt_ptr_ entirely until it is fully rebuilt. + need_rebuild_ = true; } + return distance > param_.update_distance; } void MapUpdateModule::update_map(const geometry_msgs::msg::Point & position) +{ + // If the current position is super far from the previous loading position, + // lock and rebuild ndt_ptr_ + if (need_rebuild_) { + ndt_ptr_mutex_->lock(); + auto param = ndt_ptr_->getParams(); + + ndt_ptr_.reset(new NdtType); + + ndt_ptr_->setParams(param); + + update_ndt(position, *ndt_ptr_); + ndt_ptr_mutex_->unlock(); + need_rebuild_ = false; + } else { + // Load map to the secondary_ndt_ptr, which does not require a mutex lock + // Since the update of the secondary ndt ptr and the NDT align (done on + // the main ndt_ptr_) overlap, the latency of updating/alignment reduces partly. + // If the updating is done the main ndt_ptr_, either the update or the NDT + // align will be blocked by the other. + update_ndt(position, *secondary_ndt_ptr_); + + ndt_ptr_mutex_->lock(); + auto input_source = ndt_ptr_->getInputSource(); + ndt_ptr_ = secondary_ndt_ptr_; + ndt_ptr_->setInputSource(input_source); + ndt_ptr_mutex_->unlock(); + } + + secondary_ndt_ptr_.reset(new NdtType); + *secondary_ndt_ptr_ = *ndt_ptr_; + + // Memorize the position of the last update + last_update_position_ = position; + + // Publish the new ndt maps + publish_partial_pcd_map(); +} + +void MapUpdateModule::update_ndt(const geometry_msgs::msg::Point & position, NdtType & ndt) { auto request = std::make_shared(); + request->area.center_x = static_cast(position.x); request->area.center_y = static_cast(position.y); request->area.radius = static_cast(param_.map_radius); - request->cached_ids = ndt_ptr_->getCurrentMapIDs(); + request->cached_ids = ndt.getCurrentMapIDs(); while (!pcd_loader_client_->wait_for_service(std::chrono::seconds(1)) && rclcpp::ok()) { RCLCPP_INFO(logger_, "Waiting for pcd loader service. Check the pointcloud_map_loader."); @@ -69,60 +130,49 @@ void MapUpdateModule::update_map(const geometry_msgs::msg::Point & position) } status = result.wait_for(std::chrono::seconds(1)); } - update_ndt(result.get()->new_pointcloud_with_ids, result.get()->ids_to_remove); - last_update_position_ = position; -} -void MapUpdateModule::update_ndt( - const std::vector & maps_to_add, - const std::vector & map_ids_to_remove) -{ + auto & maps_to_add = result.get()->new_pointcloud_with_ids; + auto & map_ids_to_remove = result.get()->ids_to_remove; + RCLCPP_INFO( logger_, "Update map (Add: %lu, Remove: %lu)", maps_to_add.size(), map_ids_to_remove.size()); if (maps_to_add.empty() && map_ids_to_remove.empty()) { RCLCPP_INFO(logger_, "Skip map update"); return; } - const auto exe_start_time = std::chrono::system_clock::now(); + const auto exe_start_time = std::chrono::system_clock::now(); const size_t add_size = maps_to_add.size(); - // Perform heavy processing outside of the lock scope std::vector>> points_pcl(add_size); + for (size_t i = 0; i < add_size; i++) { points_pcl[i] = pcl::make_shared>(); pcl::fromROSMsg(maps_to_add[i].pointcloud, *points_pcl[i]); } - (*ndt_ptr_mutex_).lock(); - // Add pcd for (size_t i = 0; i < add_size; i++) { - ndt_ptr_->addTarget(points_pcl[i], maps_to_add[i].cell_id); + ndt.addTarget(points_pcl[i], maps_to_add[i].cell_id); } // Remove pcd for (const std::string & map_id_to_remove : map_ids_to_remove) { - ndt_ptr_->removeTarget(map_id_to_remove); + ndt.removeTarget(map_id_to_remove); } - ndt_ptr_->createVoxelKdtree(); - - (*ndt_ptr_mutex_).unlock(); + ndt.createVoxelKdtree(); const auto exe_end_time = std::chrono::system_clock::now(); const auto duration_micro_sec = std::chrono::duration_cast(exe_end_time - exe_start_time).count(); const auto exe_time = static_cast(duration_micro_sec) / 1000.0; RCLCPP_INFO(logger_, "Time duration for creating new ndt_ptr: %lf [ms]", exe_time); - - publish_partial_pcd_map(); } void MapUpdateModule::publish_partial_pcd_map() { pcl::PointCloud map_pcl = ndt_ptr_->getVoxelPCD(); - sensor_msgs::msg::PointCloud2 map_msg; pcl::toROSMsg(map_pcl, map_msg); map_msg.header.frame_id = "map"; diff --git a/localization/pose_initializer/launch/pose_initializer.launch.xml b/localization/pose_initializer/launch/pose_initializer.launch.xml index a0fdde13c65c5..3e4911e2c2936 100644 --- a/localization/pose_initializer/launch/pose_initializer.launch.xml +++ b/localization/pose_initializer/launch/pose_initializer.launch.xml @@ -7,6 +7,7 @@ + @@ -17,8 +18,10 @@ - + + + diff --git a/localization/pose_instability_detector/config/pose_instability_detector.param.yaml b/localization/pose_instability_detector/config/pose_instability_detector.param.yaml index 29a25849d6b1c..d94de020a4a12 100644 --- a/localization/pose_instability_detector/config/pose_instability_detector.param.yaml +++ b/localization/pose_instability_detector/config/pose_instability_detector.param.yaml @@ -1,6 +1,6 @@ /**: ros__parameters: - interval_sec: 1.0 # [sec] + interval_sec: 0.5 # [sec] threshold_diff_position_x: 1.0 # [m] threshold_diff_position_y: 1.0 # [m] threshold_diff_position_z: 1.0 # [m] diff --git a/localization/pose_instability_detector/schema/pose_instability_detector.schema.json b/localization/pose_instability_detector/schema/pose_instability_detector.schema.json index b45b43102444d..560d39a2d5bff 100644 --- a/localization/pose_instability_detector/schema/pose_instability_detector.schema.json +++ b/localization/pose_instability_detector/schema/pose_instability_detector.schema.json @@ -8,7 +8,7 @@ "properties": { "interval_sec": { "type": "number", - "default": 1.0, + "default": 0.5, "exclusiveMinimum": 0, "description": "The interval of timer_callback in seconds." }, diff --git a/map/map_height_fitter/CMakeLists.txt b/map/map_height_fitter/CMakeLists.txt index 2f123d40c417e..8821158c54757 100644 --- a/map/map_height_fitter/CMakeLists.txt +++ b/map/map_height_fitter/CMakeLists.txt @@ -10,6 +10,10 @@ ament_auto_add_library(map_height_fitter SHARED ) target_link_libraries(map_height_fitter ${PCL_LIBRARIES}) +# When adding `lanelet2_extension` to package.xml, many warnings are generated. +# These are treated as errors in compile, so pedantic warnings are disabled for this package. +target_compile_options(map_height_fitter PRIVATE -Wno-pedantic) + ament_auto_add_executable(node src/node.cpp ) diff --git a/map/map_height_fitter/config/map_height_fitter.param.yaml b/map/map_height_fitter/config/map_height_fitter.param.yaml index 45d51a7efb83f..af6a7fddfa554 100644 --- a/map/map_height_fitter/config/map_height_fitter.param.yaml +++ b/map/map_height_fitter/config/map_height_fitter.param.yaml @@ -1,3 +1,5 @@ /**: ros__parameters: - map_loader_name: "/map/pointcloud_map_loader" + map_height_fitter: + map_loader_name: "/map/pointcloud_map_loader" + target: "pointcloud_map" diff --git a/map/map_height_fitter/launch/map_height_fitter.launch.xml b/map/map_height_fitter/launch/map_height_fitter.launch.xml index 955bb66cf6e8d..353f2151ee172 100644 --- a/map/map_height_fitter/launch/map_height_fitter.launch.xml +++ b/map/map_height_fitter/launch/map_height_fitter.launch.xml @@ -7,6 +7,7 @@ + diff --git a/map/map_height_fitter/package.xml b/map/map_height_fitter/package.xml index 85258a8e54b22..7f384aa43ec7b 100644 --- a/map/map_height_fitter/package.xml +++ b/map/map_height_fitter/package.xml @@ -18,8 +18,10 @@ ament_cmake autoware_cmake + autoware_auto_mapping_msgs autoware_map_msgs geometry_msgs + lanelet2_extension libpcl-common pcl_conversions rclcpp diff --git a/map/map_height_fitter/schema/map_height_fitter.schema.json b/map/map_height_fitter/schema/map_height_fitter.schema.json index 69798d92d2fe6..dc59eb76d9685 100644 --- a/map/map_height_fitter/schema/map_height_fitter.schema.json +++ b/map/map_height_fitter/schema/map_height_fitter.schema.json @@ -10,9 +10,14 @@ "type": "string", "description": "Node name of the map loader from which this map_height_fitter will retrieve its parameters", "default": "/map/pointcloud_map_loader" + }, + "target": { + "type": "string", + "description": "Target map to fit (choose from 'pointcloud_map', 'vector_map')", + "default": "pointcloud_map" } }, - "required": ["map_loader_name"], + "required": ["map_loader_name", "target"], "additionalProperties": false } }, @@ -21,7 +26,12 @@ "type": "object", "properties": { "ros__parameters": { - "$ref": "#/definitions/map_height_fitter" + "type": "object", + "properties": { + "map_height_fitter": { + "$ref": "#/definitions/map_height_fitter" + } + } } }, "required": ["ros__parameters"], diff --git a/map/map_height_fitter/src/map_height_fitter.cpp b/map/map_height_fitter/src/map_height_fitter.cpp index a05c63d44ebce..095574125d9a0 100644 --- a/map/map_height_fitter/src/map_height_fitter.cpp +++ b/map/map_height_fitter/src/map_height_fitter.cpp @@ -14,10 +14,16 @@ #include "map_height_fitter/map_height_fitter.hpp" +#include +#include + +#include #include #include #include +#include +#include #include #include #include @@ -31,52 +37,74 @@ struct MapHeightFitter::Impl static constexpr char enable_partial_load[] = "enable_partial_load"; explicit Impl(rclcpp::Node * node); - void on_map(const sensor_msgs::msg::PointCloud2::ConstSharedPtr msg); + void on_pcd_map(const sensor_msgs::msg::PointCloud2::ConstSharedPtr msg); + void on_vector_map(const autoware_auto_mapping_msgs::msg::HADMapBin::ConstSharedPtr msg); bool get_partial_point_cloud_map(const Point & point); - double get_ground_height(const tf2::Vector3 & point) const; + double get_ground_height(const Point & point) const; std::optional fit(const Point & position, const std::string & frame); tf2::BufferCore tf2_buffer_; tf2_ros::TransformListener tf2_listener_; std::string map_frame_; - pcl::PointCloud::Ptr map_cloud_; rclcpp::Node * node_; + std::string fit_target_; + + // for fitting by pointcloud_map_loader rclcpp::CallbackGroup::SharedPtr group_; - rclcpp::Client::SharedPtr cli_map_; - rclcpp::Subscription::SharedPtr sub_map_; - rclcpp::AsyncParametersClient::SharedPtr params_map_loader_; + pcl::PointCloud::Ptr map_cloud_; + rclcpp::Client::SharedPtr cli_pcd_map_; + rclcpp::Subscription::SharedPtr sub_pcd_map_; + rclcpp::AsyncParametersClient::SharedPtr params_pcd_map_loader_; + + // for fitting by vector_map_loader + lanelet::LaneletMapPtr vector_map_; + rclcpp::Subscription::SharedPtr sub_vector_map_; }; MapHeightFitter::Impl::Impl(rclcpp::Node * node) : tf2_listener_(tf2_buffer_), node_(node) { - const auto callback = [this](const std::shared_future> & future) { - bool partial_load = false; - for (const auto & param : future.get()) { - if (param.get_name() == enable_partial_load) { - partial_load = param.as_bool(); - } - } - - if (partial_load) { - group_ = node_->create_callback_group(rclcpp::CallbackGroupType::MutuallyExclusive); - cli_map_ = node_->create_client( - "~/partial_map_load", rmw_qos_profile_default, group_); - } else { - const auto durable_qos = rclcpp::QoS(1).transient_local(); - sub_map_ = node_->create_subscription( - "~/pointcloud_map", durable_qos, - std::bind(&MapHeightFitter::Impl::on_map, this, std::placeholders::_1)); - } - }; - - const auto map_loader_name = node->declare_parameter("map_loader_name"); - params_map_loader_ = rclcpp::AsyncParametersClient::make_shared(node, map_loader_name); - params_map_loader_->wait_for_service(); - params_map_loader_->get_parameters({enable_partial_load}, callback); + fit_target_ = node->declare_parameter("map_height_fitter.target"); + if (fit_target_ == "pointcloud_map") { + const auto callback = + [this](const std::shared_future> & future) { + bool partial_load = false; + for (const auto & param : future.get()) { + if (param.get_name() == enable_partial_load) { + partial_load = param.as_bool(); + } + } + + if (partial_load) { + group_ = node_->create_callback_group(rclcpp::CallbackGroupType::MutuallyExclusive); + cli_pcd_map_ = node_->create_client( + "~/partial_map_load", rmw_qos_profile_default, group_); + } else { + const auto durable_qos = rclcpp::QoS(1).transient_local(); + sub_pcd_map_ = node_->create_subscription( + "~/pointcloud_map", durable_qos, + std::bind(&MapHeightFitter::Impl::on_pcd_map, this, std::placeholders::_1)); + } + }; + + const auto map_loader_name = + node->declare_parameter("map_height_fitter.map_loader_name"); + params_pcd_map_loader_ = rclcpp::AsyncParametersClient::make_shared(node, map_loader_name); + params_pcd_map_loader_->wait_for_service(); + params_pcd_map_loader_->get_parameters({enable_partial_load}, callback); + + } else if (fit_target_ == "vector_map") { + const auto durable_qos = rclcpp::QoS(1).transient_local(); + sub_vector_map_ = node_->create_subscription( + "~/vector_map", durable_qos, + std::bind(&MapHeightFitter::Impl::on_vector_map, this, std::placeholders::_1)); + + } else { + throw std::runtime_error("invalid fit_target"); + } } -void MapHeightFitter::Impl::on_map(const sensor_msgs::msg::PointCloud2::ConstSharedPtr msg) +void MapHeightFitter::Impl::on_pcd_map(const sensor_msgs::msg::PointCloud2::ConstSharedPtr msg) { map_frame_ = msg->header.frame_id; map_cloud_ = pcl::PointCloud::Ptr(new pcl::PointCloud); @@ -87,11 +115,11 @@ bool MapHeightFitter::Impl::get_partial_point_cloud_map(const Point & point) { const auto logger = node_->get_logger(); - if (!cli_map_) { + if (!cli_pcd_map_) { RCLCPP_WARN_STREAM(logger, "Partial map loading in pointcloud_map_loader is not enabled"); return false; } - if (!cli_map_->service_is_ready()) { + if (!cli_pcd_map_->service_is_ready()) { RCLCPP_WARN_STREAM(logger, "Partial map loading in pointcloud_map_loader is not ready"); return false; } @@ -102,7 +130,7 @@ bool MapHeightFitter::Impl::get_partial_point_cloud_map(const Point & point) req->area.radius = 50; RCLCPP_DEBUG(logger, "Send request to map_loader"); - auto future = cli_map_->async_send_request(req); + auto future = cli_pcd_map_->async_send_request(req); auto status = future.wait_for(std::chrono::seconds(1)); while (status != std::future_status::ready) { RCLCPP_DEBUG(logger, "waiting response"); @@ -134,72 +162,121 @@ bool MapHeightFitter::Impl::get_partial_point_cloud_map(const Point & point) return true; } -double MapHeightFitter::Impl::get_ground_height(const tf2::Vector3 & point) const +void MapHeightFitter::Impl::on_vector_map( + const autoware_auto_mapping_msgs::msg::HADMapBin::ConstSharedPtr msg) { - const double x = point.getX(); - const double y = point.getY(); - - // find distance d to closest point - double min_dist2 = INFINITY; - for (const auto & p : map_cloud_->points) { - const double dx = x - p.x; - const double dy = y - p.y; - const double sd = (dx * dx) + (dy * dy); - min_dist2 = std::min(min_dist2, sd); - } + vector_map_ = std::make_shared(); + lanelet::utils::conversion::fromBinMsg(*msg, vector_map_); + map_frame_ = msg->header.frame_id; +} + +double MapHeightFitter::Impl::get_ground_height(const Point & point) const +{ + const auto logger = node_->get_logger(); + + const double x = point.x; + const double y = point.y; - // find lowest height within radius (d+1.0) - const double radius2 = std::pow(std::sqrt(min_dist2) + 1.0, 2.0); double height = INFINITY; - for (const auto & p : map_cloud_->points) { - const double dx = x - p.x; - const double dy = y - p.y; - const double sd = (dx * dx) + (dy * dy); - if (sd < radius2) { - height = std::min(height, static_cast(p.z)); + if (fit_target_ == "pointcloud_map") { + // find distance d to closest point + double min_dist2 = INFINITY; + for (const auto & p : map_cloud_->points) { + const double dx = x - p.x; + const double dy = y - p.y; + const double sd = (dx * dx) + (dy * dy); + min_dist2 = std::min(min_dist2, sd); + } + + // find lowest height within radius (d+1.0) + const double radius2 = std::pow(std::sqrt(min_dist2) + 1.0, 2.0); + + for (const auto & p : map_cloud_->points) { + const double dx = x - p.x; + const double dy = y - p.y; + const double sd = (dx * dx) + (dy * dy); + if (sd < radius2) { + height = std::min(height, static_cast(p.z)); + } + } + } else if (fit_target_ == "vector_map") { + lanelet::ConstLanelets all_lanelets = lanelet::utils::query::laneletLayer(vector_map_); + + geometry_msgs::msg::Pose pose; + pose.position.x = x; + pose.position.y = y; + pose.position.z = 0.0; + lanelet::ConstLanelet closest_lanelet; + const bool result = + lanelet::utils::query::getClosestLanelet(all_lanelets, pose, &closest_lanelet); + if (!result) { + RCLCPP_WARN_STREAM(logger, "failed to get closest lanelet"); + return point.z; } + height = closest_lanelet.centerline().back().z(); } - return std::isfinite(height) ? height : point.getZ(); + return std::isfinite(height) ? height : point.z; } std::optional MapHeightFitter::Impl::fit(const Point & position, const std::string & frame) { const auto logger = node_->get_logger(); - tf2::Vector3 point(position.x, position.y, position.z); + RCLCPP_INFO_STREAM(logger, "fit_target: " << fit_target_ << ", frame: " << frame); - RCLCPP_DEBUG(logger, "original point: %.3f %.3f %.3f", point.getX(), point.getY(), point.getZ()); + Point point; + point.x = position.x; + point.y = position.y; + point.z = position.z; - if (cli_map_) { - if (!get_partial_point_cloud_map(position)) { + RCLCPP_DEBUG(logger, "original point: %.3f %.3f %.3f", point.x, point.y, point.z); + + // prepare data + if (fit_target_ == "pointcloud_map") { + if (cli_pcd_map_) { // if cli_pcd_map_ is available, prepare pointcloud map by partial loading + if (!get_partial_point_cloud_map(position)) { + RCLCPP_WARN_STREAM(logger, "failed to get partial point cloud map"); + return std::nullopt; + } + } // otherwise, pointcloud map should be already prepared by on_pcd_map + if (!map_cloud_) { + RCLCPP_WARN_STREAM(logger, "point cloud map is not ready"); + return std::nullopt; + } + } else if (fit_target_ == "vector_map") { + // vector_map_ should be already prepared by on_vector_map + if (!vector_map_) { + RCLCPP_WARN_STREAM(logger, "vector map is not ready"); return std::nullopt; } + } else { + throw std::runtime_error("invalid fit_target"); } - if (!map_cloud_) { - RCLCPP_WARN_STREAM(logger, "point cloud map is not ready"); + // transform frame to map_frame_ + try { + const auto stamped = tf2_buffer_.lookupTransform(frame, map_frame_, tf2::TimePointZero); + tf2::doTransform(point, point, stamped); + } catch (tf2::TransformException & exception) { + RCLCPP_WARN_STREAM(logger, "failed to lookup transform: " << exception.what()); return std::nullopt; } + // fit height on map_frame_ + point.z = get_ground_height(point); + + // transform map_frame_ to frame try { const auto stamped = tf2_buffer_.lookupTransform(map_frame_, frame, tf2::TimePointZero); - tf2::Transform transform{tf2::Quaternion{}, tf2::Vector3{}}; - tf2::fromMsg(stamped.transform, transform); - point = transform * point; - point.setZ(get_ground_height(point)); - point = transform.inverse() * point; + tf2::doTransform(point, point, stamped); } catch (tf2::TransformException & exception) { RCLCPP_WARN_STREAM(logger, "failed to lookup transform: " << exception.what()); return std::nullopt; } - RCLCPP_DEBUG(logger, "modified point: %.3f %.3f %.3f", point.getX(), point.getY(), point.getZ()); + RCLCPP_DEBUG(logger, "modified point: %.3f %.3f %.3f", point.x, point.y, point.z); - Point result; - result.x = point.getX(); - result.y = point.getY(); - result.z = point.getZ(); - return result; + return point; } MapHeightFitter::MapHeightFitter(rclcpp::Node * node) diff --git a/map/map_loader/README.md b/map/map_loader/README.md index fbe019096a3e7..8a31ecee50be5 100644 --- a/map/map_loader/README.md +++ b/map/map_loader/README.md @@ -111,15 +111,7 @@ Please see [the description of `GetSelectedPointCloudMap.srv`](https://github.co ### Parameters -| Name | Type | Description | Default value | -| :---------------------------- | :---------- | :-------------------------------------------------------------------------------- | :------------ | -| enable_whole_load | bool | A flag to enable raw pointcloud map publishing | true | -| enable_downsampled_whole_load | bool | A flag to enable downsampled pointcloud map publishing | false | -| enable_partial_load | bool | A flag to enable partial pointcloud map server | false | -| enable_selected_load | bool | A flag to enable selected pointcloud map server | false | -| leaf_size | float | Downsampling leaf size (only used when enable_downsampled_whole_load is set true) | 3.0 | -| pcd_paths_or_directory | std::string | Path(s) to pointcloud map file or directory | | -| pcd_metadata_path | std::string | Path to pointcloud metadata file | | +{{ json_to_markdown("map/map_loader/schema/pointcloud_map_loader.schema.json") }} ### Interfaces @@ -156,10 +148,7 @@ Please see [tier4_autoware_msgs/msg/MapProjectorInfo.msg](https://github.com/tie ### Parameters -| Name | Type | Description | Default value | -| :--------------------- | :---------- | :----------------------------------------------- | :------------ | -| center_line_resolution | double | Define the resolution of the lanelet center line | 5.0 | -| lanelet2_map_path | std::string | The lanelet2 map path | None | +{{ json_to_markdown("map/map_loader/schema/lanelet2_map_loader.schema.json") }} --- diff --git a/map/map_loader/config/lanelet2_map_loader.param.yaml b/map/map_loader/config/lanelet2_map_loader.param.yaml index 24d2b0e8ed7a8..b830e038f1de2 100755 --- a/map/map_loader/config/lanelet2_map_loader.param.yaml +++ b/map/map_loader/config/lanelet2_map_loader.param.yaml @@ -1,3 +1,4 @@ /**: ros__parameters: center_line_resolution: 5.0 # [m] + lanelet2_map_path: $(var lanelet2_map_path) # The lanelet2 map path diff --git a/map/map_loader/config/pointcloud_map_loader.param.yaml b/map/map_loader/config/pointcloud_map_loader.param.yaml index b4efbec9706b4..180a3e6f9a2e5 100644 --- a/map/map_loader/config/pointcloud_map_loader.param.yaml +++ b/map/map_loader/config/pointcloud_map_loader.param.yaml @@ -7,3 +7,5 @@ # only used when downsample_whole_load enabled leaf_size: 3.0 # downsample leaf size [m] + pcd_paths_or_directory: [$(var pcd_paths_or_directory)] # Path to the pointcloud map file or directory + pcd_metadata_path: $(var pcd_metadata_path) # Path to pointcloud metadata file diff --git a/map/map_loader/launch/lanelet2_map_loader.launch.xml b/map/map_loader/launch/lanelet2_map_loader.launch.xml index b24ddae3a53e5..ea0157620ce43 100644 --- a/map/map_loader/launch/lanelet2_map_loader.launch.xml +++ b/map/map_loader/launch/lanelet2_map_loader.launch.xml @@ -5,13 +5,12 @@ - + - - + diff --git a/map/map_loader/launch/pointcloud_map_loader.launch.xml b/map/map_loader/launch/pointcloud_map_loader.launch.xml index 9901d04df5645..3e447456bb623 100644 --- a/map/map_loader/launch/pointcloud_map_loader.launch.xml +++ b/map/map_loader/launch/pointcloud_map_loader.launch.xml @@ -7,8 +7,6 @@ - - - + diff --git a/map/map_loader/schema/lanelet2_map_loader.schema.json b/map/map_loader/schema/lanelet2_map_loader.schema.json new file mode 100644 index 0000000000000..fa2b4d363ff92 --- /dev/null +++ b/map/map_loader/schema/lanelet2_map_loader.schema.json @@ -0,0 +1,38 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Parameters for lanelet2 map loader Node", + "type": "object", + "definitions": { + "lanelet2_map_loader": { + "type": "object", + "properties": { + "center_line_resolution": { + "type": "number", + "description": "Resolution of the Lanelet center line [m]", + "default": "5.0" + }, + "lanelet2_map_path": { + "type": "string", + "description": "The lanelet2 map path pointing to the .osm file", + "default": "" + } + }, + "required": ["center_line_resolution", "lanelet2_map_path"], + "additionalProperties": false + } + }, + "properties": { + "/**": { + "type": "object", + "properties": { + "ros__parameters": { + "$ref": "#/definitions/lanelet2_map_loader" + } + }, + "required": ["ros__parameters"], + "additionalProperties": false + } + }, + "required": ["/**"], + "additionalProperties": false +} diff --git a/map/map_loader/schema/pointcloud_map_loader.schema.json b/map/map_loader/schema/pointcloud_map_loader.schema.json new file mode 100644 index 0000000000000..03cf10b9c5423 --- /dev/null +++ b/map/map_loader/schema/pointcloud_map_loader.schema.json @@ -0,0 +1,71 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Parameters for point cloud map loader Node", + "type": "object", + "definitions": { + "pointcloud_map_loader": { + "type": "object", + "properties": { + "enable_whole_load": { + "type": "boolean", + "description": "Enable raw pointcloud map publishing", + "default": true + }, + "enable_downsampled_whole_load": { + "type": "boolean", + "description": "Enable downsampled pointcloud map publishing", + "default": false + }, + "enable_partial_load": { + "type": "boolean", + "description": "Enable partial pointcloud map server", + "default": true + }, + "enable_selected_load": { + "type": "boolean", + "description": "Enable selected pointcloud map server", + "default": false + }, + "leaf_size": { + "type": "number", + "description": "Downsampling leaf size (only used when enable_downsampled_whole_load is set true)", + "default": 3.0 + }, + "pcd_paths_or_directory": { + "type": "array", + "description": "Path(s) to pointcloud map file or directory", + "default": [] + }, + "pcd_metadata_path": { + "type": "string", + "description": "Path to pointcloud metadata file", + "default": "" + } + }, + "required": [ + "enable_whole_load", + "enable_downsampled_whole_load", + "enable_partial_load", + "enable_selected_load", + "leaf_size", + "pcd_paths_or_directory", + "pcd_metadata_path" + ], + "additionalProperties": false + } + }, + "properties": { + "/**": { + "type": "object", + "properties": { + "ros__parameters": { + "$ref": "#/definitions/pointcloud_map_loader" + } + }, + "required": ["ros__parameters"], + "additionalProperties": false + } + }, + "required": ["/**"], + "additionalProperties": false +} diff --git a/map/map_loader/src/lanelet2_map_loader/lanelet2_map_loader_node.cpp b/map/map_loader/src/lanelet2_map_loader/lanelet2_map_loader_node.cpp index 03504cc8e2e20..617f3dd503ce0 100644 --- a/map/map_loader/src/lanelet2_map_loader/lanelet2_map_loader_node.cpp +++ b/map/map_loader/src/lanelet2_map_loader/lanelet2_map_loader_node.cpp @@ -61,8 +61,8 @@ Lanelet2MapLoaderNode::Lanelet2MapLoaderNode(const rclcpp::NodeOptions & options sub_map_projector_info_, [this](const MapProjectorInfo::Message::ConstSharedPtr msg) { on_map_projector_info(msg); }); - declare_parameter("lanelet2_map_path", ""); - declare_parameter("center_line_resolution", 5.0); + declare_parameter("lanelet2_map_path"); + declare_parameter("center_line_resolution"); } void Lanelet2MapLoaderNode::on_map_projector_info( diff --git a/map/map_loader/src/lanelet2_map_loader/lanelet2_map_visualization_node.cpp b/map/map_loader/src/lanelet2_map_loader/lanelet2_map_visualization_node.cpp index a0c57759d51a6..bdfbcf904cf36 100644 --- a/map/map_loader/src/lanelet2_map_loader/lanelet2_map_visualization_node.cpp +++ b/map/map_loader/src/lanelet2_map_loader/lanelet2_map_visualization_node.cpp @@ -71,7 +71,7 @@ Lanelet2MapVisualizationNode::Lanelet2MapVisualizationNode(const rclcpp::NodeOpt { using std::placeholders::_1; - viz_lanelets_centerline_ = this->declare_parameter("viz_lanelets_centerline", true); + viz_lanelets_centerline_ = true; sub_map_bin_ = this->create_subscription( "input/lanelet2_map", rclcpp::QoS{1}.transient_local(), @@ -96,8 +96,10 @@ void Lanelet2MapVisualizationNode::onMapBin( lanelet::ConstLanelets crosswalk_lanelets = lanelet::utils::query::crosswalkLanelets(all_lanelets); lanelet::ConstLineStrings3d partitions = lanelet::utils::query::getAllPartitions(viz_lanelet_map); - lanelet::ConstLineStrings3d pedestrian_markings = - lanelet::utils::query::getAllPedestrianMarkings(viz_lanelet_map); + lanelet::ConstLineStrings3d pedestrian_polygon_markings = + lanelet::utils::query::getAllPedestrianPolygonMarkings(viz_lanelet_map); + lanelet::ConstLineStrings3d pedestrian_line_markings = + lanelet::utils::query::getAllPedestrianLineMarkings(viz_lanelet_map); lanelet::ConstLanelets walkway_lanelets = lanelet::utils::query::walkwayLanelets(all_lanelets); std::vector stop_lines = lanelet::utils::query::stopLinesLanelets(road_lanelets); @@ -179,8 +181,12 @@ void Lanelet2MapVisualizationNode::onMapBin( &map_marker_array, lanelet::visualization::laneletsAsTriangleMarkerArray( "crosswalk_lanelets", crosswalk_lanelets, cl_cross)); insertMarkerArray( - &map_marker_array, lanelet::visualization::pedestrianMarkingsAsMarkerArray( - pedestrian_markings, cl_pedestrian_markings)); + &map_marker_array, lanelet::visualization::pedestrianPolygonMarkingsAsMarkerArray( + pedestrian_polygon_markings, cl_pedestrian_markings)); + + insertMarkerArray( + &map_marker_array, lanelet::visualization::pedestrianLineMarkingsAsMarkerArray( + pedestrian_line_markings, cl_pedestrian_markings)); insertMarkerArray( &map_marker_array, lanelet::visualization::laneletsAsTriangleMarkerArray( "walkway_lanelets", walkway_lanelets, cl_cross)); diff --git a/map/map_loader/src/pointcloud_map_loader/pointcloud_map_loader_module.cpp b/map/map_loader/src/pointcloud_map_loader/pointcloud_map_loader_module.cpp index b4aa930831a04..a66f9ee99534c 100644 --- a/map/map_loader/src/pointcloud_map_loader/pointcloud_map_loader_module.cpp +++ b/map/map_loader/src/pointcloud_map_loader/pointcloud_map_loader_module.cpp @@ -74,7 +74,7 @@ sensor_msgs::msg::PointCloud2 PointcloudMapLoaderModule::loadPCDFiles( for (size_t i = 0; i < pcd_paths.size(); ++i) { auto & path = pcd_paths[i]; if (i % 50 == 0) { - RCLCPP_INFO_STREAM( + RCLCPP_DEBUG_STREAM( logger_, fmt::format("Load {} ({} out of {})", path, i + 1, pcd_paths.size())); } diff --git a/map/map_loader/src/pointcloud_map_loader/pointcloud_map_loader_node.cpp b/map/map_loader/src/pointcloud_map_loader/pointcloud_map_loader_node.cpp index 5f4b3e311e6e9..bacbabe60a719 100644 --- a/map/map_loader/src/pointcloud_map_loader/pointcloud_map_loader_node.cpp +++ b/map/map_loader/src/pointcloud_map_loader/pointcloud_map_loader_node.cpp @@ -88,30 +88,27 @@ PointCloudMapLoaderNode::PointCloudMapLoaderNode(const rclcpp::NodeOptions & opt std::map PointCloudMapLoaderNode::getPCDMetadata( const std::string & pcd_metadata_path, const std::vector & pcd_paths) const { - std::map pcd_metadata_dict; - if (pcd_paths.size() != 1) { - if (!fs::exists(pcd_metadata_path)) { - throw std::runtime_error("PCD metadata file not found: " + pcd_metadata_path); - } - - pcd_metadata_dict = loadPCDMetadata(pcd_metadata_path); + if (fs::exists(pcd_metadata_path)) { + auto pcd_metadata_dict = loadPCDMetadata(pcd_metadata_path); pcd_metadata_dict = replaceWithAbsolutePath(pcd_metadata_dict, pcd_paths); - RCLCPP_INFO_STREAM(get_logger(), "Loaded PCD metadata: " << pcd_metadata_path); - } else { + return pcd_metadata_dict; + } else if (pcd_paths.size() == 1) { // An exception when using a single file PCD map so that the users do not have to provide // a metadata file. // Note that this should ideally be avoided and thus eventually be removed by someone, until // Autoware users get used to handling the PCD file(s) with metadata. - RCLCPP_INFO_STREAM(get_logger(), "Create PCD metadata, as the pointcloud is a single file."); + RCLCPP_DEBUG_STREAM(get_logger(), "Create PCD metadata, as the pointcloud is a single file."); pcl::PointCloud single_pcd; - if (pcl::io::loadPCDFile(pcd_paths[0], single_pcd) == -1) { - throw std::runtime_error("PCD load failed: " + pcd_paths[0]); + const auto & pcd_path = pcd_paths.front(); + if (pcl::io::loadPCDFile(pcd_path, single_pcd) == -1) { + throw std::runtime_error("PCD load failed: " + pcd_path); } PCDFileMetadata metadata = {}; pcl::getMinMax3D(single_pcd, metadata.min, metadata.max); - pcd_metadata_dict[pcd_paths[0]] = metadata; + return std::map{{pcd_path, metadata}}; + } else { + throw std::runtime_error("PCD metadata file not found: " + pcd_metadata_path); } - return pcd_metadata_dict; } std::vector PointCloudMapLoaderNode::getPcdPaths( diff --git a/map/map_loader/test/lanelet2_map_loader_launch.test.py b/map/map_loader/test/lanelet2_map_loader_launch.test.py index 24758a46f75aa..f1aa9e0efe922 100644 --- a/map/map_loader/test/lanelet2_map_loader_launch.test.py +++ b/map/map_loader/test/lanelet2_map_loader_launch.test.py @@ -34,7 +34,7 @@ def generate_test_description(): lanelet2_map_loader = Node( package="map_loader", executable="lanelet2_map_loader", - parameters=[{"lanelet2_map_path": lanelet2_map_path}], + parameters=[{"lanelet2_map_path": lanelet2_map_path, "center_line_resolution": 5.0}], ) context = {} diff --git a/map/util/lanelet2_map_preprocessor/CMakeLists.txt b/map/util/lanelet2_map_preprocessor/CMakeLists.txt index 8a6b16c05011b..982a262455efa 100644 --- a/map/util/lanelet2_map_preprocessor/CMakeLists.txt +++ b/map/util/lanelet2_map_preprocessor/CMakeLists.txt @@ -29,5 +29,6 @@ if(BUILD_TESTING) endif() ament_auto_package(INSTALL_TO_SHARE + config launch ) diff --git a/map/util/lanelet2_map_preprocessor/config/fix_z_value_by_pcd.param.yaml b/map/util/lanelet2_map_preprocessor/config/fix_z_value_by_pcd.param.yaml new file mode 100644 index 0000000000000..de86ef7936d74 --- /dev/null +++ b/map/util/lanelet2_map_preprocessor/config/fix_z_value_by_pcd.param.yaml @@ -0,0 +1,5 @@ +/**: + ros__parameters: + llt_map_path: $(var llt_map_path) + pcd_map_path: $(var pcd_map_path) + llt_output_path: $(var llt_output_path) diff --git a/map/util/lanelet2_map_preprocessor/config/transform_maps.param.yaml b/map/util/lanelet2_map_preprocessor/config/transform_maps.param.yaml new file mode 100644 index 0000000000000..548c28055c834 --- /dev/null +++ b/map/util/lanelet2_map_preprocessor/config/transform_maps.param.yaml @@ -0,0 +1,12 @@ +/**: + ros__parameters: + llt_map_path: $(var llt_map_path) + pcd_map_path: $(var pcd_map_path) + llt_output_path: $(var llt_output_path) + pcd_output_path: $(var pcd_output_path) + x: 0.0 + y: 0.0 + z: 0.0 + roll: 0.0 + pitch: 0.0 + yaw: 0.0 diff --git a/map/util/lanelet2_map_preprocessor/launch/fix_z_value_by_pcd.launch.xml b/map/util/lanelet2_map_preprocessor/launch/fix_z_value_by_pcd.launch.xml index 82021ff25d240..5b2aa2e425184 100644 --- a/map/util/lanelet2_map_preprocessor/launch/fix_z_value_by_pcd.launch.xml +++ b/map/util/lanelet2_map_preprocessor/launch/fix_z_value_by_pcd.launch.xml @@ -1,12 +1,6 @@ - - - - - - - + diff --git a/map/util/lanelet2_map_preprocessor/launch/transform_maps.launch.xml b/map/util/lanelet2_map_preprocessor/launch/transform_maps.launch.xml index 902ca982dace1..950593771878c 100644 --- a/map/util/lanelet2_map_preprocessor/launch/transform_maps.launch.xml +++ b/map/util/lanelet2_map_preprocessor/launch/transform_maps.launch.xml @@ -1,26 +1,6 @@ - - - - - - - - - - - - - - - - - - - - - + diff --git a/map/util/lanelet2_map_preprocessor/package.xml b/map/util/lanelet2_map_preprocessor/package.xml index 79fce94f87a95..421f04257de67 100644 --- a/map/util/lanelet2_map_preprocessor/package.xml +++ b/map/util/lanelet2_map_preprocessor/package.xml @@ -5,7 +5,15 @@ 0.1.0 The lanelet2_map_preprocessor package Ryohsuke Mitsudome + Yamato Ando + Kento Yabuuchi + Masahiro Sakamoto + NGUYEN Viet Anh + Taiki Yamada + Shintaro Sakoda + Ryu Yamamoto Apache License 2.0 + Ryohsuke Mitsudome ament_cmake_auto autoware_cmake diff --git a/map/util/lanelet2_map_preprocessor/schema/transform_maps.schema.json b/map/util/lanelet2_map_preprocessor/schema/transform_maps.schema.json new file mode 100644 index 0000000000000..78fb3952d44ce --- /dev/null +++ b/map/util/lanelet2_map_preprocessor/schema/transform_maps.schema.json @@ -0,0 +1,75 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Parameters for Transforming Maps", + "type": "object", + "definitions": { + "transform_maps": { + "type": "object", + "properties": { + "llt_map_path": { + "type": "string", + "description": "Path pointing to the input lanelet2 file", + "default": "" + }, + "pcd_map_path": { + "type": "string", + "description": "Path pointing to the input point cloud file", + "default": "" + }, + "llt_output_path": { + "type": "string", + "description": "Path pointing to the output lanelet2 file", + "default": "" + }, + "pcd_output_path": { + "type": "string", + "description": "Path pointing to the output point cloud file", + "default": "" + }, + "x": { + "type": "number", + "default": 0.0, + "description": "x factor of Translation vector for transforming maps [m]" + }, + "y": { + "type": "number", + "default": 0.0, + "description": "y factor of Translation vector for transforming maps [m]" + }, + "z": { + "type": "number", + "default": 0.0, + "description": "z factor of Translation vector for transforming maps [m]" + }, + "roll": { + "type": "number", + "default": 0.0, + "description": "roll factor of Rotation vector for transforming maps [rad]" + }, + "pitch": { + "type": "number", + "default": 0.0, + "description": "pitch factor of Rotation vector for transforming maps [rad]" + }, + "yaw": { + "type": "number", + "default": 0.0, + "description": "yaw factor of Rotation vector for transforming maps [rad]" + } + }, + "required": ["x", "y", "z", "roll", "pitch", "yaw"] + } + }, + "properties": { + "/**": { + "type": "object", + "properties": { + "ros__parameters": { + "$ref": "#/definitions/transform_maps" + } + }, + "required": ["ros__parameters"] + } + }, + "required": ["/**"] +} diff --git a/map/util/lanelet2_map_preprocessor/src/transform_maps.cpp b/map/util/lanelet2_map_preprocessor/src/transform_maps.cpp index 987f559be1e8e..2317ddb7d9f95 100644 --- a/map/util/lanelet2_map_preprocessor/src/transform_maps.cpp +++ b/map/util/lanelet2_map_preprocessor/src/transform_maps.cpp @@ -107,12 +107,12 @@ int main(int argc, char * argv[]) const auto pcd_map_path = node->declare_parameter("pcd_map_path"); const auto llt_output_path = node->declare_parameter("llt_output_path"); const auto pcd_output_path = node->declare_parameter("pcd_output_path"); - const auto x = node->declare_parameter("x", 0.0); - const auto y = node->declare_parameter("y", 0.0); - const auto z = node->declare_parameter("z", 0.0); - const auto roll = node->declare_parameter("roll", 0.0); - const auto pitch = node->declare_parameter("pitch", 0.0); - const auto yaw = node->declare_parameter("yaw", 0.0); + const auto x = node->declare_parameter("x"); + const auto y = node->declare_parameter("y"); + const auto z = node->declare_parameter("z"); + const auto roll = node->declare_parameter("roll"); + const auto pitch = node->declare_parameter("pitch"); + const auto yaw = node->declare_parameter("yaw"); std::cout << "transforming maps with following parameters" << std::endl << "x " << x << std::endl diff --git a/perception/bytetrack/README.md b/perception/bytetrack/README.md index cabbdd2f05836..90116c4dae7e5 100644 --- a/perception/bytetrack/README.md +++ b/perception/bytetrack/README.md @@ -25,7 +25,7 @@ the number of false negatives is expected to decrease by using it. The paper just says that the 2d tracking algorithm is a simple Kalman filter. Original codes use the `top-left-corner` and `aspect ratio` and `size` as the state vector. -This is sometimes unstable because the aspectratio can be changed by the occlusion. +This is sometimes unstable because the aspect ratio can be changed by the occlusion. So, we use the `top-left` and `size` as the state vector. Kalman filter settings can be controlled by the parameters in `config/bytetrack_node.param.yaml`. diff --git a/perception/bytetrack/config/bytetrack.param.yaml b/perception/bytetrack/config/bytetrack.param.yaml new file mode 100644 index 0000000000000..e38dbdb4d3efa --- /dev/null +++ b/perception/bytetrack/config/bytetrack.param.yaml @@ -0,0 +1,3 @@ +/**: + ros__parameters: + track_buffer_length: 30 diff --git a/perception/bytetrack/config/bytetrack_visualizer.param.yaml b/perception/bytetrack/config/bytetrack_visualizer.param.yaml new file mode 100644 index 0000000000000..ad1f6fce301b3 --- /dev/null +++ b/perception/bytetrack/config/bytetrack_visualizer.param.yaml @@ -0,0 +1,3 @@ +/**: + ros__parameters: + use_raw: false diff --git a/perception/bytetrack/config/bytetracker.param.yaml b/perception/bytetrack/config/kalman_filter.param.yaml similarity index 100% rename from perception/bytetrack/config/bytetracker.param.yaml rename to perception/bytetrack/config/kalman_filter.param.yaml diff --git a/perception/bytetrack/launch/bytetrack.launch.xml b/perception/bytetrack/launch/bytetrack.launch.xml index 25ed56b991c30..3a5a369637453 100644 --- a/perception/bytetrack/launch/bytetrack.launch.xml +++ b/perception/bytetrack/launch/bytetrack.launch.xml @@ -4,21 +4,21 @@ - - + + - + - + diff --git a/perception/bytetrack/lib/include/data_type.h b/perception/bytetrack/lib/include/data_type.h index 896e021538cae..64e8acc6c1e50 100644 --- a/perception/bytetrack/lib/include/data_type.h +++ b/perception/bytetrack/lib/include/data_type.h @@ -50,7 +50,7 @@ typedef Eigen::Matrix FEATURE; typedef Eigen::Matrix FEATURESS; // typedef std::vector FEATURESS; -// Kalmanfilter +// Kalman Filter // typedef Eigen::Matrix KAL_FILTER; typedef Eigen::Matrix KAL_MEAN; typedef Eigen::Matrix KAL_COVA; diff --git a/perception/bytetrack/lib/src/strack.cpp b/perception/bytetrack/lib/src/strack.cpp index eda11509d2403..f2c5a40ad3c2e 100644 --- a/perception/bytetrack/lib/src/strack.cpp +++ b/perception/bytetrack/lib/src/strack.cpp @@ -72,7 +72,7 @@ STrack::STrack(std::vector input_tlwh, float score, int label) const std::string package_share_directory = ament_index_cpp::get_package_share_directory("bytetrack"); const std::string default_config_path = - package_share_directory + "/config/bytetracker.param.yaml"; + package_share_directory + "/config/kalman_filter.param.yaml"; if (!_parameters_loaded) { load_parameters(default_config_path); _parameters_loaded = true; diff --git a/perception/compare_map_segmentation/src/voxel_grid_map_loader.cpp b/perception/compare_map_segmentation/src/voxel_grid_map_loader.cpp index 06bd89af38259..ef3727019c0f4 100644 --- a/perception/compare_map_segmentation/src/voxel_grid_map_loader.cpp +++ b/perception/compare_map_segmentation/src/voxel_grid_map_loader.cpp @@ -241,8 +241,10 @@ bool VoxelGridMapLoader::is_in_voxel( const double dist_x = map->points.at(voxel_index).x - target_point.x; const double dist_y = map->points.at(voxel_index).y - target_point.y; const double dist_z = map->points.at(voxel_index).z - target_point.z; - const double sqr_distance = dist_x * dist_x + dist_y * dist_y + dist_z * dist_z; - if (sqr_distance < distance_threshold * distance_threshold * downsize_ratio_z_axis_) { + // check if the point is inside the distance threshold voxel + if ( + std::abs(dist_x) < distance_threshold && std::abs(dist_y) < distance_threshold && + std::abs(dist_z) < distance_threshold * downsize_ratio_z_axis_) { return true; } } diff --git a/perception/crosswalk_traffic_light_estimator/README.md b/perception/crosswalk_traffic_light_estimator/README.md index 73c7151997c54..412a93ae6dd7a 100644 --- a/perception/crosswalk_traffic_light_estimator/README.md +++ b/perception/crosswalk_traffic_light_estimator/README.md @@ -8,17 +8,17 @@ ### Input -| Name | Type | Description | -| ------------------------------------ | ------------------------------------------------ | ------------------ | -| `~/input/vector_map` | `autoware_auto_mapping_msgs::msg::HADMapBin` | vector map | -| `~/input/route` | `autoware_planning_msgs::msg::LaneletRoute` | route | -| `~/input/classified/traffic_signals` | `tier4_perception_msgs::msg::TrafficSignalArray` | classified signals | +| Name | Type | Description | +| ------------------------------------ | --------------------------------------------------- | ------------------ | +| `~/input/vector_map` | `autoware_auto_mapping_msgs::msg::HADMapBin` | vector map | +| `~/input/route` | `autoware_planning_msgs::msg::LaneletRoute` | route | +| `~/input/classified/traffic_signals` | `autoware_perception_msgs::msg::TrafficSignalArray` | classified signals | ### Output -| Name | Type | Description | -| -------------------------- | ------------------------------------------------ | --------------------------------------------------------- | -| `~/output/traffic_signals` | `tier4_perception_msgs::msg::TrafficSignalArray` | output that contains estimated pedestrian traffic signals | +| Name | Type | Description | +| -------------------------- | --------------------------------------------------- | --------------------------------------------------------- | +| `~/output/traffic_signals` | `autoware_perception_msgs::msg::TrafficSignalArray` | output that contains estimated pedestrian traffic signals | ## Parameters diff --git a/perception/crosswalk_traffic_light_estimator/include/crosswalk_traffic_light_estimator/node.hpp b/perception/crosswalk_traffic_light_estimator/include/crosswalk_traffic_light_estimator/node.hpp index 0578d671d27b9..ccffde4fb3a11 100644 --- a/perception/crosswalk_traffic_light_estimator/include/crosswalk_traffic_light_estimator/node.hpp +++ b/perception/crosswalk_traffic_light_estimator/include/crosswalk_traffic_light_estimator/node.hpp @@ -23,9 +23,6 @@ #include #include #include -#include -#include -#include #include #include diff --git a/perception/crosswalk_traffic_light_estimator/src/node.cpp b/perception/crosswalk_traffic_light_estimator/src/node.cpp index 904a365786755..e29a80de283ce 100644 --- a/perception/crosswalk_traffic_light_estimator/src/node.cpp +++ b/perception/crosswalk_traffic_light_estimator/src/node.cpp @@ -102,7 +102,7 @@ CrosswalkTrafficLightEstimatorNode::CrosswalkTrafficLightEstimatorNode( void CrosswalkTrafficLightEstimatorNode::onMap(const HADMapBin::ConstSharedPtr msg) { - RCLCPP_INFO(get_logger(), "[CrosswalkTrafficLightEstimatorNode]: Start loading lanelet"); + RCLCPP_DEBUG(get_logger(), "[CrosswalkTrafficLightEstimatorNode]: Start loading lanelet"); lanelet_map_ptr_ = std::make_shared(); lanelet::utils::conversion::fromBinMsg( *msg, lanelet_map_ptr_, &traffic_rules_ptr_, &routing_graph_ptr_); @@ -117,7 +117,7 @@ void CrosswalkTrafficLightEstimatorNode::onMap(const HADMapBin::ConstSharedPtr m lanelet::routing::RoutingGraphContainer overall_graphs({vehicle_graph, pedestrian_graph}); overall_graphs_ptr_ = std::make_shared(overall_graphs); - RCLCPP_INFO(get_logger(), "[CrosswalkTrafficLightEstimatorNode]: Map is loaded"); + RCLCPP_DEBUG(get_logger(), "[CrosswalkTrafficLightEstimatorNode]: Map is loaded"); } void CrosswalkTrafficLightEstimatorNode::onRoute(const LaneletRoute::ConstSharedPtr msg) diff --git a/perception/detected_object_validation/include/detected_object_validation/detected_object_filter/object_lanelet_filter.hpp b/perception/detected_object_validation/include/detected_object_validation/detected_object_filter/object_lanelet_filter.hpp index 4331b5c19d3f1..e3c3263466033 100644 --- a/perception/detected_object_validation/include/detected_object_validation/detected_object_filter/object_lanelet_filter.hpp +++ b/perception/detected_object_validation/include/detected_object_validation/detected_object_filter/object_lanelet_filter.hpp @@ -19,6 +19,7 @@ #include #include +#include #include #include @@ -27,6 +28,7 @@ #include #include +#include #include namespace object_lanelet_filter @@ -48,6 +50,7 @@ class ObjectLaneletFilterNode : public rclcpp::Node rclcpp::Publisher::SharedPtr object_pub_; rclcpp::Subscription::SharedPtr map_sub_; rclcpp::Subscription::SharedPtr object_sub_; + std::unique_ptr debug_publisher_{nullptr}; lanelet::LaneletMapPtr lanelet_map_ptr_; lanelet::ConstLanelets road_lanelets_; diff --git a/perception/detected_object_validation/include/detected_object_validation/obstacle_pointcloud_based_validator/obstacle_pointcloud_based_validator.hpp b/perception/detected_object_validation/include/detected_object_validation/obstacle_pointcloud_based_validator/obstacle_pointcloud_based_validator.hpp index 2d70247445043..eb6da6bc45b24 100644 --- a/perception/detected_object_validation/include/detected_object_validation/obstacle_pointcloud_based_validator/obstacle_pointcloud_based_validator.hpp +++ b/perception/detected_object_validation/include/detected_object_validation/obstacle_pointcloud_based_validator/obstacle_pointcloud_based_validator.hpp @@ -20,6 +20,7 @@ #include "detected_object_validation/obstacle_pointcloud_based_validator/debugger.hpp" #include +#include #include #include @@ -133,6 +134,7 @@ class ObstaclePointCloudBasedValidator : public rclcpp::Node rclcpp::Publisher::SharedPtr objects_pub_; message_filters::Subscriber objects_sub_; message_filters::Subscriber obstacle_pointcloud_sub_; + std::unique_ptr debug_publisher_{nullptr}; tf2_ros::Buffer tf_buffer_; tf2_ros::TransformListener tf_listener_; diff --git a/perception/detected_object_validation/src/object_lanelet_filter.cpp b/perception/detected_object_validation/src/object_lanelet_filter.cpp index cd16d414425cb..74000a91e60fc 100644 --- a/perception/detected_object_validation/src/object_lanelet_filter.cpp +++ b/perception/detected_object_validation/src/object_lanelet_filter.cpp @@ -52,6 +52,9 @@ ObjectLaneletFilterNode::ObjectLaneletFilterNode(const rclcpp::NodeOptions & nod "input/object", rclcpp::QoS{1}, std::bind(&ObjectLaneletFilterNode::objectCallback, this, _1)); object_pub_ = this->create_publisher( "output/object", rclcpp::QoS{1}); + + debug_publisher_ = + std::make_unique(this, "object_lanelet_filter"); } void ObjectLaneletFilterNode::mapCallback( @@ -116,6 +119,15 @@ void ObjectLaneletFilterNode::objectCallback( ++index; } object_pub_->publish(output_object_msg); + + // Publish debug info + const double pipeline_latency = + std::chrono::duration( + std::chrono::nanoseconds( + (this->get_clock()->now() - output_object_msg.header.stamp).nanoseconds())) + .count(); + debug_publisher_->publish( + "debug/pipeline_latency_ms", pipeline_latency); } geometry_msgs::msg::Polygon ObjectLaneletFilterNode::setFootprint( diff --git a/perception/detected_object_validation/src/obstacle_pointcloud_based_validator.cpp b/perception/detected_object_validation/src/obstacle_pointcloud_based_validator.cpp index d903a9ca3bb41..f3f56652792e9 100644 --- a/perception/detected_object_validation/src/obstacle_pointcloud_based_validator.cpp +++ b/perception/detected_object_validation/src/obstacle_pointcloud_based_validator.cpp @@ -304,6 +304,8 @@ ObstaclePointCloudBasedValidator::ObstaclePointCloudBasedValidator( objects_pub_ = create_publisher( "~/output/objects", rclcpp::QoS{1}); + debug_publisher_ = std::make_unique( + this, "obstacle_pointcloud_based_validator"); const bool enable_debugger = declare_parameter("enable_debugger", false); if (enable_debugger) debugger_ = std::make_shared(this); @@ -350,6 +352,14 @@ void ObstaclePointCloudBasedValidator::onObjectsAndObstaclePointCloud( debugger_->publishNeighborPointcloud(input_obstacle_pointcloud->header); debugger_->publishPointcloudWithinPolygon(input_obstacle_pointcloud->header); } + + // Publish processing time info + const double pipeline_latency = + std::chrono::duration( + std::chrono::nanoseconds((this->get_clock()->now() - output.header.stamp).nanoseconds())) + .count(); + debug_publisher_->publish( + "debug/pipeline_latency_ms", pipeline_latency); } } // namespace obstacle_pointcloud_based_validator diff --git a/perception/euclidean_cluster/src/euclidean_cluster_node.cpp b/perception/euclidean_cluster/src/euclidean_cluster_node.cpp index a890c19e25d06..746ef1bafb583 100644 --- a/perception/euclidean_cluster/src/euclidean_cluster_node.cpp +++ b/perception/euclidean_cluster/src/euclidean_cluster_node.cpp @@ -64,10 +64,7 @@ void EuclideanClusterNode::onPointCloud( cluster_pub_->publish(output); // build debug msg - if (debug_pub_->get_subscription_count() < 1) { - return; - } - { + if (debug_pub_->get_subscription_count() >= 1) { sensor_msgs::msg::PointCloud2 debug; convertObjectMsg2SensorMsg(output, debug); debug_pub_->publish(debug); @@ -75,10 +72,16 @@ void EuclideanClusterNode::onPointCloud( if (debug_publisher_) { const double cyclic_time_ms = stop_watch_ptr_->toc("cyclic_time", true); const double processing_time_ms = stop_watch_ptr_->toc("processing_time", true); + const double pipeline_latency_ms = + std::chrono::duration( + std::chrono::nanoseconds((this->get_clock()->now() - output.header.stamp).nanoseconds())) + .count(); debug_publisher_->publish( "debug/cyclic_time_ms", cyclic_time_ms); debug_publisher_->publish( "debug/processing_time_ms", processing_time_ms); + debug_publisher_->publish( + "debug/pipeline_latency_ms", pipeline_latency_ms); } } diff --git a/perception/euclidean_cluster/src/voxel_grid_based_euclidean_cluster_node.cpp b/perception/euclidean_cluster/src/voxel_grid_based_euclidean_cluster_node.cpp index a28791e5d4057..f58d9ac6dcc48 100644 --- a/perception/euclidean_cluster/src/voxel_grid_based_euclidean_cluster_node.cpp +++ b/perception/euclidean_cluster/src/voxel_grid_based_euclidean_cluster_node.cpp @@ -76,10 +76,7 @@ void VoxelGridBasedEuclideanClusterNode::onPointCloud( cluster_pub_->publish(output); // build debug msg - if (debug_pub_->get_subscription_count() < 1) { - return; - } - { + if (debug_pub_->get_subscription_count() >= 1) { sensor_msgs::msg::PointCloud2 debug; convertObjectMsg2SensorMsg(output, debug); debug_pub_->publish(debug); @@ -87,10 +84,16 @@ void VoxelGridBasedEuclideanClusterNode::onPointCloud( if (debug_publisher_) { const double processing_time_ms = stop_watch_ptr_->toc("processing_time", true); const double cyclic_time_ms = stop_watch_ptr_->toc("cyclic_time", true); + const double pipeline_latency_ms = + std::chrono::duration( + std::chrono::nanoseconds((this->get_clock()->now() - output.header.stamp).nanoseconds())) + .count(); debug_publisher_->publish( "debug/cyclic_time_ms", cyclic_time_ms); debug_publisher_->publish( "debug/processing_time_ms", processing_time_ms); + debug_publisher_->publish( + "debug/pipeline_latency_ms", pipeline_latency_ms); } } diff --git a/perception/front_vehicle_velocity_estimator/CMakeLists.txt b/perception/front_vehicle_velocity_estimator/CMakeLists.txt deleted file mode 100644 index 98094d029a316..0000000000000 --- a/perception/front_vehicle_velocity_estimator/CMakeLists.txt +++ /dev/null @@ -1,30 +0,0 @@ -cmake_minimum_required(VERSION 3.5) -project(front_vehicle_velocity_estimator) - -# Dependencies -find_package(autoware_cmake REQUIRED) -autoware_package() - -# Targets -ament_auto_add_library(front_vehicle_velocity_estimator_node_component SHARED - src/front_vehicle_velocity_estimator_node/front_vehicle_velocity_estimator_node.cpp - src/front_vehicle_velocity_estimator_node/front_vehicle_velocity_estimator.cpp -) - -rclcpp_components_register_node(front_vehicle_velocity_estimator_node_component - PLUGIN "front_vehicle_velocity_estimator::FrontVehicleVelocityEstimatorNode" - EXECUTABLE front_vehicle_velocity_estimator_node -) - -# Tests -if(BUILD_TESTING) - list(APPEND AMENT_LINT_AUTO_EXCLUDE ament_cmake_uncrustify) - find_package(ament_lint_auto REQUIRED) - ament_lint_auto_find_test_dependencies() -endif() - -# Package -ament_auto_package( - INSTALL_TO_SHARE - launch -) diff --git a/perception/front_vehicle_velocity_estimator/README.md b/perception/front_vehicle_velocity_estimator/README.md deleted file mode 100644 index 2225880db101c..0000000000000 --- a/perception/front_vehicle_velocity_estimator/README.md +++ /dev/null @@ -1,45 +0,0 @@ -# front_vehicle_velocity_estimator - -This package contains a front vehicle velocity estimation for offline perception module analysis. -This package can: - -- Attach velocity to 3D detections from velocity estimation with LiDAR pointcloud. - -## Algorithm - -- Processing flow - 1. Choose front vehicle from front area objects. - 2. Choose nearest neighbor point within front vehicle. - 3. Estimate velocity of front vehicle by using the differentiated value from time series of nearest neighbor point positions. - 4. Compensate ego vehicle twist - -## Input - -| Name | Type | Description | -| -------------------- | ---------------------------------------------------- | -------------------- | -| `~/input/objects` | autoware_auto_perception_msgs/msg/DetectedObject.msg | 3D detected objects. | -| `~/input/pointcloud` | sensor_msgs/msg/PointCloud2.msg | LiDAR pointcloud. | -| `~/input/odometry` | nav_msgs::msg::Odometry.msg | Odometry data. | - -## Output - -| Name | Type | Description | -| ------------------------------------- | ----------------------------------------------------- | --------------------------------------------- | -| `~/output/objects` | autoware_auto_perception_msgs/msg/DetectedObjects.msg | 3D detected object with twist. | -| `~/debug/nearest_neighbor_pointcloud` | sensor_msgs/msg/PointCloud2.msg | The pointcloud msg of nearest neighbor point. | - -## Node parameter - -| Name | Type | Description | Default value | -| :--------------- | :----- | :-------------------- | :------------ | -| `update_rate_hz` | double | The update rate [hz]. | 10.0 | - -## Core parameter - -| Name | Type | Description | Default value | -| :---------------------------- | :----- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------ | -| `moving_average_num` | int | The moving average number for velocity estimation. | 1 | -| `threshold_pointcloud_z_high` | float | The threshold for z position value of point when choosing nearest neighbor point within front vehicle [m]. If z > `threshold_pointcloud_z_high`, the point is considered to noise. | 1.0f | -| `threshold_pointcloud_z_low` | float | The threshold for z position value of point when choosing nearest neighbor point within front vehicle [m]. If z < `threshold_pointcloud_z_low`, the point is considered to noise like ground. | 0.6f | -| `threshold_relative_velocity` | double | The threshold for min and max of estimated relative velocity ($v_{re}$) [m/s]. If $v_{re}$ < - `threshold_relative_velocity` , then $v_{re}$ = - `threshold_relative_velocity`. If $v_{re}$ > `threshold_relative_velocity`, then $v_{re}$ = `threshold_relative_velocity`. | 10.0 | -| `threshold_absolute_velocity` | double | The threshold for max of estimated absolute velocity ($v_{ae}$) [m/s]. If $v_{ae}$ > `threshold_absolute_velocity`, then $v_{ae}$ = `threshold_absolute_velocity`. | 20.0 | diff --git a/perception/front_vehicle_velocity_estimator/include/front_vehicle_velocity_estimator/front_vehicle_velocity_estimator.hpp b/perception/front_vehicle_velocity_estimator/include/front_vehicle_velocity_estimator/front_vehicle_velocity_estimator.hpp deleted file mode 100644 index 01f738fc6beaa..0000000000000 --- a/perception/front_vehicle_velocity_estimator/include/front_vehicle_velocity_estimator/front_vehicle_velocity_estimator.hpp +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright 2022 TIER IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef FRONT_VEHICLE_VELOCITY_ESTIMATOR__FRONT_VEHICLE_VELOCITY_ESTIMATOR_HPP_ -#define FRONT_VEHICLE_VELOCITY_ESTIMATOR__FRONT_VEHICLE_VELOCITY_ESTIMATOR_HPP_ - -#include "pcl/point_cloud.h" -#include "pcl/point_types.h" -#include "pcl_conversions/pcl_conversions.h" -#include "rclcpp/logger.hpp" -#include "tier4_autoware_utils/geometry/boost_geometry.hpp" - -#include "autoware_auto_perception_msgs/msg/detected_objects.hpp" -#include "nav_msgs/msg/odometry.hpp" -#include "sensor_msgs/msg/point_cloud2.hpp" - -#include -#include -#include - -namespace front_vehicle_velocity_estimator -{ -using autoware_auto_perception_msgs::msg::DetectedObject; -using autoware_auto_perception_msgs::msg::DetectedObjectKinematics; -using autoware_auto_perception_msgs::msg::DetectedObjects; -using autoware_auto_perception_msgs::msg::ObjectClassification; -using nav_msgs::msg::Odometry; -using sensor_msgs::msg::PointCloud2; -using tier4_autoware_utils::LinearRing2d; -using tier4_autoware_utils::Point2d; - -class FrontVehicleVelocityEstimator -{ -public: - explicit FrontVehicleVelocityEstimator(const rclcpp::Logger & logger) : logger_(logger) {} - - struct Input - { - PointCloud2::ConstSharedPtr pointcloud{}; - DetectedObjects::ConstSharedPtr objects{}; - Odometry::ConstSharedPtr odometry{}; - }; - - struct Output - { - DetectedObjects objects{}; - PointCloud2 nearest_neighbor_pointcloud{}; - }; - - struct Param - { - int moving_average_num{}; - float threshold_pointcloud_z_high{}; - float threshold_pointcloud_z_low{}; - double threshold_relative_velocity{}; - double threshold_absolute_velocity{}; - }; - - void setParam(const Param & param) { param_ = param; } - Output update(const Input & input); - -private: - struct ObjectsWithFrontVehicle - { - DetectedObjects::SharedPtr objects_without_front_vehicle{}; - DetectedObject front_vehicle{}; - bool is_front_vehicle = false; - }; - - rclcpp::Logger logger_; - - // Buffer data - Param param_{}; - std::deque velocity_queue_{}; - rclcpp::Time prev_time_{}; - pcl::PointXYZ prev_point_{}; - - // Function - LinearRing2d createBoxArea(const Point2d size); - LinearRing2d createObjectArea(const DetectedObject & object); - ObjectsWithFrontVehicle filterFrontVehicle( - DetectedObjects::ConstSharedPtr objects, const LinearRing2d & front_area); - pcl::PointXYZ getNearestNeighborPoint( - const DetectedObject & object, PointCloud2::ConstSharedPtr pointcloud, - const Point2d & front_size); - double estimateRelativeVelocity(const pcl::PointXYZ & point, const rclcpp::Time & header_time); - double estimateAbsoluteVelocity( - const double relative_velocity, Odometry::ConstSharedPtr odometry); - bool isFrontVehicle(const DetectedObject & object, const LinearRing2d & front_area); - bool isWithinVehicle( - const DetectedObject & object, const pcl::PointXYZ & point, const Point2d & front_size); -}; - -} // namespace front_vehicle_velocity_estimator - -#endif // FRONT_VEHICLE_VELOCITY_ESTIMATOR__FRONT_VEHICLE_VELOCITY_ESTIMATOR_HPP_ diff --git a/perception/front_vehicle_velocity_estimator/include/front_vehicle_velocity_estimator/front_vehicle_velocity_estimator_node.hpp b/perception/front_vehicle_velocity_estimator/include/front_vehicle_velocity_estimator/front_vehicle_velocity_estimator_node.hpp deleted file mode 100644 index 8b42b61606677..0000000000000 --- a/perception/front_vehicle_velocity_estimator/include/front_vehicle_velocity_estimator/front_vehicle_velocity_estimator_node.hpp +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright 2022 TIER IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef FRONT_VEHICLE_VELOCITY_ESTIMATOR__FRONT_VEHICLE_VELOCITY_ESTIMATOR_NODE_HPP_ -#define FRONT_VEHICLE_VELOCITY_ESTIMATOR__FRONT_VEHICLE_VELOCITY_ESTIMATOR_NODE_HPP_ - -#include "front_vehicle_velocity_estimator/front_vehicle_velocity_estimator.hpp" -#include "rclcpp/rclcpp.hpp" - -#include "autoware_auto_perception_msgs/msg/detected_objects.hpp" -#include "nav_msgs/msg/odometry.hpp" -#include "sensor_msgs/msg/point_cloud2.hpp" - -#include -#include -#include - -#include -#include -#include -#include - -namespace front_vehicle_velocity_estimator -{ -using autoware_auto_perception_msgs::msg::DetectedObject; -using autoware_auto_perception_msgs::msg::DetectedObjects; -using nav_msgs::msg::Odometry; -using sensor_msgs::msg::PointCloud2; -using tier4_autoware_utils::LinearRing2d; -using tier4_autoware_utils::Point2d; - -class FrontVehicleVelocityEstimatorNode : public rclcpp::Node -{ -public: - explicit FrontVehicleVelocityEstimatorNode(const rclcpp::NodeOptions & node_options); - - struct NodeParam - { - double update_rate_hz{}; - }; - -private: - // Subscriber - message_filters::Subscriber sub_pointcloud_{}; - message_filters::Subscriber sub_objects_{}; - message_filters::Subscriber sub_odometry_{}; - - using SyncPolicy = - message_filters::sync_policies::ApproximateTime; - using Sync = message_filters::Synchronizer; - typename std::shared_ptr sync_ptr_; - - // Callback - void onData( - const PointCloud2::ConstSharedPtr pointcloud_msg, - const DetectedObjects::ConstSharedPtr object_msg, const Odometry::ConstSharedPtr odometry_msg); - - // Data Buffer - PointCloud2::ConstSharedPtr pointcloud_data_{}; - DetectedObjects::ConstSharedPtr objects_data_{}; - Odometry::ConstSharedPtr odometry_data_{}; - - // Publisher - rclcpp::Publisher::SharedPtr pub_objects_{}; - rclcpp::Publisher::SharedPtr pub_nearest_neighbor_pointcloud_{}; - - // Timer - bool isDataReady(); - - // Parameter Server - OnSetParametersCallbackHandle::SharedPtr set_param_res_; - rcl_interfaces::msg::SetParametersResult onSetParam( - const std::vector & params); - - // Parameter - NodeParam node_param_{}; - - // Core - FrontVehicleVelocityEstimator::Input input_{}; - FrontVehicleVelocityEstimator::Output output_{}; - FrontVehicleVelocityEstimator::Param core_param_{}; - std::unique_ptr front_vehicle_velocity_estimator_{}; -}; - -} // namespace front_vehicle_velocity_estimator - -#endif // FRONT_VEHICLE_VELOCITY_ESTIMATOR__FRONT_VEHICLE_VELOCITY_ESTIMATOR_NODE_HPP_ diff --git a/perception/front_vehicle_velocity_estimator/launch/front_vehicle_velocity_estimator.launch.xml b/perception/front_vehicle_velocity_estimator/launch/front_vehicle_velocity_estimator.launch.xml deleted file mode 100644 index 518791821a0ac..0000000000000 --- a/perception/front_vehicle_velocity_estimator/launch/front_vehicle_velocity_estimator.launch.xml +++ /dev/null @@ -1,28 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/perception/front_vehicle_velocity_estimator/package.xml b/perception/front_vehicle_velocity_estimator/package.xml deleted file mode 100644 index 3b1af8cf544ac..0000000000000 --- a/perception/front_vehicle_velocity_estimator/package.xml +++ /dev/null @@ -1,32 +0,0 @@ - - - - front_vehicle_velocity_estimator - 0.1.0 - front_vehicle_velocity_estimator - Satoshi Tanaka - Shunsuke Miura - Apache License 2.0 - Satoshi Tanaka - - ament_cmake_auto - autoware_cmake - - autoware_auto_perception_msgs - geometry_msgs - message_filters - nav_msgs - pcl_conversions - pcl_ros - rclcpp - rclcpp_components - sensor_msgs - tier4_autoware_utils - - ament_lint_auto - autoware_lint_common - - - ament_cmake - - diff --git a/perception/front_vehicle_velocity_estimator/src/front_vehicle_velocity_estimator_node/front_vehicle_velocity_estimator.cpp b/perception/front_vehicle_velocity_estimator/src/front_vehicle_velocity_estimator_node/front_vehicle_velocity_estimator.cpp deleted file mode 100644 index 6f59823615c04..0000000000000 --- a/perception/front_vehicle_velocity_estimator/src/front_vehicle_velocity_estimator_node/front_vehicle_velocity_estimator.cpp +++ /dev/null @@ -1,248 +0,0 @@ -// Copyright 2022 TIER IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "front_vehicle_velocity_estimator/front_vehicle_velocity_estimator.hpp" - -#include "tier4_autoware_utils/geometry/geometry.hpp" - -#include - -#include -#include -#include - -namespace front_vehicle_velocity_estimator -{ -FrontVehicleVelocityEstimator::Output FrontVehicleVelocityEstimator::update( - const FrontVehicleVelocityEstimator::Input & input) -{ - Output output_{}; - ObjectsWithFrontVehicle objects_with_front_vehicle{}; - - // Filter front vehicle - const Point2d front_size{100.0, 3.0}; - LinearRing2d front_area = createBoxArea(front_size); - objects_with_front_vehicle = filterFrontVehicle(input.objects, front_area); - - // Get nearest neighbor pointcloud - pcl::PointXYZ nearest_neighbor_point = - getNearestNeighborPoint(objects_with_front_vehicle.front_vehicle, input.pointcloud, front_size); - - // Set objects output - output_.objects = *(objects_with_front_vehicle.objects_without_front_vehicle); - - // If there is no front vehicle, return output - if (!objects_with_front_vehicle.is_front_vehicle) { - return output_; - } - - // Estimate relative velocity - double now_relative_velocity = - estimateRelativeVelocity(nearest_neighbor_point, input.pointcloud->header.stamp); - - // Estimate absolute velocity - double now_absolute_velocity = estimateAbsoluteVelocity(now_relative_velocity, input.odometry); - - // Validate velocity - if ( - !isfinite(now_relative_velocity) || - now_relative_velocity > param_.threshold_relative_velocity || - now_relative_velocity < -param_.threshold_relative_velocity) { - output_.objects.objects.emplace_back(objects_with_front_vehicle.front_vehicle); - return output_; - } - now_absolute_velocity = std::min(param_.threshold_absolute_velocity, now_absolute_velocity); - - // Set queue of nearest_neighbor_point - if (static_cast(velocity_queue_.size()) >= param_.moving_average_num) { - velocity_queue_.pop_front(); - } - - // Estimate average velocity - velocity_queue_.push_back(now_absolute_velocity); - double velocity = std::accumulate(std::begin(velocity_queue_), std::end(velocity_queue_), 0.0) / - velocity_queue_.size(); - // RCLCPP_INFO( - // rclcpp::get_logger("front_vehicle_velocity_estimator"), "x=%f, v=%f km/h, v_a=%f km/h", - // objects_with_front_vehicle.front_vehicle.kinematics.pose_with_covariance.pose.position.x, - // now_absolute_velocity * 3.6, velocity * 3.6); - - // Set kinematics information for front vehicle - objects_with_front_vehicle.front_vehicle.kinematics.has_twist = true; - objects_with_front_vehicle.front_vehicle.kinematics.twist_with_covariance.twist.linear.x = - velocity; - objects_with_front_vehicle.front_vehicle.kinematics.has_twist_covariance = true; - objects_with_front_vehicle.front_vehicle.kinematics.twist_with_covariance.covariance.at(0) = 1.0; - output_.objects.objects.emplace_back(objects_with_front_vehicle.front_vehicle); - - // Set previous data buffer - prev_time_ = input.pointcloud->header.stamp; - prev_point_ = nearest_neighbor_point; - - // Set nearest_neighbor_pointcloud output for debug - pcl::PointCloud output_pointcloud; - output_pointcloud.points.push_back(nearest_neighbor_point); - pcl::toROSMsg(output_pointcloud, output_.nearest_neighbor_pointcloud); - output_.nearest_neighbor_pointcloud.header = input.pointcloud->header; - - return output_; -} - -// Create front area x = 0 ~ size.x, y = -size.y ~ size.y -LinearRing2d FrontVehicleVelocityEstimator::createBoxArea(const Point2d size) -{ - LinearRing2d box{}; - box.push_back(Point2d{0.0, size.y()}); - box.push_back(Point2d{size.x(), size.y()}); - box.push_back(Point2d{size.x(), -size.y()}); - box.push_back(Point2d{0.0, -size.y()}); - box.push_back(Point2d{0.0, size.y()}); - return box; -} - -// Create object area -LinearRing2d FrontVehicleVelocityEstimator::createObjectArea(const DetectedObject & object) -{ - Point2d object_size{object.shape.dimensions.x, object.shape.dimensions.y}; - const double x_front = object_size.x() / 2.0; - const double x_rear = -object_size.x() / 2.0; - const double y_left = object_size.y() / 2.0; - const double y_right = -object_size.y() / 2.0; - - LinearRing2d box{}; - box.push_back(Point2d{x_front, y_left}); - box.push_back(Point2d{x_front, y_right}); - box.push_back(Point2d{x_rear, y_right}); - box.push_back(Point2d{x_rear, y_left}); - box.push_back(Point2d{x_front, y_left}); - - LinearRing2d transformed_box = tier4_autoware_utils::transformVector( - box, tier4_autoware_utils::pose2transform(object.kinematics.pose_with_covariance.pose)); - - return transformed_box; -} - -// Filter for a front vehicle. -FrontVehicleVelocityEstimator::ObjectsWithFrontVehicle -FrontVehicleVelocityEstimator::filterFrontVehicle( - DetectedObjects::ConstSharedPtr objects, const LinearRing2d & front_area) -{ - // Initialize output - ObjectsWithFrontVehicle output{}; - output.is_front_vehicle = false; - - DetectedObjects output_objects_{}; - output_objects_.header = objects->header; - - for (const auto & object : objects->objects) { - if (!isFrontVehicle(object, front_area)) { - output_objects_.objects.emplace_back(object); - } else if (!output.is_front_vehicle) { - output.front_vehicle = object; - output.is_front_vehicle = true; - } else { - auto front_vehicle_position = - output.front_vehicle.kinematics.pose_with_covariance.pose.position; - if (object.kinematics.pose_with_covariance.pose.position.x < front_vehicle_position.x) { - output_objects_.objects.emplace_back(output.front_vehicle); - output.front_vehicle = object; - } else { - output_objects_.objects.emplace_back(object); - } - } - } - output.objects_without_front_vehicle = std::make_shared(output_objects_); - return output; -} - -pcl::PointXYZ FrontVehicleVelocityEstimator::getNearestNeighborPoint( - const DetectedObject & object, PointCloud2::ConstSharedPtr pointcloud, const Point2d & front_size) -{ - pcl::PointCloud::Ptr pcl_msg(new pcl::PointCloud); - pcl::fromROSMsg(*pointcloud, *pcl_msg); - - // Initialize - pcl::PointXYZ nearest_neighbor_point; - bool is_initialized = false; - - for (const auto & point : *pcl_msg) { - if (isWithinVehicle(object, point, front_size)) { - if (!is_initialized) { - nearest_neighbor_point = point; - } else if (point.x < nearest_neighbor_point.x) { - nearest_neighbor_point = point; - } - } - } - return nearest_neighbor_point; -} - -bool FrontVehicleVelocityEstimator::isWithinVehicle( - const DetectedObject & object, const pcl::PointXYZ & point, const Point2d & front_size) -{ - LinearRing2d object_ring_2d = createObjectArea(object); - Point2d point_{point.x, point.y}; - if (point.z < param_.threshold_pointcloud_z_low) { - return false; - } else if (point.z > param_.threshold_pointcloud_z_high) { - return false; - } else if (point.x < 0) { - return false; - } else if (point.x > front_size.x()) { - return false; - } else if (point.y < -front_size.y()) { - return false; - } else if (point.y > front_size.y()) { - return false; - } else if (!boost::geometry::within(point_, object_ring_2d)) { - return false; - } - return true; -} - -double FrontVehicleVelocityEstimator::estimateRelativeVelocity( - const pcl::PointXYZ & point, const rclcpp::Time & header_time) -{ - if (velocity_queue_.size() == 0) { - return 0.0; - } - const double dt = (header_time - prev_time_).seconds(); - const double relative_velocity = (point.x - prev_point_.x) / dt; - return relative_velocity; -} - -double FrontVehicleVelocityEstimator::estimateAbsoluteVelocity( - const double relative_velocity, Odometry::ConstSharedPtr odometry) -{ - const double velocity = relative_velocity + odometry->twist.twist.linear.x; - return velocity; -} - -bool FrontVehicleVelocityEstimator::isFrontVehicle( - const DetectedObject & object, const LinearRing2d & front_area) -{ - auto position = object.kinematics.pose_with_covariance.pose.position; - auto label = object.classification.at(0).label; - Point2d object_point{position.x, position.y}; - if ( - !(label == ObjectClassification::UNKNOWN) && !(label == ObjectClassification::PEDESTRIAN) && - !(label == ObjectClassification::BICYCLE) && !(label == ObjectClassification::MOTORCYCLE) && - boost::geometry::within(object_point, front_area)) { - return true; - } else { - return false; - } -} - -} // namespace front_vehicle_velocity_estimator diff --git a/perception/front_vehicle_velocity_estimator/src/front_vehicle_velocity_estimator_node/front_vehicle_velocity_estimator_node.cpp b/perception/front_vehicle_velocity_estimator/src/front_vehicle_velocity_estimator_node/front_vehicle_velocity_estimator_node.cpp deleted file mode 100644 index d79164bf54e0d..0000000000000 --- a/perception/front_vehicle_velocity_estimator/src/front_vehicle_velocity_estimator_node/front_vehicle_velocity_estimator_node.cpp +++ /dev/null @@ -1,185 +0,0 @@ -// Copyright 2022 TIER IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "front_vehicle_velocity_estimator/front_vehicle_velocity_estimator_node.hpp" - -#include "rclcpp/rclcpp.hpp" - -#include -#include -#include - -using namespace std::literals; -using std::chrono::duration; -using std::chrono::duration_cast; -using std::chrono::nanoseconds; - -namespace -{ -template -bool update_param( - const std::vector & params, const std::string & name, T & value) -{ - const auto itr = std::find_if( - params.cbegin(), params.cend(), - [&name](const rclcpp::Parameter & p) { return p.get_name() == name; }); - - // Not found - if (itr == params.cend()) { - return false; - } - - value = itr->template get_value(); - return true; -} -} // namespace - -namespace front_vehicle_velocity_estimator -{ - -FrontVehicleVelocityEstimatorNode::FrontVehicleVelocityEstimatorNode( - const rclcpp::NodeOptions & node_options) -: Node("front_vehicle_velocity_estimator", node_options) -{ - // Parameter Server - set_param_res_ = this->add_on_set_parameters_callback( - std::bind(&FrontVehicleVelocityEstimatorNode::onSetParam, this, std::placeholders::_1)); - - // Node Parameter - node_param_.update_rate_hz = declare_parameter("node_params.update_rate_hz", 10.0); - - // Core Parameter - core_param_.moving_average_num = declare_parameter("core_params.moving_average_num", 1); - core_param_.threshold_pointcloud_z_high = - declare_parameter("core_params.threshold_pointcloud_z_high", 1.0f); - core_param_.threshold_pointcloud_z_low = - declare_parameter("core_params.threshold_pointcloud_z_low", 0.6f); - core_param_.threshold_relative_velocity = - declare_parameter("core_params.threshold_relative_velocity", 10.0); - core_param_.threshold_absolute_velocity = - declare_parameter("core_params.threshold_absolute_velocity", 20.0); - - // Core - front_vehicle_velocity_estimator_ = std::make_unique(get_logger()); - front_vehicle_velocity_estimator_->setParam(core_param_); - - // Subscriber - sub_pointcloud_.subscribe( - this, "~/input/pointcloud", rclcpp::SensorDataQoS().get_rmw_qos_profile()); - sub_objects_.subscribe(this, "~/input/objects", rclcpp::QoS{1}.get_rmw_qos_profile()); - sub_odometry_.subscribe(this, "~/input/odometry", rclcpp::QoS{1}.get_rmw_qos_profile()); - - using std::placeholders::_1; - using std::placeholders::_2; - using std::placeholders::_3; - sync_ptr_ = std::make_shared(SyncPolicy(20), sub_pointcloud_, sub_objects_, sub_odometry_); - sync_ptr_->registerCallback( - std::bind(&FrontVehicleVelocityEstimatorNode::onData, this, _1, _2, _3)); - - // Publisher - pub_objects_ = create_publisher("~/output/objects", 1); - pub_nearest_neighbor_pointcloud_ = - create_publisher("~/debug/nearest_neighbor_pointcloud", 1); -} - -void FrontVehicleVelocityEstimatorNode::onData( - const PointCloud2::ConstSharedPtr pointcloud_msg, - const DetectedObjects::ConstSharedPtr object_msg, const Odometry::ConstSharedPtr odometry_msg) -{ - pointcloud_data_ = pointcloud_msg; - objects_data_ = object_msg; - odometry_data_ = odometry_msg; - - if (!isDataReady()) { - return; - } - - if (!odometry_data_) { - // If odometry data does not come, publish original objects - pub_objects_->publish(*objects_data_); - } else { - // Set input data - input_.objects = objects_data_; - input_.pointcloud = pointcloud_data_; - input_.odometry = odometry_data_; - - // Update - output_ = front_vehicle_velocity_estimator_->update(input_); - - // Publish - pub_objects_->publish(output_.objects); - pub_nearest_neighbor_pointcloud_->publish(output_.nearest_neighbor_pointcloud); - } -} - -rcl_interfaces::msg::SetParametersResult FrontVehicleVelocityEstimatorNode::onSetParam( - const std::vector & params) -{ - rcl_interfaces::msg::SetParametersResult result; - - try { - // Node Parameter - { - auto & p = node_param_; - - // Update params - update_param(params, "node_params.update_rate_hz", p.update_rate_hz); - } - - // Core Parameter - { - auto & p = core_param_; - - // Update params - update_param(params, "core_params.moving_average_num", p.moving_average_num); - update_param( - params, "core_params.threshold_pointcloud_z_high", p.threshold_pointcloud_z_high); - update_param(params, "core_params.threshold_pointcloud_z_low", p.threshold_pointcloud_z_low); - update_param( - params, "core_params.threshold_relative_velocity", p.threshold_relative_velocity); - update_param( - params, "core_params.threshold_absolute_velocity", p.threshold_absolute_velocity); - - // Set parameter to instance - if (front_vehicle_velocity_estimator_) { - front_vehicle_velocity_estimator_->setParam(core_param_); - } - } - } catch (const rclcpp::exceptions::InvalidParameterTypeException & e) { - result.successful = false; - result.reason = e.what(); - return result; - } - - result.successful = true; - result.reason = "success"; - return result; -} - -bool FrontVehicleVelocityEstimatorNode::isDataReady() -{ - if (!objects_data_) { - RCLCPP_INFO_THROTTLE(get_logger(), *get_clock(), 1000, "waiting for objects msg..."); - return false; - } - if (!pointcloud_data_) { - RCLCPP_INFO_THROTTLE(get_logger(), *get_clock(), 1000, "waiting for pointcloud msg..."); - return false; - } - return true; -} -} // namespace front_vehicle_velocity_estimator - -#include "rclcpp_components/register_node_macro.hpp" -RCLCPP_COMPONENTS_REGISTER_NODE(front_vehicle_velocity_estimator::FrontVehicleVelocityEstimatorNode) diff --git a/perception/heatmap_visualizer/CMakeLists.txt b/perception/heatmap_visualizer/CMakeLists.txt deleted file mode 100644 index 75fa02a1cdcf7..0000000000000 --- a/perception/heatmap_visualizer/CMakeLists.txt +++ /dev/null @@ -1,20 +0,0 @@ -cmake_minimum_required(VERSION 3.14) -project(heatmap_visualizer) - -find_package(autoware_cmake REQUIRED) -autoware_package() - -ament_auto_add_library(heatmap_visualizer_component SHARED - src/heatmap_visualizer_node.cpp - src/utils.cpp -) - -rclcpp_components_register_node(heatmap_visualizer_component - PLUGIN "heatmap_visualizer::HeatmapVisualizerNode" - EXECUTABLE heatmap_visualizer -) - -ament_auto_package(INSTALL_TO_SHARE - launch - config -) diff --git a/perception/heatmap_visualizer/README.md b/perception/heatmap_visualizer/README.md deleted file mode 100644 index 747a158b3d487..0000000000000 --- a/perception/heatmap_visualizer/README.md +++ /dev/null @@ -1,92 +0,0 @@ -# heatmap_visualizer - -## Purpose - -heatmap_visualizer is a package for visualizing heatmap of detected 3D objects' positions on the BEV space. - -This package is used for qualitative evaluation and trend analysis of the detector, it means, for instance, the heatmap shows "This detector performs good for near around of our vehicle, but far is bad". - -### How to run - -```shell -ros2 launch heatmap_visualizer heatmap_visualizer.launch.xml input/objects:= -``` - -## Inner-workings / Algorithms - -In this implementation, create heatmap of the center position of detected objects for each classes, for instance, CAR, PEDESTRIAN, etc, and publish them as occupancy grid maps. - -![heatmap_visualizer_sample](./image/heatmap_sample.png) - -In the above figure, the pink represents high detection frequency area and blue one is low, or black represents there is no detection. - -![heatmap_visualizer](./image/heatmap_visualizer.png) - -As inner-workings, add center positions of detected objects to index of each corresponding grid map cell in a buffer. -The created heatmap will be published by each specific frame, which can be specified with `frame_count`. Note that the buffer to be add the positions is not reset per publishing. -When publishing, firstly these values are normalized to [0, 1] using maximum and minimum values in the buffer. Secondly, they are scaled to integer in [0, 100] because `nav_msgs::msg::OccupancyGrid` only allow the value in [0, 100]. - -## Inputs / Outputs - -### Input - -| Name | Type | Description | -| ----------------- | ----------------------------------------------------- | ---------------- | -| `~/input/objects` | `autoware_auto_perception_msgs::msg::DetectedObjects` | detected objects | - -### Output - -| Name | Type | Description | -| ------------------------------- | ------------------------------ | ------------------ | -| `~/output/objects/` | `nav_msgs::msg::OccupancyGrid` | visualized heatmap | - -## Parameters - -### Core Parameters - -| Name | Type | Default Value | Description | -| --------------------- | ------------- | ------------------------------------------------------------------------------------- | ----------------------------------------------- | -| `publish_frame_count` | int | `50` | The number of frames to publish heatmap | -| `heatmap_frame_id` | string | `base_link` | The frame ID of heatmap to be respected | -| `heatmap_length` | float | `200.0` | A length of map in meter | -| `heatmap_resolution` | float | `0.8` | A resolution of map | -| `use_confidence` | bool | `false` | A flag if use confidence score as heatmap value | -| `class_names` | array | `["UNKNOWN", "CAR", "TRUCK", "BUS", "TRAILER", "BICYCLE", "MOTORBIKE", "PEDESTRIAN"]` | An array of class names to be published | -| `rename_to_car` | bool | `true` | A flag if rename car like vehicle to car | - -## Assumptions / Known limits - -The heatmap depends on the data to be used, so if the objects in data are sparse the heatmap will be sparse. - -## (Optional) Error detection and handling - - - -## (Optional) Performance characterization - - - -## References/External links - -## (Optional) Future extensions / Unimplemented parts - - diff --git a/perception/heatmap_visualizer/config/heatmap_visualizer.param.yaml b/perception/heatmap_visualizer/config/heatmap_visualizer.param.yaml deleted file mode 100644 index 2da863ea5e37a..0000000000000 --- a/perception/heatmap_visualizer/config/heatmap_visualizer.param.yaml +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2023 TIER IV, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -/**: - ros__parameters: - publish_frame_count: 50 - heatmap_frame_id: "base_link" - heatmap_length: 200.0 - heatmap_resolution: 0.8 - use_confidence: false - class_names: - [ - "UNKNOWN", - "CAR", - "TRUCK", - "BUS", - "TRAILER", - "BICYCLE", - "MOTORBIKE", - "PEDESTRIAN", - ] - rename_to_car: false diff --git a/perception/heatmap_visualizer/image/heatmap_sample.png b/perception/heatmap_visualizer/image/heatmap_sample.png deleted file mode 100644 index 42a0c8eab4a98..0000000000000 Binary files a/perception/heatmap_visualizer/image/heatmap_sample.png and /dev/null differ diff --git a/perception/heatmap_visualizer/image/heatmap_visualizer.png b/perception/heatmap_visualizer/image/heatmap_visualizer.png deleted file mode 100644 index cf43ddedeb6ce..0000000000000 Binary files a/perception/heatmap_visualizer/image/heatmap_visualizer.png and /dev/null differ diff --git a/perception/heatmap_visualizer/include/heatmap_visualizer/heatmap_visualizer_node.hpp b/perception/heatmap_visualizer/include/heatmap_visualizer/heatmap_visualizer_node.hpp deleted file mode 100644 index 508ce238b51f1..0000000000000 --- a/perception/heatmap_visualizer/include/heatmap_visualizer/heatmap_visualizer_node.hpp +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright 2022 TIER IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef HEATMAP_VISUALIZER__HEATMAP_VISUALIZER_NODE_HPP_ -#define HEATMAP_VISUALIZER__HEATMAP_VISUALIZER_NODE_HPP_ - -#include - -#include -#include -#include - -#include -#include -#include - -namespace heatmap_visualizer -{ -class HeatmapVisualizerNode : public rclcpp::Node -{ -public: - /** - * @brief Construct a new Heatmap Visualizer Node object - * - * @param node_options - */ - explicit HeatmapVisualizerNode(const rclcpp::NodeOptions & node_options); - -private: - /** - * @brief Callback function - * - * @param msg - */ - void detectedObjectsCallback( - const autoware_auto_perception_msgs::msg::DetectedObjects::ConstSharedPtr msg); - - // Subscriber - rclcpp::Subscription::SharedPtr objects_sub_; - - // Publishers - std::map::SharedPtr> heatmap_pubs_; - - std::map heatmaps_; - - uint32_t frame_count_; - - // ROS params - uint32_t total_frame_; - std::string target_frame_; - std::string src_frame_; - std::string map_frame_; - float map_length_; - float map_resolution_; - bool use_confidence_; - std::vector class_names_{"UNKNOWN", "CAR", "TRUCK", "BUS", - "TRAILER", "BICYCLE", "MOTORBIKE", "PEDESTRIAN"}; - bool rename_car_to_truck_and_bus_; - - // Number of width and height cells - uint32_t width_; - uint32_t height_; - std::map> data_buffers_; -}; // class HeatmapVisualizerNode - -} // namespace heatmap_visualizer - -#endif // HEATMAP_VISUALIZER__HEATMAP_VISUALIZER_NODE_HPP_ diff --git a/perception/heatmap_visualizer/include/heatmap_visualizer/utils.hpp b/perception/heatmap_visualizer/include/heatmap_visualizer/utils.hpp deleted file mode 100644 index cd0c833e2dea8..0000000000000 --- a/perception/heatmap_visualizer/include/heatmap_visualizer/utils.hpp +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2022 TIER IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef HEATMAP_VISUALIZER__UTILS_HPP_ -#define HEATMAP_VISUALIZER__UTILS_HPP_ - -#include -#include -#include -#include -#include - -#ifdef ROS_DISTRO_GALACTIC -#include -#else -#include -#endif - -#include -#include - -namespace heatmap_visualizer -{ -using Label = autoware_auto_perception_msgs::msg::ObjectClassification; - -/** - * @brief IndexXYT object - * - */ -struct IndexXYT -{ - int x; - int y; - int theta; -}; // struct IndexXYT - -/** - * @brief - * - * @param theta - * @param theta_size - * @return int - */ -int discretizeAngle(const double theta, const int theta_size); - -/** - * @brief - * - * @param costmap - * @param pose_local - * @param theta_size - * @return IndexXYT - */ -IndexXYT pose2index( - const nav_msgs::msg::OccupancyGrid & costmap, const geometry_msgs::msg::Pose & pose_local, - const int theta_size); - -/** - * @brief Set confidence score or 1 to box position to data buffer. - * - * @param obj - * @param heatmap - * @param data_buffer - * @param use_confidence - */ -void setHeatmapToBuffer( - const autoware_auto_perception_msgs::msg::DetectedObject & obj, - const nav_msgs::msg::OccupancyGrid & heatmap, std::vector * data_buffer, - const bool use_confidence); - -/** - * @brief Set the Heatmap To Occupancy Grid object - * - * @param data_buffer - * @param heatmap - */ -void setHeatmapToOccupancyGrid( - const std::vector & data_buffer, nav_msgs::msg::OccupancyGrid * heatmap); - -/** - * @brief Get the Semantic Type object - * - * @param class_name - * @return uint8_t - */ -uint8_t getSemanticType(const std::string & class_name); - -/** - * @brief Get the Class Name object - * - * @param label - * @return std::string - */ -std::string getClassName(const uint8_t label); - -} // namespace heatmap_visualizer - -#endif // HEATMAP_VISUALIZER__UTILS_HPP_ diff --git a/perception/heatmap_visualizer/launch/heatmap_visualizer.launch.xml b/perception/heatmap_visualizer/launch/heatmap_visualizer.launch.xml deleted file mode 100644 index 7251d776b6110..0000000000000 --- a/perception/heatmap_visualizer/launch/heatmap_visualizer.launch.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - diff --git a/perception/heatmap_visualizer/package.xml b/perception/heatmap_visualizer/package.xml deleted file mode 100644 index ab75738cbd008..0000000000000 --- a/perception/heatmap_visualizer/package.xml +++ /dev/null @@ -1,30 +0,0 @@ - - - heatmap_visualizer - 1.0.0 - The heatmap_visualizer package - Kotaro Uetake - Apache License 2.0 - - Kotaro Uetake - - ament_cmake_auto - autoware_cmake - - autoware_auto_perception_msgs - geometry_msgs - nav_msgs - object_recognition_utils - rclcpp - rclcpp_components - tf2 - tf2_geometry_msgs - tier4_autoware_utils - - ament_lint_auto - autoware_lint_common - - - ament_cmake - - diff --git a/perception/heatmap_visualizer/schema/heatmap_visualizer.schema.json b/perception/heatmap_visualizer/schema/heatmap_visualizer.schema.json deleted file mode 100644 index cb67ae383c35a..0000000000000 --- a/perception/heatmap_visualizer/schema/heatmap_visualizer.schema.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Parameters for Heatmap Visualizer Node", - "type": "object", - "definitions": { - "heatmap_visualizer": { - "type": "object", - "properties": { - "publish_frame_count": { - "type": "integer", - "description": "The number of frames to be published at once.", - "default": 50, - "minimum": 1 - }, - "heatmap_frame_id": { - "type": "string", - "description": "A frame ID in which visualized heatmap is with respect to.", - "default": "base_link" - }, - "heatmap_length": { - "type": "number", - "description": "A size length of heatmap [m].", - "default": 200.0, - "exclusiveMinimum": 0.0 - }, - "heatmap_resolution": { - "type": "number", - "description": "A cell resolution of heatmap [m].", - "default": 0.8, - "exclusiveMinimum": 0.0 - }, - "use_confidence": { - "type": "boolean", - "description": "Indicates whether to use object existence probability as score. It this parameter is false, 1.0 is set to score.", - "default": false - }, - "class_names": { - "type": "array", - "description": "An array of object class names.", - "default": [ - "UNKNOWN", - "CAR", - "TRUCK", - "BUS", - "TRAILER", - "BICYCLE", - "MOTORBIKE", - "PEDESTRIAN" - ], - "uniqueItems": true - }, - "rename_to_car": { - "type": "boolean", - "description": "Indicates whether to rename car like vehicle into car.", - "default": false - } - }, - "required": [ - "publish_frame_count", - "heatmap_frame_id", - "heatmap_length", - "heatmap_resolution", - "use_confidence", - "class_names", - "rename_to_car" - ] - } - }, - "properties": { - "/**": { - "type": "object", - "properties": { - "ros__parameters": { - "$ref": "#/definitions/heatmap_visualizer" - } - }, - "required": ["ros__parameters"] - } - }, - "required": ["/**"] -} diff --git a/perception/heatmap_visualizer/src/heatmap_visualizer_node.cpp b/perception/heatmap_visualizer/src/heatmap_visualizer_node.cpp deleted file mode 100644 index d9337e0c9f61d..0000000000000 --- a/perception/heatmap_visualizer/src/heatmap_visualizer_node.cpp +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright 2022 TIER IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "heatmap_visualizer/heatmap_visualizer_node.hpp" - -#include "heatmap_visualizer/utils.hpp" -#include "object_recognition_utils/object_recognition_utils.hpp" - -#include - -#include -#include - -namespace heatmap_visualizer -{ -HeatmapVisualizerNode::HeatmapVisualizerNode(const rclcpp::NodeOptions & node_options) -: Node("heatmap_visualizer", node_options), frame_count_(0) -{ - total_frame_ = static_cast(declare_parameter("publish_frame_count")); - map_frame_ = declare_parameter("heatmap_frame_id"); - map_length_ = static_cast(declare_parameter("heatmap_length")); - map_resolution_ = static_cast(declare_parameter("heatmap_resolution")); - use_confidence_ = declare_parameter("use_confidence"); - class_names_ = declare_parameter>("class_names"); - rename_car_to_truck_and_bus_ = declare_parameter("rename_to_car"); - - width_ = static_cast(map_length_ / map_resolution_); - height_ = static_cast(map_length_ / map_resolution_); - - objects_sub_ = create_subscription( - "/heatmap_visualizer/input/objects", 1, - std::bind(&HeatmapVisualizerNode::detectedObjectsCallback, this, std::placeholders::_1)); - - for (std::string & key : class_names_) { - nav_msgs::msg::OccupancyGrid heatmap; - heatmap.header.frame_id = map_frame_; - heatmap.info.width = width_; - heatmap.info.height = height_; - heatmap.info.resolution = map_resolution_; - heatmap.info.origin.position.x = -map_length_ / 2; - heatmap.info.origin.position.y = -map_length_ / 2; - heatmap.data.resize(width_ * height_, 0); - - std::vector buffer; - buffer.resize(width_ * height_, 0); - - uint8_t label = getSemanticType(key); - bool is_car_like_vehicle = object_recognition_utils::isCarLikeVehicle(label); - if ((!rename_car_to_truck_and_bus_) && (is_car_like_vehicle)) { - uint8_t car_label = getSemanticType("CAR"); - heatmaps_.insert(std::make_pair(car_label, heatmap)); - data_buffers_.insert(std::make_pair(car_label, buffer)); - - heatmap_pubs_.insert(std::make_pair( - car_label, create_publisher("~/output/heatmap/CAR", 10.0))); - } else { - heatmaps_.insert(std::make_pair(label, heatmap)); - data_buffers_.insert(std::make_pair(label, buffer)); - - heatmap_pubs_.insert(std::make_pair( - label, create_publisher("~/output/heatmap/" + key, 10.0))); - } - } -} - -void HeatmapVisualizerNode::detectedObjectsCallback( - const autoware_auto_perception_msgs::msg::DetectedObjects::ConstSharedPtr msg) -{ - frame_count_ += 1; - for (const auto & obj : msg->objects) { - uint8_t label = obj.classification[0].label; - bool is_car_like_vehicle = object_recognition_utils::isCarLikeVehicle(label); - if ((!rename_car_to_truck_and_bus_) && (is_car_like_vehicle)) { - label = getSemanticType("CAR"); - } - // Set value to data buffer - setHeatmapToBuffer(obj, heatmaps_.at(label), &data_buffers_.at(label), use_confidence_); - } - // Publish messages if frame_count_ == total_frame_ - if (frame_count_ == total_frame_) { - for (auto & map : heatmaps_) { - setHeatmapToOccupancyGrid(data_buffers_.at(map.first), &map.second); - heatmap_pubs_.at(map.first)->publish(map.second); - } - // Reset frame count - frame_count_ = 0; - } -} - -} // namespace heatmap_visualizer - -RCLCPP_COMPONENTS_REGISTER_NODE(heatmap_visualizer::HeatmapVisualizerNode) diff --git a/perception/heatmap_visualizer/src/utils.cpp b/perception/heatmap_visualizer/src/utils.cpp deleted file mode 100644 index c4b1c3d6a22c6..0000000000000 --- a/perception/heatmap_visualizer/src/utils.cpp +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright 2022 TIER IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "heatmap_visualizer/utils.hpp" - -#include -#include -#include - -#include - -#include -#include - -namespace heatmap_visualizer -{ -using tier4_autoware_utils::normalizeRadian; - -int discretizeAngle(const double theta, const int theta_size) -{ - const double one_angle_range = 2.0 * M_PI / theta_size; - return static_cast(std::rint(normalizeRadian(theta, 0.0) / one_angle_range)) % theta_size; -} - -IndexXYT pose2index( - const nav_msgs::msg::OccupancyGrid & costmap, const geometry_msgs::msg::Pose & pose_local, - const int theta_size) -{ - const int index_x = pose_local.position.x / costmap.info.resolution; - const int index_y = pose_local.position.y / costmap.info.resolution; - const double theta = tf2::getYaw(pose_local.orientation); - const int index_theta = discretizeAngle(theta, theta_size); - return {index_x, index_y, index_theta}; -} - -void setHeatmapToBuffer( - const autoware_auto_perception_msgs::msg::DetectedObject & obj, - const nav_msgs::msg::OccupancyGrid & heatmap, std::vector * data_buffer, - const bool use_confidence) -{ - int theta_size = 48; - IndexXYT indexPose{pose2index(heatmap, obj.kinematics.pose_with_covariance.pose, theta_size)}; - int mapWidth = heatmap.info.width; - int mapHeight = heatmap.info.height; - int indexX = indexPose.x + mapWidth / 2; - int indexY = indexPose.y + mapHeight / 2; - - float score; - if (use_confidence) { - score = obj.existence_probability; - } else { - score = 1.0; - } - - try { - data_buffer->at(indexY * mapWidth + indexX) += score; - } catch (const std::out_of_range & e) { - RCLCPP_ERROR(rclcpp::get_logger("setHeatmapToBuffer"), e.what()); - } -} - -void setHeatmapToOccupancyGrid( - const std::vector & data_buffer, nav_msgs::msg::OccupancyGrid * heatmap) -{ - float max_value = *std::max_element(data_buffer.begin(), data_buffer.end()); - float min_value = *std::min_element(data_buffer.begin(), data_buffer.end()); - if (max_value == min_value) { - return; - } - - for (size_t i = 0; i < heatmap->data.size(); ++i) { - heatmap->data[i] = - static_cast(100 * (data_buffer[i] - min_value) / (max_value - min_value)); - } - return; -} - -uint8_t getSemanticType(const std::string & class_name) -{ - if (class_name == "CAR") { - return Label::CAR; - } else if (class_name == "TRUCK") { - return Label::TRUCK; - } else if (class_name == "BUS") { - return Label::BUS; - } else if (class_name == "TRAILER") { - return Label::TRAILER; - } else if (class_name == "BICYCLE") { - return Label::BICYCLE; - } else if (class_name == "MOTORBIKE") { - return Label::MOTORCYCLE; - } else if (class_name == "PEDESTRIAN") { - return Label::PEDESTRIAN; - } else { - return Label::UNKNOWN; - } -} - -std::string getClassName(const uint8_t label) -{ - if (label == Label::CAR) { - return "CAR"; - } else if (label == Label::TRUCK) { - return "TRUCK"; - } else if (label == Label::BUS) { - return "BUS"; - } else if (label == Label::TRAILER) { - return "TRAILER"; - } else if (label == Label::BICYCLE) { - return "BICYCLE"; - } else if (label == Label::MOTORCYCLE) { - return "MOTORBIKE"; - } else if (label == Label::PEDESTRIAN) { - return "PEDESTRIAN"; - } else { - return "UNKNOWN"; - } -} - -} // namespace heatmap_visualizer diff --git a/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml b/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml index f336eb16e94a1..9236d7d789ed1 100644 --- a/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml +++ b/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml @@ -52,7 +52,7 @@ - + @@ -93,7 +93,7 @@ - + diff --git a/perception/image_projection_based_fusion/package.xml b/perception/image_projection_based_fusion/package.xml index 1322d053b358a..4457cf3d3992d 100644 --- a/perception/image_projection_based_fusion/package.xml +++ b/perception/image_projection_based_fusion/package.xml @@ -10,6 +10,7 @@ badai nguyen Kotaro Uetake Tao Zhong + Koji Minoda Apache License 2.0 ament_cmake_auto diff --git a/perception/image_projection_based_fusion/src/fusion_node.cpp b/perception/image_projection_based_fusion/src/fusion_node.cpp index f06ff96abe257..febec4dbc20b8 100644 --- a/perception/image_projection_based_fusion/src/fusion_node.cpp +++ b/perception/image_projection_based_fusion/src/fusion_node.cpp @@ -132,7 +132,7 @@ FusionNode::FusionNode( using tier4_autoware_utils::DebugPublisher; using tier4_autoware_utils::StopWatch; stop_watch_ptr_ = std::make_unique>(); - debug_publisher_ = std::make_unique(this, "image_projection_based_fusion"); + debug_publisher_ = std::make_unique(this, get_name()); stop_watch_ptr_->tic("cyclic_time"); stop_watch_ptr_->tic("processing_time"); } @@ -167,19 +167,27 @@ void FusionNode::subCallback(const typename Msg::ConstSharedPtr timer_->cancel(); postprocess(*(cached_msg_.second)); publish(*(cached_msg_.second)); - cached_msg_.second = nullptr; std::fill(is_fused_.begin(), is_fused_.end(), false); // add processing time for debug if (debug_publisher_) { const double cyclic_time_ms = stop_watch_ptr_->toc("cyclic_time", true); + const double pipeline_latency_ms = + std::chrono::duration( + std::chrono::nanoseconds( + (this->get_clock()->now() - cached_msg_.second->header.stamp).nanoseconds())) + .count(); debug_publisher_->publish( "debug/cyclic_time_ms", cyclic_time_ms); debug_publisher_->publish( "debug/processing_time_ms", processing_time_ms + stop_watch_ptr_->toc("processing_time", true)); + debug_publisher_->publish( + "debug/pipeline_latency_ms", pipeline_latency_ms); processing_time_ms = 0; } + + cached_msg_.second = nullptr; } std::lock_guard lock(mutex_cached_msgs_); diff --git a/perception/lidar_apollo_instance_segmentation/README.md b/perception/lidar_apollo_instance_segmentation/README.md index 2d86b695d2474..f9dc6b3f44a23 100644 --- a/perception/lidar_apollo_instance_segmentation/README.md +++ b/perception/lidar_apollo_instance_segmentation/README.md @@ -47,6 +47,7 @@ None | `use_constant_feature` | bool | false | The flag to use direction and distance feature of pointcloud. | | `target_frame` | string | "base_link" | Pointcloud data is transformed into this frame. | | `z_offset` | int | 2 | z offset from target frame. [m] | +| `build_only` | bool | `false` | shutdown the node after TensorRT engine file is built | ## Assumptions / Known limits diff --git a/perception/lidar_apollo_instance_segmentation/launch/lidar_apollo_instance_segmentation.launch.xml b/perception/lidar_apollo_instance_segmentation/launch/lidar_apollo_instance_segmentation.launch.xml index 7f5024929b531..207dac45671dd 100644 --- a/perception/lidar_apollo_instance_segmentation/launch/lidar_apollo_instance_segmentation.launch.xml +++ b/perception/lidar_apollo_instance_segmentation/launch/lidar_apollo_instance_segmentation.launch.xml @@ -2,6 +2,7 @@ + @@ -24,5 +25,8 @@ + + + diff --git a/perception/lidar_apollo_instance_segmentation/src/detector.cpp b/perception/lidar_apollo_instance_segmentation/src/detector.cpp index 5da7825f96681..236f0998ab21f 100644 --- a/perception/lidar_apollo_instance_segmentation/src/detector.cpp +++ b/perception/lidar_apollo_instance_segmentation/src/detector.cpp @@ -48,6 +48,11 @@ LidarApolloInstanceSegmentation::LidarApolloInstanceSegmentation(rclcpp::Node * return; } + if (node_->declare_parameter("build_only", false)) { + RCLCPP_INFO(node_->get_logger(), "TensorRT engine is built and shutdown node."); + rclcpp::shutdown(); + } + // GPU memory allocation const auto input_dims = trt_common_->getBindingDimensions(0); const auto input_size = diff --git a/perception/lidar_centerpoint/package.xml b/perception/lidar_centerpoint/package.xml index fee97a84be917..674325cccc677 100644 --- a/perception/lidar_centerpoint/package.xml +++ b/perception/lidar_centerpoint/package.xml @@ -5,6 +5,7 @@ 1.0.0 The lidar_centerpoint package Kenzo Lobos-Tsunekawa + Koji Minoda Apache License 2.0 ament_cmake_auto diff --git a/perception/lidar_centerpoint/src/node.cpp b/perception/lidar_centerpoint/src/node.cpp index 39683755bcaf0..d370a60a26aa5 100644 --- a/perception/lidar_centerpoint/src/node.cpp +++ b/perception/lidar_centerpoint/src/node.cpp @@ -169,10 +169,17 @@ void LidarCenterPointNode::pointCloudCallback( if (debug_publisher_ptr_ && stop_watch_ptr_) { const double cyclic_time_ms = stop_watch_ptr_->toc("cyclic_time", true); const double processing_time_ms = stop_watch_ptr_->toc("processing_time", true); + const double pipeline_latency_ms = + std::chrono::duration( + std::chrono::nanoseconds( + (this->get_clock()->now() - output_msg.header.stamp).nanoseconds())) + .count(); debug_publisher_ptr_->publish( "debug/cyclic_time_ms", cyclic_time_ms); debug_publisher_ptr_->publish( "debug/processing_time_ms", processing_time_ms); + debug_publisher_ptr_->publish( + "debug/pipeline_latency_ms", pipeline_latency_ms); } } diff --git a/perception/map_based_prediction/README.md b/perception/map_based_prediction/README.md index 2419cdb010799..a58e19db70304 100644 --- a/perception/map_based_prediction/README.md +++ b/perception/map_based_prediction/README.md @@ -191,6 +191,11 @@ This module treats **Pedestrians** and **Bicycles** as objects using the crosswa If there are a reachable crosswalk entry points within the `prediction_time_horizon` and the objects satisfies above condition, this module outputs additional predicted path to cross the opposite side via the crosswalk entry point. +This module takes into account the corresponding traffic light information. +When RED signal is indicated, we assume the target object will not walk across. +In addition, if the target object is stopping (not moving) against GREEN signal, we assume the target object will not walk across either. +This prediction comes from the assumption that the object should move if the traffic light is green and the object is intended to cross. +
@@ -205,10 +210,11 @@ If the target object is inside the road or crosswalk, this module outputs one or ### Input -| Name | Type | Description | -| -------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------- | -| `~/perception/object_recognition/tracking/objects` | `autoware_auto_perception_msgs::msg::TrackedObjects` | tracking objects without predicted path. | -| `~/vector_map` | `autoware_auto_mapping_msgs::msg::HADMapBin` | binary data of Lanelet2 Map. | +| Name | Type | Description | +| -------------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------------- | +| `~/perception/object_recognition/tracking/objects` | `autoware_auto_perception_msgs::msg::TrackedObjects` | tracking objects without predicted path. | +| `~/vector_map` | `autoware_auto_mapping_msgs::msg::HADMapBin` | binary data of Lanelet2 Map. | +| '~/perception/traffic_light_recognition/traffic_signals' | 'autoware_perception_msgs::msg::TrafficSignalArray;' | rearranged information on the corresponding traffic lights | ### Output diff --git a/perception/map_based_prediction/config/map_based_prediction.param.yaml b/perception/map_based_prediction/config/map_based_prediction.param.yaml index ed1193f1c49a0..f8ad371ab92a6 100644 --- a/perception/map_based_prediction/config/map_based_prediction.param.yaml +++ b/perception/map_based_prediction/config/map_based_prediction.param.yaml @@ -19,7 +19,12 @@ use_vehicle_acceleration: false # whether to consider current vehicle acceleration when predicting paths or not speed_limit_multiplier: 1.5 # When using vehicle acceleration. Set vehicle's maximum predicted speed as the legal speed limit in that lanelet times this value acceleration_exponential_half_life: 2.5 # [s] When using vehicle acceleration. The decaying acceleration model considers that the current vehicle acceleration will be halved after this many seconds - use_crosswalk_signal: true + crosswalk_with_signal: + use_crosswalk_signal: true + threshold_velocity_assumed_as_stopping: 0.25 # [m/s] velocity threshold for the module to judge whether the objects is stopped + # If the pedestrian does not move for X seconds against green signal, the module judge that the pedestrian has no intention to walk. + distance_set_for_no_intention_to_walk: [1.0, 5.0] # [m] ascending order, keys of map + timeout_set_for_no_intention_to_walk: [1.0, 0.0] # [s] values of map # parameter for shoulder lane prediction prediction_time_horizon_rate_for_validate_shoulder_lane_length: 0.8 diff --git a/perception/map_based_prediction/include/map_based_prediction/map_based_prediction_node.hpp b/perception/map_based_prediction/include/map_based_prediction/map_based_prediction_node.hpp index 2864d1c5bf393..321342c3d8367 100644 --- a/perception/map_based_prediction/include/map_based_prediction/map_based_prediction_node.hpp +++ b/perception/map_based_prediction/include/map_based_prediction/map_based_prediction_node.hpp @@ -42,6 +42,7 @@ #include #include +#include #include #include #include @@ -131,6 +132,7 @@ class MapBasedPredictionNode : public rclcpp::Node // Object History std::unordered_map> objects_history_; + std::map, rclcpp::Time> stopped_times_against_green_; // Lanelet Map Pointers std::shared_ptr lanelet_map_ptr_; @@ -190,6 +192,9 @@ class MapBasedPredictionNode : public rclcpp::Node double acceleration_exponential_half_life_; bool use_crosswalk_signal_; + double threshold_velocity_assumed_as_stopping_; + std::vector distance_set_for_no_intention_to_walk_; + std::vector timeout_set_for_no_intention_to_walk_; // Stop watch StopWatch stop_watch_; @@ -214,7 +219,8 @@ class MapBasedPredictionNode : public rclcpp::Node PredictedObject getPredictedObjectAsCrosswalkUser(const TrackedObject & object); - void removeOldObjectsHistory(const double current_time); + void removeOldObjectsHistory( + const double current_time, const TrackedObjects::ConstSharedPtr in_objects); LaneletsData getCurrentLanelets(const TrackedObject & object); bool checkCloseLaneletCondition( @@ -262,6 +268,9 @@ class MapBasedPredictionNode : public rclcpp::Node const PredictedPath & predicted_path, const std::vector & predicted_paths); std::optional getTrafficSignalId(const lanelet::ConstLanelet & way_lanelet); std::optional getTrafficSignalElement(const lanelet::Id & id); + bool calcIntentionToCrossWithTrafficSignal( + const TrackedObject & object, const lanelet::ConstLanelet & crosswalk, + const lanelet::Id & signal_id); visualization_msgs::msg::Marker getDebugMarker( const TrackedObject & object, const Maneuver & maneuver, const size_t obj_num); diff --git a/perception/map_based_prediction/schema/map_based_pediction.schema.json b/perception/map_based_prediction/schema/map_based_prediction.schema.json similarity index 100% rename from perception/map_based_prediction/schema/map_based_pediction.schema.json rename to perception/map_based_prediction/schema/map_based_prediction.schema.json diff --git a/perception/map_based_prediction/src/map_based_prediction_node.cpp b/perception/map_based_prediction/src/map_based_prediction_node.cpp index 168b874ccbcce..21697bb1b5f1a 100644 --- a/perception/map_based_prediction/src/map_based_prediction_node.cpp +++ b/perception/map_based_prediction/src/map_based_prediction_node.cpp @@ -788,7 +788,13 @@ MapBasedPredictionNode::MapBasedPredictionNode(const rclcpp::NodeOptions & node_ acceleration_exponential_half_life_ = declare_parameter("acceleration_exponential_half_life"); - use_crosswalk_signal_ = declare_parameter("use_crosswalk_signal"); + use_crosswalk_signal_ = declare_parameter("crosswalk_with_signal.use_crosswalk_signal"); + threshold_velocity_assumed_as_stopping_ = + declare_parameter("crosswalk_with_signal.threshold_velocity_assumed_as_stopping"); + distance_set_for_no_intention_to_walk_ = declare_parameter>( + "crosswalk_with_signal.distance_set_for_no_intention_to_walk"); + timeout_set_for_no_intention_to_walk_ = declare_parameter>( + "crosswalk_with_signal.timeout_set_for_no_intention_to_walk"); path_generator_ = std::make_shared( prediction_time_horizon_, lateral_control_time_horizon_, prediction_sampling_time_interval_, @@ -864,11 +870,11 @@ PredictedObject MapBasedPredictionNode::convertToPredictedObject( void MapBasedPredictionNode::mapCallback(const HADMapBin::ConstSharedPtr msg) { - RCLCPP_INFO(get_logger(), "[Map Based Prediction]: Start loading lanelet"); + RCLCPP_DEBUG(get_logger(), "[Map Based Prediction]: Start loading lanelet"); lanelet_map_ptr_ = std::make_shared(); lanelet::utils::conversion::fromBinMsg( *msg, lanelet_map_ptr_, &traffic_rules_ptr_, &routing_graph_ptr_); - RCLCPP_INFO(get_logger(), "[Map Based Prediction]: Map is loaded"); + RCLCPP_DEBUG(get_logger(), "[Map Based Prediction]: Map is loaded"); const auto all_lanelets = lanelet::utils::query::laneletLayer(lanelet_map_ptr_); const auto crosswalks = lanelet::utils::query::crosswalkLanelets(all_lanelets); @@ -912,7 +918,7 @@ void MapBasedPredictionNode::objectsCallback(const TrackedObjects::ConstSharedPt // Remove old objects information in object history const double objects_detected_time = rclcpp::Time(in_objects->header.stamp).seconds(); - removeOldObjectsHistory(objects_detected_time); + removeOldObjectsHistory(objects_detected_time, in_objects); // result output PredictedObjects output; @@ -1229,16 +1235,13 @@ PredictedObject MapBasedPredictionNode::getPredictedObjectAsCrosswalkUser( } } } + // try to find the edge points for all crosswalks and generate path to the crosswalk edge for (const auto & crosswalk : crosswalks_) { const auto crosswalk_signal_id_opt = getTrafficSignalId(crosswalk); if (crosswalk_signal_id_opt.has_value() && use_crosswalk_signal_) { - const auto signal_color = [&] { - const auto elem_opt = getTrafficSignalElement(crosswalk_signal_id_opt.value()); - return elem_opt ? elem_opt.value().color : TrafficSignalElement::UNKNOWN; - }(); - - if (signal_color == TrafficSignalElement::RED) { + if (!calcIntentionToCrossWithTrafficSignal( + object, crosswalk, crosswalk_signal_id_opt.value())) { continue; } } @@ -1306,6 +1309,13 @@ void MapBasedPredictionNode::updateObjectData(TrackedObject & object) // assumption: the object vx is much larger than vy if (object.kinematics.twist_with_covariance.twist.linear.x >= 0.0) return; + // calculate absolute velocity and do nothing if it is too slow + const double abs_object_speed = std::hypot( + object.kinematics.twist_with_covariance.twist.linear.x, + object.kinematics.twist_with_covariance.twist.linear.y); + constexpr double min_abs_speed = 1e-1; // 0.1 m/s + if (abs_object_speed < min_abs_speed) return; + switch (object.kinematics.orientation_availability) { case autoware_auto_perception_msgs::msg::DetectedObjectKinematics::SIGN_UNKNOWN: { const auto original_yaw = @@ -1330,7 +1340,8 @@ void MapBasedPredictionNode::updateObjectData(TrackedObject & object) return; } -void MapBasedPredictionNode::removeOldObjectsHistory(const double current_time) +void MapBasedPredictionNode::removeOldObjectsHistory( + const double current_time, const TrackedObjects::ConstSharedPtr in_objects) { std::vector invalid_object_id; for (auto iter = objects_history_.begin(); iter != objects_history_.end(); ++iter) { @@ -1371,6 +1382,19 @@ void MapBasedPredictionNode::removeOldObjectsHistory(const double current_time) for (const auto & key : invalid_object_id) { objects_history_.erase(key); } + + for (auto it = stopped_times_against_green_.begin(); it != stopped_times_against_green_.end();) { + const bool isDisappeared = std::none_of( + in_objects->objects.begin(), in_objects->objects.end(), + [&it](autoware_auto_perception_msgs::msg::TrackedObject obj) { + return tier4_autoware_utils::toHexString(obj.object_id) == it->first.first; + }); + if (isDisappeared) { + it = stopped_times_against_green_.erase(it); + } else { + ++it; + } + } } LaneletsData MapBasedPredictionNode::getCurrentLanelets(const TrackedObject & object) @@ -2268,6 +2292,69 @@ std::optional MapBasedPredictionNode::getTrafficSignalElem return std::nullopt; } +bool MapBasedPredictionNode::calcIntentionToCrossWithTrafficSignal( + const TrackedObject & object, const lanelet::ConstLanelet & crosswalk, + const lanelet::Id & signal_id) +{ + const auto signal_color = [&] { + const auto elem_opt = getTrafficSignalElement(signal_id); + return elem_opt ? elem_opt.value().color : TrafficSignalElement::UNKNOWN; + }(); + + const auto key = std::make_pair(tier4_autoware_utils::toHexString(object.object_id), signal_id); + if ( + signal_color == TrafficSignalElement::GREEN && + tier4_autoware_utils::calcNorm(object.kinematics.twist_with_covariance.twist.linear) < + threshold_velocity_assumed_as_stopping_) { + stopped_times_against_green_.try_emplace(key, this->get_clock()->now()); + + const auto timeout_no_intention_to_walk = [&]() { + auto InterpolateMap = []( + const std::vector & key_set, + const std::vector & value_set, const double query) { + if (query <= key_set.front()) { + return value_set.front(); + } else if (query >= key_set.back()) { + return value_set.back(); + } + for (size_t i = 0; i < key_set.size() - 1; ++i) { + if (key_set.at(i) <= query && query <= key_set.at(i + 1)) { + auto ratio = + (query - key_set.at(i)) / std::max(key_set.at(i + 1) - key_set.at(i), 1.0e-5); + ratio = std::clamp(ratio, 0.0, 1.0); + return value_set.at(i) + ratio * (value_set.at(i + 1) - value_set.at(i)); + } + } + return value_set.back(); + }; + + const auto obj_position = object.kinematics.pose_with_covariance.pose.position; + const double distance_to_crosswalk = boost::geometry::distance( + crosswalk.polygon2d().basicPolygon(), + lanelet::BasicPoint2d(obj_position.x, obj_position.y)); + return InterpolateMap( + distance_set_for_no_intention_to_walk_, timeout_set_for_no_intention_to_walk_, + distance_to_crosswalk); + }(); + + if ( + (this->get_clock()->now() - stopped_times_against_green_.at(key)).seconds() > + timeout_no_intention_to_walk) { + return false; + } + + } else { + stopped_times_against_green_.erase(key); + // If the pedestrian disappears, another function erases the old data. + } + + if (signal_color == TrafficSignalElement::RED) { + return false; + } + + return true; +} + } // namespace map_based_prediction #include diff --git a/perception/multi_object_tracker/src/tracker/model/bicycle_tracker.cpp b/perception/multi_object_tracker/src/tracker/model/bicycle_tracker.cpp index 8133bc7dcf0a0..8a3a5629ec277 100644 --- a/perception/multi_object_tracker/src/tracker/model/bicycle_tracker.cpp +++ b/perception/multi_object_tracker/src/tracker/model/bicycle_tracker.cpp @@ -173,7 +173,7 @@ bool BicycleTracker::predict(const rclcpp::Time & time) bool BicycleTracker::predict(const double dt, KalmanFilter & ekf) const { - /* static bicycle model (constant slip angle, constant velocity) + /* Motion model: static bicycle model (constant slip angle, constant velocity) * * w_k = vel_k * sin(slip_k) / l_r * x_{k+1} = x_k + vel_k*cos(yaw_k+slip_k)*dt - 0.5*w_k*vel_k*sin(yaw_k+slip_k)*dt*dt @@ -291,12 +291,6 @@ bool BicycleTracker::predict(const double dt, KalmanFilter & ekf) const return true; } -/** - * @brief measurement update with ekf on receiving detected_object - * - * @param object : Detected object - * @return bool : measurement is updatable - */ bool BicycleTracker::measureWithPose( const autoware_auto_perception_msgs::msg::DetectedObject & object) { @@ -333,19 +327,16 @@ bool BicycleTracker::measureWithPose( const int dim_y = use_orientation_information ? 3 : 2; // pos x, pos y, (pos yaw) depending on Pose output - /* Set measurement matrix */ + // Set measurement matrix C and observation vector Y Eigen::MatrixXd Y(dim_y, 1); + Eigen::MatrixXd C = Eigen::MatrixXd::Zero(dim_y, ekf_params_.dim_x); Y(IDX::X, 0) = object.kinematics.pose_with_covariance.pose.position.x; Y(IDX::Y, 0) = object.kinematics.pose_with_covariance.pose.position.y; - - /* Set measurement matrix */ - Eigen::MatrixXd C = Eigen::MatrixXd::Zero(dim_y, ekf_params_.dim_x); C(0, IDX::X) = 1.0; // for pos x C(1, IDX::Y) = 1.0; // for pos y - /* Set measurement noise covariance */ + // Set noise covariance matrix R Eigen::MatrixXd R = Eigen::MatrixXd::Zero(dim_y, dim_y); - if (!object.kinematics.has_position_covariance) { R(0, 0) = ekf_params_.r_cov_x; // x - x R(0, 1) = 0.0; // x - y @@ -471,6 +462,7 @@ bool BicycleTracker::getTrackedObject( tmp_ekf_for_no_update.getX(X_t); tmp_ekf_for_no_update.getP(P); + /* put predicted pose and twist to output object */ auto & pose_with_cov = object.kinematics.pose_with_covariance; auto & twist_with_cov = object.kinematics.twist_with_covariance; diff --git a/perception/multi_object_tracker/src/tracker/model/big_vehicle_tracker.cpp b/perception/multi_object_tracker/src/tracker/model/big_vehicle_tracker.cpp index b5a730e93663a..80c0df7e5ffb1 100644 --- a/perception/multi_object_tracker/src/tracker/model/big_vehicle_tracker.cpp +++ b/perception/multi_object_tracker/src/tracker/model/big_vehicle_tracker.cpp @@ -188,7 +188,7 @@ bool BigVehicleTracker::predict(const rclcpp::Time & time) bool BigVehicleTracker::predict(const double dt, KalmanFilter & ekf) const { - /* static bicycle model (constant slip angle, constant velocity) + /* Motion model: static bicycle model (constant slip angle, constant velocity) * * w_k = vel_k * sin(slip_k) / l_r * x_{k+1} = x_k + vel_k*cos(yaw_k+slip_k)*dt - 0.5*w_k*vel_k*sin(yaw_k+slip_k)*dt*dt @@ -362,11 +362,9 @@ bool BigVehicleTracker::measureWithPose( last_input_bounding_box_.width, last_input_bounding_box_.length, last_nearest_corner_index_, bbox_object, X_t(IDX::YAW), offset_object, tracking_offset_); - /* Set measurement matrix and noise covariance*/ + // Set measurement matrix C and observation vector Y Eigen::MatrixXd Y(dim_y, 1); Eigen::MatrixXd C = Eigen::MatrixXd::Zero(dim_y, ekf_params_.dim_x); - Eigen::MatrixXd R = Eigen::MatrixXd::Zero(dim_y, dim_y); - Y(IDX::X, 0) = offset_object.kinematics.pose_with_covariance.pose.position.x; Y(IDX::Y, 0) = offset_object.kinematics.pose_with_covariance.pose.position.y; Y(IDX::YAW, 0) = measurement_yaw; @@ -374,7 +372,8 @@ bool BigVehicleTracker::measureWithPose( C(1, IDX::Y) = 1.0; // for pos y C(2, IDX::YAW) = 1.0; // for yaw - /* Set measurement noise covariance */ + // Set noise covariance matrix R + Eigen::MatrixXd R = Eigen::MatrixXd::Zero(dim_y, dim_y); if (!object.kinematics.has_position_covariance) { const double cos_yaw = std::cos(measurement_yaw); const double sin_yaw = std::sin(measurement_yaw); @@ -459,6 +458,7 @@ bool BigVehicleTracker::measureWithShape( // update lf, lr lf_ = std::max(bounding_box_.length * 0.3, 1.5); // 30% front from the center, minimum of 1.5m lr_ = std::max(bounding_box_.length * 0.25, 1.5); // 25% rear from the center, minimum of 1.5m + return true; } @@ -597,6 +597,7 @@ bool BigVehicleTracker::getTrackedObject( const auto ekf_pose_yaw = tf2::getYaw(pose_with_cov.pose.orientation); object.shape.footprint = tier4_autoware_utils::rotatePolygon(object.shape.footprint, origin_yaw - ekf_pose_yaw); + return true; } diff --git a/perception/multi_object_tracker/src/tracker/model/normal_vehicle_tracker.cpp b/perception/multi_object_tracker/src/tracker/model/normal_vehicle_tracker.cpp index ca0a2d39c266b..7714c381894f0 100644 --- a/perception/multi_object_tracker/src/tracker/model/normal_vehicle_tracker.cpp +++ b/perception/multi_object_tracker/src/tracker/model/normal_vehicle_tracker.cpp @@ -188,7 +188,7 @@ bool NormalVehicleTracker::predict(const rclcpp::Time & time) bool NormalVehicleTracker::predict(const double dt, KalmanFilter & ekf) const { - /* static bicycle model (constant slip angle, constant velocity) + /* Motion model: static bicycle model (constant slip angle, constant velocity) * * w_k = vel_k * sin(slip_k) / l_r * x_{k+1} = x_k + vel_k*cos(yaw_k+slip_k)*dt - 0.5*w_k*vel_k*sin(yaw_k+slip_k)*dt*dt @@ -362,11 +362,9 @@ bool NormalVehicleTracker::measureWithPose( last_input_bounding_box_.width, last_input_bounding_box_.length, last_nearest_corner_index_, bbox_object, X_t(IDX::YAW), offset_object, tracking_offset_); - /* Set measurement matrix and noise covariance*/ + // Set measurement matrix C and observation vector Y Eigen::MatrixXd Y(dim_y, 1); Eigen::MatrixXd C = Eigen::MatrixXd::Zero(dim_y, ekf_params_.dim_x); - Eigen::MatrixXd R = Eigen::MatrixXd::Zero(dim_y, dim_y); - Y(IDX::X, 0) = offset_object.kinematics.pose_with_covariance.pose.position.x; Y(IDX::Y, 0) = offset_object.kinematics.pose_with_covariance.pose.position.y; Y(IDX::YAW, 0) = measurement_yaw; @@ -374,7 +372,8 @@ bool NormalVehicleTracker::measureWithPose( C(1, IDX::Y) = 1.0; // for pos y C(2, IDX::YAW) = 1.0; // for yaw - /* Set measurement noise covariance */ + // Set noise covariance matrix R + Eigen::MatrixXd R = Eigen::MatrixXd::Zero(dim_y, dim_y); if (!object.kinematics.has_position_covariance) { const double cos_yaw = std::cos(measurement_yaw); const double sin_yaw = std::sin(measurement_yaw); @@ -598,6 +597,7 @@ bool NormalVehicleTracker::getTrackedObject( const auto ekf_pose_yaw = tf2::getYaw(pose_with_cov.pose.orientation); object.shape.footprint = tier4_autoware_utils::rotatePolygon(object.shape.footprint, origin_yaw - ekf_pose_yaw); + return true; } diff --git a/perception/multi_object_tracker/src/tracker/model/pedestrian_tracker.cpp b/perception/multi_object_tracker/src/tracker/model/pedestrian_tracker.cpp index b168717042db3..57c2e8f899951 100644 --- a/perception/multi_object_tracker/src/tracker/model/pedestrian_tracker.cpp +++ b/perception/multi_object_tracker/src/tracker/model/pedestrian_tracker.cpp @@ -19,7 +19,6 @@ #include "multi_object_tracker/tracker/model/pedestrian_tracker.hpp" #include "multi_object_tracker/utils/utils.hpp" -#include "object_recognition_utils/object_recognition_utils.hpp" #include #include @@ -35,6 +34,7 @@ #else #include #endif +#include "object_recognition_utils/object_recognition_utils.hpp" #define EIGEN_MPL2_ONLY #include @@ -52,37 +52,41 @@ PedestrianTracker::PedestrianTracker( { object_ = object; - // initialize params - float q_stddev_x = 0.4; // [m/s] - float q_stddev_y = 0.4; // [m/s] - float q_stddev_yaw = tier4_autoware_utils::deg2rad(20); // [rad/s] - float q_stddev_vx = tier4_autoware_utils::kmph2mps(5); // [m/(s*s)] - float q_stddev_wz = tier4_autoware_utils::deg2rad(20); // [rad/(s*s)] - float r_stddev_x = 0.4; // [m] - float r_stddev_y = 0.4; // [m] - float r_stddev_yaw = tier4_autoware_utils::deg2rad(30); // [rad] - float p0_stddev_x = 1.0; // [m/s] - float p0_stddev_y = 1.0; // [m/s] - float p0_stddev_yaw = tier4_autoware_utils::deg2rad(1000); // [rad/s] - float p0_stddev_vx = tier4_autoware_utils::kmph2mps(5); // [m/(s*s)] - float p0_stddev_wz = tier4_autoware_utils::deg2rad(10); // [rad/(s*s)] - ekf_params_.q_cov_x = std::pow(q_stddev_x, 2.0); - ekf_params_.q_cov_y = std::pow(q_stddev_y, 2.0); - ekf_params_.q_cov_yaw = std::pow(q_stddev_yaw, 2.0); - ekf_params_.q_cov_vx = std::pow(q_stddev_vx, 2.0); - ekf_params_.q_cov_wz = std::pow(q_stddev_wz, 2.0); + // Initialize parameters + // measurement noise covariance + float r_stddev_x = 0.4; // [m] + float r_stddev_y = 0.4; // [m] + float r_stddev_yaw = tier4_autoware_utils::deg2rad(30); // [rad] ekf_params_.r_cov_x = std::pow(r_stddev_x, 2.0); ekf_params_.r_cov_y = std::pow(r_stddev_y, 2.0); ekf_params_.r_cov_yaw = std::pow(r_stddev_yaw, 2.0); + // initial state covariance + float p0_stddev_x = 2.0; // [m] + float p0_stddev_y = 2.0; // [m] + float p0_stddev_yaw = tier4_autoware_utils::deg2rad(1000); // [rad] + float p0_stddev_vx = tier4_autoware_utils::kmph2mps(120); // [m/s] + float p0_stddev_wz = tier4_autoware_utils::deg2rad(360); // [rad/s] ekf_params_.p0_cov_x = std::pow(p0_stddev_x, 2.0); ekf_params_.p0_cov_y = std::pow(p0_stddev_y, 2.0); ekf_params_.p0_cov_yaw = std::pow(p0_stddev_yaw, 2.0); ekf_params_.p0_cov_vx = std::pow(p0_stddev_vx, 2.0); ekf_params_.p0_cov_wz = std::pow(p0_stddev_wz, 2.0); + // process noise covariance + float q_stddev_x = 0.4; // [m/s] + float q_stddev_y = 0.4; // [m/s] + float q_stddev_yaw = tier4_autoware_utils::deg2rad(20); // [rad/s] + float q_stddev_vx = tier4_autoware_utils::kmph2mps(5); // [m/(s*s)] + float q_stddev_wz = tier4_autoware_utils::deg2rad(30); // [rad/(s*s)] + ekf_params_.q_cov_x = std::pow(q_stddev_x, 2.0); + ekf_params_.q_cov_y = std::pow(q_stddev_y, 2.0); + ekf_params_.q_cov_yaw = std::pow(q_stddev_yaw, 2.0); + ekf_params_.q_cov_vx = std::pow(q_stddev_vx, 2.0); + ekf_params_.q_cov_wz = std::pow(q_stddev_wz, 2.0); + // limitations max_vx_ = tier4_autoware_utils::kmph2mps(10); // [m/s] max_wz_ = tier4_autoware_utils::deg2rad(30); // [rad/s] - // initialize X matrix + // initialize state vector X Eigen::MatrixXd X(ekf_params_.dim_x, 1); X(IDX::X) = object.kinematics.pose_with_covariance.pose.position.x; X(IDX::Y) = object.kinematics.pose_with_covariance.pose.position.y; @@ -95,7 +99,7 @@ PedestrianTracker::PedestrianTracker( X(IDX::WZ) = 0.0; } - // initialize P matrix + // initialize state covariance matrix P Eigen::MatrixXd P = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); if (!object.kinematics.has_position_covariance) { const double cos_yaw = std::cos(X(IDX::YAW)); @@ -154,7 +158,7 @@ bool PedestrianTracker::predict(const rclcpp::Time & time) bool PedestrianTracker::predict(const double dt, KalmanFilter & ekf) const { - /* == Nonlinear model == + /* Motion model: Constant turn-rate motion model (CTRV) * * x_{k+1} = x_k + vx_k * cos(yaw_k) * dt * y_{k+1} = y_k + vx_k * sin(yaw_k) * dt @@ -164,7 +168,7 @@ bool PedestrianTracker::predict(const double dt, KalmanFilter & ekf) const * */ - /* == Linearized model == + /* Jacobian Matrix * * A = [ 1, 0, -vx*sin(yaw)*dt, cos(yaw)*dt, 0] * [ 0, 1, vx*cos(yaw)*dt, sin(yaw)*dt, 0] @@ -173,14 +177,14 @@ bool PedestrianTracker::predict(const double dt, KalmanFilter & ekf) const * [ 0, 0, 0, 0, 1] */ - // X t + // Current state vector X t Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); // predicted state ekf.getX(X_t); const double cos_yaw = std::cos(X_t(IDX::YAW)); const double sin_yaw = std::sin(X_t(IDX::YAW)); const double sin_2yaw = std::sin(2.0f * X_t(IDX::YAW)); - // X t+1 + // Predict state vector X t+1 Eigen::MatrixXd X_next_t(ekf_params_.dim_x, 1); // predicted state X_next_t(IDX::X) = X_t(IDX::X) + X_t(IDX::VX) * cos_yaw * dt; // dx = v * cos(yaw) X_next_t(IDX::Y) = X_t(IDX::Y) + X_t(IDX::VX) * sin_yaw * dt; // dy = v * sin(yaw) @@ -188,7 +192,7 @@ bool PedestrianTracker::predict(const double dt, KalmanFilter & ekf) const X_next_t(IDX::VX) = X_t(IDX::VX); X_next_t(IDX::WZ) = X_t(IDX::WZ); - // A + // State transition matrix A Eigen::MatrixXd A = Eigen::MatrixXd::Identity(ekf_params_.dim_x, ekf_params_.dim_x); A(IDX::X, IDX::YAW) = -X_t(IDX::VX) * sin_yaw * dt; A(IDX::X, IDX::VX) = cos_yaw * dt; @@ -196,7 +200,7 @@ bool PedestrianTracker::predict(const double dt, KalmanFilter & ekf) const A(IDX::Y, IDX::VX) = sin_yaw * dt; A(IDX::YAW, IDX::WZ) = dt; - // Q + // Process noise covariance Q Eigen::MatrixXd Q = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); // Rotate the covariance matrix according to the vehicle yaw // because q_cov_x and y are in the vehicle coordinate system. @@ -240,18 +244,16 @@ bool PedestrianTracker::measureWithPose( // if (tier4_autoware_utils::deg2rad(60) < std::fabs(theta)) return false; // } - /* Set measurement matrix */ + // Set measurement matrix C and observation vector Y Eigen::MatrixXd Y(dim_y, 1); + Eigen::MatrixXd C = Eigen::MatrixXd::Zero(dim_y, ekf_params_.dim_x); Y << object.kinematics.pose_with_covariance.pose.position.x, object.kinematics.pose_with_covariance.pose.position.y; - - /* Set measurement matrix */ - Eigen::MatrixXd C = Eigen::MatrixXd::Zero(dim_y, ekf_params_.dim_x); C(0, IDX::X) = 1.0; // for pos x C(1, IDX::Y) = 1.0; // for pos y // C(2, IDX::YAW) = 1.0; // for yaw - /* Set measurement noise covariance */ + // Set noise covariance matrix R Eigen::MatrixXd R = Eigen::MatrixXd::Zero(dim_y, dim_y); if (!object.kinematics.has_position_covariance) { R(0, 0) = ekf_params_.r_cov_x; // x - x @@ -270,6 +272,8 @@ bool PedestrianTracker::measureWithPose( // R(2, 1) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_Y]; // R(2, 2) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW]; } + + // ekf update if (!ekf_.update(Y, C, R)) { RCLCPP_WARN(logger_, "Cannot update"); } @@ -345,7 +349,7 @@ bool PedestrianTracker::getTrackedObject( object.object_id = getUUID(); object.classification = getClassification(); - // predict kinematics + // predict state KalmanFilter tmp_ekf_for_no_update = ekf_; const double dt = (time - last_update_time_).seconds(); if (0.001 /*1msec*/ < dt) { @@ -356,6 +360,7 @@ bool PedestrianTracker::getTrackedObject( tmp_ekf_for_no_update.getX(X_t); tmp_ekf_for_no_update.getP(P); + /* put predicted pose and twist to output object */ auto & pose_with_cov = object.kinematics.pose_with_covariance; auto & twist_with_cov = object.kinematics.twist_with_covariance; @@ -399,6 +404,7 @@ bool PedestrianTracker::getTrackedObject( constexpr double vz_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative constexpr double wx_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative constexpr double wy_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + twist_with_cov.covariance[utils::MSG_COV_IDX::X_X] = P(IDX::VX, IDX::VX); twist_with_cov.covariance[utils::MSG_COV_IDX::Y_Y] = vy_cov; twist_with_cov.covariance[utils::MSG_COV_IDX::Z_Z] = vz_cov; diff --git a/perception/object_merger/config/object_association_merger.param.yaml b/perception/object_merger/config/object_association_merger.param.yaml new file mode 100644 index 0000000000000..5e5dafad5d573 --- /dev/null +++ b/perception/object_merger/config/object_association_merger.param.yaml @@ -0,0 +1,5 @@ +/**: + ros__parameters: + sync_queue_size: 20 + precision_threshold_to_judge_overlapped: 0.4 + remove_overlapped_unknown_objects: true diff --git a/perception/object_merger/launch/object_association_merger.launch.xml b/perception/object_merger/launch/object_association_merger.launch.xml index 5754c256fef3c..b26788bb04667 100644 --- a/perception/object_merger/launch/object_association_merger.launch.xml +++ b/perception/object_merger/launch/object_association_merger.launch.xml @@ -6,7 +6,7 @@ - + @@ -14,9 +14,9 @@ + + + - - - diff --git a/perception/object_range_splitter/src/node.cpp b/perception/object_range_splitter/src/node.cpp index 52383ef922621..4c4f824feb6fb 100644 --- a/perception/object_range_splitter/src/node.cpp +++ b/perception/object_range_splitter/src/node.cpp @@ -20,7 +20,7 @@ ObjectRangeSplitterNode::ObjectRangeSplitterNode(const rclcpp::NodeOptions & nod : Node("object_range_splitter_node", node_options) { using std::placeholders::_1; - spilt_range_ = declare_parameter("split_range", 30.0); + spilt_range_ = declare_parameter("split_range"); sub_ = this->create_subscription( "input/object", rclcpp::QoS{1}, std::bind(&ObjectRangeSplitterNode::objectCallback, this, _1)); long_range_object_pub_ = diff --git a/perception/object_velocity_splitter/CMakeLists.txt b/perception/object_velocity_splitter/CMakeLists.txt index 0bf6de39f8f3c..68e87a3355f86 100644 --- a/perception/object_velocity_splitter/CMakeLists.txt +++ b/perception/object_velocity_splitter/CMakeLists.txt @@ -1,17 +1,6 @@ cmake_minimum_required(VERSION 3.5) project(object_velocity_splitter) -# Compile options -if(NOT CMAKE_CXX_STANDARD) - set(CMAKE_CXX_STANDARD 17) - set(CMAKE_CXX_STANDARD_REQUIRED ON) - set(CMAKE_CXX_EXTENSIONS OFF) -endif() - -if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") - add_compile_options(-Wall -Wextra -Wpedantic) -endif() - # Dependencies find_package(autoware_cmake REQUIRED) autoware_package() diff --git a/perception/object_velocity_splitter/README.md b/perception/object_velocity_splitter/README.md index 9dcd535a5a10f..57d2834f3f442 100644 --- a/perception/object_velocity_splitter/README.md +++ b/perception/object_velocity_splitter/README.md @@ -3,21 +3,23 @@ This package contains a object filter module for [autoware_auto_perception_msgs/msg/DetectedObject](https://gitlab.com/autowarefoundation/autoware.auto/autoware_auto_msgs/-/blob/master/autoware_auto_perception_msgs/msg/DetectedObject.idl). This package can split DetectedObjects into two messages by object's speed. -## Input +## Interface -| Name | Type | Description | -| ----------------- | ----------------------------------------------------- | -------------------- | -| `~/input/objects` | autoware_auto_perception_msgs/msg/DetectedObjects.msg | 3D detected objects. | +### Input -## Output +- `~/input/objects` (`autoware_auto_perception_msgs/msg/DetectedObjects.msg`) + - 3D detected objects -| Name | Type | Description | -| ----------------------------- | ----------------------------------------------------- | ----------------------- | -| `~/output/low_speed_objects` | autoware_auto_perception_msgs/msg/DetectedObjects.msg | Objects with low speed | -| `~/output/high_speed_objects` | autoware_auto_perception_msgs/msg/DetectedObjects.msg | Objects with high speed | +### Output -## Parameters +- `~/output/low_speed_objects` (`autoware_auto_perception_msgs/msg/DetectedObjects.msg`) + - Objects with low speed +- `~/output/high_speed_objects` (`autoware_auto_perception_msgs/msg/DetectedObjects.msg`) + - Objects with high speed -| Name | Type | Description | Default value | -| :------------------- | :----- | :-------------------------------------------------- | :------------ | -| `velocity_threshold` | double | Velocity threshold parameter to split objects [m/s] | 3.0 | +### Parameters + +- `velocity_threshold` (double) [m/s] + - Default parameter is 3.0 + +This parameter is velocity threshold to split objects diff --git a/perception/object_velocity_splitter/src/object_velocity_splitter_node/object_velocity_splitter_node.cpp b/perception/object_velocity_splitter/src/object_velocity_splitter_node/object_velocity_splitter_node.cpp index a5d54b63b3311..0dfe6c6034948 100644 --- a/perception/object_velocity_splitter/src/object_velocity_splitter_node/object_velocity_splitter_node.cpp +++ b/perception/object_velocity_splitter/src/object_velocity_splitter_node/object_velocity_splitter_node.cpp @@ -53,7 +53,7 @@ ObjectVelocitySplitterNode::ObjectVelocitySplitterNode(const rclcpp::NodeOptions std::bind(&ObjectVelocitySplitterNode::onSetParam, this, std::placeholders::_1)); // Node Parameter - node_param_.velocity_threshold = declare_parameter("velocity_threshold", 3.0); + node_param_.velocity_threshold = declare_parameter("velocity_threshold"); // Subscriber sub_objects_ = create_subscription( diff --git a/perception/probabilistic_occupancy_grid_map/src/pointcloud_based_occupancy_grid_map/pointcloud_based_occupancy_grid_map_node.cpp b/perception/probabilistic_occupancy_grid_map/src/pointcloud_based_occupancy_grid_map/pointcloud_based_occupancy_grid_map_node.cpp index 82da92da7f146..63b6715948f93 100644 --- a/perception/probabilistic_occupancy_grid_map/src/pointcloud_based_occupancy_grid_map/pointcloud_based_occupancy_grid_map_node.cpp +++ b/perception/probabilistic_occupancy_grid_map/src/pointcloud_based_occupancy_grid_map/pointcloud_based_occupancy_grid_map_node.cpp @@ -206,10 +206,17 @@ void PointcloudBasedOccupancyGridMapNode::onPointcloudWithObstacleAndRaw( if (debug_publisher_ptr_ && stop_watch_ptr_) { const double cyclic_time_ms = stop_watch_ptr_->toc("cyclic_time", true); const double processing_time_ms = stop_watch_ptr_->toc("processing_time", true); + const double pipeline_latency_ms = + std::chrono::duration( + std::chrono::nanoseconds( + (this->get_clock()->now() - input_raw_msg->header.stamp).nanoseconds())) + .count(); debug_publisher_ptr_->publish( "debug/cyclic_time_ms", cyclic_time_ms); debug_publisher_ptr_->publish( "debug/processing_time_ms", processing_time_ms); + debug_publisher_ptr_->publish( + "debug/pipeline_latency_ms", pipeline_latency_ms); } } diff --git a/perception/probabilistic_occupancy_grid_map/test/test_synchronized_grid_map_fusion_node.py b/perception/probabilistic_occupancy_grid_map/test/test_synchronized_grid_map_fusion_node.py index c15daaf886b6f..e73aea581433e 100644 --- a/perception/probabilistic_occupancy_grid_map/test/test_synchronized_grid_map_fusion_node.py +++ b/perception/probabilistic_occupancy_grid_map/test/test_synchronized_grid_map_fusion_node.py @@ -57,7 +57,7 @@ def generate_test_description(): FindPackageShare(PACKAGE_NAME).find(PACKAGE_NAME) + "/launch/synchronized_occupancy_grid_map_fusion.launch.xml" ) - # use default launch arguments and parms + # use default launch arguments and params launch_args = [] # action to include launch file test_launch_file = launch.actions.IncludeLaunchDescription( diff --git a/perception/radar_crossing_objects_noise_filter/CMakeLists.txt b/perception/radar_crossing_objects_noise_filter/CMakeLists.txt index 6f1270c656bde..dd179e99829a3 100644 --- a/perception/radar_crossing_objects_noise_filter/CMakeLists.txt +++ b/perception/radar_crossing_objects_noise_filter/CMakeLists.txt @@ -1,17 +1,6 @@ cmake_minimum_required(VERSION 3.5) project(radar_crossing_objects_noise_filter) -# Compile options -if(NOT CMAKE_CXX_STANDARD) - set(CMAKE_CXX_STANDARD 17) - set(CMAKE_CXX_STANDARD_REQUIRED ON) - set(CMAKE_CXX_EXTENSIONS OFF) -endif() - -if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") - add_compile_options(-Wall -Wextra -Wpedantic) -endif() - # Dependencies find_package(autoware_cmake REQUIRED) autoware_package() diff --git a/perception/radar_crossing_objects_noise_filter/README.md b/perception/radar_crossing_objects_noise_filter/README.md index dc1283798fed9..d6d6005a5178e 100644 --- a/perception/radar_crossing_objects_noise_filter/README.md +++ b/perception/radar_crossing_objects_noise_filter/README.md @@ -3,7 +3,7 @@ This package contains a radar noise filter module for [autoware_auto_perception_msgs/msg/DetectedObject](https://gitlab.com/autowarefoundation/autoware.auto/autoware_auto_msgs/-/blob/master/autoware_auto_perception_msgs/msg/DetectedObject.idl). This package can filter the noise objects which cross to the ego vehicle. -## Algorithm +## Design ### Background @@ -32,7 +32,7 @@ When the ego vehicle turn right, the surrounding objects have left circular moti ![turning_around](docs/turning_around.png) -### Detail Algorithm +### Algorithm To filter the objects crossing to ego vehicle, this package filter the objects as below algorithm. @@ -51,22 +51,30 @@ To filter the objects crossing to ego vehicle, this package filter the objects a } ``` -## Input +## Interface -| Name | Type | Description | -| ----------------- | ----------------------------------------------------- | -------------- | -| `~/input/objects` | autoware_auto_perception_msgs/msg/DetectedObjects.msg | Radar objects. | +### Input -## Output +- `~/input/objects` (`autoware_auto_perception_msgs/msg/DetectedObjects.msg`) + - Input radar objects -| Name | Type | Description | -| --------------------------- | ----------------------------------------------------- | ---------------- | -| `~/output/noise_objects` | autoware_auto_perception_msgs/msg/DetectedObjects.msg | Noise objects | -| `~/output/filtered_objects` | autoware_auto_perception_msgs/msg/DetectedObjects.msg | Filtered objects | +### Output -## Parameters +- `~/output/noise_objects` (`autoware_auto_perception_msgs/msg/DetectedObjects.msg`) + - Noise objects +- `~/output/filtered_objects` (`autoware_auto_perception_msgs/msg/DetectedObjects.msg`) + - Filtered objects -| Name | Type | Description | Default value | -| :------------------- | :----- | :-------------------------------------------------------------------------------------------------------------------------------------------------- | :------------ | -| `angle_threshold` | double | The angle threshold parameter to filter [rad]. This parameter has condition that 0 < `angle_threshold` < pi / 2. See algorithm chapter for details. | 1.0472 | -| `velocity_threshold` | double | The velocity threshold parameter to filter [m/s]. See algorithm chapter for details. | 3.0 | +### Parameters + +- `angle_threshold` (double) [rad] + - Default parameter is 1.0472. + +This parameter is the angle threshold to filter. It has condition that 0 < `angle_threshold` < pi / 2. If the crossing angle is larger than this parameter, it can be a candidate for noise object. In other words, if it is smaller than this parameter, it is a filtered object. +If this parameter is set smaller, more objects are considered noise. In detail, see algorithm chapter. + +- `velocity_threshold` (double) [m/s] + - Default parameter is 3.0. + +This parameter is the velocity threshold to filter. If velocity of an object is larger than this parameter, it can be a candidate for noise object. In other words, if velocity of an object is smaller than this parameter, it is a filtered object. +If this parameter is set smaller, more objects are considered noise. In detail, see algorithm chapter. diff --git a/perception/radar_crossing_objects_noise_filter/src/radar_crossing_objects_noise_filter_node/radar_crossing_objects_noise_filter_node.cpp b/perception/radar_crossing_objects_noise_filter/src/radar_crossing_objects_noise_filter_node/radar_crossing_objects_noise_filter_node.cpp index 947856e5d2a4d..5b69ba65f3964 100644 --- a/perception/radar_crossing_objects_noise_filter/src/radar_crossing_objects_noise_filter_node/radar_crossing_objects_noise_filter_node.cpp +++ b/perception/radar_crossing_objects_noise_filter/src/radar_crossing_objects_noise_filter_node/radar_crossing_objects_noise_filter_node.cpp @@ -64,8 +64,8 @@ RadarCrossingObjectsNoiseFilterNode::RadarCrossingObjectsNoiseFilterNode( std::bind(&RadarCrossingObjectsNoiseFilterNode::onSetParam, this, std::placeholders::_1)); // Node Parameter - node_param_.angle_threshold = declare_parameter("angle_threshold", 1.0472); - node_param_.velocity_threshold = declare_parameter("velocity_threshold", 3.0); + node_param_.angle_threshold = declare_parameter("angle_threshold"); + node_param_.velocity_threshold = declare_parameter("velocity_threshold"); // Subscriber sub_objects_ = create_subscription( diff --git a/perception/radar_fusion_to_detected_object/README.md b/perception/radar_fusion_to_detected_object/README.md index defc716a744d9..9ea8e5d188f71 100644 --- a/perception/radar_fusion_to_detected_object/README.md +++ b/perception/radar_fusion_to_detected_object/README.md @@ -1,46 +1,100 @@ # radar_fusion_to_detected_object -This package contains a sensor fusion module for radar-detected objects and 3D detected objects. The fusion node can: +This package contains a sensor fusion module for radar-detected objects and 3D detected objects. + +The fusion node can: - Attach velocity to 3D detections when successfully matching radar data. The tracking modules use the velocity information to enhance the tracking results while planning modules use it to execute actions like adaptive cruise control. - Improve the low confidence 3D detections when corresponding radar detections are found. ![process_low_confidence](docs/radar_fusion_to_detected_object_6.drawio.svg) -## Core algorithm +## Design + +### Background + +This package is the fusion with LiDAR-based 3D detection output and radar data. +LiDAR based 3D detection can estimate position and size of objects with high precision, but it cannot estimate velocity of objects. +Radar data can estimate doppler velocity of objects, but it cannot estimate position and size of objects with high precision +This fusion package is aim to fuse these characteristic data, and to estimate position, size, velocity of objects with high precision. + +### Algorithm The document of core algorithm is [here](docs/algorithm.md) +## Interface for core algorithm + +The parameters for core algorithm can be set as `core_params`. + ### Parameters for sensor fusion -| Name | Type | Description | Default value | -| :----------------------- | :----- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------ | -| bounding_box_margin | double | The distance to extend the 2D bird's-eye view Bounding Box on each side. This distance is used as a threshold to find radar centroids falling inside the extended box. [m] | 2.0 | -| split_threshold_velocity | double | The object's velocity threshold to decide to split for two objects from radar information (currently not implemented) [m/s] | 5.0 | -| threshold_yaw_diff | double | The yaw orientation threshold. If ∣ θ_ob − θ_ra ∣ < threshold × yaw_diff attached to radar information include estimated velocity, where*θob*is yaw angle from 3d detected object,\*θ_ra is yaw angle from radar object. [rad] | 0.35 | +- `bounding_box_margin` (double) [m] + - Default parameter is 2.0. + +This parameter is the distance to extend the 2D bird's-eye view bounding box on each side. +This parameter is used as a threshold to find radar centroids falling inside the extended box. + +- `split_threshold_velocity` (double) [m/s] + - Default parameter is 5.0. + +This parameter is the object's velocity threshold to decide to split for two objects from radar information. +Note that this feature is not currently implemented. + +- `threshold_yaw_diff` (double) [rad] + - Default parameter is 0.35. + +This parameter is the yaw orientation threshold. +If the difference of yaw degree between from a LiDAR-based detection object and radar velocity, radar information is attached to output objects. ### Weight parameters for velocity estimation To tune these weight parameters, please see [document](docs/algorithm.md) in detail. -| Name | Type | Description | Default value | -| :----------------------------------- | :----- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------ | -| velocity_weight_average | double | The twist coefficient of average twist of radar data in velocity estimation. | 0.0 | -| velocity_weight_median | double | The twist coefficient of median twist of radar data in velocity estimation. | 0.0 | -| velocity_weight_min_distance | double | The twist coefficient of radar data nearest to the center of bounding box in velocity estimation. | 1.0 | -| velocity_weight_target_value_average | double | The twist coefficient of target value weighted average in velocity estimation. Target value is amplitude if using radar pointcloud. Target value is probability if using radar objects. | -| 0.0 | -| velocity_weight_target_value_top | double | The twist coefficient of top target value radar data in velocity estimation. Target value is amplitude if using radar pointcloud. Target value is probability if using radar objects. | 0.0 | +- `velocity_weight_average` (double) +- Default parameter is 0.0. + +This parameter is the twist coefficient of average twist of radar data in velocity estimation. + +- `velocity_weight_median` (double) +- Default parameter is 0.0. + +This parameter is the twist coefficient of median twist of radar data in velocity estimation. + +- `velocity_weight_min_distance` (double) +- Default parameter is 1.0. + +This parameter is the twist coefficient of radar data nearest to the center of bounding box in velocity estimation. + +- `velocity_weight_target_value_average` (double) +- Default parameter is 0.0. + +This parameter is the twist coefficient of target value weighted average in velocity estimation. Target value is amplitude if using radar pointcloud. Target value is probability if using radar objects. + +- `velocity_weight_target_value_top` (double) +- Default parameter is 0.0. + +This parameter is the twist coefficient of top target value radar data in velocity estimation. Target value is amplitude if using radar pointcloud. Target value is probability if using radar objects. ### Parameters for fixed object information -| Name | Type | Description | Default value | -| :----------------------- | :---- | :----------------------------------------------------------------------------------------------------------------------------------------------------- | :------------ | -| convert_doppler_to_twist | bool | Convert doppler velocity to twist using the yaw information of a detected object. | false | -| threshold_probability | float | If the probability of an output object is lower than this parameter, and the output object does not have radar points/objects, then delete the object. | 0.4 | -| compensate_probability | bool | If this parameter is true, compensate probability of objects to threshold probability. | false | +- `convert_doppler_to_twist` (bool) + - Default parameter is false. + +This parameter is the flag whether convert doppler velocity to twist using the yaw information of a detected object. -## radar_object_fusion_to_detected_object +- `threshold_probability` (float) + - Default parameter is 0.4. + +This parameter is the threshold to filter output objects. +If the probability of an output object is lower than this parameter, and the output object does not have radar points/objects, then delete the object. + +- `compensate_probability` (bool) + - Default parameter is false. + +This parameter is the flag to use probability compensation. +If this parameter is true, compensate probability of objects to threshold probability. + +## Interface for radar_object_fusion_to_detected_object Sensor fusion with radar objects and a detected object. @@ -56,24 +110,28 @@ ros2 launch radar_fusion_to_detected_object radar_object_to_detected_object.laun ### Input -| Name | Type | Description | -| ----------------------- | ----------------------------------------------------- | ---------------------------------------------------------------------- | -| `~/input/objects` | autoware_auto_perception_msgs/msg/DetectedObject.msg | 3D detected objects. | -| `~/input/radar_objects` | autoware_auto_perception_msgs/msg/DetectedObjects.msg | Radar objects. Note that frame_id need to be same as `~/input/objects` | +- `~/input/objects` (`autoware_auto_perception_msgs/msg/DetectedObjects.msg`) + - 3D detected objects. +- `~/input/radar_objects` (`autoware_auto_perception_msgs/msg/DetectedObjects.msg`) + - Radar objects. Note that frame_id need to be same as `~/input/objects` ### Output -| Name | Type | Description | -| -------------------------------- | ----------------------------------------------------- | -------------------------------------------------------------------------------------- | -| `~/output/objects` | autoware_auto_perception_msgs/msg/DetectedObjects.msg | 3D detected object with twist. | -| `~/debug/low_confidence_objects` | autoware_auto_perception_msgs/msg/DetectedObjects.msg | 3D detected object that doesn't output as `~/output/objects` because of low confidence | +- `~/output/objects` (`autoware_auto_perception_msgs/msg/DetectedObjects.msg`) + - 3D detected object with twist. +- `~/debug/low_confidence_objects` (`autoware_auto_perception_msgs/msg/DetectedObjects.msg`) + - 3D detected object that doesn't output as `~/output/objects` because of low confidence ### Parameters -| Name | Type | Description | Default value | -| :------------- | :----- | :-------------------- | :------------ | -| update_rate_hz | double | The update rate [hz]. | 20.0 | +The parameters for core algorithm can be set as `node_params`. + +- `update_rate_hz` (double) [hz] + - Default parameter is 20.0 + +This parameter is update rate for the `onTimer` function. +This parameter should be same as the frame rate of input topics. -## radar_scan_fusion_to_detected_object (TBD) +## Interface for radar_scan_fusion_to_detected_object (TBD) -TBD +Under implement diff --git a/perception/radar_object_clustering/CMakeLists.txt b/perception/radar_object_clustering/CMakeLists.txt index ba911c3035765..ea25353d3e1d7 100644 --- a/perception/radar_object_clustering/CMakeLists.txt +++ b/perception/radar_object_clustering/CMakeLists.txt @@ -1,17 +1,6 @@ cmake_minimum_required(VERSION 3.5) project(radar_object_clustering) -# Compile options -if(NOT CMAKE_CXX_STANDARD) - set(CMAKE_CXX_STANDARD 17) - set(CMAKE_CXX_STANDARD_REQUIRED ON) - set(CMAKE_CXX_EXTENSIONS OFF) -endif() - -if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") - add_compile_options(-Wall -Wextra -Wpedantic) -endif() - # Dependencies find_package(autoware_cmake REQUIRED) autoware_package() diff --git a/perception/radar_object_clustering/README.md b/perception/radar_object_clustering/README.md index 6463dd7157dea..0b2dd07f9e59f 100644 --- a/perception/radar_object_clustering/README.md +++ b/perception/radar_object_clustering/README.md @@ -7,7 +7,7 @@ In other word, this package can combine multiple radar detections from one objec ![radar_clustering](docs/radar_clustering.drawio.svg) -## Algorithm +## Design ### Background @@ -15,14 +15,14 @@ In radars with object output, there are cases that multiple detection results ar Its multiple detection results cause separation of objects in tracking module. Therefore, by this package the multiple detection results are clustered into one object in advance. -### Detail Algorithm +### Algorithm -- Sort by distance from `base_link` +- 1. Sort by distance from `base_link` At first, to prevent changing the result from depending on the order of objects in DetectedObjects, input objects are sorted by distance from `base_link`. In addition, to apply matching in closeness order considering occlusion, objects are sorted in order of short distance in advance. -- Clustering +- 2. Clustering If two radar objects are near, and yaw angle direction and velocity between two radar objects is similar (the degree of these is defined by parameters), then these are clustered. Note that radar characteristic affect parameters for this matching. @@ -32,13 +32,13 @@ For example, if resolution of range distance or angle is low and accuracy of vel After grouping for all radar objects, if multiple radar objects are grouping, the kinematics of the new clustered object is calculated from average of that and label and shape of the new clustered object is calculated from top confidence in radar objects. -- Fixed label correction +- 3. Fixed label correction When the label information from radar outputs lack accuracy, `is_fixed_label` parameter is recommended to set `true`. If the parameter is true, the label of a clustered object is overwritten by the label set by `fixed_label` parameter. If this package use for faraway dynamic object detection with radar, the parameter is recommended to set to `VEHICLE`. -- Fixed size correction +- 4. Fixed size correction When the size information from radar outputs lack accuracy, `is_fixed_size` parameter is recommended to set `true`. If the parameter is true, the size of a clustered object is overwritten by the label set by `size_x`, `size_y`, and `size_z` parameters. @@ -51,28 +51,75 @@ Note that to use for [multi_objects_tracker](https://github.com/autowarefoundati For now, size estimation for clustered object is not implemented. So `is_fixed_size` parameter is recommended to set `true`, and size parameters is recommended to set to value near to average size of vehicles. -## Input - -| Name | Type | Description | -| ----------------- | ----------------------------------------------------- | -------------- | -| `~/input/objects` | autoware_auto_perception_msgs/msg/DetectedObjects.msg | Radar objects. | - -## Output - -| Name | Type | Description | -| ------------------ | ----------------------------------------------------- | -------------- | -| `~/output/objects` | autoware_auto_perception_msgs/msg/DetectedObjects.msg | Output objects | - -## Parameters - -| Name | Type | Description | Default value | -| :------------------- | :----- | :---------------------------------------------------------------------------------------------------------------------------------------- | :------------ | -| `angle_threshold` | double | Angle threshold to judge whether radar detections come from one object. [rad] | 0.174 | -| `distance_threshold` | double | Distance threshold to judge whether radar detections come from one object. [m] | 4.0 | -| `velocity_threshold` | double | Velocity threshold to judge whether radar detections come from one object. [m/s] | 2.0 | -| `is_fixed_label` | bool | If this parameter is true, the label of a clustered object is overwritten by the label set by `fixed_label` parameter. | false | -| `fixed_label` | string | If `is_fixed_label` is true, the label of a clustered object is overwritten by this parameter. | "UNKNOWN" | -| `is_fixed_size` | bool | If this parameter is true, the size of a clustered object is overwritten by the label set by `size_x`, `size_y`, and `size_z` parameters. | false | -| `size_x` | double | If `is_fixed_size` is true, the x-axis size of a clustered object is overwritten by this parameter. [m] | 4.0 | -| `size_y` | double | If `is_fixed_size` is true, the y-axis size of a clustered object is overwritten by this parameter. [m] | 1.5 | -| `size_z` | double | If `is_fixed_size` is true, the z-axis size of a clustered object is overwritten by this parameter. [m] | 1.5 | +## Interface + +### Input + +- `~/input/objects` (`autoware_auto_perception_msgs/msg/DetectedObjects.msg`) + - Radar objects + +### Output + +- `~/output/objects` (`autoware_auto_perception_msgs/msg/DetectedObjects.msg`) + - Output objects + +### Parameter + +- `angle_threshold` (double) [rad] + - Default parameter is 0.174. +- `distance_threshold` (double) [m] + - Default parameter is 4.0. +- `velocity_threshold` (double) [m/s] + - Default parameter is 2.0. + +These parameter are thresholds for angle, distance, and velocity to judge whether radar detections come from one object in "clustering" processing, which is written in detail at algorithm section. +If all of the difference in angle/distance/velocity from two objects is less than the thresholds, then the two objects are merged to one clustered object. +If these parameter is larger, more objects are merged to one clustered object. + +These are used in `isSameObject` function as below. + +```cpp + +bool RadarObjectClusteringNode::isSameObject( + const DetectedObject & object_1, const DetectedObject & object_2) +{ + const double angle_diff = std::abs(tier4_autoware_utils::normalizeRadian( + tf2::getYaw(object_1.kinematics.pose_with_covariance.pose.orientation) - + tf2::getYaw(object_2.kinematics.pose_with_covariance.pose.orientation))); + const double velocity_diff = std::abs( + object_1.kinematics.twist_with_covariance.twist.linear.x - + object_2.kinematics.twist_with_covariance.twist.linear.x); + const double distance = tier4_autoware_utils::calcDistance2d( + object_1.kinematics.pose_with_covariance.pose.position, + object_2.kinematics.pose_with_covariance.pose.position); + + if ( + distance < node_param_.distance_threshold && angle_diff < node_param_.angle_threshold && + velocity_diff < node_param_.velocity_threshold) { + return true; + } else { + return false; + } +} +``` + +- `is_fixed_label` (bool) + - Default parameter is false. +- `fixed_label` (string) + - Default parameter is "UNKNOWN". + +`is_fixed_label` is the flag to use fixed label. +If it is true, the label of a clustered object is overwritten by the label set by `fixed_label` parameter. +If the radar objects do not have label information, then it is recommended to use fixed label. + +- `is_fixed_size` (bool) + - Default parameter is false. +- `size_x` (double) [m] + - Default parameter is 4.0. +- `size_y` (double) [m] + - Default parameter is 1.5. +- `size_z` (double) [m] + - Default parameter is 1.5. + +`is_fixed_size` is the flag to use fixed size parameters. +If it is true, the size of a clustered object is overwritten by the label set by `size_x`, `size_y`, and `size_z` parameters. diff --git a/perception/radar_object_tracker/config/radar_object_tracker.param.yaml b/perception/radar_object_tracker/config/radar_object_tracker.param.yaml index d2c0841cf372e..2fe006ffe8394 100644 --- a/perception/radar_object_tracker/config/radar_object_tracker.param.yaml +++ b/perception/radar_object_tracker/config/radar_object_tracker.param.yaml @@ -24,3 +24,6 @@ max_distance_from_lane: 5.0 # [m] max_angle_diff_from_lane: 0.785398 # [rad] (45 deg) max_lateral_velocity: 7.0 # [m/s] + + # tracking model parameters + tracking_config_directory: $(find-pkg-share radar_object_tracker)/config/tracking/ diff --git a/perception/radar_object_tracker/launch/radar_object_tracker.launch.xml b/perception/radar_object_tracker/launch/radar_object_tracker.launch.xml index 60ed2efb5767b..dfebf334456ea 100644 --- a/perception/radar_object_tracker/launch/radar_object_tracker.launch.xml +++ b/perception/radar_object_tracker/launch/radar_object_tracker.launch.xml @@ -3,7 +3,6 @@ - @@ -14,7 +13,6 @@ - - + diff --git a/perception/radar_object_tracker/src/tracker/model/constant_turn_rate_motion_tracker.cpp b/perception/radar_object_tracker/src/tracker/model/constant_turn_rate_motion_tracker.cpp index 5815fe34a68a9..62d3ff532da4d 100644 --- a/perception/radar_object_tracker/src/tracker/model/constant_turn_rate_motion_tracker.cpp +++ b/perception/radar_object_tracker/src/tracker/model/constant_turn_rate_motion_tracker.cpp @@ -387,11 +387,11 @@ bool ConstantTurnRateMotionTracker::measureWithPose( const bool enable_yaw_measurement = trust_yaw_input_ && object_has_orientation; if (enable_yaw_measurement) { - Eigen::MatrixXd Cyaw = Eigen::MatrixXd::Zero(1, ekf_params_.dim_x); - Cyaw(0, IDX::YAW) = 1; - C_list.push_back(Cyaw); + Eigen::MatrixXd C_yaw = Eigen::MatrixXd::Zero(1, ekf_params_.dim_x); + C_yaw(0, IDX::YAW) = 1; + C_list.push_back(C_yaw); - Eigen::MatrixXd Yyaw = Eigen::MatrixXd::Zero(1, 1); + Eigen::MatrixXd Y_yaw = Eigen::MatrixXd::Zero(1, 1); const auto yaw = [&] { auto obj_yaw = tier4_autoware_utils::normalizeRadian( tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation)); @@ -404,12 +404,12 @@ bool ConstantTurnRateMotionTracker::measureWithPose( return obj_yaw; }(); - Yyaw << yaw; - Y_list.push_back(Yyaw); + Y_yaw << yaw; + Y_list.push_back(Y_yaw); - Eigen::MatrixXd Ryaw = Eigen::MatrixXd::Zero(1, 1); - Ryaw << ekf_params_.r_cov_yaw; - R_block_list.push_back(Ryaw); + Eigen::MatrixXd R_yaw = Eigen::MatrixXd::Zero(1, 1); + R_yaw << ekf_params_.r_cov_yaw; + R_block_list.push_back(R_yaw); } // 3. add linear velocity measurement diff --git a/perception/radar_tracks_msgs_converter/config/radar_tracks_msgs_converter.param.yaml b/perception/radar_tracks_msgs_converter/config/radar_tracks_msgs_converter.param.yaml index 5a30a6bb4031d..7b344aaa57762 100644 --- a/perception/radar_tracks_msgs_converter/config/radar_tracks_msgs_converter.param.yaml +++ b/perception/radar_tracks_msgs_converter/config/radar_tracks_msgs_converter.param.yaml @@ -1,6 +1,7 @@ /**: ros__parameters: update_rate_hz: 20.0 + new_frame_id: "base_link" use_twist_compensation: true use_twist_yaw_compensation: false static_object_speed_threshold: 1.0 diff --git a/perception/radar_tracks_msgs_converter/src/radar_tracks_msgs_converter_node/radar_tracks_msgs_converter_node.cpp b/perception/radar_tracks_msgs_converter/src/radar_tracks_msgs_converter_node/radar_tracks_msgs_converter_node.cpp index 323e4bf7788c5..c7f2bb5bdc60d 100644 --- a/perception/radar_tracks_msgs_converter/src/radar_tracks_msgs_converter_node/radar_tracks_msgs_converter_node.cpp +++ b/perception/radar_tracks_msgs_converter/src/radar_tracks_msgs_converter_node/radar_tracks_msgs_converter_node.cpp @@ -78,13 +78,12 @@ RadarTracksMsgsConverterNode::RadarTracksMsgsConverterNode(const rclcpp::NodeOpt std::bind(&RadarTracksMsgsConverterNode::onSetParam, this, _1)); // Node Parameter - node_param_.update_rate_hz = declare_parameter("update_rate_hz", 20.0); - node_param_.new_frame_id = declare_parameter("new_frame_id", "base_link"); - node_param_.use_twist_compensation = declare_parameter("use_twist_compensation", false); - node_param_.use_twist_yaw_compensation = - declare_parameter("use_twist_yaw_compensation", false); + node_param_.update_rate_hz = declare_parameter("update_rate_hz"); + node_param_.new_frame_id = declare_parameter("new_frame_id"); + node_param_.use_twist_compensation = declare_parameter("use_twist_compensation"); + node_param_.use_twist_yaw_compensation = declare_parameter("use_twist_yaw_compensation"); node_param_.static_object_speed_threshold = - declare_parameter("static_object_speed_threshold", 1.0); + declare_parameter("static_object_speed_threshold"); // Subscriber sub_radar_ = create_subscription( diff --git a/perception/simple_object_merger/src/simple_object_merger_node/simple_object_merger_node.cpp b/perception/simple_object_merger/src/simple_object_merger_node/simple_object_merger_node.cpp index 683c6921eef5b..56250af766421 100644 --- a/perception/simple_object_merger/src/simple_object_merger_node/simple_object_merger_node.cpp +++ b/perception/simple_object_merger/src/simple_object_merger_node/simple_object_merger_node.cpp @@ -80,9 +80,9 @@ SimpleObjectMergerNode::SimpleObjectMergerNode(const rclcpp::NodeOptions & node_ std::bind(&SimpleObjectMergerNode::onSetParam, this, std::placeholders::_1)); // Node Parameter - node_param_.update_rate_hz = declare_parameter("update_rate_hz", 20.0); - node_param_.new_frame_id = declare_parameter("new_frame_id", "base_link"); - node_param_.timeout_threshold = declare_parameter("timeout_threshold", 0.1); + node_param_.update_rate_hz = declare_parameter("update_rate_hz"); + node_param_.new_frame_id = declare_parameter("new_frame_id"); + node_param_.timeout_threshold = declare_parameter("timeout_threshold"); declare_parameter("input_topics", std::vector()); node_param_.topic_names = get_parameter("input_topics").as_string_array(); diff --git a/perception/tensorrt_yolo/.gitignore b/perception/tensorrt_yolo/.gitignore deleted file mode 100755 index 160e7b321280b..0000000000000 --- a/perception/tensorrt_yolo/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -data/ -calib_image/ diff --git a/perception/tensorrt_yolo/CMakeLists.txt b/perception/tensorrt_yolo/CMakeLists.txt deleted file mode 100755 index b059ab44d8ece..0000000000000 --- a/perception/tensorrt_yolo/CMakeLists.txt +++ /dev/null @@ -1,162 +0,0 @@ -cmake_minimum_required(VERSION 3.14) -project(tensorrt_yolo) - -find_package(autoware_cmake REQUIRED) -autoware_package() - -# TODO(tensorrt_yolo): Remove once upgrading to TensorRT 8.5 is complete -add_compile_options(-Wno-deprecated-declarations) - -option(CUDA_VERBOSE "Verbose output of CUDA modules" OFF) - -find_package(OpenCV REQUIRED) - -# set flags for CUDA availability -option(CUDA_AVAIL "CUDA available" OFF) -find_package(CUDA) -if(CUDA_FOUND) - find_library(CUBLAS_LIBRARIES cublas HINTS - ${CUDA_TOOLKIT_ROOT_DIR}/lib64 - ${CUDA_TOOLKIT_ROOT_DIR}/lib - ) - if(CUDA_VERBOSE) - message(STATUS "CUDA is available!") - message(STATUS "CUDA Libs: ${CUDA_LIBRARIES}") - message(STATUS "CUDA Headers: ${CUDA_INCLUDE_DIRS}") - endif() - set(CUDA_AVAIL ON) -else() - message("CUDA NOT FOUND") - set(CUDA_AVAIL OFF) -endif() - -# set flags for TensorRT availability -option(TRT_AVAIL "TensorRT available" OFF) -# try to find the tensorRT modules -find_library(NVINFER NAMES nvinfer) -find_library(NVONNXPARSER nvonnxparser) -find_library(NVINFER_PLUGIN NAMES nvinfer_plugin) -if(NVINFER AND NVONNXPARSER AND NVINFER_PLUGIN) - if(CUDA_VERBOSE) - message(STATUS "TensorRT is available!") - message(STATUS "NVINFER: ${NVINFER}") - message(STATUS "NVPARSERS: ${NVPARSERS}") - message(STATUS "NVINFER_PLUGIN: ${NVINFER_PLUGIN}") - message(STATUS "NVONNXPARSER: ${NVONNXPARSER}") - endif() - set(TRT_AVAIL ON) -else() - message("TensorRT is NOT Available") - set(TRT_AVAIL OFF) -endif() - -# set flags for CUDNN availability -option(CUDNN_AVAIL "CUDNN available" OFF) -# try to find the CUDNN module -find_library(CUDNN_LIBRARY -NAMES libcudnn.so${__cudnn_ver_suffix} libcudnn${__cudnn_ver_suffix}.dylib ${__cudnn_lib_win_name} -PATHS $ENV{LD_LIBRARY_PATH} ${__libpath_cudart} ${CUDNN_ROOT_DIR} ${PC_CUDNN_LIBRARY_DIRS} ${CMAKE_INSTALL_PREFIX} -PATH_SUFFIXES lib lib64 bin -DOC "CUDNN library." -) -if(CUDNN_LIBRARY) - if(CUDA_VERBOSE) - message(STATUS "CUDNN is available!") - message(STATUS "CUDNN_LIBRARY: ${CUDNN_LIBRARY}") - endif() - set(CUDNN_AVAIL ON) -else() - message("CUDNN is NOT Available") - set(CUDNN_AVAIL OFF) -endif() - -# create calib image directory for int8 calibration -set(CALIB_IMAGE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/calib_image") -if(NOT EXISTS "${CALIB_IMAGE_PATH}") - execute_process(COMMAND mkdir -p ${CALIB_IMAGE_PATH}) -endif() - -if(TRT_AVAIL AND CUDA_AVAIL AND CUDNN_AVAIL) - include_directories( - include - lib/include - ${OpenCV_INCLUDE_DIRS} - ${CUDA_INCLUDE_DIRS} - ) - - - ### yolo ### - cuda_add_library(mish_plugin SHARED - lib/src/plugins/mish.cu - lib/src/plugins/mish_plugin.cpp - ) - - cuda_add_library(yolo_layer_plugin SHARED - lib/src/plugins/yolo_layer.cu - lib/src/plugins/yolo_layer_plugin.cpp - ) - - cuda_add_library(nms_plugin SHARED - lib/src/plugins/nms.cu - lib/src/plugins/nms_plugin.cpp - ) - - ament_auto_add_library(yolo SHARED - lib/src/trt_yolo.cpp - ) - - target_include_directories(yolo PUBLIC - lib/include - ) - - target_link_libraries(yolo - ${NVINFER} - ${NVONNXPARSER} - ${NVINFER_PLUGIN} - ${CUDA_LIBRARIES} - ${CUBLAS_LIBRARIES} - ${CUDNN_LIBRARY} - mish_plugin - yolo_layer_plugin - nms_plugin - ) - - ament_auto_add_library(tensorrt_yolo_nodelet SHARED - src/nodelet.cpp - ) - - target_link_libraries(tensorrt_yolo_nodelet - ${OpenCV_LIBS} - yolo - mish_plugin - yolo_layer_plugin - nms_plugin - ) - - rclcpp_components_register_node(tensorrt_yolo_nodelet - PLUGIN "object_recognition::TensorrtYoloNodelet" - EXECUTABLE tensorrt_yolo_node - ) - - ament_auto_package(INSTALL_TO_SHARE - config - launch - ) - - install( - TARGETS - mish_plugin - yolo_layer_plugin - nms_plugin - LIBRARY DESTINATION lib - ARCHIVE DESTINATION lib - RUNTIME DESTINATION bin - ) - -else() - message("TensorrtYolo won't be built, CUDA and/or TensorRT were not found.") - ament_auto_package(INSTALL_TO_SHARE - config - launch - ) -endif() diff --git a/perception/tensorrt_yolo/README.md b/perception/tensorrt_yolo/README.md deleted file mode 100644 index 58d4af0dfa83d..0000000000000 --- a/perception/tensorrt_yolo/README.md +++ /dev/null @@ -1,117 +0,0 @@ -# tensorrt_yolo - -## Purpose - -This package detects 2D bounding boxes for target objects e.g., cars, trucks, bicycles, and pedestrians on a image based on YOLO(You only look once) model. - -## Inner-workings / Algorithms - -### Cite - - - -yolov3 - -Redmon, J., & Farhadi, A. (2018). Yolov3: An incremental improvement. arXiv preprint arXiv:1804.02767. - -yolov4 - -Bochkovskiy, A., Wang, C. Y., & Liao, H. Y. M. (2020). Yolov4: Optimal speed and accuracy of object detection. arXiv preprint arXiv:2004.10934. - -yolov5 - -Jocher, G., et al. (2021). ultralytics/yolov5: v6.0 - YOLOv5n 'Nano' models, Roboflow integration, TensorFlow export, OpenCV DNN support (v6.0). Zenodo. - -## Inputs / Outputs - -### Input - -| Name | Type | Description | -| ---------- | ------------------- | --------------- | -| `in/image` | `sensor_msgs/Image` | The input image | - -### Output - -| Name | Type | Description | -| ------------- | -------------------------------------------------- | -------------------------------------------------- | -| `out/objects` | `tier4_perception_msgs/DetectedObjectsWithFeature` | The detected objects with 2D bounding boxes | -| `out/image` | `sensor_msgs/Image` | The image with 2D bounding boxes for visualization | - -## Parameters - -### Core Parameters - -| Name | Type | Default Value | Description | -| ------------------- | ------------ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------- | -| `anchors` | double array | [10.0, 13.0, 16.0, 30.0, 33.0, 23.0, 30.0, 61.0, 62.0, 45.0, 59.0, 119.0, 116.0, 90.0, 156.0, 198.0, 373.0, 326.0] | The anchors to create bounding box candidates | -| `scale_x_y` | double array | [1.0, 1.0, 1.0] | The scale parameter to eliminate grid sensitivity | -| `score_thresh` | double | 0.1 | If the objectness score is less than this value, the object is ignored in yolo layer. | -| `iou_thresh` | double | 0.45 | The iou threshold for NMS method | -| `detections_per_im` | int | 100 | The maximum detection number for one frame | -| `use_darknet_layer` | bool | true | The flag to use yolo layer in darknet | -| `ignore_thresh` | double | 0.5 | If the output score is less than this value, ths object is ignored. | - -### Node Parameters - -| Name | Type | Default Value | Description | -| ----------------------- | ------ | ------------- | ------------------------------------------------------------------ | -| `data_path` | string | "" | Packages data and artifacts directory path | -| `onnx_file` | string | "" | The onnx file name for yolo model | -| `engine_file` | string | "" | The tensorrt engine file name for yolo model | -| `label_file` | string | "" | The label file with label names for detected objects written on it | -| `calib_image_directory` | string | "" | The directory name including calibration images for int8 inference | -| `calib_cache_file` | string | "" | The calibration cache file for int8 inference | -| `mode` | string | "FP32" | The inference mode: "FP32", "FP16", "INT8" | -| `gpu_id` | int | 0 | GPU device ID that runs the model | - -## Assumptions / Known limits - -This package includes multiple licenses. - -## Onnx model - -All YOLO ONNX models are converted from the officially trained model. If you need information about training datasets and conditions, please refer to the official repositories. - -All models are downloaded during env preparation by ansible (as mention in [installation](https://autowarefoundation.github.io/autoware-documentation/main/installation/autoware/source-installation/)). It is also possible to download them manually, see [Manual downloading of artifacts](https://github.com/autowarefoundation/autoware/tree/main/ansible/roles/artifacts) . When launching the node with a model for the first time, the model is automatically converted to TensorRT, although this may take some time. - -### YOLOv3 - -[YOLOv3](https://awf.ml.dev.web.auto/perception/models/yolov3.onnx "YOLOv3"): Converted from darknet [weight file](https://pjreddie.com/media/files/yolov3.weights "weight file") and [conf file](https://github.com/pjreddie/darknet/blob/master/cfg/yolov3.cfg "conf file"). - -- [This code](https://github.com/wep21/yolo_onnx_converter) is used for converting darknet weight file and conf file to onnx. - -### YOLOv4 - -[YOLOv4](https://awf.ml.dev.web.auto/perception/models/yolov4.onnx "YOLOv4"): Converted from darknet [weight file](https://github.com/AlexeyAB/darknet/releases/download/darknet_yolo_v3_optimal/yolov4.weights "weight file") and [conf file](https://github.com/AlexeyAB/darknet/blob/master/cfg/yolov4.cfg "conf file"). - -[YOLOv4-tiny](https://awf.ml.dev.web.auto/perception/models/yolov4-tiny.onnx "YOLOv4-tiny"): Converted from darknet [weight file](https://github.com/AlexeyAB/darknet/releases/download/darknet_yolo_v4_pre/yolov4-tiny.weights "weight file") and [conf file](https://github.com/AlexeyAB/darknet/blob/master/cfg/yolov4-tiny.cfg "conf file"). - -- [This code](https://github.com/wep21/yolo_onnx_converter) is used for converting darknet weight file and conf file to onnx. - -### YOLOv5 - -Refer to [this guide](https://github.com/ultralytics/yolov5/issues/251 "guide") - -- [YOLOv5s](https://awf.ml.dev.web.auto/perception/models/yolov5s.onnx "YOLOv5s") - -- [YOLOv5m](https://awf.ml.dev.web.auto/perception/models/yolov5m.onnx "YOLOv5m") - -- [YOLOv5l](https://awf.ml.dev.web.auto/perception/models/yolov5l.onnx "YOLOv5l") - -- [YOLOv5x](https://awf.ml.dev.web.auto/perception/models/yolov5x.onnx "YOLOv5x") - -## Limitations - -- If you want to run multiple instances of this node for multiple cameras using "yolo.launch.xml", first of all, create a TensorRT engine by running the "tensorrt_yolo.launch.xml" launch file separately for each GPU. Otherwise, multiple instances of the node trying to create the same TensorRT engine can cause potential problems. - -## Reference repositories - -- - -- - -- - -- - -- diff --git a/perception/tensorrt_yolo/config/yolov3.param.yaml b/perception/tensorrt_yolo/config/yolov3.param.yaml deleted file mode 100644 index d97505dae5020..0000000000000 --- a/perception/tensorrt_yolo/config/yolov3.param.yaml +++ /dev/null @@ -1,10 +0,0 @@ -/**: - ros__parameters: - num_anchors: 3 - anchors: [116.0, 90.0, 156.0, 198.0, 373.0, 326.0, 30.0, 61.0, 62.0, 45.0, 59.0, 119.0, 10.0, 13.0, 16.0, 30.0, 33.0, 23.0] - scale_x_y: [1.0, 1.0, 1.0] - score_thresh: 0.1 - iou_thresh: 0.45 - detections_per_im: 100 - use_darknet_layer: true - ignore_thresh: 0.5 diff --git a/perception/tensorrt_yolo/config/yolov4-tiny.param.yaml b/perception/tensorrt_yolo/config/yolov4-tiny.param.yaml deleted file mode 100644 index 8071da9dcac06..0000000000000 --- a/perception/tensorrt_yolo/config/yolov4-tiny.param.yaml +++ /dev/null @@ -1,10 +0,0 @@ -/**: - ros__parameters: - num_anchors: 3 - anchors: [81.0, 82.0, 135.0, 169.0, 344.0, 319.0, 23.0, 27.0, 37.0, 58.0, 81.0, 82.0] - scale_x_y: [1.05, 1.05] - score_thresh: 0.1 - iou_thresh: 0.45 - detections_per_im: 100 - use_darknet_layer: true - ignore_thresh: 0.5 diff --git a/perception/tensorrt_yolo/config/yolov4.param.yaml b/perception/tensorrt_yolo/config/yolov4.param.yaml deleted file mode 100644 index 9edc0fc6ce708..0000000000000 --- a/perception/tensorrt_yolo/config/yolov4.param.yaml +++ /dev/null @@ -1,10 +0,0 @@ -/**: - ros__parameters: - num_anchors: 3 - anchors: [12.0, 16.0, 19.0, 36.0, 40.0, 28.0, 36.0, 75.0, 76.0, 55.0, 72.0, 146.0, 142.0, 110.0, 192.0, 243.0, 459.0, 401.0] - scale_x_y: [1.2, 1.1, 1.05] - score_thresh: 0.1 - iou_thresh: 0.45 - detections_per_im: 100 - use_darknet_layer: true - ignore_thresh: 0.5 diff --git a/perception/tensorrt_yolo/config/yolov5l.param.yaml b/perception/tensorrt_yolo/config/yolov5l.param.yaml deleted file mode 100644 index 67ae862fc33af..0000000000000 --- a/perception/tensorrt_yolo/config/yolov5l.param.yaml +++ /dev/null @@ -1,10 +0,0 @@ -/**: - ros__parameters: - num_anchors: 3 - anchors: [10.0, 13.0, 16.0, 30.0, 33.0, 23.0, 30.0, 61.0, 62.0, 45.0, 59.0, 119.0, 116.0, 90.0, 156.0, 198.0, 373.0, 326.0] - scale_x_y: [1.0, 1.0, 1.0] - score_thresh: 0.1 - iou_thresh: 0.45 - detections_per_im: 100 - use_darknet_layer: false - ignore_thresh: 0.5 diff --git a/perception/tensorrt_yolo/config/yolov5m.param.yaml b/perception/tensorrt_yolo/config/yolov5m.param.yaml deleted file mode 100644 index 67ae862fc33af..0000000000000 --- a/perception/tensorrt_yolo/config/yolov5m.param.yaml +++ /dev/null @@ -1,10 +0,0 @@ -/**: - ros__parameters: - num_anchors: 3 - anchors: [10.0, 13.0, 16.0, 30.0, 33.0, 23.0, 30.0, 61.0, 62.0, 45.0, 59.0, 119.0, 116.0, 90.0, 156.0, 198.0, 373.0, 326.0] - scale_x_y: [1.0, 1.0, 1.0] - score_thresh: 0.1 - iou_thresh: 0.45 - detections_per_im: 100 - use_darknet_layer: false - ignore_thresh: 0.5 diff --git a/perception/tensorrt_yolo/config/yolov5s.param.yaml b/perception/tensorrt_yolo/config/yolov5s.param.yaml deleted file mode 100644 index 67ae862fc33af..0000000000000 --- a/perception/tensorrt_yolo/config/yolov5s.param.yaml +++ /dev/null @@ -1,10 +0,0 @@ -/**: - ros__parameters: - num_anchors: 3 - anchors: [10.0, 13.0, 16.0, 30.0, 33.0, 23.0, 30.0, 61.0, 62.0, 45.0, 59.0, 119.0, 116.0, 90.0, 156.0, 198.0, 373.0, 326.0] - scale_x_y: [1.0, 1.0, 1.0] - score_thresh: 0.1 - iou_thresh: 0.45 - detections_per_im: 100 - use_darknet_layer: false - ignore_thresh: 0.5 diff --git a/perception/tensorrt_yolo/config/yolov5x.param.yaml b/perception/tensorrt_yolo/config/yolov5x.param.yaml deleted file mode 100644 index 67ae862fc33af..0000000000000 --- a/perception/tensorrt_yolo/config/yolov5x.param.yaml +++ /dev/null @@ -1,10 +0,0 @@ -/**: - ros__parameters: - num_anchors: 3 - anchors: [10.0, 13.0, 16.0, 30.0, 33.0, 23.0, 30.0, 61.0, 62.0, 45.0, 59.0, 119.0, 116.0, 90.0, 156.0, 198.0, 373.0, 326.0] - scale_x_y: [1.0, 1.0, 1.0] - score_thresh: 0.1 - iou_thresh: 0.45 - detections_per_im: 100 - use_darknet_layer: false - ignore_thresh: 0.5 diff --git a/perception/tensorrt_yolo/include/tensorrt_yolo/nodelet.hpp b/perception/tensorrt_yolo/include/tensorrt_yolo/nodelet.hpp deleted file mode 100644 index aea3142b1f35e..0000000000000 --- a/perception/tensorrt_yolo/include/tensorrt_yolo/nodelet.hpp +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef TENSORRT_YOLO__NODELET_HPP_ -#define TENSORRT_YOLO__NODELET_HPP_ - -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include - -#if __has_include() -#include -#else -#include -#endif - -#include -#include -#include -#include -#include -#include - -namespace object_recognition -{ -class TensorrtYoloNodelet : public rclcpp::Node -{ -public: - explicit TensorrtYoloNodelet(const rclcpp::NodeOptions & options); - void connectCb(); - void callback(const sensor_msgs::msg::Image::ConstSharedPtr image_msg); - bool readLabelFile(const std::string & filepath, std::vector * labels); - -private: - std::mutex connect_mutex_; - - image_transport::Publisher image_pub_; - rclcpp::Publisher::SharedPtr objects_pub_; - - image_transport::Subscriber image_sub_; - - rclcpp::TimerBase::SharedPtr timer_; - - yolo::Config yolo_config_; - - int gpu_device_id_; - - std::vector labels_; - std::unique_ptr out_scores_; - std::unique_ptr out_boxes_; - std::unique_ptr out_classes_; - std::unique_ptr net_ptr_; -}; - -} // namespace object_recognition - -#endif // TENSORRT_YOLO__NODELET_HPP_ diff --git a/perception/tensorrt_yolo/launch/tensorrt_yolo.launch.xml b/perception/tensorrt_yolo/launch/tensorrt_yolo.launch.xml deleted file mode 100755 index b2656de0ab72e..0000000000000 --- a/perception/tensorrt_yolo/launch/tensorrt_yolo.launch.xml +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/perception/tensorrt_yolo/lib/include/calibrator.hpp b/perception/tensorrt_yolo/lib/include/calibrator.hpp deleted file mode 100644 index edfbc126d645a..0000000000000 --- a/perception/tensorrt_yolo/lib/include/calibrator.hpp +++ /dev/null @@ -1,200 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a - * copy of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, - * and/or sell copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL - * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - * DEALINGS IN THE SOFTWARE. - */ - -#ifndef CALIBRATOR_HPP_ -#define CALIBRATOR_HPP_ - -#include -#include -#include -#include - -#include -#include - -#include -#include -#include -#include -#include - -namespace yolo -{ -class ImageStream -{ -public: - ImageStream( - int batch_size, nvinfer1::Dims input_dims, const std::vector calibration_images) - : batch_size_(batch_size), - calibration_images_(calibration_images), - current_batch_(0), - max_batches_(calibration_images.size() / batch_size_), - input_dims_(input_dims) - { - batch_.resize(batch_size_ * input_dims_.d[1] * input_dims_.d[2] * input_dims_.d[3]); - } - - int getBatchSize() const { return batch_size_; } - - int getMaxBatches() const { return max_batches_; } - - float * getBatch() { return &batch_[0]; } - - nvinfer1::Dims getInputDims() { return input_dims_; } - - std::vector preprocess(const cv::Mat & in_img, const int c, const int w, const int h) const - { - cv::Mat rgb; - cv::cvtColor(in_img, rgb, cv::COLOR_BGR2RGB); - - cv::resize(rgb, rgb, cv::Size(w, h)); - - cv::Mat img_float; - rgb.convertTo(img_float, CV_32FC3, 1 / 255.0); - - // HWC TO CHW - std::vector input_channels(c); - cv::split(img_float, input_channels); - - std::vector result(h * w * c); - auto data = result.data(); - int channel_length = h * w; - for (int i = 0; i < c; ++i) { - memcpy(data, input_channels[i].data, channel_length * sizeof(float)); - data += channel_length; - } - - return result; - } - - bool next() - { - if (current_batch_ == max_batches_) { - return false; - } - - for (int i = 0; i < batch_size_; ++i) { - auto image = - cv::imread(calibration_images_[batch_size_ * current_batch_ + i].c_str(), cv::IMREAD_COLOR); - auto input = preprocess(image, input_dims_.d[1], input_dims_.d[3], input_dims_.d[2]); - batch_.insert( - batch_.begin() + i * input_dims_.d[1] * input_dims_.d[2] * input_dims_.d[3], input.begin(), - input.end()); - } - - ++current_batch_; - return true; - } - - void reset() { current_batch_ = 0; } - -private: - int batch_size_; - std::vector calibration_images_; - int current_batch_; - int max_batches_; - nvinfer1::Dims input_dims_; - - std::vector batch_; -}; - -class Int8EntropyCalibrator : public nvinfer1::IInt8EntropyCalibrator2 -{ -public: - Int8EntropyCalibrator( - ImageStream & stream, const std::string calibration_cache_file, bool read_cache = true) - : stream_(stream), calibration_cache_file_(calibration_cache_file), read_cache_(read_cache) - { - auto d = stream_.getInputDims(); - input_count_ = stream_.getBatchSize() * d.d[1] * d.d[2] * d.d[3]; - CHECK_CUDA_ERROR(cudaMalloc(&device_input_, input_count_ * sizeof(float))); - } - - int getBatchSize() const noexcept override { return stream_.getBatchSize(); } - - virtual ~Int8EntropyCalibrator() { CHECK_CUDA_ERROR(cudaFree(device_input_)); } - - bool getBatch(void * bindings[], const char * names[], int nb_bindings) noexcept override - { - (void)names; - (void)nb_bindings; - - if (!stream_.next()) { - return false; - } - - try { - CHECK_CUDA_ERROR(cudaMemcpy( - device_input_, stream_.getBatch(), input_count_ * sizeof(float), cudaMemcpyHostToDevice)); - } catch (const std::exception & e) { - // Do nothing - } - bindings[0] = device_input_; - return true; - } - - const void * readCalibrationCache(size_t & length) noexcept override - { - calib_cache_.clear(); - std::ifstream input(calibration_cache_file_, std::ios::binary); - input >> std::noskipws; - if (read_cache_ && input.good()) { - std::copy( - std::istream_iterator(input), std::istream_iterator(), - std::back_inserter(calib_cache_)); - } - - length = calib_cache_.size(); - return length ? &calib_cache_[0] : nullptr; - } - - void writeCalibrationCache(const void * cache, size_t length) noexcept override - { - std::ofstream output(calibration_cache_file_, std::ios::binary); - output.write(reinterpret_cast(cache), length); - } - -private: - ImageStream stream_; - const std::string calibration_cache_file_; - bool read_cache_{true}; - size_t input_count_; - void * device_input_{nullptr}; - std::vector calib_cache_; -}; -} // namespace yolo - -#endif // CALIBRATOR_HPP_ diff --git a/perception/tensorrt_yolo/lib/include/cuda_utils.hpp b/perception/tensorrt_yolo/lib/include/cuda_utils.hpp deleted file mode 100644 index 8a8a21d99e2cb..0000000000000 --- a/perception/tensorrt_yolo/lib/include/cuda_utils.hpp +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a - * copy of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, - * and/or sell copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL - * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - * DEALINGS IN THE SOFTWARE. - */ - -/* - * This code is licensed under CC0 1.0 Universal (Public Domain). - * You can use this without any limitation. - * https://creativecommons.org/publicdomain/zero/1.0/deed.en - */ - -#ifndef CUDA_UTILS_HPP_ -#define CUDA_UTILS_HPP_ - -#include - -#include -#include -#include -#include - -#define CHECK_CUDA_ERROR(e) (cuda::check_error(e, __FILE__, __LINE__)) - -namespace cuda -{ -void check_error(const ::cudaError_t e, const char * f, int n) -{ - if (e != ::cudaSuccess) { - ::std::stringstream s; - s << ::cudaGetErrorName(e) << " (" << e << ")@" << f << "#L" << n << ": " - << ::cudaGetErrorString(e); - throw ::std::runtime_error{s.str()}; - } -} - -struct deleter -{ - void operator()(void * p) const { CHECK_CUDA_ERROR(::cudaFree(p)); } -}; -template -using unique_ptr = ::std::unique_ptr; - -template -typename ::std::enable_if<::std::is_array::value, cuda::unique_ptr>::type make_unique( - const ::std::size_t n) -{ - using U = typename ::std::remove_extent::type; - U * p; - CHECK_CUDA_ERROR(::cudaMalloc(reinterpret_cast(&p), sizeof(U) * n)); - return cuda::unique_ptr{p}; -} - -template -cuda::unique_ptr make_unique() -{ - T * p; - CHECK_CUDA_ERROR(::cudaMalloc(reinterpret_cast(&p), sizeof(T))); - return cuda::unique_ptr{p}; -} - -constexpr size_t CUDA_ALIGN = 256; - -template -inline size_t get_size_aligned(size_t num_elem) -{ - size_t size = num_elem * sizeof(T); - size_t extra_align = 0; - if (size % CUDA_ALIGN != 0) { - extra_align = CUDA_ALIGN - size % CUDA_ALIGN; - } - return size + extra_align; -} - -template -inline T * get_next_ptr(size_t num_elem, void *& workspace, size_t & workspace_size) -{ - size_t size = get_size_aligned(num_elem); - if (size > workspace_size) { - throw ::std::runtime_error("Workspace is too small!"); - } - workspace_size -= size; - T * ptr = reinterpret_cast(workspace); - workspace = reinterpret_cast(reinterpret_cast(workspace) + size); - return ptr; -} - -} // namespace cuda - -#endif // CUDA_UTILS_HPP_ diff --git a/perception/tensorrt_yolo/lib/include/mish.hpp b/perception/tensorrt_yolo/lib/include/mish.hpp deleted file mode 100644 index 566cd0203ff41..0000000000000 --- a/perception/tensorrt_yolo/lib/include/mish.hpp +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * MIT License - - * Copyright (c) 2019-2020 Wang Xinyu - - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -#ifndef MISH_HPP_ -#define MISH_HPP_ - -#include - -#include - -namespace yolo -{ -int mish(cudaStream_t stream, const float * input, float * output, int n); -} // namespace yolo - -#endif // MISH_HPP_ diff --git a/perception/tensorrt_yolo/lib/include/mish_plugin.hpp b/perception/tensorrt_yolo/lib/include/mish_plugin.hpp deleted file mode 100644 index e49d2d2ba5ac9..0000000000000 --- a/perception/tensorrt_yolo/lib/include/mish_plugin.hpp +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * MIT License - - * Copyright (c) 2019-2020 Wang Xinyu - - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -#ifndef MISH_PLUGIN_HPP_ -#define MISH_PLUGIN_HPP_ - -#include - -#include -#include -#include - -namespace yolo -{ -class MishPlugin : public nvinfer1::IPluginV2DynamicExt -{ -public: - MishPlugin(); - MishPlugin(const void * data, size_t length); - - // IPluginV2 methods - const char * getPluginType() const noexcept override; - const char * getPluginVersion() const noexcept override; - int getNbOutputs() const noexcept override; - int initialize() noexcept override; - void terminate() noexcept override; - size_t getSerializationSize() const noexcept override; - void serialize(void * buffer) const noexcept override; - void destroy() noexcept override; - void setPluginNamespace(const char * libNamespace) noexcept override; - const char * getPluginNamespace() const noexcept override; - - // IPluginV2Ext methods - nvinfer1::DataType getOutputDataType( - int index, const nvinfer1::DataType * inputType, int nbInputs) const noexcept override; - - // IPluginV2DynamicExt methods - nvinfer1::IPluginV2DynamicExt * clone() const noexcept override; - nvinfer1::DimsExprs getOutputDimensions( - int outputIndex, const nvinfer1::DimsExprs * inputs, int nbInputs, - nvinfer1::IExprBuilder & exprBuilder) noexcept override; - bool supportsFormatCombination( - int pos, const nvinfer1::PluginTensorDesc * inOut, int nbInputs, - int nbOutputs) noexcept override; - void configurePlugin( - const nvinfer1::DynamicPluginTensorDesc * in, int nbInputs, - const nvinfer1::DynamicPluginTensorDesc * out, int nbOutputs) noexcept override; - size_t getWorkspaceSize( - const nvinfer1::PluginTensorDesc * inputs, int nbInputs, - const nvinfer1::PluginTensorDesc * outputs, int nbOutputs) const noexcept override; - int enqueue( - const nvinfer1::PluginTensorDesc * inputDesc, const nvinfer1::PluginTensorDesc * outputDesc, - const void * const * inputs, void * const * outputs, void * workspace, - cudaStream_t stream) noexcept override; - -private: - const char * mPluginNamespace; -}; - -class MishPluginCreator : public nvinfer1::IPluginCreator -{ -public: - MishPluginCreator(); - - ~MishPluginCreator() override = default; - - const char * getPluginName() const noexcept override; - - const char * getPluginVersion() const noexcept override; - - const nvinfer1::PluginFieldCollection * getFieldNames() noexcept override; - - nvinfer1::IPluginV2DynamicExt * createPlugin( - const char * name, const nvinfer1::PluginFieldCollection * fc) noexcept override; - - nvinfer1::IPluginV2DynamicExt * deserializePlugin( - const char * name, const void * serialData, size_t serialLength) noexcept override; - - void setPluginNamespace(const char * libNamespace) noexcept override - { - mNamespace = libNamespace; - } - - const char * getPluginNamespace() const noexcept override { return mNamespace.c_str(); } - -private: - std::string mNamespace; - static nvinfer1::PluginFieldCollection mFC; - static std::vector mPluginAttributes; -}; - -REGISTER_TENSORRT_PLUGIN(MishPluginCreator); - -} // namespace yolo - -#endif // MISH_PLUGIN_HPP_ diff --git a/perception/tensorrt_yolo/lib/include/nms.hpp b/perception/tensorrt_yolo/lib/include/nms.hpp deleted file mode 100644 index 1070c50f35807..0000000000000 --- a/perception/tensorrt_yolo/lib/include/nms.hpp +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a - * copy of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, - * and/or sell copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL - * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - * DEALINGS IN THE SOFTWARE. - */ - -#ifndef NMS_HPP_ -#define NMS_HPP_ - -#include - -#include - -namespace yolo -{ -int nms( - int batchSize, const void * const * inputs, void * const * outputs, size_t count, - int detections_per_im, float nms_thresh, void * workspace, size_t workspace_size, - cudaStream_t stream); -} - -#endif // NMS_HPP_ diff --git a/perception/tensorrt_yolo/lib/include/nms_plugin.hpp b/perception/tensorrt_yolo/lib/include/nms_plugin.hpp deleted file mode 100644 index 58bc49e57adf4..0000000000000 --- a/perception/tensorrt_yolo/lib/include/nms_plugin.hpp +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a - * copy of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, - * and/or sell copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL - * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - * DEALINGS IN THE SOFTWARE. - */ - -#ifndef NMS_PLUGIN_HPP_ -#define NMS_PLUGIN_HPP_ - -#include - -#include -#include - -namespace yolo -{ -class NMSPlugin : public nvinfer1::IPluginV2DynamicExt -{ -public: - NMSPlugin(float nms_thresh, int detections_per_im); - NMSPlugin(float nms_thresh, int detections_per_im, size_t count); - NMSPlugin(const void * data, size_t length); - - // IPluginV2 methods - const char * getPluginType() const noexcept override; - const char * getPluginVersion() const noexcept override; - int getNbOutputs() const noexcept override; - int initialize() noexcept override; - void terminate() noexcept override; - size_t getSerializationSize() const noexcept override; - void serialize(void * buffer) const noexcept override; - void destroy() noexcept override; - void setPluginNamespace(const char * libNamespace) noexcept override; - const char * getPluginNamespace() const noexcept override; - - // IPluginV2Ext methods - nvinfer1::DataType getOutputDataType( - int index, const nvinfer1::DataType * inputType, int nbInputs) const noexcept override; - - // IPluginV2DynamicExt methods - nvinfer1::IPluginV2DynamicExt * clone() const noexcept override; - nvinfer1::DimsExprs getOutputDimensions( - int outputIndex, const nvinfer1::DimsExprs * inputs, int nbInputs, - nvinfer1::IExprBuilder & exprBuilder) noexcept override; - bool supportsFormatCombination( - int pos, const nvinfer1::PluginTensorDesc * inOut, int nbInputs, - int nbOutputs) noexcept override; - void configurePlugin( - const nvinfer1::DynamicPluginTensorDesc * in, int nbInputs, - const nvinfer1::DynamicPluginTensorDesc * out, int nbOutputs) noexcept override; - size_t getWorkspaceSize( - const nvinfer1::PluginTensorDesc * inputs, int nbInputs, - const nvinfer1::PluginTensorDesc * outputs, int nbOutputs) const noexcept override; - int enqueue( - const nvinfer1::PluginTensorDesc * inputDesc, const nvinfer1::PluginTensorDesc * outputDesc, - const void * const * inputs, void * const * outputs, void * workspace, - cudaStream_t stream) noexcept override; - -private: - float nms_thresh_; - int detections_per_im_; - - size_t count_; - mutable int size = -1; -}; - -class NMSPluginCreator : public nvinfer1::IPluginCreator -{ -public: - NMSPluginCreator(); - - const char * getPluginName() const noexcept override; - - const char * getPluginVersion() const noexcept override; - - const nvinfer1::PluginFieldCollection * getFieldNames() noexcept override; - - nvinfer1::IPluginV2DynamicExt * createPlugin( - const char * name, const nvinfer1::PluginFieldCollection * fc) noexcept override; - - nvinfer1::IPluginV2DynamicExt * deserializePlugin( - const char * name, const void * serialData, size_t serialLength) noexcept override; - - void setPluginNamespace(const char * libNamespace) noexcept override; - - const char * getPluginNamespace() const noexcept override; -}; - -REGISTER_TENSORRT_PLUGIN(NMSPluginCreator); - -} // namespace yolo - -#endif // NMS_PLUGIN_HPP_ diff --git a/perception/tensorrt_yolo/lib/include/trt_yolo.hpp b/perception/tensorrt_yolo/lib/include/trt_yolo.hpp deleted file mode 100644 index a64c94c008526..0000000000000 --- a/perception/tensorrt_yolo/lib/include/trt_yolo.hpp +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a - * copy of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, - * and/or sell copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL - * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - * DEALINGS IN THE SOFTWARE. - */ - -#ifndef TRT_YOLO_HPP_ -#define TRT_YOLO_HPP_ - -#include -#include -#include -#include -#include -#include - -#include -#include - -#include -#include -#include -#include - -namespace yolo -{ -struct Deleter -{ - template - void operator()(T * obj) const - { - if (obj) { - delete obj; - } - } -}; - -template -using unique_ptr = std::unique_ptr; - -struct Config -{ - int num_anchors; - std::vector anchors; - std::vector scale_x_y; - float score_thresh; - float iou_thresh; - int detections_per_im; - bool use_darknet_layer; - float ignore_thresh; -}; - -class Net -{ -public: - // Create engine from engine path - explicit Net(const std::string & engine_path, bool verbose = false); - - // Create engine from serialized onnx model - Net( - const std::string & onnx_file_path, const std::string & precision, const int max_batch_size, - const Config & yolo_config, const std::vector & calibration_images, - const std::string & calibration_table, bool verbose = true, - size_t workspace_size = (1ULL << 30)); - - ~Net(); - - // Save model to path - void save(const std::string & path) const; - - bool detect(const cv::Mat & in_img, float * out_scores, float * out_boxes, float * out_classes); - - // Get (c, h, w) size of the fixed input - std::vector getInputDims() const; - - std::vector getOutputScoreSize() const; - - // Get max allowed batch size - int getMaxBatchSize() const; - - // Get max number of detections - int getMaxDetections() const; - - int getInputSize() const; - -private: - unique_ptr runtime_ = nullptr; - unique_ptr plan_ = nullptr; - unique_ptr engine_ = nullptr; - unique_ptr context_ = nullptr; - cudaStream_t stream_ = nullptr; - cuda::unique_ptr input_d_ = nullptr; - cuda::unique_ptr out_scores_d_ = nullptr; - cuda::unique_ptr out_boxes_d_ = nullptr; - cuda::unique_ptr out_classes_d_ = nullptr; - tensorrt_common::Logger logger_; - - void load(const std::string & path); - bool prepare(); - std::vector preprocess( - const cv::Mat & in_img, const int c, const int h, const int w) const; - // Infer using pre-allocated GPU buffers {data, scores, boxes} - void infer(std::vector & buffers, const int batch_size); -}; - -bool set_cuda_device(int gpu_id) -{ - cudaError_t status = cudaSetDevice(gpu_id); - if (status != cudaSuccess) { - return false; - } else { - return true; - } -} - -} // namespace yolo - -#endif // TRT_YOLO_HPP_ diff --git a/perception/tensorrt_yolo/lib/include/yolo_layer.hpp b/perception/tensorrt_yolo/lib/include/yolo_layer.hpp deleted file mode 100644 index 62dc601185140..0000000000000 --- a/perception/tensorrt_yolo/lib/include/yolo_layer.hpp +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * MIT License - - * Copyright (c) 2019-2020 Wang Xinyu - - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -/* - * Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a - * copy of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, - * and/or sell copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL - * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - * DEALINGS IN THE SOFTWARE. - */ - -#ifndef YOLO_LAYER_HPP_ -#define YOLO_LAYER_HPP_ - -#include - -#include - -namespace yolo -{ -int yoloLayer( - int batch_size, const void * const * inputs, void * const * outputs, int grid_width, - int grid_height, int num_classes, int num_anchors, const std::vector & anchors, - int input_width, int input_height, float scale_xy, float score_thresh, bool use_darknet_layer, - void * workspace, size_t workspace_size, cudaStream_t stream); -} // namespace yolo - -#endif // YOLO_LAYER_HPP_ diff --git a/perception/tensorrt_yolo/lib/include/yolo_layer_plugin.hpp b/perception/tensorrt_yolo/lib/include/yolo_layer_plugin.hpp deleted file mode 100644 index a7e10e8a6351d..0000000000000 --- a/perception/tensorrt_yolo/lib/include/yolo_layer_plugin.hpp +++ /dev/null @@ -1,155 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * MIT License - - * Copyright (c) 2019-2020 Wang Xinyu - - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -/* - * Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a - * copy of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, - * and/or sell copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL - * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - * DEALINGS IN THE SOFTWARE. - */ - -#ifndef YOLO_LAYER_PLUGIN_HPP_ -#define YOLO_LAYER_PLUGIN_HPP_ - -#include - -#include - -#include -#include -#include -#include - -namespace yolo -{ -class YoloLayerPlugin : public nvinfer1::IPluginV2DynamicExt -{ -public: - explicit YoloLayerPlugin( - int width, int height, int num_classes, std::vector & anchors, float scale_xy, - float score_thresh, bool use_darknet_layer); - YoloLayerPlugin(const void * data, size_t length); - - // IPluginV2 methods - const char * getPluginType() const noexcept override; - const char * getPluginVersion() const noexcept override; - int getNbOutputs() const noexcept override; - int initialize() noexcept override; - void terminate() noexcept override; - size_t getSerializationSize() const noexcept override; - void serialize(void * buffer) const noexcept override; - void destroy() noexcept override; - void setPluginNamespace(const char * libNamespace) noexcept override; - const char * getPluginNamespace() const noexcept override; - - // IPluginV2Ext methods - nvinfer1::DataType getOutputDataType( - int index, const nvinfer1::DataType * inputType, int nbInputs) const noexcept override; - - // IPluginV2DynamicExt methods - nvinfer1::IPluginV2DynamicExt * clone() const noexcept override; - nvinfer1::DimsExprs getOutputDimensions( - int outputIndex, const nvinfer1::DimsExprs * inputs, int nbInputs, - nvinfer1::IExprBuilder & exprBuilder) noexcept override; - bool supportsFormatCombination( - int pos, const nvinfer1::PluginTensorDesc * inOut, int nbInputs, - int nbOutputs) noexcept override; - void configurePlugin( - const nvinfer1::DynamicPluginTensorDesc * in, int nbInputs, - const nvinfer1::DynamicPluginTensorDesc * out, int nbOutputs) noexcept override; - size_t getWorkspaceSize( - const nvinfer1::PluginTensorDesc * inputs, int nbInputs, - const nvinfer1::PluginTensorDesc * outputs, int nbOutputs) const noexcept override; - int enqueue( - const nvinfer1::PluginTensorDesc * inputDesc, const nvinfer1::PluginTensorDesc * outputDesc, - const void * const * inputs, void * const * outputs, void * workspace, - cudaStream_t stream) noexcept override; - -private: - int width_; - int height_; - int num_anchors_; - std::vector anchors_; - float scale_x_y_; - float score_thresh_; - bool use_darknet_layer_; - mutable int size = -1; -}; - -class YoloLayerPluginCreator : public nvinfer1::IPluginCreator -{ -public: - YoloLayerPluginCreator(); - - const char * getPluginName() const noexcept override; - - const char * getPluginVersion() const noexcept override; - - const nvinfer1::PluginFieldCollection * getFieldNames() noexcept override; - - nvinfer1::IPluginV2DynamicExt * createPlugin( - const char * name, const nvinfer1::PluginFieldCollection * fc) noexcept override; - - nvinfer1::IPluginV2DynamicExt * deserializePlugin( - const char * name, const void * serialData, size_t serialLength) noexcept override; - - void setPluginNamespace(const char * libNamespace) noexcept override; - - const char * getPluginNamespace() const noexcept override; -}; - -REGISTER_TENSORRT_PLUGIN(YoloLayerPluginCreator); - -} // namespace yolo - -#endif // YOLO_LAYER_PLUGIN_HPP_ diff --git a/perception/tensorrt_yolo/lib/src/plugins/mish.cu b/perception/tensorrt_yolo/lib/src/plugins/mish.cu deleted file mode 100644 index 3f2eda9a984cf..0000000000000 --- a/perception/tensorrt_yolo/lib/src/plugins/mish.cu +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * MIT License - - * Copyright (c) 2019-2020 Wang Xinyu - - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -#include - -#include -#include - -namespace yolo -{ -__device__ float mish(float x) -{ - float e = __expf(x); - float n = e * e + 2 * e; - if (x <= -0.6f) return x * __fdividef(n, n + 2); - - return x - 2 * __fdividef(x, n + 2); -} -template -__global__ void mishKernel(const T * input, T * output, int num_elem) -{ - int idx = threadIdx.x + TPB * blockIdx.x; - if (idx >= num_elem) return; - output[idx] = mish(input[idx]); -} - -int mish(cudaStream_t stream, const float * input, float * output, int n) -{ - constexpr int blockSize = 256; - const int gridSize = (n + blockSize - 1) / blockSize; - mishKernel<<>>(input, output, n); - return 0; -} - -} // namespace yolo diff --git a/perception/tensorrt_yolo/lib/src/plugins/mish_plugin.cpp b/perception/tensorrt_yolo/lib/src/plugins/mish_plugin.cpp deleted file mode 100644 index fe8688ed946e8..0000000000000 --- a/perception/tensorrt_yolo/lib/src/plugins/mish_plugin.cpp +++ /dev/null @@ -1,272 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * MIT License - - * Copyright (c) 2019-2020 Wang Xinyu - - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -#include -#include - -#include - -#include -#include -#include -#include - -using nvinfer1::DataType; -using nvinfer1::Dims; -using nvinfer1::DimsExprs; -using nvinfer1::DynamicPluginTensorDesc; -using nvinfer1::IExprBuilder; -using nvinfer1::IPluginV2DynamicExt; -using nvinfer1::PluginField; -using nvinfer1::PluginFieldCollection; -using nvinfer1::PluginFormat; -using nvinfer1::PluginTensorDesc; - -namespace -{ -const char * MISH_PLUGIN_VERSION{"1"}; -const char * MISH_PLUGIN_NAME{"Mish_TRT"}; - -inline int64_t volume(const Dims & d) -{ - int64_t v = 1; - for (int64_t i = 0; i < d.nbDims; i++) { - v *= d.d[i]; - } - return v; -} -} // namespace - -namespace yolo -{ -MishPlugin::MishPlugin() -{ -} - -// create the plugin at runtime from a byte stream -MishPlugin::MishPlugin(const void * data, size_t length) -{ - (void)data; - (void)length; -} - -// IPluginV2 Methods - -const char * MishPlugin::getPluginType() const noexcept -{ - return MISH_PLUGIN_NAME; -} - -const char * MishPlugin::getPluginVersion() const noexcept -{ - return MISH_PLUGIN_VERSION; -} - -int MishPlugin::getNbOutputs() const noexcept -{ - return 1; -} - -int MishPlugin::initialize() noexcept -{ - return 0; -} - -void MishPlugin::terminate() noexcept -{ -} - -size_t MishPlugin::getSerializationSize() const noexcept -{ - return 0; -} - -void MishPlugin::serialize(void * buffer) const noexcept -{ - (void)buffer; -} - -void MishPlugin::destroy() noexcept -{ - delete this; -} - -void MishPlugin::setPluginNamespace(const char * pluginNamespace) noexcept -{ - mPluginNamespace = pluginNamespace; -} - -const char * MishPlugin::getPluginNamespace() const noexcept -{ - return mPluginNamespace; -} - -// IPluginV2Ext Methods - -DataType MishPlugin::getOutputDataType( - int index, const DataType * inputTypes, int nbInputs) const noexcept -{ - (void)index; - (void)inputTypes; - (void)nbInputs; - - assert(inputTypes[0] == DataType::kFLOAT); - return inputTypes[0]; -} - -// IPluginV2DynamicExt Methods - -IPluginV2DynamicExt * MishPlugin::clone() const noexcept -{ - auto plugin = new MishPlugin(*this); - plugin->setPluginNamespace(mPluginNamespace); - return plugin; -} - -DimsExprs MishPlugin::getOutputDimensions( - int outputIndex, const DimsExprs * inputs, int nbInputs, IExprBuilder & exprBuilder) noexcept -{ - (void)outputIndex; - (void)nbInputs; - (void)exprBuilder; - - return inputs[0]; -} - -bool MishPlugin::supportsFormatCombination( - int pos, const PluginTensorDesc * inOut, int nbInputs, int nbOutputs) noexcept -{ - (void)nbInputs; - (void)nbOutputs; - - return inOut[pos].type == DataType::kFLOAT && inOut[pos].format == PluginFormat::kLINEAR; -} - -void MishPlugin::configurePlugin( - const DynamicPluginTensorDesc * in, int nbInput, const DynamicPluginTensorDesc * out, - int nbOutput) noexcept -{ - (void)in; - (void)nbInput; - (void)out; - (void)nbOutput; - - assert(nbInput == 1); - assert(nbOutput == 1); -} - -size_t MishPlugin::getWorkspaceSize( - const PluginTensorDesc * inputs, int nbInputs, const PluginTensorDesc * outputs, - int nbOutputs) const noexcept -{ - (void)inputs; - (void)nbInputs; - (void)outputs; - (void)nbOutputs; - - return 0; -} - -int MishPlugin::enqueue( - const PluginTensorDesc * inputDesc, const PluginTensorDesc * outputDesc, - const void * const * inputs, void * const * outputs, void * workspace, - cudaStream_t stream) noexcept -{ - (void)inputDesc; - (void)outputDesc; - (void)workspace; - - const int input_volume = volume(inputDesc[0].dims); - - int status = -1; - - const float * input = static_cast(inputs[0]); - float * output = static_cast(outputs[0]); - status = mish(stream, input, output, input_volume); - return status; -} - -PluginFieldCollection MishPluginCreator::mFC{}; -std::vector MishPluginCreator::mPluginAttributes; - -MishPluginCreator::MishPluginCreator() -{ - mPluginAttributes.clear(); - - mFC.nbFields = mPluginAttributes.size(); - mFC.fields = mPluginAttributes.data(); -} - -const char * MishPluginCreator::getPluginName() const noexcept -{ - return MISH_PLUGIN_NAME; -} - -const char * MishPluginCreator::getPluginVersion() const noexcept -{ - return MISH_PLUGIN_VERSION; -} - -const PluginFieldCollection * MishPluginCreator::getFieldNames() noexcept -{ - return &mFC; -} - -IPluginV2DynamicExt * MishPluginCreator::createPlugin( - const char * name, const PluginFieldCollection * fc) noexcept -{ - (void)name; - (void)fc; - - MishPlugin * obj = new MishPlugin(); - obj->setPluginNamespace(mNamespace.c_str()); - return obj; -} - -IPluginV2DynamicExt * MishPluginCreator::deserializePlugin( - const char * name, const void * serialData, size_t serialLength) noexcept -{ - (void)name; - - // This object will be deleted when the network is destroyed, which will - // call MishPlugin::destroy() - MishPlugin * obj = new MishPlugin(serialData, serialLength); - obj->setPluginNamespace(mNamespace.c_str()); - return obj; -} -} // namespace yolo diff --git a/perception/tensorrt_yolo/lib/src/plugins/nms.cu b/perception/tensorrt_yolo/lib/src/plugins/nms.cu deleted file mode 100644 index d73ab59ff4a7d..0000000000000 --- a/perception/tensorrt_yolo/lib/src/plugins/nms.cu +++ /dev/null @@ -1,188 +0,0 @@ -/// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a - * copy of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, - * and/or sell copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL - * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - * DEALINGS IN THE SOFTWARE. - */ - -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include - -#if CUDART_VERSION >= 11000 -#include -#include -#else -#include -#include -using namespace thrust::cuda_cub; -#endif - -#include -#include - -namespace yolo -{ -__global__ void nms_kernel( - const int num_per_thread, const float threshold, const int num_detections, const int * indices, - float * scores, const float * classes, const float4 * boxes) -{ - // Go through detections by descending score - for (int m = 0; m < num_detections; m++) { - for (int n = 0; n < num_per_thread; n++) { - int i = threadIdx.x * num_per_thread + n; - if (i < num_detections && m < i && scores[m] > 0.0f) { - int idx = indices[i]; - int max_idx = indices[m]; - int icls = classes[idx]; - int mcls = classes[max_idx]; - if (mcls == icls) { - float4 ibox = boxes[idx]; - float4 mbox = boxes[max_idx]; - float x1 = max(ibox.x, mbox.x); - float y1 = max(ibox.y, mbox.y); - float x2 = min(ibox.x + ibox.z, mbox.x + mbox.z); - float y2 = min(ibox.y + ibox.w, ibox.y + mbox.w); - float w = max(0.0f, x2 - x1); - float h = max(0.0f, y2 - y1); - float iarea = ibox.z * ibox.w; - float marea = mbox.z * mbox.w; - float inter = w * h; - float overlap = inter / (iarea + marea - inter); - if (overlap > threshold) { - scores[i] = 0.0f; - } - } - } - } - - // Sync discarded detections - __syncthreads(); - } -} - -int nms( - int batch_size, const void * const * inputs, void * const * outputs, size_t count, - int detections_per_im, float nms_thresh, void * workspace, size_t workspace_size, - cudaStream_t stream) -{ - if (!workspace || !workspace_size) { - // Return required scratch space size cub style - workspace_size = cuda::get_size_aligned(count); // flags - workspace_size += cuda::get_size_aligned(count); // indices - workspace_size += cuda::get_size_aligned(count); // indices_sorted - workspace_size += cuda::get_size_aligned(count); // scores - workspace_size += cuda::get_size_aligned(count); // scores_sorted - - size_t temp_size_flag = 0; - cub::DeviceSelect::Flagged( - (void *)nullptr, temp_size_flag, cub::CountingInputIterator(count), (bool *)nullptr, - (int *)nullptr, (int *)nullptr, count); - size_t temp_size_sort = 0; - cub::DeviceRadixSort::SortPairsDescending( - (void *)nullptr, temp_size_sort, (float *)nullptr, (float *)nullptr, (int *)nullptr, - (int *)nullptr, count); - workspace_size += std::max(temp_size_flag, temp_size_sort); - - return workspace_size; - } - - auto on_stream = thrust::cuda::par.on(stream); - - auto flags = cuda::get_next_ptr(count, workspace, workspace_size); - auto indices = cuda::get_next_ptr(count, workspace, workspace_size); - auto indices_sorted = cuda::get_next_ptr(count, workspace, workspace_size); - auto scores = cuda::get_next_ptr(count, workspace, workspace_size); - auto scores_sorted = cuda::get_next_ptr(count, workspace, workspace_size); - - for (int batch = 0; batch < batch_size; batch++) { - auto in_scores = static_cast(inputs[0]) + batch * count; - auto in_boxes = static_cast(inputs[1]) + batch * count; - auto in_classes = static_cast(inputs[2]) + batch * count; - - auto out_scores = static_cast(outputs[0]) + batch * detections_per_im; - auto out_boxes = static_cast(outputs[1]) + batch * detections_per_im; - auto out_classes = static_cast(outputs[2]) + batch * detections_per_im; - - // Discard null scores - thrust::transform( - on_stream, in_scores, in_scores + count, flags, thrust::placeholders::_1 > 0.0f); - - int * num_selected = reinterpret_cast(indices_sorted); - cub::DeviceSelect::Flagged( - workspace, workspace_size, cub::CountingInputIterator(0), flags, indices, num_selected, - count, stream); - cudaStreamSynchronize(stream); - int num_detections = *thrust::device_pointer_cast(num_selected); - - // Sort scores and corresponding indices - thrust::gather(on_stream, indices, indices + num_detections, in_scores, scores); - cub::DeviceRadixSort::SortPairsDescending( - workspace, workspace_size, scores, scores_sorted, indices, indices_sorted, num_detections, 0, - sizeof(*scores) * 8, stream); - - // Launch actual NMS kernel - 1 block with each thread handling n detections - const int max_threads = 1024; - int num_per_thread = ceil((float)num_detections / max_threads); - nms_kernel<<<1, max_threads, 0, stream>>>( - num_per_thread, nms_thresh, num_detections, indices_sorted, scores_sorted, in_classes, - in_boxes); - - // Re-sort with updated scores - cub::DeviceRadixSort::SortPairsDescending( - workspace, workspace_size, scores_sorted, scores, indices_sorted, indices, num_detections, 0, - sizeof(*scores) * 8, stream); - - // Gather filtered scores, boxes, classes - num_detections = min(detections_per_im, num_detections); - cudaMemcpyAsync( - out_scores, scores, num_detections * sizeof *scores, cudaMemcpyDeviceToDevice, stream); - if (num_detections < detections_per_im) { - thrust::fill_n(on_stream, out_scores + num_detections, detections_per_im - num_detections, 0); - } - thrust::gather(on_stream, indices, indices + num_detections, in_boxes, out_boxes); - thrust::gather(on_stream, indices, indices + num_detections, in_classes, out_classes); - } - - return 0; -} - -} // namespace yolo diff --git a/perception/tensorrt_yolo/lib/src/plugins/nms_plugin.cpp b/perception/tensorrt_yolo/lib/src/plugins/nms_plugin.cpp deleted file mode 100644 index b486595a2cb7b..0000000000000 --- a/perception/tensorrt_yolo/lib/src/plugins/nms_plugin.cpp +++ /dev/null @@ -1,285 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a - * copy of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, - * and/or sell copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL - * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - * DEALINGS IN THE SOFTWARE. - */ - -#include -#include - -#include -#include - -#include -#include -#include - -using nvinfer1::DataType; -using nvinfer1::DimsExprs; -using nvinfer1::DynamicPluginTensorDesc; -using nvinfer1::IExprBuilder; -using nvinfer1::IPluginV2DynamicExt; -using nvinfer1::PluginFieldCollection; -using nvinfer1::PluginFormat; -using nvinfer1::PluginTensorDesc; - -namespace -{ -const char * NMS_PLUGIN_VERSION{"1"}; -const char * NMS_PLUGIN_NAME{"YOLO_NMS_TRT"}; -const char * NMS_PLUGIN_NAMESPACE{""}; - -template -void write(char *& buffer, const T & val) -{ - std::memcpy(buffer, &val, sizeof(T)); - buffer += sizeof(T); -} - -template -void read(const char *& buffer, T & val) -{ - std::memcpy(&val, buffer, sizeof(T)); - buffer += sizeof(T); -} - -} // namespace - -namespace yolo -{ -NMSPlugin::NMSPlugin(float nms_thresh, int detections_per_im) -: nms_thresh_(nms_thresh), detections_per_im_(detections_per_im) -{ - assert(nms_thresh > 0); - assert(detections_per_im > 0); -} - -NMSPlugin::NMSPlugin(float nms_thresh, int detections_per_im, size_t count) -: nms_thresh_(nms_thresh), detections_per_im_(detections_per_im), count_(count) -{ - assert(nms_thresh > 0); - assert(detections_per_im > 0); - assert(count > 0); -} - -NMSPlugin::NMSPlugin(void const * data, size_t length) -{ - (void)length; - - const char * d = static_cast(data); - read(d, nms_thresh_); - read(d, detections_per_im_); - read(d, count_); -} - -const char * NMSPlugin::getPluginType() const noexcept -{ - return NMS_PLUGIN_NAME; -} - -const char * NMSPlugin::getPluginVersion() const noexcept -{ - return NMS_PLUGIN_VERSION; -} - -int NMSPlugin::getNbOutputs() const noexcept -{ - return 3; -} - -int NMSPlugin::initialize() noexcept -{ - return 0; -} - -void NMSPlugin::terminate() noexcept -{ -} - -size_t NMSPlugin::getSerializationSize() const noexcept -{ - return sizeof(nms_thresh_) + sizeof(detections_per_im_) + sizeof(count_); -} - -void NMSPlugin::serialize(void * buffer) const noexcept -{ - char * d = static_cast(buffer); - write(d, nms_thresh_); - write(d, detections_per_im_); - write(d, count_); -} - -void NMSPlugin::destroy() noexcept -{ - delete this; -} - -void NMSPlugin::setPluginNamespace(const char * N) noexcept -{ - (void)N; -} - -const char * NMSPlugin::getPluginNamespace() const noexcept -{ - return NMS_PLUGIN_NAMESPACE; -} - -// IPluginV2Ext Methods - -DataType NMSPlugin::getOutputDataType( - int index, const DataType * inputTypes, int nbInputs) const noexcept -{ - (void)index; - (void)inputTypes; - (void)nbInputs; - - assert(index < 3); - return DataType::kFLOAT; -} - -// IPluginV2DynamicExt Methods - -IPluginV2DynamicExt * NMSPlugin::clone() const noexcept -{ - return new NMSPlugin(nms_thresh_, detections_per_im_, count_); -} - -DimsExprs NMSPlugin::getOutputDimensions( - int outputIndex, const DimsExprs * inputs, int nbInputs, IExprBuilder & exprBuilder) noexcept -{ - (void)nbInputs; - - DimsExprs output(inputs[0]); - output.d[1] = exprBuilder.constant(detections_per_im_ * (outputIndex == 1 ? 4 : 1)); - output.d[2] = exprBuilder.constant(1); - output.d[3] = exprBuilder.constant(1); - return output; -} - -bool NMSPlugin::supportsFormatCombination( - int pos, const PluginTensorDesc * inOut, int nbInputs, int nbOutputs) noexcept -{ - (void)nbInputs; - (void)nbOutputs; - - assert(nbInputs == 3); - assert(nbOutputs == 3); - assert(pos < 6); - return inOut[pos].type == DataType::kFLOAT && inOut[pos].format == PluginFormat::kLINEAR; -} - -void NMSPlugin::configurePlugin( - const DynamicPluginTensorDesc * in, int nbInputs, const DynamicPluginTensorDesc * out, - int nbOutputs) noexcept -{ - (void)nbInputs; - (void)nbOutputs; - (void)out; - (void)nbOutputs; - - assert(nbInputs == 3); - assert(in[0].desc.dims.d[1] == in[2].desc.dims.d[1]); - assert(in[1].desc.dims.d[1] == in[2].desc.dims.d[1] * 4); - count_ = in[0].desc.dims.d[1]; -} - -size_t NMSPlugin::getWorkspaceSize( - const PluginTensorDesc * inputs, int nbInputs, const PluginTensorDesc * outputs, - int nbOutputs) const noexcept -{ - (void)nbInputs; - (void)outputs; - (void)nbOutputs; - - if (size < 0) { - size = nms( - inputs->dims.d[0], nullptr, nullptr, count_, detections_per_im_, nms_thresh_, nullptr, 0, - nullptr); - } - return size; -} - -int NMSPlugin::enqueue( - const PluginTensorDesc * inputDesc, const PluginTensorDesc * outputDesc, - const void * const * inputs, void * const * outputs, void * workspace, - cudaStream_t stream) noexcept -{ - return nms( - inputDesc->dims.d[0], inputs, outputs, count_, detections_per_im_, nms_thresh_, workspace, - getWorkspaceSize(inputDesc, 3, outputDesc, 3), stream); -} - -NMSPluginCreator::NMSPluginCreator() -{ -} - -const char * NMSPluginCreator::getPluginName() const noexcept -{ - return NMS_PLUGIN_NAME; -} - -const char * NMSPluginCreator::getPluginVersion() const noexcept -{ - return NMS_PLUGIN_VERSION; -} - -const char * NMSPluginCreator::getPluginNamespace() const noexcept -{ - return NMS_PLUGIN_NAMESPACE; -} - -void NMSPluginCreator::setPluginNamespace(const char * N) noexcept -{ - (void)N; -} -const PluginFieldCollection * NMSPluginCreator::getFieldNames() noexcept -{ - return nullptr; -} -IPluginV2DynamicExt * NMSPluginCreator::createPlugin( - const char * name, const PluginFieldCollection * fc) noexcept -{ - (void)name; - (void)fc; - return nullptr; -} - -IPluginV2DynamicExt * NMSPluginCreator::deserializePlugin( - const char * name, const void * serialData, size_t serialLength) noexcept -{ - (void)name; - - return new NMSPlugin(serialData, serialLength); -} - -} // namespace yolo diff --git a/perception/tensorrt_yolo/lib/src/plugins/yolo_layer.cu b/perception/tensorrt_yolo/lib/src/plugins/yolo_layer.cu deleted file mode 100644 index 0da2c342faaa9..0000000000000 --- a/perception/tensorrt_yolo/lib/src/plugins/yolo_layer.cu +++ /dev/null @@ -1,169 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * MIT License - - * Copyright (c) 2019-2020 Wang Xinyu - - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -/* - * Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a - * copy of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, - * and/or sell copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL - * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - * DEALINGS IN THE SOFTWARE. - */ - -#include -#include - -#include -#include - -#include - -namespace yolo -{ -inline __device__ float sigmoid(float x) -{ - return 1.0f / (1.0f + __expf(-x)); -} - -inline __device__ float scaleSigmoid(float x, float scale) -{ - return scale * sigmoid(x) - (scale - 1.0f) * 0.5f; -} - -template -__global__ void yoloLayerKernel( - const float * input, float * out_scores, float4 * out_boxes, float * out_classes, int grid_width, - int grid_height, int num_classes, int num_anchors, const float * anchors, int input_width, - int input_height, float scale_x_y, float score_thresh, bool use_darknet_layer) -{ - int idx = threadIdx.x + TPB * blockIdx.x; - int total_grids = grid_width * grid_height; - if (idx >= total_grids * num_anchors) return; - auto out_score = (out_scores) + idx; - auto out_box = (out_boxes) + idx; - auto out_class = (out_classes) + idx; - - int anchor_idx = idx / total_grids; - idx = idx - total_grids * anchor_idx; - int info_len = 5 + num_classes; - auto cur_input = static_cast(input) + anchor_idx * (info_len * total_grids); - - int class_id; - float max_cls_logit = -CUDART_INF_F; // minus infinity - for (int i = 5; i < info_len; ++i) { - float l = cur_input[idx + i * total_grids]; - if (l > max_cls_logit) { - max_cls_logit = l; - class_id = i - 5; - } - } - float max_cls_prob = sigmoid(max_cls_logit); - float objectness = sigmoid(cur_input[idx + 4 * total_grids]); - - int row = idx / grid_width; - int col = idx % grid_width; - float x = 0, y = 0, w = 0, h = 0; - - if (use_darknet_layer) { - x = (col + scaleSigmoid(cur_input[idx + 0 * total_grids], scale_x_y)) / grid_width; // [0, 1] - y = (row + scaleSigmoid(cur_input[idx + 1 * total_grids], scale_x_y)) / grid_height; // [0, 1] - w = __expf(cur_input[idx + 2 * total_grids]) * anchors[2 * anchor_idx] / input_width; // [0, 1] - h = __expf(cur_input[idx + 3 * total_grids]) * anchors[2 * anchor_idx + 1] / - input_height; // [0, 1] - } else { - x = (col + sigmoid(cur_input[idx + 0 * total_grids]) * 2 - 0.5) / grid_width; // [0, 1] - y = (row + sigmoid(cur_input[idx + 1 * total_grids]) * 2 - 0.5) / grid_height; // [0, 1] - w = (sigmoid(cur_input[idx + 2 * total_grids]) * 2) * - (sigmoid(cur_input[idx + 2 * total_grids]) * 2) * anchors[2 * anchor_idx] / - input_width; // [0, 1] - h = (sigmoid(cur_input[idx + 3 * total_grids]) * 2) * - (sigmoid(cur_input[idx + 3 * total_grids]) * 2) * anchors[2 * anchor_idx + 1] / - input_height; // [0, 1] - } - x -= w / 2; // shift from center to top-left - y -= h / 2; - *out_box = make_float4(x, y, w, h); - *out_class = class_id; - *out_score = objectness < score_thresh ? 0.0 : max_cls_prob * objectness; -} - -int yoloLayer( - int batch_size, const void * const * inputs, void * const * outputs, int grid_width, - int grid_height, int num_classes, int num_anchors, const std::vector & anchors, - int input_width, int input_height, float scale_x_y, float score_thresh, bool use_darknet_layer, - void * workspace, size_t workspace_size, cudaStream_t stream) -{ - if (!workspace || !workspace_size) { - workspace_size = cuda::get_size_aligned(anchors.size()); - return workspace_size; - } - - auto anchors_d = cuda::get_next_ptr(anchors.size(), workspace, workspace_size); - cudaMemcpyAsync( - anchors_d, anchors.data(), anchors.size() * sizeof *anchors_d, cudaMemcpyHostToDevice, stream); - - int num_elements = num_anchors * grid_width * grid_height; - constexpr int block_size = 256; - const int grid_size = (num_elements + block_size - 1) / block_size; - for (int batch = 0; batch < batch_size; ++batch) { - auto input = static_cast(inputs[0]) + - batch * num_anchors * (num_classes + 5) * grid_width * grid_height; - auto out_scores = static_cast(outputs[0]) + batch * num_elements; - auto out_boxes = static_cast(outputs[1]) + batch * num_elements; - auto out_classes = static_cast(outputs[2]) + batch * num_elements; - yoloLayerKernel<<>>( - input, out_scores, out_boxes, out_classes, grid_width, grid_height, num_classes, num_anchors, - anchors_d, input_width, input_height, scale_x_y, score_thresh, use_darknet_layer); - } - return 0; -} - -} // namespace yolo diff --git a/perception/tensorrt_yolo/lib/src/plugins/yolo_layer_plugin.cpp b/perception/tensorrt_yolo/lib/src/plugins/yolo_layer_plugin.cpp deleted file mode 100644 index dc12921dc1e96..0000000000000 --- a/perception/tensorrt_yolo/lib/src/plugins/yolo_layer_plugin.cpp +++ /dev/null @@ -1,339 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * MIT License - - * Copyright (c) 2019-2020 Wang Xinyu - - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -/* - * Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a - * copy of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, - * and/or sell copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL - * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - * DEALINGS IN THE SOFTWARE. - */ - -#include -#include - -#include -#include - -#include -#include -#include -#include - -using nvinfer1::DataType; -using nvinfer1::DimsExprs; -using nvinfer1::DynamicPluginTensorDesc; -using nvinfer1::IExprBuilder; -using nvinfer1::IPluginV2DynamicExt; -using nvinfer1::PluginFieldCollection; -using nvinfer1::PluginFormat; -using nvinfer1::PluginTensorDesc; - -namespace -{ -const char * YOLO_LAYER_PLUGIN_VERSION{"1"}; -const char * YOLO_LAYER_PLUGIN_NAME{"YOLO_Layer_TRT"}; -const char * YOLO_LAYER_PLUGIN_NAMESPACE{""}; - -template -void write(char *& buffer, const T & val) -{ - std::memcpy(buffer, &val, sizeof(T)); - buffer += sizeof(T); -} - -template -void read(const char *& buffer, T & val) -{ - std::memcpy(&val, buffer, sizeof(T)); - buffer += sizeof(T); -} -} // namespace - -namespace yolo -{ -YoloLayerPlugin::YoloLayerPlugin( - int width, int height, int num_anchors, std::vector & anchors, float scale_x_y, - float score_thresh, bool use_darknet_layer) -: width_(width), - height_(height), - num_anchors_(num_anchors), - anchors_(anchors), - scale_x_y_(scale_x_y), - score_thresh_(score_thresh), - use_darknet_layer_(use_darknet_layer) -{ -} - -// create the plugin at runtime from a byte stream -YoloLayerPlugin::YoloLayerPlugin(const void * data, size_t length) -{ - (void)length; - - const char * d = static_cast(data); - read(d, width_); - read(d, height_); - read(d, num_anchors_); - int anchor_size = num_anchors_ * 2; - while (anchor_size--) { - float val; - read(d, val); - anchors_.push_back(val); - } - read(d, scale_x_y_); - read(d, score_thresh_); - read(d, use_darknet_layer_); -} -// IPluginV2 Methods - -const char * YoloLayerPlugin::getPluginType() const noexcept -{ - return YOLO_LAYER_PLUGIN_NAME; -} - -const char * YoloLayerPlugin::getPluginVersion() const noexcept -{ - return YOLO_LAYER_PLUGIN_VERSION; -} - -int YoloLayerPlugin::getNbOutputs() const noexcept -{ - return 3; -} - -int YoloLayerPlugin::initialize() noexcept -{ - return 0; -} - -void YoloLayerPlugin::terminate() noexcept -{ -} - -size_t YoloLayerPlugin::getSerializationSize() const noexcept -{ - return sizeof(width_) + sizeof(height_) + sizeof(num_anchors_) + - sizeof(float) * num_anchors_ * 2 + sizeof(scale_x_y_) + sizeof(score_thresh_) + - sizeof(use_darknet_layer_); -} - -void YoloLayerPlugin::serialize(void * buffer) const noexcept -{ - char * d = reinterpret_cast(buffer); - write(d, width_); - write(d, height_); - write(d, num_anchors_); - for (int i = 0; i < num_anchors_ * 2; ++i) { - write(d, anchors_[i]); - } - write(d, scale_x_y_); - write(d, score_thresh_); - write(d, use_darknet_layer_); -} - -void YoloLayerPlugin::destroy() noexcept -{ - delete this; -} - -void YoloLayerPlugin::setPluginNamespace(const char * N) noexcept -{ - (void)N; -} - -const char * YoloLayerPlugin::getPluginNamespace() const noexcept -{ - return YOLO_LAYER_PLUGIN_NAMESPACE; -} - -// IPluginV2Ext Methods - -DataType YoloLayerPlugin::getOutputDataType( - int index, const DataType * inputTypes, int nbInputs) const noexcept -{ - (void)index; - (void)inputTypes; - (void)nbInputs; - - return DataType::kFLOAT; -} - -// IPluginV2DynamicExt Methods - -IPluginV2DynamicExt * YoloLayerPlugin::clone() const noexcept -{ - return new YoloLayerPlugin(*this); -} - -DimsExprs YoloLayerPlugin::getOutputDimensions( - int outputIndex, const DimsExprs * inputs, int nbInputs, IExprBuilder & exprBuilder) noexcept -{ - (void)nbInputs; - - DimsExprs ret = inputs[0]; - ret.nbDims = 3; - const auto total_count = - ret.d[2]->getConstantValue() * ret.d[3]->getConstantValue() * num_anchors_; - ret.d[1] = exprBuilder.constant(total_count * (outputIndex == 1 ? 4 : 1)); - ret.d[2] = exprBuilder.constant(1); - ret.d[3] = exprBuilder.constant(1); - return ret; -} - -bool YoloLayerPlugin::supportsFormatCombination( - int pos, const PluginTensorDesc * inOut, int nbInputs, int nbOutputs) noexcept -{ - (void)nbInputs; - (void)nbOutputs; - - assert(nbInputs == 1); - assert(nbOutputs == 3); - assert(pos < 4); - return inOut[pos].type == DataType::kFLOAT && inOut[pos].format == PluginFormat::kLINEAR; -} - -void YoloLayerPlugin::configurePlugin( - const DynamicPluginTensorDesc * in, int nbInput, const DynamicPluginTensorDesc * out, - int nbOutput) noexcept -{ - (void)in; - (void)nbInput; - (void)out; - (void)nbOutput; - - assert(nbInput == 1); - assert(nbOutput == 3); -} - -size_t YoloLayerPlugin::getWorkspaceSize( - const PluginTensorDesc * inputs, int nbInputs, const PluginTensorDesc * outputs, - int nbOutputs) const noexcept -{ - (void)nbInputs; - (void)outputs; - (void)nbOutputs; - - if (size < 0) { - const int batch_size = inputs[0].dims.d[0]; - const int grid_width = inputs[0].dims.d[2]; - const int grid_height = inputs[0].dims.d[3]; - const int num_classes = inputs[0].dims.d[1] / num_anchors_ - 5; - size = yoloLayer( - batch_size, nullptr, nullptr, grid_width, grid_height, num_classes, num_anchors_, anchors_, - width_, height_, scale_x_y_, score_thresh_, use_darknet_layer_, nullptr, 0, nullptr); - } - return size; -} - -int YoloLayerPlugin::enqueue( - const PluginTensorDesc * inputDesc, const PluginTensorDesc * outputDesc, - const void * const * inputs, void * const * outputs, void * workspace, - cudaStream_t stream) noexcept -{ - const int batch_size = inputDesc[0].dims.d[0]; - const int grid_width = inputDesc[0].dims.d[2]; - const int grid_height = inputDesc[0].dims.d[3]; - const int num_classes = inputDesc[0].dims.d[1] / num_anchors_ - 5; - - int status = -1; - status = yoloLayer( - batch_size, inputs, outputs, grid_width, grid_height, num_classes, num_anchors_, anchors_, - width_, height_, scale_x_y_, score_thresh_, use_darknet_layer_, workspace, - getWorkspaceSize(inputDesc, 1, outputDesc, 3), stream); - return status; -} - -YoloLayerPluginCreator::YoloLayerPluginCreator() -{ -} - -const char * YoloLayerPluginCreator::getPluginName() const noexcept -{ - return YOLO_LAYER_PLUGIN_NAME; -} - -const char * YoloLayerPluginCreator::getPluginVersion() const noexcept -{ - return YOLO_LAYER_PLUGIN_VERSION; -} - -const char * YoloLayerPluginCreator::getPluginNamespace() const noexcept -{ - return YOLO_LAYER_PLUGIN_NAMESPACE; -} - -void YoloLayerPluginCreator::setPluginNamespace(const char * N) noexcept -{ - (void)N; -} -const PluginFieldCollection * YoloLayerPluginCreator::getFieldNames() noexcept -{ - return nullptr; -} - -IPluginV2DynamicExt * YoloLayerPluginCreator::createPlugin( - const char * name, const PluginFieldCollection * fc) noexcept -{ - (void)name; - (void)fc; - - return nullptr; -} - -IPluginV2DynamicExt * YoloLayerPluginCreator::deserializePlugin( - const char * name, const void * serialData, size_t serialLength) noexcept -{ - (void)name; - - return new YoloLayerPlugin(serialData, serialLength); -} -} // namespace yolo diff --git a/perception/tensorrt_yolo/lib/src/trt_yolo.cpp b/perception/tensorrt_yolo/lib/src/trt_yolo.cpp deleted file mode 100644 index c8793aa4c8512..0000000000000 --- a/perception/tensorrt_yolo/lib/src/trt_yolo.cpp +++ /dev/null @@ -1,346 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a - * copy of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, - * and/or sell copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL - * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - * DEALINGS IN THE SOFTWARE. - */ - -#include -#include -#include -#include -#include -#include - -#include - -#include -#include -#include -#include -#include -#include -#include -#include - -namespace yolo -{ -void Net::load(const std::string & path) -{ - std::ifstream file(path, std::ios::in | std::ios::binary); - file.seekg(0, file.end); - size_t size = file.tellg(); - file.seekg(0, file.beg); - - char * buffer = new char[size]; - file.read(buffer, size); - file.close(); - if (runtime_) { - engine_ = unique_ptr(runtime_->deserializeCudaEngine(buffer, size)); - } - delete[] buffer; -} - -bool Net::prepare() -{ - if (!engine_) { - return false; - } - context_ = unique_ptr(engine_->createExecutionContext()); - if (!context_) { - return false; - } - input_d_ = cuda::make_unique(getMaxBatchSize() * getInputSize()); - out_scores_d_ = cuda::make_unique(getMaxBatchSize() * getMaxDetections()); - out_boxes_d_ = cuda::make_unique(getMaxBatchSize() * getMaxDetections() * 4); - out_classes_d_ = cuda::make_unique(getMaxBatchSize() * getMaxDetections()); - cudaStreamCreate(&stream_); - return true; -} - -std::vector Net::preprocess( - const cv::Mat & in_img, const int c, const int w, const int h) const -{ - cv::Mat rgb; - cv::cvtColor(in_img, rgb, cv::COLOR_BGR2RGB); - - cv::resize(rgb, rgb, cv::Size(w, h)); - - cv::Mat img_float; - rgb.convertTo(img_float, CV_32FC3, 1 / 255.0); - - // HWC TO CHW - std::vector input_channels(c); - cv::split(img_float, input_channels); - - std::vector result(h * w * c); - auto data = result.data(); - int channel_length = h * w; - for (int i = 0; i < c; ++i) { - memcpy(data, input_channels[i].data, channel_length * sizeof(float)); - data += channel_length; - } - - return result; -} - -Net::Net(const std::string & path, bool verbose) : logger_(verbose) -{ - runtime_ = unique_ptr(nvinfer1::createInferRuntime(logger_)); - load(path); - if (!prepare()) { - std::cout << "Fail to prepare engine" << std::endl; - return; - } -} - -Net::~Net() -{ - if (stream_) { - cudaStreamSynchronize(stream_); - cudaStreamDestroy(stream_); - } -} - -Net::Net( - const std::string & onnx_file_path, const std::string & precision, const int max_batch_size, - const Config & yolo_config, const std::vector & calibration_images, - const std::string & calibration_table, bool verbose, size_t workspace_size) -: logger_(verbose) -{ - runtime_ = unique_ptr(nvinfer1::createInferRuntime(logger_)); - if (!runtime_) { - std::cout << "Fail to create runtime" << std::endl; - return; - } - bool fp16 = precision.compare("FP16") == 0; - bool int8 = precision.compare("INT8") == 0; - - // Create builder - auto builder = unique_ptr(nvinfer1::createInferBuilder(logger_)); - if (!builder) { - std::cout << "Fail to create builder" << std::endl; - return; - } - auto config = unique_ptr(builder->createBuilderConfig()); - if (!config) { - std::cout << "Fail to create builder config" << std::endl; - return; - } - // Allow use of FP16 layers when running in INT8 - if (fp16 || int8) { - config->setFlag(nvinfer1::BuilderFlag::kFP16); - } -#if (NV_TENSORRT_MAJOR * 1000) + (NV_TENSORRT_MINOR * 100) + NV_TENSOR_PATCH >= 8400 - config->setMemoryPoolLimit(nvinfer1::MemoryPoolType::kWORKSPACE, workspace_size); -#else - config->setMaxWorkspaceSize(workspace_size); -#endif - - // Parse ONNX FCN - tensorrt_common::LOG_INFO(logger_) << "Building " << precision << " core model..." << std::endl; - const auto flag = - 1U << static_cast(nvinfer1::NetworkDefinitionCreationFlag::kEXPLICIT_BATCH); - auto network = unique_ptr(builder->createNetworkV2(flag)); - if (!network) { - tensorrt_common::LOG_ERROR(logger_) << "Fail to create network" << std::endl; - return; - } - auto parser = unique_ptr(nvonnxparser::createParser(*network, logger_)); - if (!parser) { - tensorrt_common::LOG_ERROR(logger_) << "Fail to create parser" << std::endl; - return; - } - - auto log_thread = logger_.log_throttle( - nvinfer1::ILogger::Severity::kINFO, - "Applying optimizations and building TRT CUDA engine. Please wait for a few minutes...", 5); - parser->parseFromFile( - onnx_file_path.c_str(), static_cast(nvinfer1::ILogger::Severity::kERROR)); - std::vector scores, boxes, classes; - const auto input = network->getInput(0); - const auto num_outputs = network->getNbOutputs(); - const auto input_dims = input->getDimensions(); - const auto input_channel = input_dims.d[1]; - const auto input_height = input_dims.d[2]; - const auto input_width = input_dims.d[3]; - for (int i = 0; i < num_outputs; ++i) { - auto output = network->getOutput(i); - std::vector anchor( - yolo_config.anchors.begin() + i * yolo_config.num_anchors * 2, - yolo_config.anchors.begin() + (i + 1) * yolo_config.num_anchors * 2); - auto yoloLayerPlugin = yolo::YoloLayerPlugin( - input_width, input_height, 3, anchor, yolo_config.scale_x_y[i], yolo_config.score_thresh, - yolo_config.use_darknet_layer); - std::vector inputs = {output}; - auto layer = network->addPluginV2(inputs.data(), inputs.size(), yoloLayerPlugin); - scores.push_back(layer->getOutput(0)); - boxes.push_back(layer->getOutput(1)); - classes.push_back(layer->getOutput(2)); - } - - // Cleanup outputs - for (int i = 0; i < num_outputs; i++) { - auto output = network->getOutput(0); - network->unmarkOutput(*output); - } - - // Concat tensors from each feature map - std::vector concat; - for (auto tensors : {scores, boxes, classes}) { - auto layer = network->addConcatenation(tensors.data(), tensors.size()); - layer->setAxis(1); - auto output = layer->getOutput(0); - concat.push_back(output); - } - - // Add NMS plugin - auto nmsPlugin = yolo::NMSPlugin(yolo_config.iou_thresh, yolo_config.detections_per_im); - auto layer = network->addPluginV2(concat.data(), concat.size(), nmsPlugin); - for (int i = 0; i < layer->getNbOutputs(); i++) { - auto output = layer->getOutput(i); - network->markOutput(*output); - } - - // create profile - auto profile = builder->createOptimizationProfile(); - profile->setDimensions( - network->getInput(0)->getName(), nvinfer1::OptProfileSelector::kMIN, - nvinfer1::Dims4{max_batch_size, input_channel, input_height, input_width}); - profile->setDimensions( - network->getInput(0)->getName(), nvinfer1::OptProfileSelector::kOPT, - nvinfer1::Dims4{max_batch_size, input_channel, input_height, input_width}); - profile->setDimensions( - network->getInput(0)->getName(), nvinfer1::OptProfileSelector::kMAX, - nvinfer1::Dims4{max_batch_size, input_channel, input_height, input_width}); - config->addOptimizationProfile(profile); - - std::unique_ptr calib{nullptr}; - if (int8) { - if (calibration_images.size() >= static_cast(max_batch_size)) { - config->setFlag(nvinfer1::BuilderFlag::kINT8); - yolo::ImageStream stream(max_batch_size, input_dims, calibration_images); - calib = std::make_unique(stream, calibration_table); - config->setInt8Calibrator(calib.get()); - } else { - tensorrt_common::LOG_WARN(logger_) - << "Fail to find enough images for INT8 calibration. Build FP mode..." << std::endl; - } - } - - // Build engine - plan_ = unique_ptr(builder->buildSerializedNetwork(*network, *config)); - if (!plan_) { - tensorrt_common::LOG_ERROR(logger_) << "Fail to create serialized network" << std::endl; - logger_.stop_throttle(log_thread); - return; - } - engine_ = unique_ptr( - runtime_->deserializeCudaEngine(plan_->data(), plan_->size())); - if (!prepare()) { - tensorrt_common::LOG_ERROR(logger_) << "Fail to create engine" << std::endl; - logger_.stop_throttle(log_thread); - return; - } - logger_.stop_throttle(log_thread); -} - -void Net::save(const std::string & path) const -{ - tensorrt_common::LOG_INFO(logger_) << "Writing to " << path << "..." << std::endl; - std::ofstream file(path, std::ios::out | std::ios::binary); - file.write(reinterpret_cast(plan_->data()), plan_->size()); -} - -void Net::infer(std::vector & buffers, const int batch_size) -{ - if (!context_) { - throw std::runtime_error("Fail to create context"); - } - auto input_dims = engine_->getBindingDimensions(0); - context_->setBindingDimensions( - 0, nvinfer1::Dims4(batch_size, input_dims.d[1], input_dims.d[2], input_dims.d[3])); - context_->enqueueV2(buffers.data(), stream_, nullptr); - cudaStreamSynchronize(stream_); -} - -bool Net::detect(const cv::Mat & in_img, float * out_scores, float * out_boxes, float * out_classes) -{ - const auto input_dims = getInputDims(); - const auto input = preprocess(in_img, input_dims.at(0), input_dims.at(2), input_dims.at(1)); - CHECK_CUDA_ERROR( - cudaMemcpy(input_d_.get(), input.data(), input.size() * sizeof(float), cudaMemcpyHostToDevice)); - std::vector buffers = { - input_d_.get(), out_scores_d_.get(), out_boxes_d_.get(), out_classes_d_.get()}; - try { - infer(buffers, 1); - } catch (const std::runtime_error & e) { - return false; - } - CHECK_CUDA_ERROR(cudaMemcpyAsync( - out_scores, out_scores_d_.get(), sizeof(float) * getMaxDetections(), cudaMemcpyDeviceToHost, - stream_)); - CHECK_CUDA_ERROR(cudaMemcpyAsync( - out_boxes, out_boxes_d_.get(), sizeof(float) * 4 * getMaxDetections(), cudaMemcpyDeviceToHost, - stream_)); - CHECK_CUDA_ERROR(cudaMemcpyAsync( - out_classes, out_classes_d_.get(), sizeof(float) * getMaxDetections(), cudaMemcpyDeviceToHost, - stream_)); - cudaStreamSynchronize(stream_); - return true; -} - -std::vector Net::getInputDims() const -{ - auto dims = engine_->getBindingDimensions(0); - return {dims.d[1], dims.d[2], dims.d[3]}; -} - -int Net::getMaxBatchSize() const -{ - return engine_->getProfileDimensions(0, 0, nvinfer1::OptProfileSelector::kMAX).d[0]; -} - -int Net::getInputSize() const -{ - const auto input_dims = getInputDims(); - const auto input_size = - std::accumulate(std::begin(input_dims), std::end(input_dims), 1, std::multiplies()); - return input_size; -} - -int Net::getMaxDetections() const -{ - return engine_->getBindingDimensions(1).d[1]; -} - -} // namespace yolo diff --git a/perception/tensorrt_yolo/schema/tensortt_yolo.json b/perception/tensorrt_yolo/schema/tensortt_yolo.json deleted file mode 100644 index 0b4724078c994..0000000000000 --- a/perception/tensorrt_yolo/schema/tensortt_yolo.json +++ /dev/null @@ -1,108 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Parameters for tensorrt_yolo", - "type": "object", - "definitions": { - "tensorrt_yolo": { - "type": "object", - "properties": { - "num_anchors": { - "type": "number", - "default": [ - 10.0, 13.0, 16.0, 30.0, 33.0, 23.0, 30.0, 61.0, 62.0, 45.0, 59.0, 119.0, 116.0, 90.0, - 156.0, 198.0, 373.0, 326.0 - ], - "description": "The anchors to create bounding box candidates." - }, - "scale_x_y": { - "type": "number", - "default": [1.0, 1.0, 1.0], - "description": "scale parameter to eliminate grid sensitivity." - }, - "score_thresh": { - "type": "number", - "default": 0.1, - "description": "If the objectness score is less than this value, the object is ignored in yolo layer." - }, - "iou_thresh": { - "type": "number", - "default": 0.45, - "description": "The iou threshold for NMS method." - }, - "detections_per_im": { - "type": "number", - "default": 100, - "description": " The maximum detection number for one frame." - }, - "use_darknet_layer": { - "type": "boolean", - "default": true, - "description": "The flag to use yolo layer in darknet." - }, - "ignore_thresh": { - "type": "number", - "default": 0.5, - "description": "If the output score is less than this value, this object is ignored." - }, - "onnx_file": { - "type": "string", - "description": "The onnx file name for yolo model." - }, - "engine_file": { - "type": "string", - "description": "The tensorrt engine file name for yolo model." - }, - "label_file": { - "type": "string", - "description": "The label file with label names for detected objects written on it." - }, - "calib_image_directory": { - "type": "string", - "description": "The directory name including calibration images for int8 inference." - }, - "calib_cache_file": { - "type": "string", - "description": "The calibration cache file for int8 inference." - }, - "mode": { - "type": "string", - "default": "FP32", - "description": "The inference mode: FP32, FP16, INT8." - }, - "gpu_id": { - "type": "number", - "default": 0, - "description": "GPU device ID that runs the model." - } - }, - "required": [ - "num_anchors", - "scale_x_y", - "score_thresh", - "iou_thresh", - "detections_per_im", - "use_darknet_layer", - "ignore_thresh", - "onnx_file", - "engine_file", - "label_file", - "calib_image_directory", - "calib_cache_file", - "mode", - "gpu_id" - ] - } - }, - "properties": { - "/**": { - "type": "object", - "properties": { - "ros__parameters": { - "$ref": "#/definitions/tensorrt_yolo" - } - }, - "required": ["ros__parameters"] - } - }, - "required": ["/**"] -} diff --git a/perception/tensorrt_yolo/src/nodelet.cpp b/perception/tensorrt_yolo/src/nodelet.cpp deleted file mode 100644 index fdcd8bf12db72..0000000000000 --- a/perception/tensorrt_yolo/src/nodelet.cpp +++ /dev/null @@ -1,219 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "tensorrt_yolo/nodelet.hpp" - -#include - -#include - -#include -#include -#include -#include -#include - -namespace -{ -std::vector getFilePath(const std::string & input_dir) -{ - glob_t glob_buffer; - std::vector files; - glob((input_dir + "*").c_str(), 0, NULL, &glob_buffer); - for (size_t i = 0; i < glob_buffer.gl_pathc; i++) { - files.push_back(glob_buffer.gl_pathv[i]); - } - globfree(&glob_buffer); - return files; -} -} // namespace -namespace object_recognition -{ -TensorrtYoloNodelet::TensorrtYoloNodelet(const rclcpp::NodeOptions & options) -: Node("tensorrt_yolo", options) -{ - using std::placeholders::_1; - - std::string onnx_file = declare_parameter("onnx_file", ""); - std::string engine_file = declare_parameter("engine_file", ""); - std::string label_file = declare_parameter("label_file", ""); - std::string calib_image_directory = declare_parameter("calib_image_directory", ""); - std::string calib_cache_file = declare_parameter("calib_cache_file", ""); - std::string mode = declare_parameter("mode"); - gpu_device_id_ = declare_parameter("gpu_id"); - yolo_config_.num_anchors = declare_parameter("num_anchors"); - auto anchors = declare_parameter( - "anchors", std::vector{ - 10, 13, 16, 30, 33, 23, 30, 61, 62, 45, 59, 119, 116, 90, 156, 198, 373, 326}); - std::vector anchors_float(anchors.begin(), anchors.end()); - yolo_config_.anchors = anchors_float; - auto scale_x_y = declare_parameter("scale_x_y", std::vector{1.0, 1.0, 1.0}); - std::vector scale_x_y_float(scale_x_y.begin(), scale_x_y.end()); - yolo_config_.scale_x_y = scale_x_y_float; - yolo_config_.score_thresh = declare_parameter("score_thresh"); - yolo_config_.iou_thresh = declare_parameter("iou_thresh"); - yolo_config_.detections_per_im = declare_parameter("detections_per_im"); - yolo_config_.use_darknet_layer = declare_parameter("use_darknet_layer"); - yolo_config_.ignore_thresh = declare_parameter("ignore_thresh"); - - if (!yolo::set_cuda_device(gpu_device_id_)) { - RCLCPP_ERROR(this->get_logger(), "Given GPU not exist or suitable"); - } - - if (!readLabelFile(label_file, &labels_)) { - RCLCPP_ERROR(this->get_logger(), "Could not find label file"); - } - std::ifstream fs(engine_file); - const auto calibration_images = getFilePath(calib_image_directory); - if (fs.is_open()) { - RCLCPP_INFO(this->get_logger(), "Found %s", engine_file.c_str()); - net_ptr_.reset(new yolo::Net(engine_file, false)); - if (net_ptr_->getMaxBatchSize() != 1) { - RCLCPP_INFO( - this->get_logger(), "Max batch size %d should be 1. Rebuild engine from file", - net_ptr_->getMaxBatchSize()); - net_ptr_.reset( - new yolo::Net(onnx_file, mode, 1, yolo_config_, calibration_images, calib_cache_file)); - net_ptr_->save(engine_file); - } - } else { - RCLCPP_INFO( - this->get_logger(), "Could not find %s, try making TensorRT engine from onnx", - engine_file.c_str()); - net_ptr_.reset( - new yolo::Net(onnx_file, mode, 1, yolo_config_, calibration_images, calib_cache_file)); - net_ptr_->save(engine_file); - } - RCLCPP_INFO(this->get_logger(), "Inference engine prepared."); - - using std::chrono_literals::operator""ms; - timer_ = rclcpp::create_timer( - this, get_clock(), 100ms, std::bind(&TensorrtYoloNodelet::connectCb, this)); - - std::lock_guard lock(connect_mutex_); - - objects_pub_ = this->create_publisher( - "out/objects", 1); - image_pub_ = image_transport::create_publisher(this, "out/image"); - - out_scores_ = - std::make_unique(net_ptr_->getMaxBatchSize() * net_ptr_->getMaxDetections()); - out_boxes_ = - std::make_unique(net_ptr_->getMaxBatchSize() * net_ptr_->getMaxDetections() * 4); - out_classes_ = - std::make_unique(net_ptr_->getMaxBatchSize() * net_ptr_->getMaxDetections()); -} - -void TensorrtYoloNodelet::connectCb() -{ - using std::placeholders::_1; - std::lock_guard lock(connect_mutex_); - if (objects_pub_->get_subscription_count() == 0 && image_pub_.getNumSubscribers() == 0) { - image_sub_.shutdown(); - } else if (!image_sub_) { - image_sub_ = image_transport::create_subscription( - this, "in/image", std::bind(&TensorrtYoloNodelet::callback, this, _1), "raw", - rmw_qos_profile_sensor_data); - } -} - -void TensorrtYoloNodelet::callback(const sensor_msgs::msg::Image::ConstSharedPtr in_image_msg) -{ - using Label = autoware_auto_perception_msgs::msg::ObjectClassification; - - tier4_perception_msgs::msg::DetectedObjectsWithFeature out_objects; - - if (!yolo::set_cuda_device(gpu_device_id_)) { - RCLCPP_ERROR(this->get_logger(), "Given GPU not exist or suitable"); - return; - } - - cv_bridge::CvImagePtr in_image_ptr; - try { - in_image_ptr = cv_bridge::toCvCopy(in_image_msg, sensor_msgs::image_encodings::BGR8); - } catch (cv_bridge::Exception & e) { - RCLCPP_ERROR(this->get_logger(), "cv_bridge exception: %s", e.what()); - return; - } - if (!net_ptr_->detect( - in_image_ptr->image, out_scores_.get(), out_boxes_.get(), out_classes_.get())) { - RCLCPP_WARN(this->get_logger(), "Fail to inference"); - return; - } - const auto width = in_image_ptr->image.cols; - const auto height = in_image_ptr->image.rows; - for (int i = 0; i < yolo_config_.detections_per_im; ++i) { - if (out_scores_[i] < yolo_config_.ignore_thresh) { - break; - } - tier4_perception_msgs::msg::DetectedObjectWithFeature object; - object.feature.roi.x_offset = out_boxes_[4 * i] * width; - object.feature.roi.y_offset = out_boxes_[4 * i + 1] * height; - object.feature.roi.width = out_boxes_[4 * i + 2] * width; - object.feature.roi.height = out_boxes_[4 * i + 3] * height; - object.object.classification.emplace_back(autoware_auto_perception_msgs::build