diff --git a/perception/autoware_image_projection_based_fusion/config/pointpainting.param.yaml b/perception/autoware_image_projection_based_fusion/config/pointpainting.param.yaml index 9227e4fa9319a..5448dd48dc456 100644 --- a/perception/autoware_image_projection_based_fusion/config/pointpainting.param.yaml +++ b/perception/autoware_image_projection_based_fusion/config/pointpainting.param.yaml @@ -9,7 +9,6 @@ post_process_params: # post-process params circle_nms_dist_threshold: 0.3 - iou_nms_target_class_names: ["CAR"] iou_nms_search_distance_2d: 10.0 iou_nms_threshold: 0.1 score_threshold: 0.35 diff --git a/perception/autoware_image_projection_based_fusion/docs/pointpainting-fusion.md b/perception/autoware_image_projection_based_fusion/docs/pointpainting-fusion.md index e04b0f48398cb..92f62443918c0 100644 --- a/perception/autoware_image_projection_based_fusion/docs/pointpainting-fusion.md +++ b/perception/autoware_image_projection_based_fusion/docs/pointpainting-fusion.md @@ -43,7 +43,6 @@ The lidar points are projected onto the output of an image-only 2d object detect | `trt_precision` | string | `fp16` | TensorRT inference precision: `fp32` or `fp16` | | `post_process_params.score_threshold` | double | `0.4` | detected objects with score less than threshold are ignored | | `post_process_params.yaw_norm_thresholds` | list[double] | [0.3, 0.3, 0.3, 0.3, 0.0] | An array of distance threshold values of norm of yaw [rad]. | -| `post_process_params.iou_nms_target_class_names` | list[string] | ["CAR"] | An array of class names to be target in NMS. | | `post_process_params.iou_nms_search_distance_2d` | double | 10.0 | A maximum distance value to search the nearest objects. | | `post_process_params.iou_nms_threshold` | double | 0.1 | A threshold value of NMS using IoU score. | | `post_process_params.has_twist` | boolean | false | Indicates whether the model outputs twist value. | diff --git a/perception/autoware_image_projection_based_fusion/schema/pointpainting.schema.json b/perception/autoware_image_projection_based_fusion/schema/pointpainting.schema.json index 57d8bc500f3c6..ab52ac0fcdf2c 100644 --- a/perception/autoware_image_projection_based_fusion/schema/pointpainting.schema.json +++ b/perception/autoware_image_projection_based_fusion/schema/pointpainting.schema.json @@ -46,12 +46,6 @@ "minimum": 0.0, "maximum": 1.0 }, - "iou_nms_target_class_names": { - "type": "array", - "description": "An array of class names to be target in NMS.", - "default": ["CAR"], - "uniqueItems": true - }, "iou_nms_search_distance_2d": { "type": "number", "description": "A maximum distance value to search the nearest objects.", diff --git a/perception/autoware_image_projection_based_fusion/src/pointpainting_fusion/node.cpp b/perception/autoware_image_projection_based_fusion/src/pointpainting_fusion/node.cpp index 4e0f88e2134ac..d4601e26dda46 100644 --- a/perception/autoware_image_projection_based_fusion/src/pointpainting_fusion/node.cpp +++ b/perception/autoware_image_projection_based_fusion/src/pointpainting_fusion/node.cpp @@ -171,9 +171,6 @@ PointPaintingFusionNode::PointPaintingFusionNode(const rclcpp::NodeOptions & opt { autoware::lidar_centerpoint::NMSParams p; - p.nms_type_ = autoware::lidar_centerpoint::NMS_TYPE::IoU_BEV; - p.target_class_names_ = this->declare_parameter>( - "post_process_params.iou_nms_target_class_names"); p.search_distance_2d_ = this->declare_parameter("post_process_params.iou_nms_search_distance_2d"); p.iou_threshold_ = this->declare_parameter("post_process_params.iou_nms_threshold"); diff --git a/perception/autoware_lidar_centerpoint/README.md b/perception/autoware_lidar_centerpoint/README.md index 471712a637dec..06ded16d5fd40 100644 --- a/perception/autoware_lidar_centerpoint/README.md +++ b/perception/autoware_lidar_centerpoint/README.md @@ -56,7 +56,6 @@ Note that these parameters are associated with ONNX file, predefined during the | `trt_precision` | string | `fp16` | TensorRT inference precision: `fp32` or `fp16` | | `post_process_params.score_threshold` | double | `0.4` | detected objects with score less than threshold are ignored | | `post_process_params.yaw_norm_thresholds` | list[double] | [0.3, 0.3, 0.3, 0.3, 0.0] | An array of distance threshold values of norm of yaw [rad]. | -| `post_process_params.iou_nms_target_class_names` | list[string] | - | target classes for IoU-based Non Maximum Suppression | | `post_process_params.iou_nms_search_distance_2d` | double | - | If two objects are farther than the value, NMS isn't applied. | | `post_process_params.iou_nms_threshold` | double | - | IoU threshold for the IoU-based Non Maximum Suppression | | `post_process_params.has_twist` | boolean | false | Indicates whether the model outputs twist value. | @@ -267,7 +266,6 @@ point_cloud_range, point_feature_size, voxel_size, etc. according to the trainin encoder_in_feature_size: 9 # post-process params circle_nms_dist_threshold: 0.5 - iou_nms_target_class_names: ["CAR"] iou_nms_search_distance_2d: 10.0 iou_nms_threshold: 0.1 yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0] diff --git a/perception/autoware_lidar_centerpoint/config/centerpoint.param.yaml b/perception/autoware_lidar_centerpoint/config/centerpoint.param.yaml index 53d77e8d1c42c..d2f6d3bb3ab6c 100644 --- a/perception/autoware_lidar_centerpoint/config/centerpoint.param.yaml +++ b/perception/autoware_lidar_centerpoint/config/centerpoint.param.yaml @@ -10,7 +10,6 @@ post_process_params: # post-process params circle_nms_dist_threshold: 0.5 - iou_nms_target_class_names: ["CAR"] iou_nms_search_distance_2d: 10.0 iou_nms_threshold: 0.1 score_threshold: 0.35 diff --git a/perception/autoware_lidar_centerpoint/config/centerpoint_sigma.param.yaml b/perception/autoware_lidar_centerpoint/config/centerpoint_sigma.param.yaml index bd5fc5f3567a5..f4a5d59c3c06f 100644 --- a/perception/autoware_lidar_centerpoint/config/centerpoint_sigma.param.yaml +++ b/perception/autoware_lidar_centerpoint/config/centerpoint_sigma.param.yaml @@ -11,7 +11,6 @@ post_process_params: # post-process params circle_nms_dist_threshold: 0.5 - iou_nms_target_class_names: ["CAR"] iou_nms_search_distance_2d: 10.0 iou_nms_threshold: 0.1 yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0] diff --git a/perception/autoware_lidar_centerpoint/config/centerpoint_tiny.param.yaml b/perception/autoware_lidar_centerpoint/config/centerpoint_tiny.param.yaml index 53d77e8d1c42c..d2f6d3bb3ab6c 100644 --- a/perception/autoware_lidar_centerpoint/config/centerpoint_tiny.param.yaml +++ b/perception/autoware_lidar_centerpoint/config/centerpoint_tiny.param.yaml @@ -10,7 +10,6 @@ post_process_params: # post-process params circle_nms_dist_threshold: 0.5 - iou_nms_target_class_names: ["CAR"] iou_nms_search_distance_2d: 10.0 iou_nms_threshold: 0.1 score_threshold: 0.35 diff --git a/perception/autoware_lidar_centerpoint/include/autoware/lidar_centerpoint/postprocess/non_maximum_suppression.hpp b/perception/autoware_lidar_centerpoint/include/autoware/lidar_centerpoint/postprocess/non_maximum_suppression.hpp index 57abd053af718..74bc5c2f57deb 100644 --- a/perception/autoware_lidar_centerpoint/include/autoware/lidar_centerpoint/postprocess/non_maximum_suppression.hpp +++ b/perception/autoware_lidar_centerpoint/include/autoware/lidar_centerpoint/postprocess/non_maximum_suppression.hpp @@ -28,36 +28,12 @@ namespace autoware::lidar_centerpoint { using autoware_perception_msgs::msg::DetectedObject; -// TODO(yukke42): now only support IoU_BEV -enum class NMS_TYPE { - IoU_BEV - // IoU_3D - // Distance_2D - // Distance_3D -}; - struct NMSParams { - NMS_TYPE nms_type_{}; - std::vector target_class_names_{}; double search_distance_2d_{}; double iou_threshold_{}; - // double distance_threshold_{}; }; -std::vector classNamesToBooleanMask(const std::vector & class_names) -{ - std::vector mask; - constexpr std::size_t num_object_classification = 8; - mask.resize(num_object_classification); - for (const auto & class_name : class_names) { - const auto semantic_type = getSemanticType(class_name); - mask.at(semantic_type) = true; - } - - return mask; -} - class NonMaximumSuppression { public: @@ -66,14 +42,13 @@ class NonMaximumSuppression std::vector apply(const std::vector &); private: - bool isTargetLabel(const std::uint8_t); - bool isTargetPairObject(const DetectedObject &, const DetectedObject &); Eigen::MatrixXd generateIoUMatrix(const std::vector &); NMSParams params_{}; - std::vector target_class_mask_{}; + + double search_distance_2d_sq_{}; }; } // namespace autoware::lidar_centerpoint diff --git a/perception/autoware_lidar_centerpoint/lib/postprocess/non_maximum_suppression.cpp b/perception/autoware_lidar_centerpoint/lib/postprocess/non_maximum_suppression.cpp index 9764d20628f24..dbc6924f6fd25 100644 --- a/perception/autoware_lidar_centerpoint/lib/postprocess/non_maximum_suppression.cpp +++ b/perception/autoware_lidar_centerpoint/lib/postprocess/non_maximum_suppression.cpp @@ -22,6 +22,7 @@ namespace autoware::lidar_centerpoint { +using Label = autoware_perception_msgs::msg::ObjectClassification; void NonMaximumSuppression::setParameters(const NMSParams & params) { @@ -29,15 +30,7 @@ void NonMaximumSuppression::setParameters(const NMSParams & params) assert(params.iou_threshold_ >= 0.0 && params.iou_threshold_ <= 1.0); params_ = params; - target_class_mask_ = classNamesToBooleanMask(params.target_class_names_); -} - -bool NonMaximumSuppression::isTargetLabel(const uint8_t label) -{ - if (label >= target_class_mask_.size()) { - return false; - } - return target_class_mask_.at(label); + search_distance_2d_sq_ = params.search_distance_2d_ * params.search_distance_2d_; } bool NonMaximumSuppression::isTargetPairObject( @@ -48,15 +41,15 @@ bool NonMaximumSuppression::isTargetPairObject( const auto label2 = autoware::object_recognition_utils::getHighestProbLabel(object2.classification); - if (isTargetLabel(label1) && isTargetLabel(label2)) { - return true; + // if labels are not the same, and one of them is pedestrian, do not suppress + if (label1 != label2 && (label1 == Label::PEDESTRIAN || label2 == Label::PEDESTRIAN)) { + return false; } - const auto search_sqr_dist_2d = params_.search_distance_2d_ * params_.search_distance_2d_; const auto sqr_dist_2d = autoware::universe_utils::calcSquaredDistance2d( autoware::object_recognition_utils::getPose(object1), autoware::object_recognition_utils::getPose(object2)); - return sqr_dist_2d <= search_sqr_dist_2d; + return sqr_dist_2d <= search_distance_2d_sq_; } Eigen::MatrixXd NonMaximumSuppression::generateIoUMatrix( @@ -73,14 +66,12 @@ Eigen::MatrixXd NonMaximumSuppression::generateIoUMatrix( continue; } - if (params_.nms_type_ == NMS_TYPE::IoU_BEV) { - const double iou = autoware::object_recognition_utils::get2dIoU(target_obj, source_obj); - triangular_matrix(target_i, source_i) = iou; - // NOTE: If the target object has any objects with iou > iou_threshold, it - // will be suppressed regardless of later results. - if (iou > params_.iou_threshold_) { - break; - } + const double iou = autoware::object_recognition_utils::get2dIoU(target_obj, source_obj); + triangular_matrix(target_i, source_i) = iou; + // NOTE: If the target object has any objects with iou > iou_threshold, it + // will be suppressed regardless of later results. + if (iou > params_.iou_threshold_) { + break; } } } @@ -97,10 +88,9 @@ std::vector NonMaximumSuppression::apply( output_objects.reserve(input_objects.size()); for (std::size_t i = 0; i < input_objects.size(); ++i) { const auto value = iou_matrix.row(i).maxCoeff(); - if (params_.nms_type_ == NMS_TYPE::IoU_BEV) { - if (value <= params_.iou_threshold_) { - output_objects.emplace_back(input_objects.at(i)); - } + + if (value <= params_.iou_threshold_) { + output_objects.emplace_back(input_objects.at(i)); } } diff --git a/perception/autoware_lidar_centerpoint/schema/centerpoint.schemal.json b/perception/autoware_lidar_centerpoint/schema/centerpoint.schemal.json index c3268f0e90d9b..f2c12b7588f9b 100644 --- a/perception/autoware_lidar_centerpoint/schema/centerpoint.schemal.json +++ b/perception/autoware_lidar_centerpoint/schema/centerpoint.schemal.json @@ -42,12 +42,6 @@ "minimum": 0.0, "maximum": 1.0 }, - "iou_nms_target_class_names": { - "type": "array", - "description": "An array of class names to be target in NMS.", - "default": ["CAR"], - "uniqueItems": true - }, "iou_nms_search_distance_2d": { "type": "number", "description": "A maximum distance value to search the nearest objects.", diff --git a/perception/autoware_lidar_centerpoint/src/node.cpp b/perception/autoware_lidar_centerpoint/src/node.cpp index 5b92e5c811a23..59acceeac54d6 100644 --- a/perception/autoware_lidar_centerpoint/src/node.cpp +++ b/perception/autoware_lidar_centerpoint/src/node.cpp @@ -80,9 +80,6 @@ LidarCenterPointNode::LidarCenterPointNode(const rclcpp::NodeOptions & node_opti { NMSParams p; - p.nms_type_ = NMS_TYPE::IoU_BEV; - p.target_class_names_ = this->declare_parameter>( - "post_process_params.iou_nms_target_class_names"); p.search_distance_2d_ = this->declare_parameter("post_process_params.iou_nms_search_distance_2d"); p.iou_threshold_ = this->declare_parameter("post_process_params.iou_nms_threshold"); diff --git a/perception/autoware_lidar_centerpoint/test/test_nms.cpp b/perception/autoware_lidar_centerpoint/test/test_nms.cpp index fd9e78b0c6585..9a2217f7af0b4 100644 --- a/perception/autoware_lidar_centerpoint/test/test_nms.cpp +++ b/perception/autoware_lidar_centerpoint/test/test_nms.cpp @@ -24,8 +24,6 @@ TEST(NonMaximumSuppressionTest, Apply) autoware::lidar_centerpoint::NMSParams params; params.search_distance_2d_ = 1.0; params.iou_threshold_ = 0.2; - params.nms_type_ = autoware::lidar_centerpoint::NMS_TYPE::IoU_BEV; - params.target_class_names_ = {"CAR"}; nms.setParameters(params); std::vector input_objects(4);