diff --git a/perception/lidar_transfusion/README.md b/perception/lidar_transfusion/README.md index e30403141e8ad..6745cc1f7d219 100644 --- a/perception/lidar_transfusion/README.md +++ b/perception/lidar_transfusion/README.md @@ -81,9 +81,9 @@ ros2 topic echo --field fields You can download the onnx format of trained models by clicking on the links below. -- TransFusion: [transfusion.onnx](https://awf.ml.dev.web.auto/perception/models/transfusion/v1/transfusion.onnx) +- TransFusion: [transfusion.onnx](https://awf.ml.dev.web.auto/perception/models/transfusion/t4xx1_90m/v2/transfusion.onnx) -The model was trained in TIER IV's internal database (~11k lidar frames) for 20 epochs. +The model was trained in TIER IV's internal database (~11k lidar frames) for 50 epochs. ### Changelog diff --git a/perception/lidar_transfusion/config/transfusion.param.yaml b/perception/lidar_transfusion/config/transfusion.param.yaml index feabe359caf1f..2c6680fe50af1 100644 --- a/perception/lidar_transfusion/config/transfusion.param.yaml +++ b/perception/lidar_transfusion/config/transfusion.param.yaml @@ -4,8 +4,9 @@ class_names: ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"] trt_precision: fp16 voxels_num: [5000, 30000, 60000] # [min, opt, max] - point_cloud_range: [-76.8, -76.8, -3.0, 76.8, 76.8, 5.0] # [x_min, y_min, z_min, x_max, y_max, z_max] - voxel_size: [0.3, 0.3, 8.0] # [x, y, z] + point_cloud_range: [-92.16, -92.16, -3.0, 92.16, 92.16, 7.0] # [x_min, y_min, z_min, x_max, y_max, z_max] + voxel_size: [0.24, 0.24, 10.0] # [x, y, z] + num_proposals: 500 onnx_path: "$(var model_path)/transfusion.onnx" engine_path: "$(var model_path)/transfusion.engine" # pre-process params @@ -17,4 +18,4 @@ iou_nms_search_distance_2d: 10.0 iou_nms_threshold: 0.1 yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0] # refers to the class_names - score_threshold: 0.2 + score_threshold: 0.1 diff --git a/perception/lidar_transfusion/include/lidar_transfusion/transfusion_config.hpp b/perception/lidar_transfusion/include/lidar_transfusion/transfusion_config.hpp index 31976de56a9da..0ad3ab2231f50 100644 --- a/perception/lidar_transfusion/include/lidar_transfusion/transfusion_config.hpp +++ b/perception/lidar_transfusion/include/lidar_transfusion/transfusion_config.hpp @@ -26,8 +26,9 @@ class TransfusionConfig public: TransfusionConfig( const std::vector & voxels_num, const std::vector & point_cloud_range, - const std::vector & voxel_size, const float circle_nms_dist_threshold, - const std::vector & yaw_norm_thresholds, const float score_threshold) + const std::vector & voxel_size, const int num_proposals, + const float circle_nms_dist_threshold, const std::vector & yaw_norm_thresholds, + const float score_threshold) { if (voxels_num.size() == 3) { max_voxels_ = voxels_num[2]; @@ -61,6 +62,9 @@ class TransfusionConfig voxel_y_size_ = static_cast(voxel_size[1]); voxel_z_size_ = static_cast(voxel_size[2]); } + if (num_proposals > 0) { + num_proposals_ = num_proposals; + } if (score_threshold > 0.0) { score_threshold_ = score_threshold; } @@ -76,6 +80,9 @@ class TransfusionConfig grid_x_size_ = static_cast((max_x_range_ - min_x_range_) / voxel_x_size_); grid_y_size_ = static_cast((max_y_range_ - min_y_range_) / voxel_y_size_); grid_z_size_ = static_cast((max_z_range_ - min_z_range_) / voxel_z_size_); + + feature_x_size_ = grid_x_size_ / out_size_factor_; + feature_y_size_ = grid_y_size_ / out_size_factor_; } ///// INPUT PARAMETERS ///// @@ -107,7 +114,7 @@ class TransfusionConfig const std::size_t out_size_factor_{4}; const std::size_t max_num_points_per_pillar_{points_per_voxel_}; const std::size_t num_point_values_{4}; - const std::size_t num_proposals_{200}; + std::size_t num_proposals_{200}; // the number of feature maps for pillar scatter const std::size_t num_feature_scatter_{pillar_feature_size_}; // the score threshold for classification diff --git a/perception/lidar_transfusion/schema/transfusion.schema.json b/perception/lidar_transfusion/schema/transfusion.schema.json index 41d8d887236a8..7debc0edda6fb 100644 --- a/perception/lidar_transfusion/schema/transfusion.schema.json +++ b/perception/lidar_transfusion/schema/transfusion.schema.json @@ -61,6 +61,12 @@ "default": "$(var model_path)/transfusion.engine", "pattern": "\\.engine$" }, + "num_proposals": { + "type": "integer", + "description": "Number of proposals at TransHead.", + "default": 500, + "minimum": 1 + }, "down_sample_factor": { "type": "integer", "description": "A scale factor of downsampling points", diff --git a/perception/lidar_transfusion/src/lidar_transfusion_node.cpp b/perception/lidar_transfusion/src/lidar_transfusion_node.cpp index e3ea6b3780de8..7f5833e60d6d0 100644 --- a/perception/lidar_transfusion/src/lidar_transfusion_node.cpp +++ b/perception/lidar_transfusion/src/lidar_transfusion_node.cpp @@ -31,6 +31,7 @@ LidarTransfusionNode::LidarTransfusionNode(const rclcpp::NodeOptions & options) const auto point_cloud_range = this->declare_parameter>("point_cloud_range", descriptor); const auto voxel_size = this->declare_parameter>("voxel_size", descriptor); + const int num_proposals = (this->declare_parameter("num_proposals", descriptor)); const std::string onnx_path = this->declare_parameter("onnx_path", descriptor); const std::string engine_path = this->declare_parameter("engine_path", descriptor); @@ -73,8 +74,8 @@ LidarTransfusionNode::LidarTransfusionNode(const rclcpp::NodeOptions & options) DensificationParam densification_param( densification_world_frame_id, densification_num_past_frames); TransfusionConfig config( - voxels_num, point_cloud_range, voxel_size, circle_nms_dist_threshold, yaw_norm_thresholds, - score_threshold); + voxels_num, point_cloud_range, voxel_size, num_proposals, circle_nms_dist_threshold, + yaw_norm_thresholds, score_threshold); const auto allow_remapping_by_area_matrix = this->declare_parameter>("allow_remapping_by_area_matrix", descriptor);