From cc0b1081a74a265a3dac05d5830610242aa0ecd3 Mon Sep 17 00:00:00 2001 From: Kotaro Uetake <60615504+ktro2828@users.noreply.github.com> Date: Fri, 15 Dec 2023 16:41:48 +0900 Subject: [PATCH] refactor(image_projection_based_fusion): add JSON Schema and remove default value spefications (#4902) * refactor: add JSON Schema and remove default values in `declare_parameter()` Signed-off-by: ktro2828 * refactor: update configuration file Signed-off-by: ktro2828 * refactor: add configuration file and update launcher to load this Signed-off-by: ktro2828 * refactor: update funsion node configuration Signed-off-by: ktro2828 * docs: update the document for roi cluster fusion Signed-off-by: ktro2828 * docs: update documents Signed-off-by: ktro2828 * refactor: move `debug_mode` into `roi_sync.param.yaml` Signed-off-by: ktro2828 * refactor: rework parameters for `roi_pointcloud_fusion` Signed-off-by: ktro2828 * chore: update maintainers Signed-off-by: ktro2828 * refactor: remove debug_mode Signed-off-by: ktro2828 * refactor: rename parameter to avoid failure of spell-check Signed-off-by: ktro2828 * fix: fix typo and parameters for initialization Signed-off-by: ktro2828 --------- Signed-off-by: ktro2828 --- .../config/pointpainting.param.yaml | 40 +- .../config/roi_cluster_fusion.param.yaml | 12 + .../config/roi_pointcloud_fusion.param.yaml | 5 + .../config/roi_sync.param.yaml | 8 + .../images/roi_cluster_fusion_pipeline.svg | 966 ++++++++++++++++++ .../docs/roi-cluster-fusion.md | 5 + .../launch/pointpainting_fusion.launch.xml | 7 - .../launch/roi_cluster_fusion.launch.xml | 36 +- .../roi_detected_object_fusion.launch.xml | 6 - .../launch/roi_pointcloud_fusion.launch.xml | 8 +- .../image_projection_based_fusion/package.xml | 2 + .../schema/pointpainting.schema.json | 152 +++ .../schema/roi_cluster_fusion.schema.json | 96 ++ .../roi_detected_object_fusion.schema.json | 70 ++ .../schema/roi_pointcloud_fusion.schema.json | 41 + .../schema/roi_sync.schema.json | 84 ++ .../src/fusion_node.cpp | 19 +- .../src/pointpainting_fusion/node.cpp | 60 +- 18 files changed, 1510 insertions(+), 107 deletions(-) create mode 100644 perception/image_projection_based_fusion/config/roi_cluster_fusion.param.yaml create mode 100644 perception/image_projection_based_fusion/config/roi_pointcloud_fusion.param.yaml create mode 100644 perception/image_projection_based_fusion/docs/images/roi_cluster_fusion_pipeline.svg create mode 100644 perception/image_projection_based_fusion/schema/pointpainting.schema.json create mode 100644 perception/image_projection_based_fusion/schema/roi_cluster_fusion.schema.json create mode 100644 perception/image_projection_based_fusion/schema/roi_detected_object_fusion.schema.json create mode 100644 perception/image_projection_based_fusion/schema/roi_pointcloud_fusion.schema.json create mode 100644 perception/image_projection_based_fusion/schema/roi_sync.schema.json diff --git a/perception/image_projection_based_fusion/config/pointpainting.param.yaml b/perception/image_projection_based_fusion/config/pointpainting.param.yaml index e1be5426cba4b..21d31f216373b 100755 --- a/perception/image_projection_based_fusion/config/pointpainting.param.yaml +++ b/perception/image_projection_based_fusion/config/pointpainting.param.yaml @@ -1,18 +1,26 @@ /**: ros__parameters: - class_names: ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"] - paint_class_names: ["CAR", "BICYCLE", "PEDESTRIAN"] - point_feature_size: 7 # x, y, z, time-lag and car, pedestrian, bicycle - max_voxel_size: 40000 - point_cloud_range: [-121.6, -76.8, -3.0, 121.6, 76.8, 5.0] - voxel_size: [0.32, 0.32, 8.0] - downsample_factor: 1 - encoder_in_feature_size: 12 - yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0] - # post-process params - circle_nms_dist_threshold: 0.3 - iou_nms_target_class_names: ["CAR"] - iou_nms_search_distance_2d: 10.0 - iou_nms_threshold: 0.1 - # omp params - omp_num_threads: 1 + model_params: + class_names: ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"] + paint_class_names: ["CAR", "BICYCLE", "PEDESTRIAN"] + point_feature_size: 7 # x, y, z, time-lag and car, pedestrian, bicycle + max_voxel_size: 40000 + point_cloud_range: [-121.6, -76.8, -3.0, 121.6, 76.8, 5.0] + voxel_size: [0.32, 0.32, 8.0] + downsample_factor: 1 + encoder_in_feature_size: 12 + yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0] + has_twist: false + densification_params: + world_frame_id: "map" + num_past_frames: 0 + post_process_params: + # post-process params + circle_nms_dist_threshold: 0.3 + iou_nms_target_class_names: ["CAR"] + iou_nms_search_distance_2d: 10.0 + iou_nms_threshold: 0.1 + score_threshold: 0.4 + omp_params: + # omp params + num_threads: 1 diff --git a/perception/image_projection_based_fusion/config/roi_cluster_fusion.param.yaml b/perception/image_projection_based_fusion/config/roi_cluster_fusion.param.yaml new file mode 100644 index 0000000000000..90ba841d53b2d --- /dev/null +++ b/perception/image_projection_based_fusion/config/roi_cluster_fusion.param.yaml @@ -0,0 +1,12 @@ +/**: + ros__parameters: + fusion_distance: 100.0 + trust_object_distance: 100.0 + trust_object_iou_mode: "iou" + non_trust_object_iou_mode: "iou_x" + use_cluster_semantic_type: false + only_allow_inside_cluster: true + roi_scale_factor: 1.1 + iou_threshold: 0.65 + unknown_iou_threshold: 0.1 + remove_unknown: true diff --git a/perception/image_projection_based_fusion/config/roi_pointcloud_fusion.param.yaml b/perception/image_projection_based_fusion/config/roi_pointcloud_fusion.param.yaml new file mode 100644 index 0000000000000..5b86b8e81d1aa --- /dev/null +++ b/perception/image_projection_based_fusion/config/roi_pointcloud_fusion.param.yaml @@ -0,0 +1,5 @@ +/**: + ros__parameters: + fuse_unknown_only: true + min_cluster_size: 2 + cluster_2d_tolerance: 0.5 diff --git a/perception/image_projection_based_fusion/config/roi_sync.param.yaml b/perception/image_projection_based_fusion/config/roi_sync.param.yaml index 21ba13787f1c0..99d85089befb8 100644 --- a/perception/image_projection_based_fusion/config/roi_sync.param.yaml +++ b/perception/image_projection_based_fusion/config/roi_sync.param.yaml @@ -3,3 +3,11 @@ input_offset_ms: [61.67, 111.67, 45.0, 28.33, 78.33, 95.0] timeout_ms: 70.0 match_threshold_ms: 50.0 + image_buffer_size: 15 + debug_mode: false + filter_scope_min_x: -100.0 + filter_scope_min_y: -100.0 + filter_scope_min_z: -100.0 + filter_scope_max_x: 100.0 + filter_scope_max_y: 100.0 + filter_scope_max_z: 100.0 diff --git a/perception/image_projection_based_fusion/docs/images/roi_cluster_fusion_pipeline.svg b/perception/image_projection_based_fusion/docs/images/roi_cluster_fusion_pipeline.svg new file mode 100644 index 0000000000000..aaadfaf186dd2 --- /dev/null +++ b/perception/image_projection_based_fusion/docs/images/roi_cluster_fusion_pipeline.svg @@ -0,0 +1,966 @@ + + + + + + + + + + +
+
+
+ + Check whether + the cluster RoI (inner) + is fully contained within + the scaled detected RoI (outer) + . +
+
+ *NOTE* +
+ Because the cluster RoI is calculated with the points projected on the image, our recommended scale factor is [1.1, 1.5] . +
+ Otherwise, set + only_allow_inside_cluster + to + False + . +
+
+
+
+
+
+ Check whether the cluster RoI (inner) is fully contained within the scaled... +
+
+ + + + +
+
+
+ use_iou +
+
+
+
+ use_iou +
+
+ + + + +
+
+
+ unknown_iou_threshold +
+
+
+
+ unknown_iou_threshold +
+
+ + + + +
+
+
+ + use_iou_x /  y +
+
+
+
+
+
+ use_iou_x /  y +
+
+ + + + + + +
+
+
+ only_allow_inside_cluster +
+
+
+
+ only_allow_inside_cluster +
+
+ + + + +
+
+
+ iou_threshold +
+
+
+
+ iou_threshold +
+
+ + + + + + + + +
+
+
+ Is UNKNOWN object? +
+
+
+
+ Is UNKNOWN object? +
+
+ + + + + + +
+
+
+ + IoU threshold < IoU score? + +
+
+
+
+ IoU threshold < IoU score? +
+
+ + + + + + + + + + + + +
+
+
+ IoU Score +
+
+
+
+ IoU Score +
+
+ + + + + + + + + + + + + + + + + + + + + +
+
+
+ Is Cluster RoI inside Detected RoI? +
+
+
+
+ Is Cluster RoI inside Detected RoI? +
+
+ + + + + + +
+
+
+ Fused Cluster +
+
+
+
+ Fused Cluster +
+
+ + + + + + +
+
+
+ Unfused Cluster +
+
+
+
+ Unfused Cluster +
+
+ + + + + + + + +
+
+
+ Fusion Target Cluster
&
Detected RoI
+
+
+
+
+ Fusion Target Cluster... +
+
+ + + + +
+
+
+ Unfused Cluster +
+
+
+
+ Unfused Cluster +
+
+ + + + + + + + + + + +
+
+
+ + + roi_scale_factor
(>=1.0)
+
+
+
+
+
+
+ roi_scale_factor... +
+
+ + + + +
+
+
+ + Check whether the IoU score between + the cluster RoI + and + the detected RoI + is greater than the threshold. +
+
+ **NOTE** +
+ Our default threshold is 0.65 with + only_allow_inside_cluster + is + True + . +
+ Also, please try the bigger score threshold and set + only_allow_inside_cluster + to + False + depending on your 2D RoI detector performance. +
+
+
+
+
+
+ Check whether the IoU score between the cluster RoI and the detected RoI is greater than the thr... +
+
+ + + + + + +
+
+
+
    +
  • + IoU = Intersection Area / Union Area +
  • +
  • + IoU X = Intersection Width / Union Width +
  • +
  • + IoU Y = Intersection Height / Union Height +
  • +
+
+
+
+
+ IoU = Intersection Area / Union AreaIoU X = Intersectio... +
+
+ + + + +
+
+
+ + Nodes overview + +
+
+
+
+ Nodes overview +
+
+ + + + +
+
+
+ Numeric Parameter +
+
+
+
+ Numeric Parameter +
+
+ + + + +
+
+
+ Result +
+
+
+
+ Result +
+
+ + + + + + + + +
+
+
+ Boolean Parameter +
+
+
+
+ Boolean Parameter +
+
+ + + + +
+
+
+ + true + +
+
+
+
+ true +
+
+ + + + +
+
+
+ + false + +
+
+
+
+ false +
+
+ + + + + + + + +
+
+
+ Condition +
+
+
+
+ Condition +
+
+ + + + +
+
+
+ + true + +
+
+
+
+ true +
+
+ + + + +
+
+
+ + false + +
+
+
+
+ false +
+
+ + + + +
+
+
+ use_cluster_semantic_type +
+
+
+
+ use_cluster_semantic_type +
+
+ + + + +
+
+
+ + Objects +
+ w/ Semantic Label +
+
+
+
+
+ Objects... +
+
+ + + + + + +
+
+
+ + All objects are set to +
+ UNKOWN Label +
+
+
+
+
+
+ All objects are set to... +
+
+ + + + + + + + +
+
+
+ Fusion Target Cluster +
+
+
+
+ Fusion Target Cluster +
+
+ + + + +
+
+
+ trust_distance +
+
+
+
+ trust_distance +
+
+ + + + + + +
+
+
+ + Is Cluster closer than + trust_distance + ? + +
+
+
+
+ Is Cluster closer than trust_... +
+
+ + + + + + + + + + + + +
+
+
+ + Input Cluster + +
+
+
+
+ Input Cluster +
+
+ + + + +
+
+
+ roi_scale_factor +
+
+
+
+ roi_scale_factor +
+
+ + + + +
+
+
+ Pre-Process +
+
+
+
+ Pre-Process +
+
+ + + + +
+
+
+ Fusion Process +
+
+
+
+ Fusion Process +
+
+ + + + +
+
+
+ Post-Process +
+
+
+
+ Post-Process +
+
+ + + + + + +
+
+
+ remove_unknown +
+
+
+
+ remove_unknown +
+
+ + + + + + +
+
+
+ Fused Cluster +
+
+
+
+ Fused Cluster +
+
+ + + + +
+
+
+ Is KNOWN object?
&&
0.1 <= RoI probability?
+
+
+
+
+ Is KNOWN object?... +
+
+ + + + + + +
+
+
+ Remove noise clusters, which are undetected by RoI detector, such as fog and exhaust gas. +
+
+
+
+ Remove noise clusters, which are undet... +
+
+ + + + +
+
+
+ + Output Fused Cluster + +
+
+
+
+ Output Fused Cluster +
+
+ + + + + + + + +
+ + + + Text is not SVG - cannot display + + +
diff --git a/perception/image_projection_based_fusion/docs/roi-cluster-fusion.md b/perception/image_projection_based_fusion/docs/roi-cluster-fusion.md index 03eaab2a3c6ca..86d3a2fa070b2 100644 --- a/perception/image_projection_based_fusion/docs/roi-cluster-fusion.md +++ b/perception/image_projection_based_fusion/docs/roi-cluster-fusion.md @@ -30,6 +30,11 @@ The clusters are projected onto image planes, and then if the ROIs of clusters a ## Parameters +The following figure is an inner pipeline overview of RoI cluster fusion node. +Please refer to it for your parameter settings. + +![roi_cluster_fusion_pipeline](./images/roi_cluster_fusion_pipeline.svg) + ### Core Parameters | Name | Type | Description | diff --git a/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml b/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml index e15737f5ed222..33781461fa1cc 100644 --- a/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml +++ b/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml @@ -30,7 +30,6 @@ - @@ -43,9 +42,6 @@ - - - @@ -82,9 +78,6 @@ - - - diff --git a/perception/image_projection_based_fusion/launch/roi_cluster_fusion.launch.xml b/perception/image_projection_based_fusion/launch/roi_cluster_fusion.launch.xml index 60f6f943b8cda..52dd71e9579c1 100644 --- a/perception/image_projection_based_fusion/launch/roi_cluster_fusion.launch.xml +++ b/perception/image_projection_based_fusion/launch/roi_cluster_fusion.launch.xml @@ -18,24 +18,11 @@ + - - - - - - - - - - - - - - @@ -46,17 +33,8 @@ - - - - - - - - - - + @@ -86,16 +64,6 @@ - - - - - - - - - - diff --git a/perception/image_projection_based_fusion/launch/roi_detected_object_fusion.launch.xml b/perception/image_projection_based_fusion/launch/roi_detected_object_fusion.launch.xml index b6165fc7c87d2..c9da81af9ddb0 100644 --- a/perception/image_projection_based_fusion/launch/roi_detected_object_fusion.launch.xml +++ b/perception/image_projection_based_fusion/launch/roi_detected_object_fusion.launch.xml @@ -25,8 +25,6 @@ - - @@ -69,10 +67,6 @@ - - - - diff --git a/perception/image_projection_based_fusion/launch/roi_pointcloud_fusion.launch.xml b/perception/image_projection_based_fusion/launch/roi_pointcloud_fusion.launch.xml index 181f4ccb88320..046d88d06e2a1 100644 --- a/perception/image_projection_based_fusion/launch/roi_pointcloud_fusion.launch.xml +++ b/perception/image_projection_based_fusion/launch/roi_pointcloud_fusion.launch.xml @@ -1,8 +1,5 @@ - - - @@ -23,6 +20,7 @@ + @@ -38,9 +36,7 @@ - - - + diff --git a/perception/image_projection_based_fusion/package.xml b/perception/image_projection_based_fusion/package.xml index 1648de210ec2c..49ff4dafc7900 100644 --- a/perception/image_projection_based_fusion/package.xml +++ b/perception/image_projection_based_fusion/package.xml @@ -9,6 +9,8 @@ Shunsuke Miura Yoshi Ri badai nguyen + Kotaro Uetake + Tao Zhong Apache License 2.0 ament_cmake_auto diff --git a/perception/image_projection_based_fusion/schema/pointpainting.schema.json b/perception/image_projection_based_fusion/schema/pointpainting.schema.json new file mode 100644 index 0000000000000..036628d72e70a --- /dev/null +++ b/perception/image_projection_based_fusion/schema/pointpainting.schema.json @@ -0,0 +1,152 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Parameters for Point Painting Fusion Node", + "type": "object", + "definitions": { + "pointpainting": { + "type": "object", + "properties": { + "model_params": { + "type": "object", + "description": "Parameters for model configuration.", + "properties": { + "class_names": { + "type": "array", + "description": "An array of class names will be predicted.", + "default": ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"], + "uniqueItems": true + }, + "paint_class_names": { + "type": "array", + "description": "An array of class names will be painted by PointPainting", + "default": ["CAR", "BICYCLE", "PEDESTRIAN"], + "uniqueItems": true + }, + "point_feature_size": { + "type": "integer", + "description": "A number of channels of point feature layer.", + "default": 7 + }, + "max_voxel_size": { + "type": "integer", + "description": "A maximum size of voxel grid.", + "default": 40000 + }, + "point_cloud_range": { + "type": "array", + "description": "An array of distance ranges of each class, this must have same length with `class_names`.", + "default": [-121.6, -76.8, -3.0, 121.6, 76.8, 5.0] + }, + "voxel_size": { + "type": "array", + "description": "An array of voxel grid sizes for PointPainting, this must have same length with `paint_class_names`.", + "default": [0.32, 0.32, 8.0] + }, + "down_sample_factor": { + "type": "integer", + "description": "A scale factor of downsampling points", + "default": 1, + "minimum": 1 + }, + "encoder_in_feature_size": { + "type": "integer", + "description": "A size of encoder input feature channels.", + "default": 12 + }, + "yaw_norm_thresholds": { + "type": "array", + "description": "An array of distance threshold values of norm of yaw [rad].", + "default": [0.3, 0.3, 0.3, 0.3, 0.0], + "minimum": 0.0, + "maximum": 1.0 + }, + "has_twist": { + "type": "boolean", + "description": "Indicates whether the model outputs twist value.", + "default": false + } + } + }, + "densification_params": { + "type": "object", + "description": "Parameters for pointcloud densification.", + "properties": { + "world_frame_id": { + "type": "string", + "description": "A name of frame id where world coordinates system is defined with respect to.", + "default": "map" + }, + "num_past_frames": { + "type": "integer", + "description": "A number of past frames to be considered as same input frame.", + "default": 0, + "minimum": 0 + } + } + }, + "post_process_params": { + "type": "object", + "properties": { + "score_threshold": { + "type": "number", + "description": "A threshold value of existence probability score, all of objects with score less than this threshold are ignored.", + "default": 0.4, + "minimum": 0.0, + "maximum": 1.0 + }, + "circle_nms_dist_threshold": { + "type": "number", + "description": "", + "default": 0.3, + "minimum": 0.0, + "maximum": 1.0 + }, + "iou_nms_target_class_names": { + "type": "array", + "description": "An array of class names to be target in NMS.", + "default": ["CAR"], + "uniqueItems": true + }, + "iou_search_distance_2d": { + "type": "number", + "description": "A maximum distance value to search the nearest objects.", + "default": 10.0, + "minimum": 0.0 + }, + "iou_nms_threshold": { + "type": "number", + "description": "A threshold value of NMS using IoU score.", + "default": 0.1, + "minimum": 0.0, + "maximum": 1.0 + } + } + }, + "omp_params": { + "type": "object", + "properties": { + "num_threads": { + "type": "integer", + "description": "The number of threads that is set to the environment variable OMP_NUM_THREADS.", + "default": 1, + "minimum": 1 + } + } + } + }, + "required": ["model_params", "densification_params", "post_process_params", "omp_params"] + } + }, + "properties": { + "/**": { + "type": "object", + "properties": { + "ros__parameters": { + "$ref": "#/definitions/pointpainting" + } + }, + "required": ["ros__parameters"] + } + }, + "required": ["/**"] +} diff --git a/perception/image_projection_based_fusion/schema/roi_cluster_fusion.schema.json b/perception/image_projection_based_fusion/schema/roi_cluster_fusion.schema.json new file mode 100644 index 0000000000000..fc32e9d6d3d8b --- /dev/null +++ b/perception/image_projection_based_fusion/schema/roi_cluster_fusion.schema.json @@ -0,0 +1,96 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Parameters for RoI Cluster Fusion Node", + "type": "object", + "definitions": { + "roi_cluster_fusion": { + "type": "object", + "properties": { + "fusion_distance": { + "type": "number", + "description": "If the detected object's distance is less than its value, the fusion will be processed.", + "default": 100.0, + "minimum": 0.0 + }, + "trust_object_distance": { + "type": "number", + "description": "If the detected object's distance is less than its value, IoU method specified in `trust_object_iou_mode` is used, otherwise `non_trust_object_iou_mode` is used.", + "default": 100.0, + "minimum": 0.0 + }, + "trust_object_iou_mode": { + "type": "string", + "description": "Name of IoU method applied to the objects in range of [0.0, `trust_distance`].", + "default": "iou", + "enum": ["iou", "iou_x", "iou_y"] + }, + "non_trust_object_iou_mode": { + "type": "string", + "description": "Name of IoU method applied to the objects in range of [`trust_distance`, `fusion_distance`], if `trust_distance` < `fusion_distance`.", + "default": "iou_x", + "enum": ["iou", "iou_x", "iou_y"] + }, + "use_cluster_semantic_type": { + "type": "boolean", + "description": "If this parameter is false, label of cluster objects will be reset to UNKNOWN.", + "default": false + }, + "only_allow_inside_cluster": { + "type": "boolean", + "description": "If this parameter is true, only clusters in which all their points are inside the RoI can be assigned to the RoI.", + "default": true + }, + "roi_scale_factor": { + "type": "number", + "description": "A scale factor for resizing RoI while checking if cluster points are inside the RoI.", + "default": 1.1, + "minimum": 1.0, + "maximum": 2.0 + }, + "iou_threshold": { + "type": "number", + "description": "An IoU score threshold. Note that the total IoU score is the sum of the IoU scores that are set to true in use_iou, use_iou_x and use_iou_y.", + "default": 0.65, + "minimum": 0.0, + "maximum": 1.0 + }, + "unknown_iou_threshold": { + "type": "number", + "description": "A threshold value of IoU score for objects labeled UNKNOWN.", + "default": 0.1, + "minimum": 0.0, + "maximum": 1.0 + }, + "remove_unknown": { + "type": "boolean", + "description": "If this parameter is true, all of objects labeled UNKNOWN will be removed in post-process.", + "default": false + } + }, + "required": [ + "fusion_distance", + "trust_object_distance", + "trust_object_iou_mode", + "non_trust_object_iou_mode", + "use_cluster_semantic_type", + "only_allow_inside_cluster", + "roi_scale_factor", + "iou_threshold", + "unknown_iou_threshold", + "remove_unknown" + ] + } + }, + "properties": { + "/**": { + "type": "object", + "properties": { + "ros__parameters": { + "$ref": "#/definitions/roi_cluster_fusion" + } + }, + "required": ["ros__parameters"] + } + }, + "required": ["/**"] +} diff --git a/perception/image_projection_based_fusion/schema/roi_detected_object_fusion.schema.json b/perception/image_projection_based_fusion/schema/roi_detected_object_fusion.schema.json new file mode 100644 index 0000000000000..3030be1305d56 --- /dev/null +++ b/perception/image_projection_based_fusion/schema/roi_detected_object_fusion.schema.json @@ -0,0 +1,70 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Parameters for RoI Detected Object Fusion Node", + "type": "object", + "definitions": { + "roi_detected_object_fusion": { + "type": "object", + "properties": { + "passthrough_lower_bound_probability_thresholds": { + "type": "array", + "description": "An array of object probability thresholds. The objects that have higher probability than their respective thresholds are kept.", + "default": [0.35, 0.35, 0.35, 0.35, 0.35, 0.35, 0.35, 0.5] + }, + "trust_distances": { + "type": "array", + "description": "An array of object distances thresholds. Any objects that is farther than this value will be skipped in the clustering process, but will still be published.", + "default": [50.0, 100.0, 100.0, 100.0, 100.0, 50.0, 50.0, 50.0] + }, + "min_iou_threshold": { + "type": "number", + "description": "An Iou score threshold.", + "default": 0.5, + "minimum": 0.0, + "maximum": 1.0 + }, + "roi_probability_threshold": { + "type": "number", + "description": "A object probability threshold.", + "default": 0.5, + "minimum": 0.0, + "maximum": 1.0 + }, + "use_roi_probability": { + "type": "boolean", + "description": "If this parameter is true, the objects are filtered out with their RoI probabilities.", + "default": false + }, + "can_assign_matrix": { + "type": "array", + "description": "An NxN matrix, where N represents the number of classes. A value 1 indicates that it is assignable, while a value of 0 indicates not.", + "default": [ + 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, + 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, + 0, 0, 0, 1, 1, 1 + ] + } + }, + "required": [ + "passthrough_lower_bound_probability_thresholds", + "trust_distances", + "min_iou_threshold", + "roi_probability_threshold", + "use_roi_probability", + "can_assign_matrix" + ] + } + }, + "properties": { + "/**": { + "type": "object", + "properties": { + "ros__parameters": { + "$ref": "#/definitions/roi_detected_object_fusion" + } + }, + "required": ["ros__parameters"] + } + }, + "required": ["/**"] +} diff --git a/perception/image_projection_based_fusion/schema/roi_pointcloud_fusion.schema.json b/perception/image_projection_based_fusion/schema/roi_pointcloud_fusion.schema.json new file mode 100644 index 0000000000000..f39ef257ea789 --- /dev/null +++ b/perception/image_projection_based_fusion/schema/roi_pointcloud_fusion.schema.json @@ -0,0 +1,41 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Parameters for RoI PointCloud Fusion Node", + "type": "object", + "definitions": { + "roi_pointcloud_fusion": { + "type": "object", + "properties": { + "fuse_unknown_only": { + "type": "boolean", + "description": "Whether to fuse only UNKNOWN clusters.", + "default": true + }, + "min_cluster_size": { + "type": "integer", + "description": "The minimum number of points that a cluster must contain to be considered as valid.", + "default": 2 + }, + "cluster_2d_tolerance": { + "type": "number", + "description": "A cluster tolerance measured in radial direction [m]", + "default": 0.5, + "exclusiveMinimum": 0.0 + } + }, + "required": ["fuse_unknown_only", "min_cluster_size", "cluster_2d_tolerance"] + } + }, + "properties": { + "/**": { + "type": "object", + "properties": { + "ros__parameters": { + "$ref": "#/definitions/roi_pointcloud_fusion" + } + }, + "required": ["ros__parameters"] + } + }, + "required": ["/**"] +} diff --git a/perception/image_projection_based_fusion/schema/roi_sync.schema.json b/perception/image_projection_based_fusion/schema/roi_sync.schema.json new file mode 100644 index 0000000000000..411fb678a49a7 --- /dev/null +++ b/perception/image_projection_based_fusion/schema/roi_sync.schema.json @@ -0,0 +1,84 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Parameters for Synchronization of RoI Fusion Nodes", + "type": "object", + "definitions": { + "roi_sync": { + "type": "object", + "properties": { + "input_offset_ms": { + "type": "array", + "description": "An array of timestamp offsets for each camera [ms].", + "default": [61.67, 111.67, 45.0, 28.33, 78.33, 95.0] + }, + "timeout_ms": { + "type": "number", + "description": "A timeout value can be assigned within a single frame [ms].", + "default": 70.0, + "minimum": 1.0, + "maximum": 100.0 + }, + "match_threshold_ms": { + "type": "number", + "description": "A maximum threshold value to synchronize RoIs from multiple cameras [ms].", + "default": 50.0, + "minimum": 0.0, + "maximum": 100.0 + }, + "image_buffer_size": { + "type": "integer", + "description": "The number of image buffer size for debug.", + "default": 15, + "minimum": 1 + }, + "debug_mode": { + "type": "boolean", + "description": "Whether to debug or not.", + "default": false + }, + "filter_scope_min_x": { + "type": "number", + "description": "Minimum x position to be considered for debug [m].", + "default": -100.0 + }, + "filter_scope_min_y": { + "type": "number", + "description": "Minimum y position to be considered for debug [m].", + "default": -100.0 + }, + "filter_scope_min_z": { + "type": "number", + "description": "Minimum z position to be considered for debug [m].", + "default": -100.0 + }, + "filter_scope_max_x": { + "type": "number", + "description": "Maximum x position to be considered for debug [m].", + "default": 100.0 + }, + "filter_scope_max_y": { + "type": "number", + "description": "Maximum y position to be considered for debug [m].", + "default": 100.0 + }, + "filter_scope_max_z": { + "type": "number", + "description": "Maximum z position [m].", + "default": 100.0 + } + } + } + }, + "properties": { + "/**": { + "type": "object", + "properties": { + "ros__parameters": { + "$ref": "#/definitions/roi_sync" + } + }, + "required": ["ros__parameters"] + } + }, + "required": ["/**"] +} diff --git a/perception/image_projection_based_fusion/src/fusion_node.cpp b/perception/image_projection_based_fusion/src/fusion_node.cpp index b01a910aaded1..4abd92d7fb063 100644 --- a/perception/image_projection_based_fusion/src/fusion_node.cpp +++ b/perception/image_projection_based_fusion/src/fusion_node.cpp @@ -46,7 +46,7 @@ FusionNode::FusionNode( : Node(node_name, options), tf_buffer_(this->get_clock()), tf_listener_(tf_buffer_) { // set rois_number - rois_number_ = static_cast(declare_parameter("rois_number", 1)); + rois_number_ = static_cast(declare_parameter("rois_number")); if (rois_number_ < 1) { RCLCPP_WARN( this->get_logger(), "minimum rois_number is 1. current rois_number is %zu", rois_number_); @@ -80,7 +80,7 @@ FusionNode::FusionNode( "/sensing/camera/camera" + std::to_string(roi_i) + "/image_rect_color"); } - input_offset_ms_ = declare_parameter("input_offset_ms", std::vector{}); + input_offset_ms_ = declare_parameter>("input_offset_ms"); if (!input_offset_ms_.empty() && rois_number_ != input_offset_ms_.size()) { throw std::runtime_error("The number of offsets does not match the number of topics."); } @@ -122,7 +122,7 @@ FusionNode::FusionNode( // debugger if (declare_parameter("debug_mode", false)) { std::size_t image_buffer_size = - static_cast(declare_parameter("image_buffer_size", 15)); + static_cast(declare_parameter("image_buffer_size")); debugger_ = std::make_shared(this, rois_number_, image_buffer_size, input_camera_topics_); } @@ -136,14 +136,15 @@ FusionNode::FusionNode( stop_watch_ptr_->tic("cyclic_time"); stop_watch_ptr_->tic("processing_time"); } + // cspell: ignore minx, maxx, miny, maxy, minz, maxz // FIXME: use min_x instead of minx - filter_scope_minx_ = declare_parameter("filter_scope_minx", -100); - filter_scope_maxx_ = declare_parameter("filter_scope_maxx", 100); - filter_scope_miny_ = declare_parameter("filter_scope_miny", -100); - filter_scope_maxy_ = declare_parameter("filter_scope_maxy", 100); - filter_scope_minz_ = declare_parameter("filter_scope_minz", -100); - filter_scope_maxz_ = declare_parameter("filter_scope_maxz", 100); + filter_scope_minx_ = declare_parameter("filter_scope_min_x"); + filter_scope_maxx_ = declare_parameter("filter_scope_max_x"); + filter_scope_miny_ = declare_parameter("filter_scope_min_y"); + filter_scope_maxy_ = declare_parameter("filter_scope_max_y"); + filter_scope_minz_ = declare_parameter("filter_scope_min_z"); + filter_scope_maxz_ = declare_parameter("filter_scope_max_z"); } template diff --git a/perception/image_projection_based_fusion/src/pointpainting_fusion/node.cpp b/perception/image_projection_based_fusion/src/pointpainting_fusion/node.cpp index 48ef3d26806c8..76b561f677c0f 100644 --- a/perception/image_projection_based_fusion/src/pointpainting_fusion/node.cpp +++ b/perception/image_projection_based_fusion/src/pointpainting_fusion/node.cpp @@ -95,28 +95,29 @@ inline bool isUnknown(int label2d) PointPaintingFusionNode::PointPaintingFusionNode(const rclcpp::NodeOptions & options) : FusionNode("pointpainting_fusion", options) { - omp_num_threads_ = this->declare_parameter("omp_num_threads", 1); + omp_num_threads_ = this->declare_parameter("omp_params.num_threads"); const float score_threshold = - static_cast(this->declare_parameter("score_threshold", 0.4)); - const float circle_nms_dist_threshold = - static_cast(this->declare_parameter("circle_nms_dist_threshold", 1.5)); + static_cast(this->declare_parameter("post_process_params.score_threshold")); + const float circle_nms_dist_threshold = static_cast( + this->declare_parameter("post_process_params.circle_nms_dist_threshold")); const auto yaw_norm_thresholds = - this->declare_parameter>("yaw_norm_thresholds"); + this->declare_parameter>("model_params.yaw_norm_thresholds"); // densification param const std::string densification_world_frame_id = - this->declare_parameter("densification_world_frame_id", "map"); + this->declare_parameter("densification_params.world_frame_id"); const int densification_num_past_frames = - this->declare_parameter("densification_num_past_frames", 0); + this->declare_parameter("densification_params.num_past_frames"); // network param - const std::string trt_precision = this->declare_parameter("trt_precision", "fp16"); - const std::string encoder_onnx_path = this->declare_parameter("encoder_onnx_path", ""); - const std::string encoder_engine_path = this->declare_parameter("encoder_engine_path", ""); - const std::string head_onnx_path = this->declare_parameter("head_onnx_path", ""); - const std::string head_engine_path = this->declare_parameter("head_engine_path", ""); - - class_names_ = this->declare_parameter>("class_names"); + const std::string trt_precision = this->declare_parameter("trt_precision"); + const std::string encoder_onnx_path = this->declare_parameter("encoder_onnx_path"); + const std::string encoder_engine_path = + this->declare_parameter("encoder_engine_path"); + const std::string head_onnx_path = this->declare_parameter("head_onnx_path"); + const std::string head_engine_path = this->declare_parameter("head_engine_path"); + + class_names_ = this->declare_parameter>("model_params.class_names"); const auto paint_class_names = - this->declare_parameter>("paint_class_names"); + this->declare_parameter>("model_params.paint_class_names"); std::vector classes_{"CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"}; if ( std::find(paint_class_names.begin(), paint_class_names.end(), "TRUCK") != @@ -138,17 +139,17 @@ PointPaintingFusionNode::PointPaintingFusionNode(const rclcpp::NodeOptions & opt isClassTable_.erase(cls); } } - has_twist_ = this->declare_parameter("has_twist", false); - const std::size_t point_feature_size = - static_cast(this->declare_parameter("point_feature_size")); + has_twist_ = this->declare_parameter("model_params.has_twist"); + const std::size_t point_feature_size = static_cast( + this->declare_parameter("model_params.point_feature_size")); const std::size_t max_voxel_size = - static_cast(this->declare_parameter("max_voxel_size")); - pointcloud_range = this->declare_parameter>("point_cloud_range"); - const auto voxel_size = this->declare_parameter>("voxel_size"); - const std::size_t downsample_factor = - static_cast(this->declare_parameter("downsample_factor")); - const std::size_t encoder_in_feature_size = - static_cast(this->declare_parameter("encoder_in_feature_size")); + static_cast(this->declare_parameter("model_params.max_voxel_size")); + pointcloud_range = this->declare_parameter>("model_params.point_cloud_range"); + const auto voxel_size = this->declare_parameter>("model_params.voxel_size"); + const std::size_t downsample_factor = static_cast( + this->declare_parameter("model_params.downsample_factor")); + const std::size_t encoder_in_feature_size = static_cast( + this->declare_parameter("model_params.encoder_in_feature_size")); const auto allow_remapping_by_area_matrix = this->declare_parameter>("allow_remapping_by_area_matrix"); const auto min_area_matrix = this->declare_parameter>("min_area_matrix"); @@ -172,10 +173,11 @@ PointPaintingFusionNode::PointPaintingFusionNode(const rclcpp::NodeOptions & opt { centerpoint::NMSParams p; p.nms_type_ = centerpoint::NMS_TYPE::IoU_BEV; - p.target_class_names_ = - this->declare_parameter>("iou_nms_target_class_names"); - p.search_distance_2d_ = this->declare_parameter("iou_nms_search_distance_2d"); - p.iou_threshold_ = this->declare_parameter("iou_nms_threshold"); + p.target_class_names_ = this->declare_parameter>( + "post_process_params.iou_nms_target_class_names"); + p.search_distance_2d_ = + this->declare_parameter("post_process_params.iou_nms_search_distance_2d"); + p.iou_threshold_ = this->declare_parameter("post_process_params.iou_nms_threshold"); iou_bev_nms_.setParameters(p); }