diff --git a/common/object_recognition_utils/include/object_recognition_utils/transform.hpp b/common/object_recognition_utils/include/object_recognition_utils/transform.hpp index 4446c87427e88..31892853a855f 100644 --- a/common/object_recognition_utils/include/object_recognition_utils/transform.hpp +++ b/common/object_recognition_utils/include/object_recognition_utils/transform.hpp @@ -15,15 +15,22 @@ #ifndef OBJECT_RECOGNITION_UTILS__TRANSFORM_HPP_ #define OBJECT_RECOGNITION_UTILS__TRANSFORM_HPP_ +#include + #include +#include +#include #include #include #include #ifdef ROS_DISTRO_GALACTIC +#include #include #else +#include + #include #endif @@ -45,6 +52,23 @@ namespace detail return boost::none; } } + +[[maybe_unused]] inline boost::optional getTransformMatrix( + const std::string & in_target_frame, const std_msgs::msg::Header & in_cloud_header, + const tf2_ros::Buffer & tf_buffer) +{ + try { + geometry_msgs::msg::TransformStamped transform_stamped; + transform_stamped = tf_buffer.lookupTransform( + in_target_frame, in_cloud_header.frame_id, in_cloud_header.stamp, + rclcpp::Duration::from_seconds(1.0)); + Eigen::Matrix4f mat = tf2::transformToEigen(transform_stamped.transform).matrix().cast(); + return mat; + } catch (tf2::TransformException & e) { + RCLCPP_WARN_STREAM(rclcpp::get_logger("detail::getTransformMatrix"), e.what()); + return boost::none; + } +} } // namespace detail namespace object_recognition_utils @@ -79,6 +103,46 @@ bool transformObjects( } return true; } +template +bool transformObjectsWithFeature( + const T & input_msg, const std::string & target_frame_id, const tf2_ros::Buffer & tf_buffer, + T & output_msg) +{ + output_msg = input_msg; + if (input_msg.header.frame_id != target_frame_id) { + output_msg.header.frame_id = target_frame_id; + tf2::Transform tf_target2objects_world; + tf2::Transform tf_target2objects; + tf2::Transform tf_objects_world2objects; + const auto ros_target2objects_world = detail::getTransform( + tf_buffer, input_msg.header.frame_id, target_frame_id, input_msg.header.stamp); + if (!ros_target2objects_world) { + return false; + } + const auto tf_matrix = detail::getTransformMatrix(target_frame_id, input_msg.header, tf_buffer); + if (!tf_matrix) { + RCLCPP_WARN( + rclcpp::get_logger("object_recognition_utils:"), "failed to get transformed matrix"); + return false; + } + for (auto & feature_object : output_msg.feature_objects) { + // transform object + tf2::fromMsg( + feature_object.object.kinematics.pose_with_covariance.pose, tf_objects_world2objects); + tf_target2objects = tf_target2objects_world * tf_objects_world2objects; + tf2::toMsg(tf_target2objects, feature_object.object.kinematics.pose_with_covariance.pose); + + // transform cluster + sensor_msgs::msg::PointCloud2 transformed_cluster; + pcl_ros::transformPointCloud(*tf_matrix, feature_object.feature.cluster, transformed_cluster); + transformed_cluster.header.frame_id = target_frame_id; + feature_object.feature.cluster = transformed_cluster; + } + output_msg.header.frame_id = target_frame_id; + return true; + } + return true; +} } // namespace object_recognition_utils #endif // OBJECT_RECOGNITION_UTILS__TRANSFORM_HPP_ diff --git a/common/object_recognition_utils/package.xml b/common/object_recognition_utils/package.xml index 95925e846a55c..2f2472515ebad 100644 --- a/common/object_recognition_utils/package.xml +++ b/common/object_recognition_utils/package.xml @@ -17,7 +17,13 @@ geometry_msgs interpolation libboost-dev + pcl_conversions + pcl_ros rclcpp + sensor_msgs + std_msgs + tf2 + tf2_eigen tier4_autoware_utils ament_cmake_ros diff --git a/control/mpc_lateral_controller/include/mpc_lateral_controller/mpc.hpp b/control/mpc_lateral_controller/include/mpc_lateral_controller/mpc.hpp index 0854340ed24a5..186481e92cdc4 100644 --- a/control/mpc_lateral_controller/include/mpc_lateral_controller/mpc.hpp +++ b/control/mpc_lateral_controller/include/mpc_lateral_controller/mpc.hpp @@ -447,7 +447,8 @@ class MPC * @param param Trajectory filtering parameters. */ void setReferenceTrajectory( - const Trajectory & trajectory_msg, const TrajectoryFilteringParam & param); + const Trajectory & trajectory_msg, const TrajectoryFilteringParam & param, + const Odometry & current_kinematics); /** * @brief Reset the previous result of MPC. diff --git a/control/mpc_lateral_controller/include/mpc_lateral_controller/mpc_lateral_controller.hpp b/control/mpc_lateral_controller/include/mpc_lateral_controller/mpc_lateral_controller.hpp index ee33062854ab9..911a39052f4dd 100644 --- a/control/mpc_lateral_controller/include/mpc_lateral_controller/mpc_lateral_controller.hpp +++ b/control/mpc_lateral_controller/include/mpc_lateral_controller/mpc_lateral_controller.hpp @@ -181,7 +181,7 @@ class MpcLateralController : public trajectory_follower::LateralControllerBase * @brief Set the current trajectory using the received message. * @param msg Received trajectory message. */ - void setTrajectory(const Trajectory & msg); + void setTrajectory(const Trajectory & msg, const Odometry & current_kinematics); /** * @brief Check if the received data is valid. diff --git a/control/mpc_lateral_controller/include/mpc_lateral_controller/mpc_utils.hpp b/control/mpc_lateral_controller/include/mpc_lateral_controller/mpc_utils.hpp index 89136f294aa18..036e6a32a9b83 100644 --- a/control/mpc_lateral_controller/include/mpc_lateral_controller/mpc_utils.hpp +++ b/control/mpc_lateral_controller/include/mpc_lateral_controller/mpc_utils.hpp @@ -99,7 +99,8 @@ void calcMPCTrajectoryArcLength(const MPCTrajectory & trajectory, std::vector resampleMPCTrajectoryByDistance( - const MPCTrajectory & input, const double resample_interval_dist); + const MPCTrajectory & input, const double resample_interval_dist, const size_t nearest_seg_idx, + const double ego_offset_to_segment); /** * @brief linearly interpolate the given trajectory assuming a base indexing and a new desired @@ -122,14 +123,14 @@ bool calcMPCTrajectoryTime(MPCTrajectory & traj); /** * @brief recalculate the velocity field (vx) of the MPCTrajectory with dynamic smoothing - * @param [in] start_idx index of the trajectory point from which to start smoothing - * @param [in] start_vel initial velocity to set at the start_idx + * @param [in] start_seg_idx segment index of the trajectory point from which to start smoothing + * @param [in] start_vel initial velocity to set at the start_seg_idx * @param [in] acc_lim limit on the acceleration * @param [in] tau constant to control the smoothing (high-value = very smooth) * @param [inout] traj MPCTrajectory for which to calculate the smoothed velocity */ void dynamicSmoothingVelocity( - const size_t start_idx, const double start_vel, const double acc_lim, const double tau, + const size_t start_seg_idx, const double start_vel, const double acc_lim, const double tau, MPCTrajectory & traj); /** diff --git a/control/mpc_lateral_controller/src/mpc.cpp b/control/mpc_lateral_controller/src/mpc.cpp index 643f8a6f91023..8ff323727f25e 100644 --- a/control/mpc_lateral_controller/src/mpc.cpp +++ b/control/mpc_lateral_controller/src/mpc.cpp @@ -173,13 +173,20 @@ Float32MultiArrayStamped MPC::generateDiagData( } void MPC::setReferenceTrajectory( - const Trajectory & trajectory_msg, const TrajectoryFilteringParam & param) + const Trajectory & trajectory_msg, const TrajectoryFilteringParam & param, + const Odometry & current_kinematics) { + const size_t nearest_seg_idx = motion_utils::findFirstNearestSegmentIndexWithSoftConstraints( + trajectory_msg.points, current_kinematics.pose.pose, ego_nearest_dist_threshold, + ego_nearest_yaw_threshold); + const double ego_offset_to_segment = motion_utils::calcLongitudinalOffsetToSegment( + trajectory_msg.points, nearest_seg_idx, current_kinematics.pose.pose.position); + const auto mpc_traj_raw = MPCUtils::convertToMPCTrajectory(trajectory_msg); // resampling - const auto [success_resample, mpc_traj_resampled] = - MPCUtils::resampleMPCTrajectoryByDistance(mpc_traj_raw, param.traj_resample_dist); + const auto [success_resample, mpc_traj_resampled] = MPCUtils::resampleMPCTrajectoryByDistance( + mpc_traj_raw, param.traj_resample_dist, nearest_seg_idx, ego_offset_to_segment); if (!success_resample) { warn_throttle("[setReferenceTrajectory] spline error when resampling by distance"); return; @@ -389,13 +396,13 @@ MPCTrajectory MPC::applyVelocityDynamicsFilter( return input; } - const size_t nearest_idx = motion_utils::findFirstNearestIndexWithSoftConstraints( + const size_t nearest_seg_idx = motion_utils::findFirstNearestSegmentIndexWithSoftConstraints( autoware_traj.points, current_kinematics.pose.pose, ego_nearest_dist_threshold, ego_nearest_yaw_threshold); MPCTrajectory output = input; MPCUtils::dynamicSmoothingVelocity( - nearest_idx, current_kinematics.twist.twist.linear.x, m_param.acceleration_limit, + nearest_seg_idx, current_kinematics.twist.twist.linear.x, m_param.acceleration_limit, m_param.velocity_time_constant, output); auto last_point = output.back(); diff --git a/control/mpc_lateral_controller/src/mpc_lateral_controller.cpp b/control/mpc_lateral_controller/src/mpc_lateral_controller.cpp index af680b5050037..c86cc0ea91577 100644 --- a/control/mpc_lateral_controller/src/mpc_lateral_controller.cpp +++ b/control/mpc_lateral_controller/src/mpc_lateral_controller.cpp @@ -227,7 +227,7 @@ trajectory_follower::LateralOutput MpcLateralController::run( trajectory_follower::InputData const & input_data) { // set input data - setTrajectory(input_data.current_trajectory); + setTrajectory(input_data.current_trajectory, input_data.current_odometry); m_current_kinematic_state = input_data.current_odometry; m_current_steering = input_data.current_steering; @@ -319,7 +319,7 @@ bool MpcLateralController::isSteerConverged(const AckermannLateralCommand & cmd) bool MpcLateralController::isReady(const trajectory_follower::InputData & input_data) { - setTrajectory(input_data.current_trajectory); + setTrajectory(input_data.current_trajectory, input_data.current_odometry); m_current_kinematic_state = input_data.current_odometry; m_current_steering = input_data.current_steering; @@ -339,7 +339,8 @@ bool MpcLateralController::isReady(const trajectory_follower::InputData & input_ return true; } -void MpcLateralController::setTrajectory(const Trajectory & msg) +void MpcLateralController::setTrajectory( + const Trajectory & msg, const Odometry & current_kinematics) { m_current_trajectory = msg; @@ -353,7 +354,7 @@ void MpcLateralController::setTrajectory(const Trajectory & msg) return; } - m_mpc.setReferenceTrajectory(msg, m_trajectory_filtering_param); + m_mpc.setReferenceTrajectory(msg, m_trajectory_filtering_param, current_kinematics); // update trajectory buffer to check the trajectory shape change. m_trajectory_buffer.push_back(m_current_trajectory); diff --git a/control/mpc_lateral_controller/src/mpc_utils.cpp b/control/mpc_lateral_controller/src/mpc_utils.cpp index 891a48299c81d..8624fc448104a 100644 --- a/control/mpc_lateral_controller/src/mpc_utils.cpp +++ b/control/mpc_lateral_controller/src/mpc_utils.cpp @@ -27,6 +27,19 @@ namespace autoware::motion::control::mpc_lateral_controller { +namespace +{ +double calcLongitudinalOffset( + const geometry_msgs::msg::Point & p_front, const geometry_msgs::msg::Point & p_back, + const geometry_msgs::msg::Point & p_target) +{ + const Eigen::Vector3d segment_vec{p_back.x - p_front.x, p_back.y - p_front.y, 0}; + const Eigen::Vector3d target_vec{p_target.x - p_front.x, p_target.y - p_front.y, 0}; + + return segment_vec.dot(target_vec) / segment_vec.norm(); +} +} // namespace + namespace MPCUtils { using tier4_autoware_utils::calcDistance2d; @@ -77,7 +90,8 @@ void calcMPCTrajectoryArcLength(const MPCTrajectory & trajectory, std::vector resampleMPCTrajectoryByDistance( - const MPCTrajectory & input, const double resample_interval_dist) + const MPCTrajectory & input, const double resample_interval_dist, const size_t nearest_seg_idx, + const double ego_offset_to_segment) { MPCTrajectory output; @@ -92,7 +106,18 @@ std::pair resampleMPCTrajectoryByDistance( } std::vector output_arclength; - for (double s = 0; s < input_arclength.back(); s += resample_interval_dist) { + // To accurately sample the ego point, resample separately in the forward direction and the + // backward direction from the current position. + for (double s = std::clamp( + input_arclength.at(nearest_seg_idx) + ego_offset_to_segment, 0.0, + input_arclength.back() - 1e-6); + 0 <= s; s -= resample_interval_dist) { + output_arclength.push_back(s); + } + std::reverse(output_arclength.begin(), output_arclength.end()); + for (double s = std::max(input_arclength.at(nearest_seg_idx) + ego_offset_to_segment, 0.0) + + resample_interval_dist; + s < input_arclength.back(); s += resample_interval_dist) { output_arclength.push_back(s); } @@ -274,13 +299,17 @@ bool calcMPCTrajectoryTime(MPCTrajectory & traj) } void dynamicSmoothingVelocity( - const size_t start_idx, const double start_vel, const double acc_lim, const double tau, + const size_t start_seg_idx, const double start_vel, const double acc_lim, const double tau, MPCTrajectory & traj) { double curr_v = start_vel; - traj.vx.at(start_idx) = start_vel; + // set current velocity in both start and end point of the segment + traj.vx.at(start_seg_idx) = start_vel; + if (1 < traj.vx.size()) { + traj.vx.at(start_seg_idx + 1) = start_vel; + } - for (size_t i = start_idx + 1; i < traj.size(); ++i) { + for (size_t i = start_seg_idx + 2; i < traj.size(); ++i) { const double ds = calcDistance2d(traj, i, i - 1); const double dt = ds / std::max(std::fabs(curr_v), std::numeric_limits::epsilon()); const double a = tau / std::max(tau + dt, std::numeric_limits::epsilon()); @@ -320,29 +349,40 @@ bool calcNearestPoseInterp( return true; } - auto calcSquaredDist = [](const Pose & p, const MPCTrajectory & t, const size_t idx) { - const double dx = p.position.x - t.x.at(idx); - const double dy = p.position.y - t.y.at(idx); - return dx * dx + dy * dy; - }; - /* get second nearest index = next to nearest_index */ - const size_t next = static_cast( - std::min(static_cast(*nearest_index) + 1, static_cast(traj_size) - 1)); - const size_t prev = - static_cast(std::max(static_cast(*nearest_index) - 1, static_cast(0))); - const double dist_to_next = calcSquaredDist(self_pose, traj, next); - const double dist_to_prev = calcSquaredDist(self_pose, traj, prev); - const size_t second_nearest_index = (dist_to_next < dist_to_prev) ? next : prev; - - const double a_sq = calcSquaredDist(self_pose, traj, *nearest_index); - const double b_sq = calcSquaredDist(self_pose, traj, second_nearest_index); - const double dx3 = traj.x.at(*nearest_index) - traj.x.at(second_nearest_index); - const double dy3 = traj.y.at(*nearest_index) - traj.y.at(second_nearest_index); - const double c_sq = dx3 * dx3 + dy3 * dy3; + const auto [prev, next] = [&]() -> std::pair { + if (*nearest_index == 0) { + return std::make_pair(0, 1); + } + if (*nearest_index == traj_size - 1) { + return std::make_pair(traj_size - 2, traj_size - 1); + } + geometry_msgs::msg::Point nearest_traj_point; + nearest_traj_point.x = traj.x.at(*nearest_index); + nearest_traj_point.y = traj.y.at(*nearest_index); + geometry_msgs::msg::Point next_nearest_traj_point; + next_nearest_traj_point.x = traj.x.at(*nearest_index + 1); + next_nearest_traj_point.y = traj.y.at(*nearest_index + 1); + + const double signed_length = + calcLongitudinalOffset(nearest_traj_point, next_nearest_traj_point, self_pose.position); + if (signed_length <= 0) { + return std::make_pair(*nearest_index - 1, *nearest_index); + } + return std::make_pair(*nearest_index, *nearest_index + 1); + }(); + + geometry_msgs::msg::Point next_traj_point; + next_traj_point.x = traj.x.at(next); + next_traj_point.y = traj.y.at(next); + geometry_msgs::msg::Point prev_traj_point; + prev_traj_point.x = traj.x.at(prev); + prev_traj_point.y = traj.y.at(prev); + const double traj_seg_length = + tier4_autoware_utils::calcDistance2d(prev_traj_point, next_traj_point); /* if distance between two points are too close */ - if (c_sq < 1.0E-5) { + if (traj_seg_length < 1.0E-5) { nearest_pose->position.x = traj.x.at(*nearest_index); nearest_pose->position.y = traj.y.at(*nearest_index); nearest_pose->orientation = createQuaternionFromYaw(traj.yaw.at(*nearest_index)); @@ -351,18 +391,15 @@ bool calcNearestPoseInterp( } /* linear interpolation */ - const double alpha = std::max(std::min(0.5 * (c_sq - a_sq + b_sq) / c_sq, 1.0), 0.0); - nearest_pose->position.x = - alpha * traj.x.at(*nearest_index) + (1 - alpha) * traj.x.at(second_nearest_index); - nearest_pose->position.y = - alpha * traj.y.at(*nearest_index) + (1 - alpha) * traj.y.at(second_nearest_index); - const double tmp_yaw_err = - normalizeRadian(traj.yaw.at(*nearest_index) - traj.yaw.at(second_nearest_index)); - const double nearest_yaw = - normalizeRadian(traj.yaw.at(second_nearest_index) + alpha * tmp_yaw_err); + const double ratio = std::clamp( + calcLongitudinalOffset(prev_traj_point, next_traj_point, self_pose.position) / traj_seg_length, + 0.0, 1.0); + nearest_pose->position.x = (1 - ratio) * traj.x.at(prev) + ratio * traj.x.at(next); + nearest_pose->position.y = (1 - ratio) * traj.y.at(prev) + ratio * traj.y.at(next); + const double tmp_yaw_err = normalizeRadian(traj.yaw.at(prev) - traj.yaw.at(next)); + const double nearest_yaw = normalizeRadian(traj.yaw.at(next) + (1 - ratio) * tmp_yaw_err); nearest_pose->orientation = createQuaternionFromYaw(nearest_yaw); - *nearest_time = alpha * traj.relative_time.at(*nearest_index) + - (1 - alpha) * traj.relative_time.at(second_nearest_index); + *nearest_time = (1 - ratio) * traj.relative_time.at(prev) + ratio * traj.relative_time.at(next); return true; } diff --git a/control/mpc_lateral_controller/test/test_mpc.cpp b/control/mpc_lateral_controller/test/test_mpc.cpp index 6f8a6fb598058..dade035daf26c 100644 --- a/control/mpc_lateral_controller/test/test_mpc.cpp +++ b/control/mpc_lateral_controller/test/test_mpc.cpp @@ -163,7 +163,9 @@ class MPCTest : public ::testing::Test mpc.initializeSteeringPredictor(); // Init trajectory - mpc.setReferenceTrajectory(dummy_straight_trajectory, trajectory_param); + const auto current_kinematics = + makeOdometry(dummy_straight_trajectory.points.front().pose, 0.0); + mpc.setReferenceTrajectory(dummy_straight_trajectory, trajectory_param, current_kinematics); } nav_msgs::msg::Odometry makeOdometry(const geometry_msgs::msg::Pose & pose, const double velocity) @@ -221,7 +223,9 @@ TEST_F(MPCTest, InitializeAndCalculateRightTurn) // Init parameters and reference trajectory initializeMPC(mpc); - mpc.setReferenceTrajectory(dummy_right_turn_trajectory, trajectory_param); + const auto current_kinematics = + makeOdometry(dummy_right_turn_trajectory.points.front().pose, 0.0); + mpc.setReferenceTrajectory(dummy_right_turn_trajectory, trajectory_param, current_kinematics); // Calculate MPC AckermannLateralCommand ctrl_cmd; @@ -237,7 +241,8 @@ TEST_F(MPCTest, OsqpCalculate) { MPC mpc; initializeMPC(mpc); - mpc.setReferenceTrajectory(dummy_straight_trajectory, trajectory_param); + const auto current_kinematics = makeOdometry(dummy_straight_trajectory.points.front().pose, 0.0); + mpc.setReferenceTrajectory(dummy_straight_trajectory, trajectory_param, current_kinematics); std::shared_ptr vehicle_model_ptr = std::make_shared(wheelbase, steer_limit, steer_tau); @@ -262,7 +267,9 @@ TEST_F(MPCTest, OsqpCalculateRightTurn) { MPC mpc; initializeMPC(mpc); - mpc.setReferenceTrajectory(dummy_right_turn_trajectory, trajectory_param); + const auto current_kinematics = + makeOdometry(dummy_right_turn_trajectory.points.front().pose, 0.0); + mpc.setReferenceTrajectory(dummy_right_turn_trajectory, trajectory_param, current_kinematics); std::shared_ptr vehicle_model_ptr = std::make_shared(wheelbase, steer_limit, steer_tau); @@ -300,7 +307,8 @@ TEST_F(MPCTest, KinematicsNoDelayCalculate) // Init filters mpc.initializeLowPassFilters(steering_lpf_cutoff_hz, error_deriv_lpf_cutoff_hz); // Init trajectory - mpc.setReferenceTrajectory(dummy_straight_trajectory, trajectory_param); + const auto current_kinematics = makeOdometry(dummy_straight_trajectory.points.front().pose, 0.0); + mpc.setReferenceTrajectory(dummy_straight_trajectory, trajectory_param, current_kinematics); // Calculate MPC AckermannLateralCommand ctrl_cmd; Trajectory pred_traj; @@ -315,7 +323,9 @@ TEST_F(MPCTest, KinematicsNoDelayCalculateRightTurn) { MPC mpc; initializeMPC(mpc); - mpc.setReferenceTrajectory(dummy_right_turn_trajectory, trajectory_param); + const auto current_kinematics = + makeOdometry(dummy_right_turn_trajectory.points.front().pose, 0.0); + mpc.setReferenceTrajectory(dummy_right_turn_trajectory, trajectory_param, current_kinematics); std::shared_ptr vehicle_model_ptr = std::make_shared(wheelbase, steer_limit); diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml index 7c18ad4bf9cd0..178e193dc2d99 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml +++ b/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml @@ -11,13 +11,12 @@ - - + @@ -59,15 +58,16 @@ --> - + - + + @@ -75,13 +75,54 @@ - - + + - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -96,6 +137,8 @@ + + diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_radar_fusion_based_detection.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_radar_fusion_based_detection.launch.xml index a8b37808a75ad..a2f91ff765005 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_radar_fusion_based_detection.launch.xml +++ b/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_radar_fusion_based_detection.launch.xml @@ -11,8 +11,6 @@ - - @@ -81,15 +79,16 @@ - + - + + @@ -97,11 +96,11 @@ - - + + - - + + @@ -189,12 +188,54 @@ - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/detection.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/detection.launch.xml index a71eefc9b84b3..33d63d5e07b8e 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/detection/detection.launch.xml +++ b/launch/tier4_perception_launch/launch/object_recognition/detection/detection.launch.xml @@ -6,7 +6,6 @@ - @@ -37,8 +36,6 @@ - - @@ -99,6 +96,8 @@ + + @@ -125,6 +124,7 @@ + diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/lidar_based_detection.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/lidar_based_detection.launch.xml index 01943a49d3d35..23e0297dc5e44 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/detection/lidar_based_detection.launch.xml +++ b/launch/tier4_perception_launch/launch/object_recognition/detection/lidar_based_detection.launch.xml @@ -4,8 +4,6 @@ - - @@ -19,29 +17,27 @@ - + - + + - - - + - - + diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/lidar_radar_based_detection.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/lidar_radar_based_detection.launch.xml index 4eee611c33992..cee6829ea3b4a 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/detection/lidar_radar_based_detection.launch.xml +++ b/launch/tier4_perception_launch/launch/object_recognition/detection/lidar_radar_based_detection.launch.xml @@ -3,7 +3,6 @@ - @@ -20,6 +19,7 @@ + diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/pointcloud_map_filter.launch.py b/launch/tier4_perception_launch/launch/object_recognition/detection/pointcloud_map_filter.launch.py index a5d8d2113d2db..93d395ca3e466 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/detection/pointcloud_map_filter.launch.py +++ b/launch/tier4_perception_launch/launch/object_recognition/detection/pointcloud_map_filter.launch.py @@ -35,7 +35,6 @@ def __init__(self, context): ) with open(pointcloud_map_filter_param_path, "r") as f: self.pointcloud_map_filter_param = yaml.safe_load(f)["/**"]["ros__parameters"] - self.use_down_sample_filter = self.pointcloud_map_filter_param["use_down_sample_filter"] self.voxel_size = self.pointcloud_map_filter_param["down_sample_voxel_size"] self.distance_threshold = self.pointcloud_map_filter_param["distance_threshold"] self.downsize_ratio_z_axis = self.pointcloud_map_filter_param["downsize_ratio_z_axis"] @@ -46,47 +45,40 @@ def __init__(self, context): ] self.map_loader_radius = self.pointcloud_map_filter_param["map_loader_radius"] self.publish_debug_pcd = self.pointcloud_map_filter_param["publish_debug_pcd"] + self.use_pointcloud_map = LaunchConfiguration("use_pointcloud_map").perform(context) def create_pipeline(self): - if self.use_down_sample_filter: - return self.create_down_sample_pipeline() + if self.use_pointcloud_map == "true": + return self.create_compare_map_pipeline() else: - return self.create_normal_pipeline() + return self.create_no_compare_map_pipeline() - def create_normal_pipeline(self): + def create_no_compare_map_pipeline(self): components = [] components.append( ComposableNode( - package="compare_map_segmentation", - plugin="compare_map_segmentation::VoxelBasedCompareMapFilterComponent", - name="voxel_based_compare_map_filter", + package="pointcloud_preprocessor", + plugin="pointcloud_preprocessor::ApproximateDownsampleFilterComponent", + name="voxel_grid_downsample_filter", remappings=[ ("input", LaunchConfiguration("input_topic")), - ("map", "/map/pointcloud_map"), ("output", LaunchConfiguration("output_topic")), - ("map_loader_service", "/map/get_differential_pointcloud_map"), - ("kinematic_state", "/localization/kinematic_state"), ], parameters=[ { - "distance_threshold": self.distance_threshold, - "downsize_ratio_z_axis": self.downsize_ratio_z_axis, - "timer_interval_ms": self.timer_interval_ms, - "use_dynamic_map_loading": self.use_dynamic_map_loading, - "map_update_distance_threshold": self.map_update_distance_threshold, - "map_loader_radius": self.map_loader_radius, - "publish_debug_pcd": self.publish_debug_pcd, - "input_frame": "map", + "voxel_size_x": self.voxel_size, + "voxel_size_y": self.voxel_size, + "voxel_size_z": self.voxel_size, } ], extra_arguments=[ - {"use_intra_process_comms": False}, + {"use_intra_process_comms": LaunchConfiguration("use_intra_process")} ], - ) + ), ) return components - def create_down_sample_pipeline(self): + def create_compare_map_pipeline(self): components = [] down_sample_topic = ( "/perception/obstacle_segmentation/pointcloud_map_filtered/downsampled/pointcloud" @@ -94,7 +86,7 @@ def create_down_sample_pipeline(self): components.append( ComposableNode( package="pointcloud_preprocessor", - plugin="pointcloud_preprocessor::VoxelGridDownsampleFilterComponent", + plugin="pointcloud_preprocessor::ApproximateDownsampleFilterComponent", name="voxel_grid_downsample_filter", remappings=[ ("input", LaunchConfiguration("input_topic")), @@ -177,6 +169,7 @@ def add_launch_arg(name: str, default_value=None): add_launch_arg("use_intra_process", "True") add_launch_arg("use_pointcloud_container", "False") add_launch_arg("container_name", "pointcloud_map_filter_pipeline_container") + add_launch_arg("use_pointcloud_map", "true") set_container_executable = SetLaunchConfiguration( "container_executable", "component_container", diff --git a/launch/tier4_perception_launch/launch/perception.launch.xml b/launch/tier4_perception_launch/launch/perception.launch.xml index ec3fce594540a..36d43bab74894 100644 --- a/launch/tier4_perception_launch/launch/perception.launch.xml +++ b/launch/tier4_perception_launch/launch/perception.launch.xml @@ -1,10 +1,8 @@ - - @@ -60,8 +58,9 @@ - + + - - - + + diff --git a/launch/tier4_perception_launch/package.xml b/launch/tier4_perception_launch/package.xml index c1723c1fa07e8..336588891d9b2 100644 --- a/launch/tier4_perception_launch/package.xml +++ b/launch/tier4_perception_launch/package.xml @@ -12,6 +12,7 @@ ament_cmake_auto autoware_cmake + cluster_merger compare_map_segmentation crosswalk_traffic_light_estimator detected_object_feature_remover diff --git a/localization/ekf_localizer/CMakeLists.txt b/localization/ekf_localizer/CMakeLists.txt index 6e7e194f7cf72..8e29bfed14a28 100644 --- a/localization/ekf_localizer/CMakeLists.txt +++ b/localization/ekf_localizer/CMakeLists.txt @@ -16,6 +16,7 @@ ament_auto_find_build_dependencies() ament_auto_add_library(ekf_localizer_lib SHARED src/ekf_localizer.cpp src/covariance.cpp + src/diagnostics.cpp src/mahalanobis.cpp src/measurement.cpp src/state_transition.cpp diff --git a/localization/ekf_localizer/README.md b/localization/ekf_localizer/README.md index 748b5ee5becc0..6492f20331a66 100644 --- a/localization/ekf_localizer/README.md +++ b/localization/ekf_localizer/README.md @@ -88,6 +88,10 @@ The parameters and input topic names can be set in the `ekf_localizer.launch` fi The estimated twist with covariance. +- diagnostics (diagnostic_msgs/DiagnosticArray) + + The diagnostic information. + ### Published TF - base_link @@ -148,6 +152,21 @@ The parameters are set in `launch/ekf_localizer.launch` . note: process noise for positions x & y are calculated automatically from nonlinear dynamics. +### For diagnostics + +| Name | Type | Description | Default value | +| :------------------------------------ | :----- | :----------------------------------------------------------------------------------------------------------------------------------------- | :------------ | +| pose_no_update_count_threshold_warn | size_t | The threshold at which a WARN state is triggered due to the Pose Topic update not happening continuously for a certain number of times. | 50 | +| pose_no_update_count_threshold_error | size_t | The threshold at which an ERROR state is triggered due to the Pose Topic update not happening continuously for a certain number of times. | 250 | +| twist_no_update_count_threshold_warn | size_t | The threshold at which a WARN state is triggered due to the Twist Topic update not happening continuously for a certain number of times. | 50 | +| twist_no_update_count_threshold_error | size_t | The threshold at which an ERROR state is triggered due to the Twist Topic update not happening continuously for a certain number of times. | 250 | + +### Misc + +| Name | Type | Description | Default value | +| :-------------------------------- | :----- | :------------------------------------------------------------------------------------------------- | :------------- | +| threshold_observable_velocity_mps | double | Minimum value for velocity that will be used for EKF. Mainly used for dead zone in velocity sensor | 0.0 (disabled) | + ## How to tune EKF parameters ### 0. Preliminaries @@ -194,6 +213,23 @@ Note that, although the dimension gets larger since the analytical expansion can

+## Diagnostics + +

+ +

+ +### The conditions that result in a WARN state + +- The node is not in the activate state. +- The number of consecutive no measurement update via the Pose/Twist topic exceeds the `pose_no_update_count_threshold_warn`/`twist_no_update_count_threshold_warn`. +- The timestamp of the Pose/Twist topic is beyond the delay compensation range. +- The Pose/Twist topic is beyond the range of Mahalanobis distance for covariance estimation. + +### The conditions that result in an ERROR state + +- The number of consecutive no measurement update via the Pose/Twist topic exceeds the `pose_no_update_count_threshold_error`/`twist_no_update_count_threshold_error`. + ## Known issues - In the presence of multiple inputs with yaw estimation, yaw bias `b_k` in the current EKF state would not make any sense, since it is intended to capture the extrinsic parameter's calibration error of a sensor. Thus, future work includes introducing yaw bias for each sensor with yaw estimation. diff --git a/localization/ekf_localizer/config/ekf_localizer.param.yaml b/localization/ekf_localizer/config/ekf_localizer.param.yaml index 4d3f5b9643462..667217d2591dc 100644 --- a/localization/ekf_localizer/config/ekf_localizer.param.yaml +++ b/localization/ekf_localizer/config/ekf_localizer.param.yaml @@ -21,3 +21,12 @@ proc_stddev_yaw_c: 0.005 proc_stddev_vx_c: 10.0 proc_stddev_wz_c: 5.0 + + # for diagnostics + pose_no_update_count_threshold_warn: 50 + pose_no_update_count_threshold_error: 250 + twist_no_update_count_threshold_warn: 50 + twist_no_update_count_threshold_error: 250 + + # for velocity measurement limitation (Set 0.0 if you want to ignore) + threshold_observable_velocity_mps: 0.0 # [m/s] diff --git a/localization/ekf_localizer/include/ekf_localizer/diagnostics.hpp b/localization/ekf_localizer/include/ekf_localizer/diagnostics.hpp new file mode 100644 index 0000000000000..f4dc6436f6a40 --- /dev/null +++ b/localization/ekf_localizer/include/ekf_localizer/diagnostics.hpp @@ -0,0 +1,40 @@ +// Copyright 2023 Autoware Foundation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef EKF_LOCALIZER__DIAGNOSTICS_HPP_ +#define EKF_LOCALIZER__DIAGNOSTICS_HPP_ + +#include + +#include +#include + +diagnostic_msgs::msg::DiagnosticStatus checkProcessActivated(const bool is_activated); + +diagnostic_msgs::msg::DiagnosticStatus checkMeasurementUpdated( + const std::string & measurement_type, const size_t no_update_count, + const size_t no_update_count_threshold_warn, const size_t no_update_count_threshold_error); +diagnostic_msgs::msg::DiagnosticStatus checkMeasurementQueueSize( + const std::string & measurement_type, const size_t queue_size); +diagnostic_msgs::msg::DiagnosticStatus checkMeasurementDelayGate( + const std::string & measurement_type, const bool is_passed_delay_gate, const double delay_time, + const double delay_time_threshold); +diagnostic_msgs::msg::DiagnosticStatus checkMeasurementMahalanobisGate( + const std::string & measurement_type, const bool is_passed_mahalanobis_gate, + const double mahalanobis_distance, const double mahalanobis_distance_threshold); + +diagnostic_msgs::msg::DiagnosticStatus mergeDiagnosticStatus( + const std::vector & stat_array); + +#endif // EKF_LOCALIZER__DIAGNOSTICS_HPP_ diff --git a/localization/ekf_localizer/include/ekf_localizer/ekf_localizer.hpp b/localization/ekf_localizer/include/ekf_localizer/ekf_localizer.hpp index a4ae47b670897..4fc2305cc7adc 100644 --- a/localization/ekf_localizer/include/ekf_localizer/ekf_localizer.hpp +++ b/localization/ekf_localizer/include/ekf_localizer/ekf_localizer.hpp @@ -25,6 +25,7 @@ #include #include +#include #include #include #include @@ -126,6 +127,8 @@ class EKFLocalizer : public rclcpp::Node rclcpp::Publisher::SharedPtr pub_biased_pose_; //!< @brief ekf estimated yaw bias publisher rclcpp::Publisher::SharedPtr pub_biased_pose_cov_; + //!< @brief diagnostics publisher + rclcpp::Publisher::SharedPtr pub_diag_; //!< @brief initial pose subscriber rclcpp::Subscription::SharedPtr sub_initialpose_; //!< @brief measurement pose with covariance subscriber @@ -144,6 +147,7 @@ class EKFLocalizer : public rclcpp::Node rclcpp::TimerBase::SharedPtr timer_tf_; //!< @brief tf broadcaster std::shared_ptr tf_br_; + //!< @brief extended kalman filter instance. TimeDelayKalmanFilter ekf_; Simple1DFilter z_filter_; @@ -167,6 +171,22 @@ class EKFLocalizer : public rclcpp::Node bool is_activated_; + size_t pose_no_update_count_; + size_t pose_queue_size_; + bool pose_is_passed_delay_gate_; + double pose_delay_time_; + double pose_delay_time_threshold_; + bool pose_is_passed_mahalanobis_gate_; + double pose_mahalanobis_distance_; + + size_t twist_no_update_count_; + size_t twist_queue_size_; + bool twist_is_passed_delay_gate_; + double twist_delay_time_; + double twist_delay_time_threshold_; + bool twist_is_passed_mahalanobis_gate_; + double twist_mahalanobis_distance_; + AgedObjectQueue pose_queue_; AgedObjectQueue twist_queue_; @@ -221,13 +241,13 @@ class EKFLocalizer : public rclcpp::Node * @brief compute EKF update with pose measurement * @param pose measurement value */ - void measurementUpdatePose(const geometry_msgs::msg::PoseWithCovarianceStamped & pose); + bool measurementUpdatePose(const geometry_msgs::msg::PoseWithCovarianceStamped & pose); /** * @brief compute EKF update with pose measurement * @param twist measurement value */ - void measurementUpdateTwist(const geometry_msgs::msg::TwistWithCovarianceStamped & twist); + bool measurementUpdateTwist(const geometry_msgs::msg::TwistWithCovarianceStamped & twist); /** * @brief get transform from frame_id @@ -246,6 +266,11 @@ class EKFLocalizer : public rclcpp::Node */ void publishEstimateResult(); + /** + * @brief publish diagnostics message + */ + void publishDiagnostics(); + /** * @brief for debug */ diff --git a/localization/ekf_localizer/include/ekf_localizer/hyper_parameters.hpp b/localization/ekf_localizer/include/ekf_localizer/hyper_parameters.hpp index c74fa9be79525..01ef658cf445d 100644 --- a/localization/ekf_localizer/include/ekf_localizer/hyper_parameters.hpp +++ b/localization/ekf_localizer/include/ekf_localizer/hyper_parameters.hpp @@ -39,7 +39,17 @@ class HyperParameters twist_smoothing_steps(node->declare_parameter("twist_smoothing_steps", 2)), proc_stddev_vx_c(node->declare_parameter("proc_stddev_vx_c", 5.0)), proc_stddev_wz_c(node->declare_parameter("proc_stddev_wz_c", 1.0)), - proc_stddev_yaw_c(node->declare_parameter("proc_stddev_yaw_c", 0.005)) + proc_stddev_yaw_c(node->declare_parameter("proc_stddev_yaw_c", 0.005)), + pose_no_update_count_threshold_warn( + node->declare_parameter("pose_no_update_count_threshold_warn", 50)), + pose_no_update_count_threshold_error( + node->declare_parameter("pose_no_update_count_threshold_error", 250)), + twist_no_update_count_threshold_warn( + node->declare_parameter("twist_no_update_count_threshold_warn", 50)), + twist_no_update_count_threshold_error( + node->declare_parameter("twist_no_update_count_threshold_error", 250)), + threshold_observable_velocity_mps( + node->declare_parameter("threshold_observable_velocity_mps", 0.5)) { } @@ -59,6 +69,11 @@ class HyperParameters const double proc_stddev_vx_c; //!< @brief vx process noise const double proc_stddev_wz_c; //!< @brief wz process noise const double proc_stddev_yaw_c; //!< @brief yaw process noise + const size_t pose_no_update_count_threshold_warn; + const size_t pose_no_update_count_threshold_error; + const size_t twist_no_update_count_threshold_warn; + const size_t twist_no_update_count_threshold_error; + const double threshold_observable_velocity_mps; }; #endif // EKF_LOCALIZER__HYPER_PARAMETERS_HPP_ diff --git a/localization/ekf_localizer/media/ekf_diagnostics.png b/localization/ekf_localizer/media/ekf_diagnostics.png new file mode 100644 index 0000000000000..2580d6d973290 Binary files /dev/null and b/localization/ekf_localizer/media/ekf_diagnostics.png differ diff --git a/localization/ekf_localizer/package.xml b/localization/ekf_localizer/package.xml index 5bc9c5e42712d..005bddd3eb22b 100644 --- a/localization/ekf_localizer/package.xml +++ b/localization/ekf_localizer/package.xml @@ -18,6 +18,7 @@ eigen + diagnostic_msgs fmt geometry_msgs kalman_filter diff --git a/localization/ekf_localizer/src/diagnostics.cpp b/localization/ekf_localizer/src/diagnostics.cpp new file mode 100644 index 0000000000000..9ff36561abaa9 --- /dev/null +++ b/localization/ekf_localizer/src/diagnostics.cpp @@ -0,0 +1,169 @@ +// Copyright 2023 Autoware Foundation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ekf_localizer/diagnostics.hpp" + +#include + +#include +#include + +diagnostic_msgs::msg::DiagnosticStatus checkProcessActivated(const bool is_activated) +{ + diagnostic_msgs::msg::DiagnosticStatus stat; + + diagnostic_msgs::msg::KeyValue key_value; + key_value.key = "is_activated"; + key_value.value = is_activated ? "True" : "False"; + stat.values.push_back(key_value); + + stat.level = diagnostic_msgs::msg::DiagnosticStatus::OK; + stat.message = "OK"; + if (!is_activated) { + stat.level = diagnostic_msgs::msg::DiagnosticStatus::WARN; + stat.message = "[WARN]process is not activated"; + } + + return stat; +} + +diagnostic_msgs::msg::DiagnosticStatus checkMeasurementUpdated( + const std::string & measurement_type, const size_t no_update_count, + const size_t no_update_count_threshold_warn, const size_t no_update_count_threshold_error) +{ + diagnostic_msgs::msg::DiagnosticStatus stat; + + diagnostic_msgs::msg::KeyValue key_value; + key_value.key = measurement_type + "_no_update_count"; + key_value.value = std::to_string(no_update_count); + stat.values.push_back(key_value); + key_value.key = measurement_type + "_no_update_count_threshold_warn"; + key_value.value = std::to_string(no_update_count_threshold_warn); + stat.values.push_back(key_value); + key_value.key = measurement_type + "_no_update_count_threshold_error"; + key_value.value = std::to_string(no_update_count_threshold_error); + stat.values.push_back(key_value); + + stat.level = diagnostic_msgs::msg::DiagnosticStatus::OK; + stat.message = "OK"; + if (no_update_count >= no_update_count_threshold_warn) { + stat.level = diagnostic_msgs::msg::DiagnosticStatus::WARN; + stat.message = "[WARN]" + measurement_type + " is not updated"; + } + if (no_update_count >= no_update_count_threshold_error) { + stat.level = diagnostic_msgs::msg::DiagnosticStatus::ERROR; + stat.message = "[ERROR]" + measurement_type + " is not updated"; + } + + return stat; +} + +diagnostic_msgs::msg::DiagnosticStatus checkMeasurementQueueSize( + const std::string & measurement_type, const size_t queue_size) +{ + diagnostic_msgs::msg::DiagnosticStatus stat; + + diagnostic_msgs::msg::KeyValue key_value; + key_value.key = measurement_type + "_queue_size"; + key_value.value = std::to_string(queue_size); + stat.values.push_back(key_value); + + stat.level = diagnostic_msgs::msg::DiagnosticStatus::OK; + stat.message = "OK"; + + return stat; +} + +diagnostic_msgs::msg::DiagnosticStatus checkMeasurementDelayGate( + const std::string & measurement_type, const bool is_passed_delay_gate, const double delay_time, + const double delay_time_threshold) +{ + diagnostic_msgs::msg::DiagnosticStatus stat; + + diagnostic_msgs::msg::KeyValue key_value; + key_value.key = measurement_type + "_is_passed_delay_gate"; + key_value.value = is_passed_delay_gate ? "True" : "False"; + stat.values.push_back(key_value); + key_value.key = measurement_type + "_delay_time"; + key_value.value = std::to_string(delay_time); + stat.values.push_back(key_value); + key_value.key = measurement_type + "_delay_time_threshold"; + key_value.value = std::to_string(delay_time_threshold); + stat.values.push_back(key_value); + + stat.level = diagnostic_msgs::msg::DiagnosticStatus::OK; + stat.message = "OK"; + if (!is_passed_delay_gate) { + stat.level = diagnostic_msgs::msg::DiagnosticStatus::WARN; + stat.message = "[WARN]" + measurement_type + " topic is delay"; + } + + return stat; +} + +diagnostic_msgs::msg::DiagnosticStatus checkMeasurementMahalanobisGate( + const std::string & measurement_type, const bool is_passed_mahalanobis_gate, + const double mahalanobis_distance, const double mahalanobis_distance_threshold) +{ + diagnostic_msgs::msg::DiagnosticStatus stat; + + diagnostic_msgs::msg::KeyValue key_value; + key_value.key = measurement_type + "_is_passed_mahalanobis_gate"; + key_value.value = is_passed_mahalanobis_gate ? "True" : "False"; + stat.values.push_back(key_value); + key_value.key = measurement_type + "_mahalanobis_distance"; + key_value.value = std::to_string(mahalanobis_distance); + stat.values.push_back(key_value); + key_value.key = measurement_type + "_mahalanobis_distance_threshold"; + key_value.value = std::to_string(mahalanobis_distance_threshold); + stat.values.push_back(key_value); + + stat.level = diagnostic_msgs::msg::DiagnosticStatus::OK; + stat.message = "OK"; + if (!is_passed_mahalanobis_gate) { + stat.level = diagnostic_msgs::msg::DiagnosticStatus::WARN; + stat.message = "[WARN]mahalanobis distance of " + measurement_type + " topic is large"; + } + + return stat; +} + +// The highest level within the stat_array will be reflected in the merged_stat. +// When all stat_array entries are 'OK,' the message of merged_stat will be "OK" +diagnostic_msgs::msg::DiagnosticStatus mergeDiagnosticStatus( + const std::vector & stat_array) +{ + diagnostic_msgs::msg::DiagnosticStatus merged_stat; + + for (const auto & stat : stat_array) { + if ((stat.level > diagnostic_msgs::msg::DiagnosticStatus::OK)) { + if (!merged_stat.message.empty()) { + merged_stat.message += "; "; + } + merged_stat.message += stat.message; + } + if (stat.level > merged_stat.level) { + merged_stat.level = stat.level; + } + for (const auto & value : stat.values) { + merged_stat.values.push_back(value); + } + } + + if (merged_stat.level == diagnostic_msgs::msg::DiagnosticStatus::OK) { + merged_stat.message = "OK"; + } + + return merged_stat; +} diff --git a/localization/ekf_localizer/src/ekf_localizer.cpp b/localization/ekf_localizer/src/ekf_localizer.cpp index 0e46a26add852..b39112b1d8d62 100644 --- a/localization/ekf_localizer/src/ekf_localizer.cpp +++ b/localization/ekf_localizer/src/ekf_localizer.cpp @@ -15,6 +15,7 @@ #include "ekf_localizer/ekf_localizer.hpp" #include "ekf_localizer/covariance.hpp" +#include "ekf_localizer/diagnostics.hpp" #include "ekf_localizer/mahalanobis.hpp" #include "ekf_localizer/matrix_types.hpp" #include "ekf_localizer/measurement.hpp" @@ -87,6 +88,7 @@ EKFLocalizer::EKFLocalizer(const std::string & node_name, const rclcpp::NodeOpti pub_biased_pose_ = create_publisher("ekf_biased_pose", 1); pub_biased_pose_cov_ = create_publisher( "ekf_biased_pose_with_covariance", 1); + pub_diag_ = this->create_publisher("/diagnostics", 10); sub_initialpose_ = create_subscription( "initialpose", 1, std::bind(&EKFLocalizer::callbackInitialPose, this, _1)); sub_pose_with_cov_ = create_subscription( @@ -143,6 +145,7 @@ void EKFLocalizer::timerCallback() if (!is_activated_) { warning_.warnThrottle( "The node is not activated. Provide initial pose to pose_initializer", 2000); + publishDiagnostics(); return; } @@ -176,6 +179,16 @@ void EKFLocalizer::timerCallback() DEBUG_INFO(get_logger(), "------------------------- end prediction -------------------------\n"); /* pose measurement update */ + + pose_queue_size_ = pose_queue_.size(); + pose_is_passed_delay_gate_ = true; + pose_delay_time_ = 0.0; + pose_delay_time_threshold_ = 0.0; + pose_is_passed_mahalanobis_gate_ = true; + pose_mahalanobis_distance_ = 0.0; + + bool pose_is_updated = false; + if (!pose_queue_.empty()) { DEBUG_INFO(get_logger(), "------------------------- start Pose -------------------------"); stop_watch_.tic(); @@ -184,13 +197,27 @@ void EKFLocalizer::timerCallback() const size_t n = pose_queue_.size(); for (size_t i = 0; i < n; ++i) { const auto pose = pose_queue_.pop_increment_age(); - measurementUpdatePose(*pose); + bool is_updated = measurementUpdatePose(*pose); + if (is_updated) { + pose_is_updated = true; + } } DEBUG_INFO(get_logger(), "[EKF] measurementUpdatePose calc time = %f [ms]", stop_watch_.toc()); DEBUG_INFO(get_logger(), "------------------------- end Pose -------------------------\n"); } + pose_no_update_count_ = pose_is_updated ? 0 : (pose_no_update_count_ + 1); /* twist measurement update */ + + twist_queue_size_ = twist_queue_.size(); + twist_is_passed_delay_gate_ = true; + twist_delay_time_ = 0.0; + twist_delay_time_threshold_ = 0.0; + twist_is_passed_mahalanobis_gate_ = true; + twist_mahalanobis_distance_ = 0.0; + + bool twist_is_updated = false; + if (!twist_queue_.empty()) { DEBUG_INFO(get_logger(), "------------------------- start Twist -------------------------"); stop_watch_.tic(); @@ -199,11 +226,15 @@ void EKFLocalizer::timerCallback() const size_t n = twist_queue_.size(); for (size_t i = 0; i < n; ++i) { const auto twist = twist_queue_.pop_increment_age(); - measurementUpdateTwist(*twist); + bool is_updated = measurementUpdateTwist(*twist); + if (is_updated) { + twist_is_updated = true; + } } DEBUG_INFO(get_logger(), "[EKF] measurementUpdateTwist calc time = %f [ms]", stop_watch_.toc()); DEBUG_INFO(get_logger(), "------------------------- end Twist -------------------------\n"); } + twist_no_update_count_ = twist_is_updated ? 0 : (twist_no_update_count_ + 1); const double x = ekf_.getXelement(IDX::X); const double y = ekf_.getXelement(IDX::Y); @@ -235,6 +266,7 @@ void EKFLocalizer::timerCallback() /* publish ekf result */ publishEstimateResult(); + publishDiagnostics(); } void EKFLocalizer::showCurrentX() @@ -353,6 +385,11 @@ void EKFLocalizer::callbackPoseWithCovariance( void EKFLocalizer::callbackTwistWithCovariance( geometry_msgs::msg::TwistWithCovarianceStamped::SharedPtr msg) { + // Ignore twist if velocity is too small. + // Note that this inequality must not include "equal". + if (std::abs(msg->twist.twist.linear.x) < params_.threshold_observable_velocity_mps) { + msg->twist.covariance[0 * 6 + 0] = 10000.0; + } twist_queue_.push(msg); } @@ -376,7 +413,7 @@ void EKFLocalizer::initEKF() /* * measurementUpdatePose */ -void EKFLocalizer::measurementUpdatePose(const geometry_msgs::msg::PoseWithCovarianceStamped & pose) +bool EKFLocalizer::measurementUpdatePose(const geometry_msgs::msg::PoseWithCovarianceStamped & pose) { if (pose.header.frame_id != params_.pose_frame_id) { warning_.warnThrottle( @@ -400,10 +437,14 @@ void EKFLocalizer::measurementUpdatePose(const geometry_msgs::msg::PoseWithCovar delay_time = std::max(delay_time, 0.0); int delay_step = std::roundf(delay_time / ekf_dt_); + + pose_delay_time_ = std::max(delay_time, pose_delay_time_); + pose_delay_time_threshold_ = params_.extend_state_step * ekf_dt_; if (delay_step >= params_.extend_state_step) { + pose_is_passed_delay_gate_ = false; warning_.warnThrottle( poseDelayStepWarningMessage(delay_time, params_.extend_state_step, ekf_dt_), 2000); - return; + return false; } DEBUG_INFO(get_logger(), "delay_time: %f [s]", delay_time); @@ -420,7 +461,7 @@ void EKFLocalizer::measurementUpdatePose(const geometry_msgs::msg::PoseWithCovar if (hasNan(y) || hasInf(y)) { warning_.warn( "[EKF] pose measurement matrix includes NaN of Inf. ignore update. check pose message."); - return; + return false; } /* Gate */ @@ -431,10 +472,12 @@ void EKFLocalizer::measurementUpdatePose(const geometry_msgs::msg::PoseWithCovar const Eigen::MatrixXd P_y = P_curr.block(0, 0, dim_y, dim_y); const double distance = mahalanobis(y_ekf, y, P_y); + pose_mahalanobis_distance_ = std::max(distance, pose_mahalanobis_distance_); if (distance > params_.pose_gate_dist) { + pose_is_passed_mahalanobis_gate_ = false; warning_.warnThrottle(mahalanobisWarningMessage(distance, params_.pose_gate_dist), 2000); warning_.warnThrottle("Ignore the measurement data.", 2000); - return; + return false; } DEBUG_PRINT_MAT(y.transpose()); @@ -460,12 +503,14 @@ void EKFLocalizer::measurementUpdatePose(const geometry_msgs::msg::PoseWithCovar const Eigen::MatrixXd X_result = ekf_.getLatestX(); DEBUG_PRINT_MAT(X_result.transpose()); DEBUG_PRINT_MAT((X_result - X_curr).transpose()); + + return true; } /* * measurementUpdateTwist */ -void EKFLocalizer::measurementUpdateTwist( +bool EKFLocalizer::measurementUpdateTwist( const geometry_msgs::msg::TwistWithCovarianceStamped & twist) { if (twist.header.frame_id != "base_link") { @@ -488,10 +533,14 @@ void EKFLocalizer::measurementUpdateTwist( delay_time = std::max(delay_time, 0.0); int delay_step = std::roundf(delay_time / ekf_dt_); + + twist_delay_time_ = std::max(delay_time, twist_delay_time_); + twist_delay_time_threshold_ = params_.extend_state_step * ekf_dt_; if (delay_step >= params_.extend_state_step) { + twist_is_passed_delay_gate_ = false; warning_.warnThrottle( twistDelayStepWarningMessage(delay_time, params_.extend_state_step, ekf_dt_), 2000); - return; + return false; } DEBUG_INFO(get_logger(), "delay_time: %f [s]", delay_time); @@ -502,7 +551,7 @@ void EKFLocalizer::measurementUpdateTwist( if (hasNan(y) || hasInf(y)) { warning_.warn( "[EKF] twist measurement matrix includes NaN of Inf. ignore update. check twist message."); - return; + return false; } const Eigen::Vector2d y_ekf( @@ -512,10 +561,12 @@ void EKFLocalizer::measurementUpdateTwist( const Eigen::MatrixXd P_y = P_curr.block(4, 4, dim_y, dim_y); const double distance = mahalanobis(y_ekf, y, P_y); + twist_mahalanobis_distance_ = std::max(distance, twist_mahalanobis_distance_); if (distance > params_.twist_gate_dist) { + twist_is_passed_mahalanobis_gate_ = false; warning_.warnThrottle(mahalanobisWarningMessage(distance, params_.twist_gate_dist), 2000); warning_.warnThrottle("Ignore the measurement data.", 2000); - return; + return false; } DEBUG_PRINT_MAT(y.transpose()); @@ -532,6 +583,8 @@ void EKFLocalizer::measurementUpdateTwist( const Eigen::MatrixXd X_result = ekf_.getLatestX(); DEBUG_PRINT_MAT(X_result.transpose()); DEBUG_PRINT_MAT((X_result - X_curr).transpose()); + + return true; } /* @@ -607,6 +660,45 @@ void EKFLocalizer::publishEstimateResult() pub_debug_->publish(msg); } +void EKFLocalizer::publishDiagnostics() +{ + std::vector diag_status_array; + + diag_status_array.push_back(checkProcessActivated(is_activated_)); + + if (is_activated_) { + diag_status_array.push_back(checkMeasurementUpdated( + "pose", pose_no_update_count_, params_.pose_no_update_count_threshold_warn, + params_.pose_no_update_count_threshold_error)); + diag_status_array.push_back(checkMeasurementQueueSize("pose", pose_queue_size_)); + diag_status_array.push_back(checkMeasurementDelayGate( + "pose", pose_is_passed_delay_gate_, pose_delay_time_, pose_delay_time_threshold_)); + diag_status_array.push_back(checkMeasurementMahalanobisGate( + "pose", pose_is_passed_mahalanobis_gate_, pose_mahalanobis_distance_, + params_.pose_gate_dist)); + + diag_status_array.push_back(checkMeasurementUpdated( + "twist", twist_no_update_count_, params_.twist_no_update_count_threshold_warn, + params_.twist_no_update_count_threshold_error)); + diag_status_array.push_back(checkMeasurementQueueSize("twist", twist_queue_size_)); + diag_status_array.push_back(checkMeasurementDelayGate( + "twist", twist_is_passed_delay_gate_, twist_delay_time_, twist_delay_time_threshold_)); + diag_status_array.push_back(checkMeasurementMahalanobisGate( + "twist", twist_is_passed_mahalanobis_gate_, twist_mahalanobis_distance_, + params_.twist_gate_dist)); + } + + diagnostic_msgs::msg::DiagnosticStatus diag_merged_status; + diag_merged_status = mergeDiagnosticStatus(diag_status_array); + diag_merged_status.name = "localization: " + std::string(this->get_name()); + diag_merged_status.hardware_id = this->get_name(); + + diagnostic_msgs::msg::DiagnosticArray diag_msg; + diag_msg.header.stamp = this->now(); + diag_msg.status.push_back(diag_merged_status); + pub_diag_->publish(diag_msg); +} + void EKFLocalizer::updateSimple1DFilters( const geometry_msgs::msg::PoseWithCovarianceStamped & pose, const size_t smoothing_step) { diff --git a/localization/ekf_localizer/test/test_diagnostics.cpp b/localization/ekf_localizer/test/test_diagnostics.cpp new file mode 100644 index 0000000000000..f506dca1cb230 --- /dev/null +++ b/localization/ekf_localizer/test/test_diagnostics.cpp @@ -0,0 +1,192 @@ +// Copyright 2023 Autoware Foundation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ekf_localizer/diagnostics.hpp" + +#include + +TEST(TestEkfDiagnostics, CheckProcessActivated) +{ + diagnostic_msgs::msg::DiagnosticStatus stat; + + bool is_activated = true; + stat = checkProcessActivated(is_activated); + EXPECT_EQ(stat.level, diagnostic_msgs::msg::DiagnosticStatus::OK); + + is_activated = false; + stat = checkProcessActivated(is_activated); + EXPECT_EQ(stat.level, diagnostic_msgs::msg::DiagnosticStatus::WARN); +} + +TEST(TestEkfDiagnostics, checkMeasurementUpdated) +{ + diagnostic_msgs::msg::DiagnosticStatus stat; + + const std::string measurement_type = "pose"; // not effect for stat.level + const size_t no_update_count_threshold_warn = 50; + const size_t no_update_count_threshold_error = 250; + + size_t no_update_count = 0; + stat = checkMeasurementUpdated( + measurement_type, no_update_count, no_update_count_threshold_warn, + no_update_count_threshold_error); + EXPECT_EQ(stat.level, diagnostic_msgs::msg::DiagnosticStatus::OK); + + no_update_count = 1; + stat = checkMeasurementUpdated( + measurement_type, no_update_count, no_update_count_threshold_warn, + no_update_count_threshold_error); + EXPECT_EQ(stat.level, diagnostic_msgs::msg::DiagnosticStatus::OK); + + no_update_count = 49; + stat = checkMeasurementUpdated( + measurement_type, no_update_count, no_update_count_threshold_warn, + no_update_count_threshold_error); + EXPECT_EQ(stat.level, diagnostic_msgs::msg::DiagnosticStatus::OK); + + no_update_count = 50; + stat = checkMeasurementUpdated( + measurement_type, no_update_count, no_update_count_threshold_warn, + no_update_count_threshold_error); + EXPECT_EQ(stat.level, diagnostic_msgs::msg::DiagnosticStatus::WARN); + + no_update_count = 249; + stat = checkMeasurementUpdated( + measurement_type, no_update_count, no_update_count_threshold_warn, + no_update_count_threshold_error); + EXPECT_EQ(stat.level, diagnostic_msgs::msg::DiagnosticStatus::WARN); + + no_update_count = 250; + stat = checkMeasurementUpdated( + measurement_type, no_update_count, no_update_count_threshold_warn, + no_update_count_threshold_error); + EXPECT_EQ(stat.level, diagnostic_msgs::msg::DiagnosticStatus::ERROR); +} + +TEST(TestEkfDiagnostics, CheckMeasurementQueueSize) +{ + diagnostic_msgs::msg::DiagnosticStatus stat; + + const std::string measurement_type = "pose"; // not effect for stat.level + + size_t queue_size = 0; // not effect for stat.level + stat = checkMeasurementQueueSize(measurement_type, queue_size); + EXPECT_EQ(stat.level, diagnostic_msgs::msg::DiagnosticStatus::OK); + + queue_size = 1; // not effect for stat.level + stat = checkMeasurementQueueSize(measurement_type, queue_size); + EXPECT_EQ(stat.level, diagnostic_msgs::msg::DiagnosticStatus::OK); +} + +TEST(TestEkfDiagnostics, CheckMeasurementDelayGate) +{ + diagnostic_msgs::msg::DiagnosticStatus stat; + + const std::string measurement_type = "pose"; // not effect for stat.level + const double delay_time = 0.1; // not effect for stat.level + const double delay_time_threshold = 1.0; // not effect for stat.level + + bool is_passed_delay_gate = true; + stat = checkMeasurementDelayGate( + measurement_type, is_passed_delay_gate, delay_time, delay_time_threshold); + EXPECT_EQ(stat.level, diagnostic_msgs::msg::DiagnosticStatus::OK); + + is_passed_delay_gate = false; + stat = checkMeasurementDelayGate( + measurement_type, is_passed_delay_gate, delay_time, delay_time_threshold); + EXPECT_EQ(stat.level, diagnostic_msgs::msg::DiagnosticStatus::WARN); +} + +TEST(TestEkfDiagnostics, CheckMeasurementMahalanobisGate) +{ + diagnostic_msgs::msg::DiagnosticStatus stat; + + const std::string measurement_type = "pose"; // not effect for stat.level + const double mahalanobis_distance = 0.1; // not effect for stat.level + const double mahalanobis_distance_threshold = 1.0; // not effect for stat.level + + bool is_passed_mahalanobis_gate = true; + stat = checkMeasurementMahalanobisGate( + measurement_type, is_passed_mahalanobis_gate, mahalanobis_distance, + mahalanobis_distance_threshold); + EXPECT_EQ(stat.level, diagnostic_msgs::msg::DiagnosticStatus::OK); + + is_passed_mahalanobis_gate = false; + stat = checkMeasurementMahalanobisGate( + measurement_type, is_passed_mahalanobis_gate, mahalanobis_distance, + mahalanobis_distance_threshold); + EXPECT_EQ(stat.level, diagnostic_msgs::msg::DiagnosticStatus::WARN); +} + +TEST(TestLocalizationErrorMonitorDiagnostics, MergeDiagnosticStatus) +{ + diagnostic_msgs::msg::DiagnosticStatus merged_stat; + std::vector stat_array(2); + + stat_array.at(0).level = diagnostic_msgs::msg::DiagnosticStatus::OK; + stat_array.at(0).message = "OK"; + stat_array.at(1).level = diagnostic_msgs::msg::DiagnosticStatus::OK; + stat_array.at(1).message = "OK"; + merged_stat = mergeDiagnosticStatus(stat_array); + EXPECT_EQ(merged_stat.level, diagnostic_msgs::msg::DiagnosticStatus::OK); + EXPECT_EQ(merged_stat.message, "OK"); + + stat_array.at(0).level = diagnostic_msgs::msg::DiagnosticStatus::WARN; + stat_array.at(0).message = "WARN0"; + stat_array.at(1).level = diagnostic_msgs::msg::DiagnosticStatus::OK; + stat_array.at(1).message = "OK"; + merged_stat = mergeDiagnosticStatus(stat_array); + EXPECT_EQ(merged_stat.level, diagnostic_msgs::msg::DiagnosticStatus::WARN); + EXPECT_EQ(merged_stat.message, "WARN0"); + + stat_array.at(0).level = diagnostic_msgs::msg::DiagnosticStatus::OK; + stat_array.at(0).message = "OK"; + stat_array.at(1).level = diagnostic_msgs::msg::DiagnosticStatus::WARN; + stat_array.at(1).message = "WARN1"; + merged_stat = mergeDiagnosticStatus(stat_array); + EXPECT_EQ(merged_stat.level, diagnostic_msgs::msg::DiagnosticStatus::WARN); + EXPECT_EQ(merged_stat.message, "WARN1"); + + stat_array.at(0).level = diagnostic_msgs::msg::DiagnosticStatus::WARN; + stat_array.at(0).message = "WARN0"; + stat_array.at(1).level = diagnostic_msgs::msg::DiagnosticStatus::WARN; + stat_array.at(1).message = "WARN1"; + merged_stat = mergeDiagnosticStatus(stat_array); + EXPECT_EQ(merged_stat.level, diagnostic_msgs::msg::DiagnosticStatus::WARN); + EXPECT_EQ(merged_stat.message, "WARN0; WARN1"); + + stat_array.at(0).level = diagnostic_msgs::msg::DiagnosticStatus::OK; + stat_array.at(0).message = "OK"; + stat_array.at(1).level = diagnostic_msgs::msg::DiagnosticStatus::ERROR; + stat_array.at(1).message = "ERROR1"; + merged_stat = mergeDiagnosticStatus(stat_array); + EXPECT_EQ(merged_stat.level, diagnostic_msgs::msg::DiagnosticStatus::ERROR); + EXPECT_EQ(merged_stat.message, "ERROR1"); + + stat_array.at(0).level = diagnostic_msgs::msg::DiagnosticStatus::WARN; + stat_array.at(0).message = "WARN0"; + stat_array.at(1).level = diagnostic_msgs::msg::DiagnosticStatus::ERROR; + stat_array.at(1).message = "ERROR1"; + merged_stat = mergeDiagnosticStatus(stat_array); + EXPECT_EQ(merged_stat.level, diagnostic_msgs::msg::DiagnosticStatus::ERROR); + EXPECT_EQ(merged_stat.message, "WARN0; ERROR1"); + + stat_array.at(0).level = diagnostic_msgs::msg::DiagnosticStatus::ERROR; + stat_array.at(0).message = "ERROR0"; + stat_array.at(1).level = diagnostic_msgs::msg::DiagnosticStatus::ERROR; + stat_array.at(1).message = "ERROR1"; + merged_stat = mergeDiagnosticStatus(stat_array); + EXPECT_EQ(merged_stat.level, diagnostic_msgs::msg::DiagnosticStatus::ERROR); + EXPECT_EQ(merged_stat.message, "ERROR0; ERROR1"); +} diff --git a/localization/stop_filter/CMakeLists.txt b/localization/stop_filter/CMakeLists.txt index 2d1867b8cd0bc..97a0443195ae5 100644 --- a/localization/stop_filter/CMakeLists.txt +++ b/localization/stop_filter/CMakeLists.txt @@ -13,4 +13,5 @@ ament_target_dependencies(stop_filter) ament_auto_package( INSTALL_TO_SHARE launch + config ) diff --git a/localization/stop_filter/config/stop_filter.param.yaml b/localization/stop_filter/config/stop_filter.param.yaml new file mode 100644 index 0000000000000..ded090b75b5bd --- /dev/null +++ b/localization/stop_filter/config/stop_filter.param.yaml @@ -0,0 +1,4 @@ +/**: + ros__parameters: + vx_threshold: 0.1 # [m/s] + wz_threshold: 0.02 # [rad/s] diff --git a/localization/stop_filter/launch/stop_filter.launch.xml b/localization/stop_filter/launch/stop_filter.launch.xml index 36a66a2c143c0..0ea92d26c9677 100644 --- a/localization/stop_filter/launch/stop_filter.launch.xml +++ b/localization/stop_filter/launch/stop_filter.launch.xml @@ -1,6 +1,5 @@ - - + @@ -10,7 +9,6 @@ - - + diff --git a/localization/stop_filter/src/stop_filter.cpp b/localization/stop_filter/src/stop_filter.cpp index 111d460be737e..ac0960b8cb67b 100644 --- a/localization/stop_filter/src/stop_filter.cpp +++ b/localization/stop_filter/src/stop_filter.cpp @@ -27,8 +27,8 @@ using std::placeholders::_1; StopFilter::StopFilter(const std::string & node_name, const rclcpp::NodeOptions & node_options) : rclcpp::Node(node_name, node_options) { - vx_threshold_ = declare_parameter("vx_threshold", 0.01); - wz_threshold_ = declare_parameter("wz_threshold", 0.01); + vx_threshold_ = declare_parameter("vx_threshold"); + wz_threshold_ = declare_parameter("wz_threshold"); sub_odom_ = create_subscription( "input/odom", 1, std::bind(&StopFilter::callbackOdometry, this, _1)); diff --git a/perception/cluster_merger/CMakeLists.txt b/perception/cluster_merger/CMakeLists.txt new file mode 100644 index 0000000000000..49506f4b439fb --- /dev/null +++ b/perception/cluster_merger/CMakeLists.txt @@ -0,0 +1,27 @@ +cmake_minimum_required(VERSION 3.8) +project(cluster_merger) + +# find dependencies +find_package(autoware_cmake REQUIRED) +autoware_package() + +# Targets +ament_auto_add_library(cluster_merger_node_component SHARED + src/cluster_merger/node.cpp +) + +rclcpp_components_register_node(cluster_merger_node_component + PLUGIN "cluster_merger::ClusterMergerNode" + EXECUTABLE cluster_merger_node) + + +if(BUILD_TESTING) + list(APPEND AMENT_LINT_AUTO_EXCLUDE ament_cmake_uncrustify) + find_package(ament_lint_auto REQUIRED) + ament_lint_auto_find_test_dependencies() +endif() + +ament_auto_package( + INSTALL_TO_SHARE + launch +) diff --git a/perception/cluster_merger/README.md b/perception/cluster_merger/README.md new file mode 100644 index 0000000000000..b46f7401b70ec --- /dev/null +++ b/perception/cluster_merger/README.md @@ -0,0 +1,72 @@ +# cluster merger + +## Purpose + +cluster_merger is a package for merging pointcloud clusters as detected objects with feature type. + +## Inner-working / Algorithms + +The clusters of merged topics are simply concatenated from clusters of input topics. + +## Input / Output + +### Input + +| Name | Type | Description | +| ---------------- | -------------------------------------------------------- | ------------------- | +| `input/cluster0` | `tier4_perception_msgs::msg::DetectedObjectsWithFeature` | pointcloud clusters | +| `input/cluster1` | `tier4_perception_msgs::msg::DetectedObjectsWithFeature` | pointcloud clusters | + +### Output + +| Name | Type | Description | +| ----------------- | ----------------------------------------------------- | --------------- | +| `output/clusters` | `autoware_auto_perception_msgs::msg::DetectedObjects` | merged clusters | + +## Parameters + +| Name | Type | Description | Default value | +| :---------------- | :----- | :----------------------------------- | :------------ | +| `output_frame_id` | string | The header frame_id of output topic. | base_link | + +## Assumptions / Known limits + + + +## (Optional) Error detection and handling + + + +## (Optional) Performance characterization + + + +## (Optional) References/External links + + + +## (Optional) Future extensions / Unimplemented parts diff --git a/perception/cluster_merger/include/cluster_merger/node.hpp b/perception/cluster_merger/include/cluster_merger/node.hpp new file mode 100644 index 0000000000000..8da5999f00384 --- /dev/null +++ b/perception/cluster_merger/include/cluster_merger/node.hpp @@ -0,0 +1,73 @@ +// Copyright 2023 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef CLUSTER_MERGER__NODE_HPP_ +#define CLUSTER_MERGER__NODE_HPP_ + +#include "message_filters/subscriber.h" +#include "message_filters/sync_policies/approximate_time.h" +#include "message_filters/synchronizer.h" +#include "rclcpp/rclcpp.hpp" +#include "tier4_autoware_utils/tier4_autoware_utils.hpp" + +#include "tier4_perception_msgs/msg/detected_objects_with_feature.hpp" + +#include +#include + +#include +#include +#include +#include + +namespace cluster_merger +{ +using tier4_perception_msgs::msg::DetectedObjectsWithFeature; +using tier4_perception_msgs::msg::DetectedObjectWithFeature; + +class ClusterMergerNode : public rclcpp::Node +{ +public: + explicit ClusterMergerNode(const rclcpp::NodeOptions & node_options); + +private: + // Subscriber + + tf2_ros::Buffer tf_buffer_; + tf2_ros::TransformListener tf_listener_; + + rclcpp::Subscription::SharedPtr sub_objects_{}; + message_filters::Subscriber objects0_sub_; + message_filters::Subscriber objects1_sub_; + typedef message_filters::sync_policies::ApproximateTime< + DetectedObjectsWithFeature, DetectedObjectsWithFeature> + SyncPolicy; + typedef message_filters::Synchronizer Sync; + Sync sync_; + + std::string output_frame_id_; + + std::vector::SharedPtr> sub_objects_array{}; + std::shared_ptr transform_listener_; + + void objectsCallback( + const DetectedObjectsWithFeature::ConstSharedPtr & input_objects0_msg, + const DetectedObjectsWithFeature::ConstSharedPtr & input_objects1_msg); + // Publisher + rclcpp::Publisher::SharedPtr pub_objects_; +}; + +} // namespace cluster_merger + +#endif // CLUSTER_MERGER__NODE_HPP_ diff --git a/perception/cluster_merger/launch/cluster_merger.launch.xml b/perception/cluster_merger/launch/cluster_merger.launch.xml new file mode 100644 index 0000000000000..1bbd0ebd91e12 --- /dev/null +++ b/perception/cluster_merger/launch/cluster_merger.launch.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/perception/cluster_merger/package.xml b/perception/cluster_merger/package.xml new file mode 100644 index 0000000000000..14826ad07e098 --- /dev/null +++ b/perception/cluster_merger/package.xml @@ -0,0 +1,28 @@ + + + + cluster_merger + 0.1.0 + The ROS 2 cluster merger package + Yukihiro Saito + Shunsuke Miura + autoware + Apache License 2.0 + + ament_cmake_auto + + geometry_msgs + message_filters + object_recognition_utils + rclcpp + rclcpp_components + tier4_autoware_utils + tier4_perception_msgs + autoware_cmake + ament_lint_auto + autoware_lint_common + + + ament_cmake + + diff --git a/perception/cluster_merger/src/cluster_merger/node.cpp b/perception/cluster_merger/src/cluster_merger/node.cpp new file mode 100644 index 0000000000000..48bf953027510 --- /dev/null +++ b/perception/cluster_merger/src/cluster_merger/node.cpp @@ -0,0 +1,73 @@ +// Copyright 2023 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "cluster_merger/node.hpp" + +#include "object_recognition_utils/object_recognition_utils.hpp" + +#include +#include +#include +namespace cluster_merger +{ + +ClusterMergerNode::ClusterMergerNode(const rclcpp::NodeOptions & node_options) +: rclcpp::Node("cluster_merger_node", node_options), + tf_buffer_(get_clock()), + tf_listener_(tf_buffer_), + objects0_sub_(this, "input/cluster0", rclcpp::QoS{1}.get_rmw_qos_profile()), + objects1_sub_(this, "input/cluster1", rclcpp::QoS{1}.get_rmw_qos_profile()), + sync_(SyncPolicy(10), objects0_sub_, objects1_sub_) +{ + output_frame_id_ = declare_parameter("output_frame_id"); + using std::placeholders::_1; + using std::placeholders::_2; + sync_.registerCallback(std::bind(&ClusterMergerNode::objectsCallback, this, _1, _2)); + + // Publisher + pub_objects_ = create_publisher("~/output/clusters", rclcpp::QoS{1}); +} + +void ClusterMergerNode::objectsCallback( + const DetectedObjectsWithFeature::ConstSharedPtr & input_objects0_msg, + const DetectedObjectsWithFeature::ConstSharedPtr & input_objects1_msg) +{ + if (pub_objects_->get_subscription_count() < 1) { + return; + } + // TODO(badai-nguyen): transform input topics to desired frame before concatenating + /* transform to the same with cluster0 frame id*/ + DetectedObjectsWithFeature transformed_objects0; + DetectedObjectsWithFeature transformed_objects1; + if ( + !object_recognition_utils::transformObjectsWithFeature( + *input_objects0_msg, output_frame_id_, tf_buffer_, transformed_objects0) || + !object_recognition_utils::transformObjectsWithFeature( + *input_objects1_msg, output_frame_id_, tf_buffer_, transformed_objects1)) { + return; + } + + DetectedObjectsWithFeature output_objects; + output_objects.header = input_objects0_msg->header; + // add check frame id and transform if they are different + output_objects.feature_objects = transformed_objects0.feature_objects; + output_objects.feature_objects.insert( + output_objects.feature_objects.end(), transformed_objects1.feature_objects.begin(), + transformed_objects1.feature_objects.end()); + pub_objects_->publish(output_objects); +} +} // namespace cluster_merger + +#include "rclcpp_components/register_node_macro.hpp" +RCLCPP_COMPONENTS_REGISTER_NODE(cluster_merger::ClusterMergerNode) diff --git a/perception/euclidean_cluster/config/compare_map.param.yaml b/perception/euclidean_cluster/config/compare_map.param.yaml deleted file mode 100644 index 3dd303464a2c1..0000000000000 --- a/perception/euclidean_cluster/config/compare_map.param.yaml +++ /dev/null @@ -1,20 +0,0 @@ -/**: - ros__parameters: - - # distance threshold for compare compare - distance_threshold: 0.5 - - # publish voxelized map pointcloud for debug - publish_debug_pcd: False - - # use dynamic map loading - use_dynamic_map_loading: True - - # time interval to check dynamic map loading - timer_interval_ms: 100 - - # distance threshold for dynamic map update - map_update_distance_threshold: 10.0 - - # radius map for dynamic map loading - map_loader_radius: 150.0 diff --git a/perception/euclidean_cluster/config/outlier.param.yaml b/perception/euclidean_cluster/config/outlier.param.yaml deleted file mode 100644 index 1962fba1f332a..0000000000000 --- a/perception/euclidean_cluster/config/outlier.param.yaml +++ /dev/null @@ -1,8 +0,0 @@ -/**: - ros__parameters: - input_frame: base_link - output_frame: base_link - voxel_size_x: 0.3 - voxel_size_y: 0.3 - voxel_size_z: 100.0 - voxel_points_threshold: 3 diff --git a/perception/euclidean_cluster/config/voxel_grid.param.yaml b/perception/euclidean_cluster/config/voxel_grid.param.yaml deleted file mode 100644 index 3ff32bfbb7146..0000000000000 --- a/perception/euclidean_cluster/config/voxel_grid.param.yaml +++ /dev/null @@ -1,7 +0,0 @@ -/**: - ros__parameters: - input_frame: base_link - output_frame: base_link - voxel_size_x: 0.15 - voxel_size_y: 0.15 - voxel_size_z: 0.15 diff --git a/perception/euclidean_cluster/config/voxel_grid_based_euclidean_cluster.param.yaml b/perception/euclidean_cluster/config/voxel_grid_based_euclidean_cluster.param.yaml index 2f3de2b789eb3..26b027f0077aa 100644 --- a/perception/euclidean_cluster/config/voxel_grid_based_euclidean_cluster.param.yaml +++ b/perception/euclidean_cluster/config/voxel_grid_based_euclidean_cluster.param.yaml @@ -7,9 +7,11 @@ max_cluster_size: 3000 use_height: false input_frame: "base_link" - max_x: 70.0 - min_x: -70.0 - max_y: 70.0 - min_y: -70.0 - max_z: 4.5 - min_z: -4.5 + + # low height crop box filter param + max_x: 200.0 + min_x: -200.0 + max_y: 200.0 + min_y: -200.0 + max_z: 2.0 + min_z: -10.0 diff --git a/perception/euclidean_cluster/launch/euclidean_cluster.launch.py b/perception/euclidean_cluster/launch/euclidean_cluster.launch.py index db86bbf80d250..66c25396a656e 100644 --- a/perception/euclidean_cluster/launch/euclidean_cluster.launch.py +++ b/perception/euclidean_cluster/launch/euclidean_cluster.launch.py @@ -35,61 +35,35 @@ def load_composable_node_param(param_path): ns = "" pkg = "euclidean_cluster" - # set voxel grid filter as a component - voxel_grid_filter_component = ComposableNode( + low_height_cropbox_filter_component = ComposableNode( package="pointcloud_preprocessor", - plugin="pointcloud_preprocessor::VoxelGridDownsampleFilterComponent", - name=AnonName("voxel_grid_filter"), + namespace=ns, + plugin="pointcloud_preprocessor::CropBoxFilterComponent", + name="low_height_crop_box_filter", remappings=[ ("input", LaunchConfiguration("input_pointcloud")), - ("output", "voxel_grid_filtered/pointcloud"), - ], - parameters=[load_composable_node_param("voxel_grid_param_path")], - ) - - # set compare map filter as a component - compare_map_filter_component = ComposableNode( - package="compare_map_segmentation", - plugin="compare_map_segmentation::VoxelBasedCompareMapFilterComponent", - name=AnonName("compare_map_filter"), - remappings=[ - ("input", "voxel_grid_filtered/pointcloud"), - ("map", LaunchConfiguration("input_map")), - ("output", "compare_map_filtered/pointcloud"), - ("map_loader_service", "/map/get_differential_pointcloud_map"), - ("kinematic_state", "/localization/kinematic_state"), - ], - parameters=[ - { - "distance_threshold": 0.5, - "timer_interval_ms": 100, - "use_dynamic_map_loading": True, - "downsize_ratio_z_axis": 0.5, - "map_update_distance_threshold": 10.0, - "map_loader_radius": 150.0, - "publish_debug_pcd": True, - "input_frame": "map", - } + ("output", "low_height/pointcloud"), ], + parameters=[load_composable_node_param("voxel_grid_based_euclidean_param_path")], ) - use_map_euclidean_cluster_component = ComposableNode( + use_low_height_euclidean_component = ComposableNode( package=pkg, plugin="euclidean_cluster::EuclideanClusterNode", name=AnonName("euclidean_cluster"), remappings=[ - ("input", "compare_map_filtered/pointcloud"), + ("input", "low_height/pointcloud"), ("output", LaunchConfiguration("output_clusters")), ], parameters=[load_composable_node_param("euclidean_param_path")], ) - disuse_map_euclidean_cluster_component = ComposableNode( + disuse_low_height_euclidean_component = ComposableNode( package=pkg, plugin="euclidean_cluster::EuclideanClusterNode", name=AnonName("euclidean_cluster"), remappings=[ - ("input", "voxel_grid_filtered/pointcloud"), + ("input", LaunchConfiguration("input_pointcloud")), ("output", LaunchConfiguration("output_clusters")), ], parameters=[load_composable_node_param("euclidean_param_path")], @@ -100,26 +74,26 @@ def load_composable_node_param(param_path): namespace=ns, package="rclcpp_components", executable="component_container", - composable_node_descriptions=[voxel_grid_filter_component], + composable_node_descriptions=[], output="screen", ) - use_map_loader = LoadComposableNodes( + use_low_height_pointcloud_loader = LoadComposableNodes( composable_node_descriptions=[ - compare_map_filter_component, - use_map_euclidean_cluster_component, + low_height_cropbox_filter_component, + use_low_height_euclidean_component, ], target_container=container, - condition=IfCondition(LaunchConfiguration("use_pointcloud_map")), + condition=IfCondition(LaunchConfiguration("use_low_height_cropbox")), ) - disuse_map_loader = LoadComposableNodes( - composable_node_descriptions=[disuse_map_euclidean_cluster_component], + disuse_low_height_pointcloud_loader = LoadComposableNodes( + composable_node_descriptions=[disuse_low_height_euclidean_component], target_container=container, - condition=UnlessCondition(LaunchConfiguration("use_pointcloud_map")), + condition=UnlessCondition(LaunchConfiguration("use_low_height_cropbox")), ) - return [container, use_map_loader, disuse_map_loader] + return [container, use_low_height_pointcloud_loader, disuse_low_height_pointcloud_loader] def generate_launch_description(): @@ -131,14 +105,7 @@ def add_launch_arg(name: str, default_value=None): add_launch_arg("input_pointcloud", "/perception/obstacle_segmentation/pointcloud"), add_launch_arg("input_map", "/map/pointcloud_map"), add_launch_arg("output_clusters", "clusters"), - add_launch_arg("use_pointcloud_map", "false"), - add_launch_arg( - "voxel_grid_param_path", - [ - FindPackageShare("autoware_launch"), - "/config/perception/object_recognition/detection/clustering/voxel_grid.param.yaml", - ], - ), + add_launch_arg("use_low_height_cropbox", "false"), add_launch_arg( "euclidean_param_path", [ diff --git a/perception/euclidean_cluster/launch/euclidean_cluster.launch.xml b/perception/euclidean_cluster/launch/euclidean_cluster.launch.xml index 7159fb4c42793..fd1ea82befae0 100644 --- a/perception/euclidean_cluster/launch/euclidean_cluster.launch.xml +++ b/perception/euclidean_cluster/launch/euclidean_cluster.launch.xml @@ -3,16 +3,14 @@ - - + - - + diff --git a/perception/euclidean_cluster/launch/voxel_grid_based_euclidean_cluster.launch.py b/perception/euclidean_cluster/launch/voxel_grid_based_euclidean_cluster.launch.py index 82ce5605b67cd..00bcd4422bd3c 100644 --- a/perception/euclidean_cluster/launch/voxel_grid_based_euclidean_cluster.launch.py +++ b/perception/euclidean_cluster/launch/voxel_grid_based_euclidean_cluster.launch.py @@ -16,10 +16,8 @@ from launch.actions import DeclareLaunchArgument from launch.actions import OpaqueFunction from launch.conditions import IfCondition -from launch.substitutions import AndSubstitution -from launch.substitutions import AnonName +from launch.conditions import UnlessCondition from launch.substitutions import LaunchConfiguration -from launch.substitutions import NotSubstitution from launch_ros.actions import ComposableNodeContainer from launch_ros.actions import LoadComposableNodes from launch_ros.descriptions import ComposableNode @@ -36,159 +34,31 @@ def load_composable_node_param(param_path): ns = "" pkg = "euclidean_cluster" - # set compare map filter as a component - compare_map_filter_component = ComposableNode( - package="compare_map_segmentation", - namespace=ns, - plugin="compare_map_segmentation::VoxelBasedCompareMapFilterComponent", - name=AnonName("compare_map_filter"), - remappings=[ - ("input", LaunchConfiguration("input_pointcloud")), - ("map", LaunchConfiguration("input_map")), - ("output", "map_filter/pointcloud"), - ("map_loader_service", "/map/get_differential_pointcloud_map"), - ("kinematic_state", "/localization/kinematic_state"), - ], - parameters=[load_composable_node_param("compare_map_param_path")], - ) - - # separate range of pointcloud when map_filter used - use_map_short_range_crop_box_filter_component = ComposableNode( - package="pointcloud_preprocessor", - namespace=ns, - plugin="pointcloud_preprocessor::CropBoxFilterComponent", - name="short_distance_crop_box_range", - remappings=[ - ("input", "map_filter/pointcloud"), - ("output", "short_range/pointcloud"), - ], - parameters=[ - load_composable_node_param("voxel_grid_based_euclidean_param_path"), - { - "negative": False, - }, - ], - ) - - use_map_long_range_crop_box_filter_component = ComposableNode( - package="pointcloud_preprocessor", - namespace=ns, - plugin="pointcloud_preprocessor::CropBoxFilterComponent", - name="long_distance_crop_box_range", - remappings=[ - ("input", LaunchConfiguration("input_pointcloud")), - ("output", "long_range/pointcloud"), - ], - parameters=[ - load_composable_node_param("voxel_grid_based_euclidean_param_path"), - { - "negative": True, - }, - ], - ) - - # disuse_map_voxel_grid_filter - disuse_map_short_range_crop_box_filter_component = ComposableNode( + low_height_cropbox_filter_component = ComposableNode( package="pointcloud_preprocessor", namespace=ns, plugin="pointcloud_preprocessor::CropBoxFilterComponent", - name="short_distance_crop_box_range", + name="low_height_crop_box_filter", remappings=[ ("input", LaunchConfiguration("input_pointcloud")), - ("output", "short_range/pointcloud"), - ], - parameters=[ - load_composable_node_param("voxel_grid_based_euclidean_param_path"), - { - "negative": False, - }, - ], - ) - - disuse_map_long_range_crop_box_filter_component = ComposableNode( - package="pointcloud_preprocessor", - namespace=ns, - plugin="pointcloud_preprocessor::CropBoxFilterComponent", - name="long_distance_crop_box_range", - remappings=[ - ("input", LaunchConfiguration("input_pointcloud")), - ("output", "long_range/pointcloud"), - ], - parameters=[ - load_composable_node_param("voxel_grid_based_euclidean_param_path"), - { - "negative": True, - }, - ], - ) - - # set voxel grid filter as a component - voxel_grid_filter_component = ComposableNode( - package="pointcloud_preprocessor", - namespace=ns, - plugin="pointcloud_preprocessor::ApproximateDownsampleFilterComponent", - name=AnonName("voxel_grid_filter"), - remappings=[ - ("input", "short_range/pointcloud"), - ("output", "downsampled/short_range/pointcloud"), - ], - parameters=[load_composable_node_param("voxel_grid_param_path")], - ) - - # set outlier filter as a component - outlier_filter_component = ComposableNode( - package="pointcloud_preprocessor", - namespace=ns, - plugin="pointcloud_preprocessor::VoxelGridOutlierFilterComponent", - name="outlier_filter", - remappings=[ - ("input", "downsampled/short_range/pointcloud"), - ("output", "outlier_filter/pointcloud"), - ], - parameters=[load_composable_node_param("outlier_param_path")], - ) - - # concat with-outlier pointcloud and without-outlier pcl - downsample_concat_component = ComposableNode( - package="pointcloud_preprocessor", - namespace=ns, - plugin="pointcloud_preprocessor::PointCloudConcatenateDataSynchronizerComponent", - name="concat_downsampled_pcl", - remappings=[("output", "downsampled/concatenated/pointcloud")], - parameters=[ - { - "input_topics": ["long_range/pointcloud", "outlier_filter/pointcloud"], - "output_frame": "base_link", - } - ], - extra_arguments=[{"use_intra_process_comms": True}], - ) - - # set euclidean cluster as a component - use_downsample_euclidean_cluster_component = ComposableNode( - package=pkg, - namespace=ns, - plugin="euclidean_cluster::VoxelGridBasedEuclideanClusterNode", - name="euclidean_cluster", - remappings=[ - ("input", "downsampled/concatenated/pointcloud"), - ("output", LaunchConfiguration("output_clusters")), + ("output", "low_height/pointcloud"), ], parameters=[load_composable_node_param("voxel_grid_based_euclidean_param_path")], ) - use_map_disuse_downsample_euclidean_component = ComposableNode( + use_low_height_euclidean_component = ComposableNode( package=pkg, namespace=ns, plugin="euclidean_cluster::VoxelGridBasedEuclideanClusterNode", name="euclidean_cluster", remappings=[ - ("input", "map_filter/pointcloud"), + ("input", "low_height/pointcloud"), ("output", LaunchConfiguration("output_clusters")), ], parameters=[load_composable_node_param("voxel_grid_based_euclidean_param_path")], ) - disuse_map_disuse_downsample_euclidean_component = ComposableNode( + + disuse_low_height_euclidean_component = ComposableNode( package=pkg, namespace=ns, plugin="euclidean_cluster::VoxelGridBasedEuclideanClusterNode", @@ -209,75 +79,24 @@ def load_composable_node_param(param_path): output="screen", ) - use_map_use_downsample_loader = LoadComposableNodes( + use_low_height_pointcloud_loader = LoadComposableNodes( composable_node_descriptions=[ - compare_map_filter_component, - use_map_short_range_crop_box_filter_component, - use_map_long_range_crop_box_filter_component, - voxel_grid_filter_component, - outlier_filter_component, - downsample_concat_component, - use_downsample_euclidean_cluster_component, + low_height_cropbox_filter_component, + use_low_height_euclidean_component, ], target_container=container, - condition=IfCondition( - AndSubstitution( - LaunchConfiguration("use_pointcloud_map"), - LaunchConfiguration("use_downsample_pointcloud"), - ) - ), + condition=IfCondition(LaunchConfiguration("use_low_height_cropbox")), ) - disuse_map_use_downsample_loader = LoadComposableNodes( - composable_node_descriptions=[ - disuse_map_short_range_crop_box_filter_component, - disuse_map_long_range_crop_box_filter_component, - voxel_grid_filter_component, - outlier_filter_component, - downsample_concat_component, - use_downsample_euclidean_cluster_component, - ], + disuse_low_height_pointcloud_loader = LoadComposableNodes( + composable_node_descriptions=[disuse_low_height_euclidean_component], target_container=container, - condition=IfCondition( - AndSubstitution( - NotSubstitution(LaunchConfiguration("use_pointcloud_map")), - LaunchConfiguration("use_downsample_pointcloud"), - ) - ), - ) - - use_map_disuse_downsample_loader = LoadComposableNodes( - composable_node_descriptions=[ - compare_map_filter_component, - use_map_disuse_downsample_euclidean_component, - ], - target_container=container, - condition=IfCondition( - AndSubstitution( - (LaunchConfiguration("use_pointcloud_map")), - NotSubstitution(LaunchConfiguration("use_downsample_pointcloud")), - ) - ), - ) - - disuse_map_disuse_downsample_loader = LoadComposableNodes( - composable_node_descriptions=[ - disuse_map_disuse_downsample_euclidean_component, - ], - target_container=container, - condition=IfCondition( - AndSubstitution( - NotSubstitution(LaunchConfiguration("use_pointcloud_map")), - NotSubstitution(LaunchConfiguration("use_downsample_pointcloud")), - ) - ), + condition=UnlessCondition(LaunchConfiguration("use_low_height_cropbox")), ) return [ container, - use_map_use_downsample_loader, - disuse_map_use_downsample_loader, - use_map_disuse_downsample_loader, - disuse_map_disuse_downsample_loader, + use_low_height_pointcloud_loader, + disuse_low_height_pointcloud_loader, ] @@ -290,29 +109,7 @@ def add_launch_arg(name: str, default_value=None): add_launch_arg("input_pointcloud", "/perception/obstacle_segmentation/pointcloud"), add_launch_arg("input_map", "/map/pointcloud_map"), add_launch_arg("output_clusters", "clusters"), - add_launch_arg("use_pointcloud_map", "false"), - add_launch_arg("use_downsample_pointcloud", "false"), - add_launch_arg( - "voxel_grid_param_path", - [ - FindPackageShare("autoware_launch"), - "/config/perception/object_recognition/detection/clustering/voxel_grid.param.yaml", - ], - ), - add_launch_arg( - "outlier_param_path", - [ - FindPackageShare("autoware_launch"), - "/config/perception/object_recognition/detection/clustering/outlier.param.yaml", - ], - ), - add_launch_arg( - "compare_map_param_path", - [ - FindPackageShare("autoware_launch"), - "/config/perception/object_recognition/detection/clustering/compare_map.param.yaml", - ], - ), + add_launch_arg("use_low_height_cropbox", "false"), add_launch_arg( "voxel_grid_based_euclidean_param_path", [ diff --git a/perception/euclidean_cluster/launch/voxel_grid_based_euclidean_cluster.launch.xml b/perception/euclidean_cluster/launch/voxel_grid_based_euclidean_cluster.launch.xml index 2833fed81c28e..b6a426dabfd12 100644 --- a/perception/euclidean_cluster/launch/voxel_grid_based_euclidean_cluster.launch.xml +++ b/perception/euclidean_cluster/launch/voxel_grid_based_euclidean_cluster.launch.xml @@ -3,22 +3,14 @@ - - - - - + - - - - - + diff --git a/perception/image_projection_based_fusion/CMakeLists.txt b/perception/image_projection_based_fusion/CMakeLists.txt index 906ad71d21732..ce7c3b5ea12a9 100644 --- a/perception/image_projection_based_fusion/CMakeLists.txt +++ b/perception/image_projection_based_fusion/CMakeLists.txt @@ -22,6 +22,7 @@ ament_auto_add_library(${PROJECT_NAME} SHARED src/utils/utils.cpp src/roi_cluster_fusion/node.cpp src/roi_detected_object_fusion/node.cpp + src/roi_pointcloud_fusion/node.cpp ) target_link_libraries(${PROJECT_NAME} @@ -39,6 +40,11 @@ rclcpp_components_register_node(${PROJECT_NAME} EXECUTABLE roi_cluster_fusion_node ) +rclcpp_components_register_node(${PROJECT_NAME} + PLUGIN "image_projection_based_fusion::RoiPointCloudFusionNode" + EXECUTABLE roi_pointcloud_fusion_node +) + set(CUDA_VERBOSE OFF) # set flags for CUDA availability diff --git a/perception/image_projection_based_fusion/README.md b/perception/image_projection_based_fusion/README.md index 1bbee35ab44f8..207989d8a6f25 100644 --- a/perception/image_projection_based_fusion/README.md +++ b/perception/image_projection_based_fusion/README.md @@ -58,3 +58,4 @@ The rclcpp::TimerBase timer could not break a for loop, therefore even if time i | roi_cluster_fusion | Overwrite a classification label of clusters by that of ROIs from a 2D object detector. | [link](./docs/roi-cluster-fusion.md) | | roi_detected_object_fusion | Overwrite a classification label of detected objects by that of ROIs from a 2D object detector. | [link](./docs/roi-detected-object-fusion.md) | | pointpainting_fusion | Paint the point cloud with the ROIs from a 2D object detector and feed to a 3D object detector. | [link](./docs/pointpainting-fusion.md) | +| roi_pointcloud_fusion | Matching pointcloud with ROIs from a 2D object detector to detect unknown-labeled objects | [link](./docs/roi-pointcloud-fusion.md) | diff --git a/perception/image_projection_based_fusion/docs/images/roi_pointcloud_fusion.png b/perception/image_projection_based_fusion/docs/images/roi_pointcloud_fusion.png new file mode 100644 index 0000000000000..c529de7c728fb Binary files /dev/null and b/perception/image_projection_based_fusion/docs/images/roi_pointcloud_fusion.png differ diff --git a/perception/image_projection_based_fusion/docs/roi-pointcloud-fusion.md b/perception/image_projection_based_fusion/docs/roi-pointcloud-fusion.md new file mode 100644 index 0000000000000..5d0b241a578d6 --- /dev/null +++ b/perception/image_projection_based_fusion/docs/roi-pointcloud-fusion.md @@ -0,0 +1,93 @@ +# roi_pointcloud_fusion + +## Purpose + +The node `roi_pointcloud_fusion` is to cluster the pointcloud based on Region Of Interests (ROIs) detected by a 2D object detector, specific for unknown labeled ROI. + +## Inner-workings / Algorithms + +- The pointclouds are projected onto image planes and extracted as cluster if they are inside the unknown labeled ROIs. +- Since the cluster might contain unrelated points from background, then a refinement step is added to filter the background pointcloud by distance to camera. + +![roi_pointcloud_fusion_image](./images/roi_pointcloud_fusion.png) + +## Inputs / Outputs + +### Input + +| Name | Type | Description | +| ------------------------ | -------------------------------------------------------- | --------------------------------------------------------- | +| `input` | `sensor_msgs::msg::PointCloud2` | input pointcloud | +| `input/camera_info[0-7]` | `sensor_msgs::msg::CameraInfo` | camera information to project 3d points onto image planes | +| `input/rois[0-7]` | `tier4_perception_msgs::msg::DetectedObjectsWithFeature` | ROIs from each image | +| `input/image_raw[0-7]` | `sensor_msgs::msg::Image` | images for visualization | + +### Output + +| Name | Type | Description | +| ----------------- | -------------------------------------------------------- | -------------------------------------------- | +| `output` | `sensor_msgs::msg::PointCloud2` | output pointcloud as default of interface | +| `output_clusters` | `tier4_perception_msgs::msg::DetectedObjectsWithFeature` | output clusters | +| `debug/clusters` | `sensor_msgs/msg/PointCloud2` | colored cluster pointcloud for visualization | + +## Parameters + +### Core Parameters + +| Name | Type | Description | +| ---------------------- | ------ | -------------------------------------------------------------------------------------------- | +| `min_cluster_size` | int | the minimum number of points that a cluster needs to contain in order to be considered valid | +| `cluster_2d_tolerance` | double | cluster tolerance measured in radial direction | +| `rois_number` | int | the number of input rois | + +## Assumptions / Known limits + +- Currently, the refinement is only based on distance to camera, the roi based clustering is supposed to work well with small object ROIs. Others criteria for refinement might needed in the future. + + + +## (Optional) Error detection and handling + + + +## (Optional) Performance characterization + + + +## (Optional) References/External links + + + +## (Optional) Future extensions / Unimplemented parts + + diff --git a/perception/image_projection_based_fusion/include/image_projection_based_fusion/fusion_node.hpp b/perception/image_projection_based_fusion/include/image_projection_based_fusion/fusion_node.hpp index 016633fa4ef92..5174aebe069a8 100644 --- a/perception/image_projection_based_fusion/include/image_projection_based_fusion/fusion_node.hpp +++ b/perception/image_projection_based_fusion/include/image_projection_based_fusion/fusion_node.hpp @@ -29,6 +29,9 @@ #include #include #include +#include +#include +#include #include #include @@ -49,6 +52,8 @@ using autoware_auto_perception_msgs::msg::DetectedObjects; using sensor_msgs::msg::PointCloud2; using tier4_perception_msgs::msg::DetectedObjectsWithFeature; using tier4_perception_msgs::msg::DetectedObjectWithFeature; +using PointCloud = pcl::PointCloud; +using autoware_auto_perception_msgs::msg::ObjectClassification; template class FusionNode : public rclcpp::Node diff --git a/perception/image_projection_based_fusion/include/image_projection_based_fusion/roi_pointcloud_fusion/node.hpp b/perception/image_projection_based_fusion/include/image_projection_based_fusion/roi_pointcloud_fusion/node.hpp new file mode 100644 index 0000000000000..2b4eb822a9edb --- /dev/null +++ b/perception/image_projection_based_fusion/include/image_projection_based_fusion/roi_pointcloud_fusion/node.hpp @@ -0,0 +1,53 @@ +// Copyright 2023 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef IMAGE_PROJECTION_BASED_FUSION__ROI_POINTCLOUD_FUSION__NODE_HPP_ +#define IMAGE_PROJECTION_BASED_FUSION__ROI_POINTCLOUD_FUSION__NODE_HPP_ + +#include "image_projection_based_fusion/fusion_node.hpp" + +#include +#include +namespace image_projection_based_fusion +{ +class RoiPointCloudFusionNode +: public FusionNode +{ +private: + int min_cluster_size_{1}; + bool fuse_unknown_only_{true}; + double cluster_2d_tolerance_; + + rclcpp::Publisher::SharedPtr pub_objects_ptr_; + std::vector output_fused_objects_; + rclcpp::Publisher::SharedPtr cluster_debug_pub_; + + /* data */ +public: + explicit RoiPointCloudFusionNode(const rclcpp::NodeOptions & options); + +protected: + void preprocess(sensor_msgs::msg::PointCloud2 & pointcloud_msg) override; + + void postprocess(sensor_msgs::msg::PointCloud2 & pointcloud_msg) override; + + void fuseOnSingleImage( + const PointCloud2 & input_pointcloud_msg, const std::size_t image_id, + const DetectedObjectsWithFeature & input_roi_msg, + const sensor_msgs::msg::CameraInfo & camera_info, PointCloud2 & output_pointcloud_msg) override; + bool out_of_scope(const DetectedObjectWithFeature & obj); +}; + +} // namespace image_projection_based_fusion +#endif // IMAGE_PROJECTION_BASED_FUSION__ROI_POINTCLOUD_FUSION__NODE_HPP_ diff --git a/perception/image_projection_based_fusion/include/image_projection_based_fusion/utils/utils.hpp b/perception/image_projection_based_fusion/include/image_projection_based_fusion/utils/utils.hpp index 541cf997412c2..d7fd3c3882046 100644 --- a/perception/image_projection_based_fusion/include/image_projection_based_fusion/utils/utils.hpp +++ b/perception/image_projection_based_fusion/include/image_projection_based_fusion/utils/utils.hpp @@ -32,18 +32,54 @@ #include #endif +#include "image_projection_based_fusion/fusion_node.hpp" + +#include +#include +#include +#include + +#include "autoware_auto_perception_msgs/msg/shape.hpp" +#include "tier4_perception_msgs/msg/detected_object_with_feature.hpp" + +#include +#include +#include + #include #include +#include namespace image_projection_based_fusion { +using PointCloud = pcl::PointCloud; std::optional getTransformStamped( const tf2_ros::Buffer & tf_buffer, const std::string & target_frame_id, const std::string & source_frame_id, const rclcpp::Time & time); Eigen::Affine3d transformToEigen(const geometry_msgs::msg::Transform & t); +void convertCluster2FeatureObject( + const std_msgs::msg::Header & header, const PointCloud & cluster, + DetectedObjectWithFeature & feature_obj); +PointCloud closest_cluster( + PointCloud & cluster, const double cluster_2d_tolerance, const int min_cluster_size, + const pcl::PointXYZ & center); + +void updateOutputFusedObjects( + std::vector & output_objs, const std::vector & clusters, + const std_msgs::msg::Header & in_cloud_header, const std_msgs::msg::Header & in_roi_header, + const tf2_ros::Buffer & tf_buffer, const int min_cluster_size, const float cluster_2d_tolerance, + std::vector & output_fused_objects); + +geometry_msgs::msg::Point getCentroid(const sensor_msgs::msg::PointCloud2 & pointcloud); + +pcl::PointXYZ getClosestPoint(const pcl::PointCloud & cluster); +void addShapeAndKinematic( + const pcl::PointCloud & cluster, + tier4_perception_msgs::msg::DetectedObjectWithFeature & feature_obj); + } // namespace image_projection_based_fusion #endif // IMAGE_PROJECTION_BASED_FUSION__UTILS__UTILS_HPP_ diff --git a/perception/image_projection_based_fusion/launch/roi_pointcloud_fusion.launch.xml b/perception/image_projection_based_fusion/launch/roi_pointcloud_fusion.launch.xml new file mode 100644 index 0000000000000..181f4ccb88320 --- /dev/null +++ b/perception/image_projection_based_fusion/launch/roi_pointcloud_fusion.launch.xml @@ -0,0 +1,77 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/perception/image_projection_based_fusion/package.xml b/perception/image_projection_based_fusion/package.xml index 32d7a5633b811..5ff99af2ebb21 100644 --- a/perception/image_projection_based_fusion/package.xml +++ b/perception/image_projection_based_fusion/package.xml @@ -14,6 +14,7 @@ autoware_auto_perception_msgs autoware_point_types cv_bridge + euclidean_cluster image_transport lidar_centerpoint message_filters diff --git a/perception/image_projection_based_fusion/src/fusion_node.cpp b/perception/image_projection_based_fusion/src/fusion_node.cpp index 1bc351b08c01b..a540688f7e751 100644 --- a/perception/image_projection_based_fusion/src/fusion_node.cpp +++ b/perception/image_projection_based_fusion/src/fusion_node.cpp @@ -422,4 +422,5 @@ void FusionNode::publish(const Msg & output_msg) template class FusionNode; template class FusionNode; template class FusionNode; +template class FusionNode; } // namespace image_projection_based_fusion diff --git a/perception/image_projection_based_fusion/src/roi_cluster_fusion/node.cpp b/perception/image_projection_based_fusion/src/roi_cluster_fusion/node.cpp index 9180b18adeed8..978d026a3c859 100644 --- a/perception/image_projection_based_fusion/src/roi_cluster_fusion/node.cpp +++ b/perception/image_projection_based_fusion/src/roi_cluster_fusion/node.cpp @@ -171,12 +171,16 @@ void RoiClusterFusionNode::fuseOnSingleImage( for (const auto & feature_obj : input_roi_msg.feature_objects) { int index = 0; double max_iou = 0.0; + bool is_roi_label_known = + feature_obj.object.classification.front().label != ObjectClassification::UNKNOWN; for (const auto & cluster_map : m_cluster_roi) { double iou(0.0), iou_x(0.0), iou_y(0.0); if (use_iou_) { iou = calcIoU(cluster_map.second, feature_obj.feature.roi); } - if (use_iou_x_) { + // use for unknown roi to improve small objects like traffic cone detect + // TODO(badai-nguyen): add option to disable roi_cluster mode + if (use_iou_x_ || !is_roi_label_known) { iou_x = calcIoUX(cluster_map.second, feature_obj.feature.roi); } if (use_iou_y_) { @@ -192,8 +196,6 @@ void RoiClusterFusionNode::fuseOnSingleImage( } } if (!output_cluster_msg.feature_objects.empty()) { - bool is_roi_label_known = feature_obj.object.classification.front().label != - autoware_auto_perception_msgs::msg::ObjectClassification::UNKNOWN; bool is_roi_existence_prob_higher = output_cluster_msg.feature_objects.at(index).object.existence_probability <= feature_obj.object.existence_probability; diff --git a/perception/image_projection_based_fusion/src/roi_pointcloud_fusion/node.cpp b/perception/image_projection_based_fusion/src/roi_pointcloud_fusion/node.cpp new file mode 100644 index 0000000000000..233e568ebee0b --- /dev/null +++ b/perception/image_projection_based_fusion/src/roi_pointcloud_fusion/node.cpp @@ -0,0 +1,165 @@ +// Copyright 2023 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "image_projection_based_fusion/roi_pointcloud_fusion/node.hpp" + +#include "image_projection_based_fusion/utils/geometry.hpp" +#include "image_projection_based_fusion/utils/utils.hpp" + +#ifdef ROS_DISTRO_GALACTIC +#include +#include +#else +#include +#include +#endif + +#include "euclidean_cluster/utils.hpp" + +namespace image_projection_based_fusion +{ +RoiPointCloudFusionNode::RoiPointCloudFusionNode(const rclcpp::NodeOptions & options) +: FusionNode("roi_pointcloud_fusion", options) +{ + fuse_unknown_only_ = declare_parameter("fuse_unknown_only"); + min_cluster_size_ = declare_parameter("min_cluster_size"); + cluster_2d_tolerance_ = declare_parameter("cluster_2d_tolerance"); + pub_objects_ptr_ = + this->create_publisher("output_clusters", rclcpp::QoS{1}); + cluster_debug_pub_ = this->create_publisher("debug/clusters", 1); +} + +void RoiPointCloudFusionNode::preprocess(__attribute__((unused)) + sensor_msgs::msg::PointCloud2 & pointcloud_msg) +{ + return; +} + +void RoiPointCloudFusionNode::postprocess(__attribute__((unused)) + sensor_msgs::msg::PointCloud2 & pointcloud_msg) +{ + const auto objects_sub_count = pub_objects_ptr_->get_subscription_count() + + pub_objects_ptr_->get_intra_process_subscription_count(); + if (objects_sub_count < 1) { + return; + } + + DetectedObjectsWithFeature output_msg; + output_msg.header = pointcloud_msg.header; + output_msg.feature_objects = output_fused_objects_; + + if (objects_sub_count > 0) { + pub_objects_ptr_->publish(output_msg); + } + output_fused_objects_.clear(); + // publish debug cluster + if (cluster_debug_pub_->get_subscription_count() > 0) { + sensor_msgs::msg::PointCloud2 debug_cluster_msg; + euclidean_cluster::convertObjectMsg2SensorMsg(output_msg, debug_cluster_msg); + cluster_debug_pub_->publish(debug_cluster_msg); + } +} +void RoiPointCloudFusionNode::fuseOnSingleImage( + const sensor_msgs::msg::PointCloud2 & input_pointcloud_msg, + __attribute__((unused)) const std::size_t image_id, + const DetectedObjectsWithFeature & input_roi_msg, + const sensor_msgs::msg::CameraInfo & camera_info, + __attribute__((unused)) sensor_msgs::msg::PointCloud2 & output_pointcloud_msg) +{ + if (input_pointcloud_msg.data.empty()) { + return; + } + std::vector output_objs; + // select ROIs for fusion + for (const auto & feature_obj : input_roi_msg.feature_objects) { + if (fuse_unknown_only_) { + bool is_roi_label_unknown = feature_obj.object.classification.front().label == + autoware_auto_perception_msgs::msg::ObjectClassification::UNKNOWN; + if (is_roi_label_unknown) { + output_objs.push_back(feature_obj); + } + } else { + // TODO(badai-nguyen): selected class from a list + output_objs.push_back(feature_obj); + } + } + if (output_objs.empty()) { + return; + } + + // transform pointcloud to camera optical frame id + Eigen::Matrix4d projection; + projection << camera_info.p.at(0), camera_info.p.at(1), camera_info.p.at(2), camera_info.p.at(3), + camera_info.p.at(4), camera_info.p.at(5), camera_info.p.at(6), camera_info.p.at(7), + camera_info.p.at(8), camera_info.p.at(9), camera_info.p.at(10), camera_info.p.at(11); + geometry_msgs::msg::TransformStamped transform_stamped; + { + const auto transform_stamped_optional = getTransformStamped( + tf_buffer_, input_roi_msg.header.frame_id, input_pointcloud_msg.header.frame_id, + input_roi_msg.header.stamp); + if (!transform_stamped_optional) { + return; + } + transform_stamped = transform_stamped_optional.value(); + } + + sensor_msgs::msg::PointCloud2 transformed_cloud; + tf2::doTransform(input_pointcloud_msg, transformed_cloud, transform_stamped); + + std::vector clusters; + clusters.resize(output_objs.size()); + + for (sensor_msgs::PointCloud2ConstIterator iter_x(transformed_cloud, "x"), + iter_y(transformed_cloud, "y"), iter_z(transformed_cloud, "z"), + iter_orig_x(input_pointcloud_msg, "x"), iter_orig_y(input_pointcloud_msg, "y"), + iter_orig_z(input_pointcloud_msg, "z"); + iter_x != iter_x.end(); + ++iter_x, ++iter_y, ++iter_z, ++iter_orig_x, ++iter_orig_y, ++iter_orig_z) { + if (*iter_z <= 0.0) { + continue; + } + Eigen::Vector4d projected_point = projection * Eigen::Vector4d(*iter_x, *iter_y, *iter_z, 1.0); + Eigen::Vector2d normalized_projected_point = Eigen::Vector2d( + projected_point.x() / projected_point.z(), projected_point.y() / projected_point.z()); + + for (std::size_t i = 0; i < output_objs.size(); ++i) { + auto & feature_obj = output_objs.at(i); + const auto & check_roi = feature_obj.feature.roi; + auto & cluster = clusters.at(i); + + if ( + check_roi.x_offset <= normalized_projected_point.x() && + check_roi.y_offset <= normalized_projected_point.y() && + check_roi.x_offset + check_roi.width >= normalized_projected_point.x() && + check_roi.y_offset + check_roi.height >= normalized_projected_point.y()) { + cluster.push_back(pcl::PointXYZ(*iter_orig_x, *iter_orig_y, *iter_orig_z)); + } + } + } + + // refine and update output_fused_objects_ + updateOutputFusedObjects( + output_objs, clusters, input_pointcloud_msg.header, input_roi_msg.header, tf_buffer_, + min_cluster_size_, cluster_2d_tolerance_, output_fused_objects_); +} + +bool RoiPointCloudFusionNode::out_of_scope(__attribute__((unused)) + const DetectedObjectWithFeature & obj) +{ + return false; +} +} // namespace image_projection_based_fusion + +#include +RCLCPP_COMPONENTS_REGISTER_NODE(image_projection_based_fusion::RoiPointCloudFusionNode) diff --git a/perception/image_projection_based_fusion/src/utils/utils.cpp b/perception/image_projection_based_fusion/src/utils/utils.cpp index 670c5596001fb..76cd1e3c61e82 100644 --- a/perception/image_projection_based_fusion/src/utils/utils.cpp +++ b/perception/image_projection_based_fusion/src/utils/utils.cpp @@ -13,7 +13,6 @@ // limitations under the License. #include "image_projection_based_fusion/utils/utils.hpp" - namespace image_projection_based_fusion { @@ -39,4 +38,197 @@ Eigen::Affine3d transformToEigen(const geometry_msgs::msg::Transform & t) return a; } +void convertCluster2FeatureObject( + const std_msgs::msg::Header & header, const PointCloud & cluster, + DetectedObjectWithFeature & feature_obj) +{ + PointCloud2 ros_cluster; + pcl::toROSMsg(cluster, ros_cluster); + ros_cluster.header = header; + feature_obj.feature.cluster = ros_cluster; + feature_obj.object.kinematics.pose_with_covariance.pose.position = getCentroid(ros_cluster); + feature_obj.object.existence_probability = 1.0f; +} + +PointCloud closest_cluster( + PointCloud & cluster, const double cluster_2d_tolerance, const int min_cluster_size, + const pcl::PointXYZ & center) +{ + // sort point by distance to camera origin + + auto func = [center](const pcl::PointXYZ & p1, const pcl::PointXYZ & p2) { + return tier4_autoware_utils::calcDistance2d(center, p1) < + tier4_autoware_utils::calcDistance2d(center, p2); + }; + std::sort(cluster.begin(), cluster.end(), func); + PointCloud out_cluster; + for (auto & point : cluster) { + if (out_cluster.empty()) { + out_cluster.push_back(point); + continue; + } + if (tier4_autoware_utils::calcDistance2d(out_cluster.back(), point) < cluster_2d_tolerance) { + out_cluster.push_back(point); + continue; + } + if (out_cluster.size() >= static_cast(min_cluster_size)) { + return out_cluster; + } + out_cluster.clear(); + out_cluster.push_back(point); + } + return out_cluster; +} + +void updateOutputFusedObjects( + std::vector & output_objs, const std::vector & clusters, + const std_msgs::msg::Header & in_cloud_header, const std_msgs::msg::Header & in_roi_header, + const tf2_ros::Buffer & tf_buffer, const int min_cluster_size, const float cluster_2d_tolerance, + std::vector & output_fused_objects) +{ + if (output_objs.size() != clusters.size()) { + return; + } + Eigen::Vector3d orig_camera_frame, orig_point_frame; + Eigen::Affine3d camera2lidar_affine; + orig_camera_frame << 0.0, 0.0, 0.0; + { + const auto transform_stamped_optional = getTransformStamped( + tf_buffer, in_cloud_header.frame_id, in_roi_header.frame_id, in_roi_header.stamp); + if (!transform_stamped_optional) { + return; + } + camera2lidar_affine = transformToEigen(transform_stamped_optional.value().transform); + } + orig_point_frame = camera2lidar_affine * orig_camera_frame; + pcl::PointXYZ camera_orig_point_frame = + pcl::PointXYZ(orig_point_frame.x(), orig_point_frame.y(), orig_point_frame.z()); + + for (std::size_t i = 0; i < output_objs.size(); ++i) { + PointCloud cluster = clusters.at(i); + auto & feature_obj = output_objs.at(i); + if (cluster.points.size() < std::size_t(min_cluster_size)) { + continue; + } + + // TODO(badai-nguyen): change to interface to select refine criteria like closest, largest + // to output refine cluster and centroid + auto refine_cluster = + closest_cluster(cluster, cluster_2d_tolerance, min_cluster_size, camera_orig_point_frame); + if (refine_cluster.points.size() < std::size_t(min_cluster_size)) { + continue; + } + + refine_cluster.width = refine_cluster.points.size(); + refine_cluster.height = 1; + refine_cluster.is_dense = false; + // convert cluster to object + convertCluster2FeatureObject(in_cloud_header, refine_cluster, feature_obj); + output_fused_objects.push_back(feature_obj); + } +} + +void addShapeAndKinematic( + const pcl::PointCloud & cluster, + tier4_perception_msgs::msg::DetectedObjectWithFeature & feature_obj) +{ + if (cluster.empty()) { + return; + } + pcl::PointXYZ centroid = pcl::PointXYZ(0.0, 0.0, 0.0); + float max_z = -1e6; + float min_z = 1e6; + for (const auto & point : cluster) { + centroid.x += point.x; + centroid.y += point.y; + centroid.z += point.z; + max_z = max_z < point.z ? point.z : max_z; + min_z = min_z > point.z ? point.z : min_z; + } + centroid.x = centroid.x / static_cast(cluster.size()); + centroid.y = centroid.y / static_cast(cluster.size()); + centroid.z = centroid.z / static_cast(cluster.size()); + + std::vector cluster2d; + std::vector cluster2d_convex; + + for (size_t i = 0; i < cluster.size(); ++i) { + cluster2d.push_back( + cv::Point((cluster.at(i).x - centroid.x) * 1000.0, (cluster.at(i).y - centroid.y) * 1000.)); + } + cv::convexHull(cluster2d, cluster2d_convex); + if (cluster2d_convex.empty()) { + return; + } + pcl::PointXYZ polygon_centroid = pcl::PointXYZ(0.0, 0.0, 0.0); + for (size_t i = 0; i < cluster2d_convex.size(); ++i) { + polygon_centroid.x += static_cast(cluster2d_convex.at(i).x) / 1000.0; + polygon_centroid.y += static_cast(cluster2d_convex.at(i).y) / 1000.0; + } + polygon_centroid.x = polygon_centroid.x / static_cast(cluster2d_convex.size()); + polygon_centroid.y = polygon_centroid.y / static_cast(cluster2d_convex.size()); + + autoware_auto_perception_msgs::msg::Shape shape; + for (size_t i = 0; i < cluster2d_convex.size(); ++i) { + geometry_msgs::msg::Point32 point; + point.x = cluster2d_convex.at(i).x / 1000.0; + point.y = cluster2d_convex.at(i).y / 1000.0; + point.z = 0.0; + shape.footprint.points.push_back(point); + } + shape.type = autoware_auto_perception_msgs::msg::Shape::POLYGON; + constexpr float eps = 0.01; + shape.dimensions.x = 0; + shape.dimensions.y = 0; + shape.dimensions.z = std::max((max_z - min_z), eps); + feature_obj.object.shape = shape; + feature_obj.object.kinematics.pose_with_covariance.pose.position.x = + centroid.x + polygon_centroid.x; + feature_obj.object.kinematics.pose_with_covariance.pose.position.y = + centroid.y + polygon_centroid.y; + feature_obj.object.kinematics.pose_with_covariance.pose.position.z = + min_z + shape.dimensions.z * 0.5; + feature_obj.object.existence_probability = 1.0; + feature_obj.object.kinematics.pose_with_covariance.pose.orientation.x = 0; + feature_obj.object.kinematics.pose_with_covariance.pose.orientation.y = 0; + feature_obj.object.kinematics.pose_with_covariance.pose.orientation.z = 0; + feature_obj.object.kinematics.pose_with_covariance.pose.orientation.w = 1; +} + +geometry_msgs::msg::Point getCentroid(const sensor_msgs::msg::PointCloud2 & pointcloud) +{ + geometry_msgs::msg::Point centroid; + centroid.x = 0.0f; + centroid.y = 0.0f; + centroid.z = 0.0f; + for (sensor_msgs::PointCloud2ConstIterator iter_x(pointcloud, "x"), + iter_y(pointcloud, "y"), iter_z(pointcloud, "z"); + iter_x != iter_x.end(); ++iter_x, ++iter_y, ++iter_z) { + centroid.x += *iter_x; + centroid.y += *iter_y; + centroid.z += *iter_z; + } + const size_t size = pointcloud.width * pointcloud.height; + centroid.x = centroid.x / static_cast(size); + centroid.y = centroid.y / static_cast(size); + centroid.z = centroid.z / static_cast(size); + return centroid; +} + +pcl::PointXYZ getClosestPoint(const pcl::PointCloud & cluster) +{ + pcl::PointXYZ closest_point; + double min_dist = 1e6; + pcl::PointXYZ orig_point = pcl::PointXYZ(0.0, 0.0, 0.0); + for (std::size_t i = 0; i < cluster.points.size(); ++i) { + pcl::PointXYZ point = cluster.points.at(i); + double dist_closest_point = tier4_autoware_utils::calcDistance2d(point, orig_point); + if (min_dist > dist_closest_point) { + min_dist = dist_closest_point; + closest_point = pcl::PointXYZ(point.x, point.y, point.z); + } + } + return closest_point; +} + } // namespace image_projection_based_fusion diff --git a/planning/obstacle_cruise_planner/config/obstacle_cruise_planner.param.yaml b/planning/obstacle_cruise_planner/config/obstacle_cruise_planner.param.yaml index 08ebea4284bf1..f3f1932c44c43 100644 --- a/planning/obstacle_cruise_planner/config/obstacle_cruise_planner.param.yaml +++ b/planning/obstacle_cruise_planner/config/obstacle_cruise_planner.param.yaml @@ -20,7 +20,11 @@ nearest_dist_deviation_threshold: 3.0 # [m] for finding nearest index nearest_yaw_deviation_threshold: 1.57 # [rad] for finding nearest index min_behavior_stop_margin: 3.0 # [m] - suppress_sudden_obstacle_stop: false + stop_on_curve: + enable_approaching: true # false + additional_safe_distance_margin: 0.0 # [m] + min_safe_distance_margin: 3.0 # [m] + suppress_sudden_obstacle_stop: true stop_obstacle_type: unknown: true @@ -54,7 +58,7 @@ pedestrian: false slow_down_obstacle_type: - unknown: true + unknown: false car: true truck: true bus: true diff --git a/planning/obstacle_cruise_planner/include/obstacle_cruise_planner/common_structs.hpp b/planning/obstacle_cruise_planner/include/obstacle_cruise_planner/common_structs.hpp index 12ebadf770996..faa7cfcd8e96a 100644 --- a/planning/obstacle_cruise_planner/include/obstacle_cruise_planner/common_structs.hpp +++ b/planning/obstacle_cruise_planner/include/obstacle_cruise_planner/common_structs.hpp @@ -104,12 +104,15 @@ struct StopObstacle : public TargetObstacleInterface { StopObstacle( const std::string & arg_uuid, const rclcpp::Time & arg_stamp, - const geometry_msgs::msg::Pose & arg_pose, const double arg_lon_velocity, - const double arg_lat_velocity, const geometry_msgs::msg::Point arg_collision_point) + const geometry_msgs::msg::Pose & arg_pose, const Shape & arg_shape, + const double arg_lon_velocity, const double arg_lat_velocity, + const geometry_msgs::msg::Point arg_collision_point) : TargetObstacleInterface(arg_uuid, arg_stamp, arg_pose, arg_lon_velocity, arg_lat_velocity), + shape(arg_shape), collision_point(arg_collision_point) { } + Shape shape; geometry_msgs::msg::Point collision_point; }; diff --git a/planning/obstacle_cruise_planner/include/obstacle_cruise_planner/node.hpp b/planning/obstacle_cruise_planner/include/obstacle_cruise_planner/node.hpp index ac6684d163aea..00afc11985d72 100644 --- a/planning/obstacle_cruise_planner/include/obstacle_cruise_planner/node.hpp +++ b/planning/obstacle_cruise_planner/include/obstacle_cruise_planner/node.hpp @@ -100,6 +100,9 @@ class ObstacleCruisePlannerNode : public rclcpp::Node bool enable_debug_info_; bool enable_calculation_time_info_; double min_behavior_stop_margin_; + bool enable_approaching_on_curve_; + double additional_safe_distance_margin_on_curve_; + double min_safe_distance_margin_on_curve_; bool suppress_sudden_obstacle_stop_; std::vector stop_obstacle_types_; diff --git a/planning/obstacle_cruise_planner/include/obstacle_cruise_planner/planner_interface.hpp b/planning/obstacle_cruise_planner/include/obstacle_cruise_planner/planner_interface.hpp index c3fa364da269e..879f2e0b8b43d 100644 --- a/planning/obstacle_cruise_planner/include/obstacle_cruise_planner/planner_interface.hpp +++ b/planning/obstacle_cruise_planner/include/obstacle_cruise_planner/planner_interface.hpp @@ -53,11 +53,16 @@ class PlannerInterface void setParam( const bool enable_debug_info, const bool enable_calculation_time_info, - const double min_behavior_stop_margin, const bool suppress_sudden_obstacle_stop) + const double min_behavior_stop_margin, const double enable_approaching_on_curve, + const double additional_safe_distance_margin_on_curve, + const double min_safe_distance_margin_on_curve, const bool suppress_sudden_obstacle_stop) { enable_debug_info_ = enable_debug_info; enable_calculation_time_info_ = enable_calculation_time_info; min_behavior_stop_margin_ = min_behavior_stop_margin; + enable_approaching_on_curve_ = enable_approaching_on_curve; + additional_safe_distance_margin_on_curve_ = additional_safe_distance_margin_on_curve; + min_safe_distance_margin_on_curve_ = min_safe_distance_margin_on_curve; suppress_sudden_obstacle_stop_ = suppress_sudden_obstacle_stop; } @@ -102,6 +107,9 @@ class PlannerInterface bool enable_calculation_time_info_{false}; LongitudinalInfo longitudinal_info_; double min_behavior_stop_margin_; + bool enable_approaching_on_curve_; + double additional_safe_distance_margin_on_curve_; + double min_safe_distance_margin_on_curve_; bool suppress_sudden_obstacle_stop_; // stop watch @@ -194,6 +202,8 @@ class PlannerInterface std::optional start_point{std::nullopt}; std::optional end_point{std::nullopt}; }; + double calculateMarginFromObstacleOnCurve( + const PlannerData & planner_data, const StopObstacle & stop_obstacle) const; double calculateSlowDownVelocity( const SlowDownObstacle & obstacle, const std::optional & prev_output) const; std::optional> calculateDistanceToSlowDownWithConstraints( diff --git a/planning/obstacle_cruise_planner/src/node.cpp b/planning/obstacle_cruise_planner/src/node.cpp index 266e184a06a08..7a5a4ef93d4a5 100644 --- a/planning/obstacle_cruise_planner/src/node.cpp +++ b/planning/obstacle_cruise_planner/src/node.cpp @@ -397,11 +397,18 @@ ObstacleCruisePlannerNode::ObstacleCruisePlannerNode(const rclcpp::NodeOptions & } min_behavior_stop_margin_ = declare_parameter("common.min_behavior_stop_margin"); + additional_safe_distance_margin_on_curve_ = + declare_parameter("common.stop_on_curve.additional_safe_distance_margin"); + enable_approaching_on_curve_ = + declare_parameter("common.stop_on_curve.enable_approaching"); + min_safe_distance_margin_on_curve_ = + declare_parameter("common.stop_on_curve.min_safe_distance_margin"); suppress_sudden_obstacle_stop_ = declare_parameter("common.suppress_sudden_obstacle_stop"); planner_ptr_->setParam( enable_debug_info_, enable_calculation_time_info_, min_behavior_stop_margin_, - suppress_sudden_obstacle_stop_); + enable_approaching_on_curve_, additional_safe_distance_margin_on_curve_, + min_safe_distance_margin_on_curve_, suppress_sudden_obstacle_stop_); } { // stop/cruise/slow down obstacle type @@ -438,9 +445,20 @@ rcl_interfaces::msg::SetParametersResult ObstacleCruisePlannerNode::onParam( parameters, "common.enable_debug_info", enable_debug_info_); tier4_autoware_utils::updateParam( parameters, "common.enable_calculation_time_info", enable_calculation_time_info_); + + tier4_autoware_utils::updateParam( + parameters, "common.stop_on_curve.enable_approaching", enable_approaching_on_curve_); + tier4_autoware_utils::updateParam( + parameters, "common.stop_on_curve.additional_safe_distance_margin", + additional_safe_distance_margin_on_curve_); + tier4_autoware_utils::updateParam( + parameters, "common.stop_on_curve.min_safe_distance_margin", + min_safe_distance_margin_on_curve_); + planner_ptr_->setParam( enable_debug_info_, enable_calculation_time_info_, min_behavior_stop_margin_, - suppress_sudden_obstacle_stop_); + enable_approaching_on_curve_, additional_safe_distance_margin_on_curve_, + min_safe_distance_margin_on_curve_, suppress_sudden_obstacle_stop_); tier4_autoware_utils::updateParam( parameters, "common.enable_slow_down_planning", enable_slow_down_planning_); @@ -911,7 +929,7 @@ std::optional ObstacleCruisePlannerNode::createStopObstacle( } const auto [tangent_vel, normal_vel] = projectObstacleVelocityToTrajectory(traj_points, obstacle); - return StopObstacle{obstacle.uuid, obstacle.stamp, obstacle.pose, + return StopObstacle{obstacle.uuid, obstacle.stamp, obstacle.pose, obstacle.shape, tangent_vel, normal_vel, *collision_point}; } diff --git a/planning/obstacle_cruise_planner/src/planner_interface.cpp b/planning/obstacle_cruise_planner/src/planner_interface.cpp index 03cae6e6f9d88..4656988750e65 100644 --- a/planning/obstacle_cruise_planner/src/planner_interface.cpp +++ b/planning/obstacle_cruise_planner/src/planner_interface.cpp @@ -15,6 +15,10 @@ #include "obstacle_cruise_planner/planner_interface.hpp" #include "motion_utils/distance/distance.hpp" +#include "motion_utils/marker/marker_helper.hpp" +#include "motion_utils/resample/resample.hpp" +#include "motion_utils/trajectory/tmp_conversion.hpp" +#include "motion_utils/trajectory/trajectory.hpp" #include "signal_processing/lowpass_filter_1d.hpp" namespace @@ -204,6 +208,19 @@ double calcDecelerationVelocityFromDistanceToTarget( } return current_velocity; } + +std::vector resampleTrajectoryPoints( + const std::vector & traj_points, const double interval) +{ + const auto traj = motion_utils::convertToTrajectory(traj_points); + const auto resampled_traj = motion_utils::resampleTrajectory(traj, interval); + return motion_utils::convertToTrajectoryPointArray(resampled_traj); +} + +tier4_autoware_utils::Point2d convertPoint(const geometry_msgs::msg::Point & p) +{ + return tier4_autoware_utils::Point2d{p.x, p.y}; +} } // namespace std::vector PlannerInterface::generateStopTrajectory( @@ -259,16 +276,19 @@ std::vector PlannerInterface::generateStopTrajectory( planner_data.traj_points, planner_data.ego_pose.position, ego_segment_idx, 0); const double dist_to_ego = -negative_dist_to_ego; + const double margin_from_obstacle = + calculateMarginFromObstacleOnCurve(planner_data, *closest_stop_obstacle); + // If behavior stop point is ahead of the closest_obstacle_stop point within a certain margin // we set closest_obstacle_stop_distance to closest_behavior_stop_distance - const double margin_from_obstacle = [&]() { + const double margin_from_obstacle_considering_behavior_module = [&]() { const size_t nearest_segment_idx = findEgoSegmentIndex(planner_data.traj_points, planner_data.ego_pose); const auto closest_behavior_stop_idx = motion_utils::searchZeroVelocityIndex(planner_data.traj_points, nearest_segment_idx + 1); if (!closest_behavior_stop_idx) { - return longitudinal_info_.safe_distance_margin; + return margin_from_obstacle; } const double closest_behavior_stop_dist_from_ego = motion_utils::calcSignedArcLength( @@ -282,29 +302,28 @@ std::vector PlannerInterface::generateStopTrajectory( abs_ego_offset; const double stop_dist_diff = closest_behavior_stop_dist_from_ego - closest_obstacle_stop_dist_from_ego; - if (stop_dist_diff < longitudinal_info_.safe_distance_margin) { + if (stop_dist_diff < margin_from_obstacle) { // Use terminal margin (terminal_safe_distance_margin) for obstacle stop return longitudinal_info_.terminal_safe_distance_margin; } } else { - const double closest_obstacle_stop_dist_from_ego = closest_obstacle_dist - dist_to_ego - - longitudinal_info_.safe_distance_margin - - abs_ego_offset; + const double closest_obstacle_stop_dist_from_ego = + closest_obstacle_dist - dist_to_ego - margin_from_obstacle - abs_ego_offset; const double stop_dist_diff = closest_behavior_stop_dist_from_ego - closest_obstacle_stop_dist_from_ego; - if (0.0 < stop_dist_diff && stop_dist_diff < longitudinal_info_.safe_distance_margin) { + if (0.0 < stop_dist_diff && stop_dist_diff < margin_from_obstacle) { // Use shorter margin (min_behavior_stop_margin) for obstacle stop return min_behavior_stop_margin_; } } - return longitudinal_info_.safe_distance_margin; + return margin_from_obstacle; }(); const auto [stop_margin_from_obstacle, will_collide_with_obstacle] = [&]() { // Check feasibility to stop if (suppress_sudden_obstacle_stop_) { const double closest_obstacle_stop_dist = - closest_obstacle_dist - margin_from_obstacle - abs_ego_offset; + closest_obstacle_dist - margin_from_obstacle_considering_behavior_module - abs_ego_offset; // Calculate feasible stop margin (Check the feasibility) const double feasible_stop_dist = calcMinimumDistanceToStop( @@ -314,11 +333,12 @@ std::vector PlannerInterface::generateStopTrajectory( if (closest_obstacle_stop_dist < feasible_stop_dist) { const auto feasible_margin_from_obstacle = - margin_from_obstacle - (feasible_stop_dist - closest_obstacle_stop_dist); + margin_from_obstacle_considering_behavior_module - + (feasible_stop_dist - closest_obstacle_stop_dist); return std::make_pair(feasible_margin_from_obstacle, true); } } - return std::make_pair(margin_from_obstacle, false); + return std::make_pair(margin_from_obstacle_considering_behavior_module, false); }(); // Generate Output Trajectory @@ -395,6 +415,91 @@ std::vector PlannerInterface::generateStopTrajectory( return output_traj_points; } +double PlannerInterface::calculateMarginFromObstacleOnCurve( + const PlannerData & planner_data, const StopObstacle & stop_obstacle) const +{ + if (!enable_approaching_on_curve_) { + return longitudinal_info_.safe_distance_margin; + } + + const double abs_ego_offset = planner_data.is_driving_forward + ? std::abs(vehicle_info_.max_longitudinal_offset_m) + : std::abs(vehicle_info_.min_longitudinal_offset_m); + + // calculate short trajectory points towards obstacle + const size_t obj_segment_idx = + motion_utils::findNearestSegmentIndex(planner_data.traj_points, stop_obstacle.collision_point); + std::vector short_traj_points{planner_data.traj_points.at(obj_segment_idx + 1)}; + double sum_short_traj_length{0.0}; + for (int i = obj_segment_idx; 0 <= i; --i) { + short_traj_points.push_back(planner_data.traj_points.at(i)); + + if ( + 1 < short_traj_points.size() && + longitudinal_info_.safe_distance_margin + abs_ego_offset < sum_short_traj_length) { + break; + } + sum_short_traj_length += tier4_autoware_utils::calcDistance2d( + planner_data.traj_points.at(i), planner_data.traj_points.at(i + 1)); + } + std::reverse(short_traj_points.begin(), short_traj_points.end()); + if (short_traj_points.size() < 2) { + return longitudinal_info_.safe_distance_margin; + } + + // calculate collision index between straight line from ego pose and object + const auto calculate_distance_from_straight_ego_path = + [&](const auto & ego_pose, const auto & object_polygon) { + const auto forward_ego_pose = tier4_autoware_utils::calcOffsetPose( + ego_pose, longitudinal_info_.safe_distance_margin + 3.0, 0.0, 0.0); + const auto ego_straight_segment = tier4_autoware_utils::Segment2d{ + convertPoint(ego_pose.position), convertPoint(forward_ego_pose.position)}; + return boost::geometry::distance(ego_straight_segment, object_polygon); + }; + const auto resampled_short_traj_points = resampleTrajectoryPoints(short_traj_points, 0.5); + const auto object_polygon = + tier4_autoware_utils::toPolygon2d(stop_obstacle.pose, stop_obstacle.shape); + const auto collision_idx = [&]() -> std::optional { + for (size_t i = 0; i < resampled_short_traj_points.size(); ++i) { + const double dist_to_obj = calculate_distance_from_straight_ego_path( + resampled_short_traj_points.at(i).pose, object_polygon); + if (dist_to_obj < vehicle_info_.vehicle_width_m / 2.0) { + return i; + } + } + return std::nullopt; + }(); + if (!collision_idx) { + return min_safe_distance_margin_on_curve_; + } + if (*collision_idx == 0) { + return longitudinal_info_.safe_distance_margin; + } + + // calculate margin from obstacle + const double partial_segment_length = [&]() { + const double collision_segment_length = tier4_autoware_utils::calcDistance2d( + resampled_short_traj_points.at(*collision_idx - 1), + resampled_short_traj_points.at(*collision_idx)); + const double prev_dist = calculate_distance_from_straight_ego_path( + resampled_short_traj_points.at(*collision_idx - 1).pose, object_polygon); + const double next_dist = calculate_distance_from_straight_ego_path( + resampled_short_traj_points.at(*collision_idx).pose, object_polygon); + return (next_dist - vehicle_info_.vehicle_width_m / 2.0) / (next_dist - prev_dist) * + collision_segment_length; + }(); + + const double short_margin_from_obstacle = + partial_segment_length + + motion_utils::calcSignedArcLength( + resampled_short_traj_points, *collision_idx, stop_obstacle.collision_point) - + abs_ego_offset + additional_safe_distance_margin_on_curve_; + + return std::min( + longitudinal_info_.safe_distance_margin, + std::max(min_safe_distance_margin_on_curve_, short_margin_from_obstacle)); +} + double PlannerInterface::calcDistanceToCollisionPoint( const PlannerData & planner_data, const geometry_msgs::msg::Point & collision_point) {