diff --git a/common/tier4_autoware_utils/include/tier4_autoware_utils/geometry/geometry.hpp b/common/tier4_autoware_utils/include/tier4_autoware_utils/geometry/geometry.hpp index 84ccf05d5ba4f..3a7ae22137b1f 100644 --- a/common/tier4_autoware_utils/include/tier4_autoware_utils/geometry/geometry.hpp +++ b/common/tier4_autoware_utils/include/tier4_autoware_utils/geometry/geometry.hpp @@ -343,6 +343,27 @@ inline geometry_msgs::msg::Pose calcOffsetPose( tf2::toMsg(tf_pose * tf_offset, pose); return pose; } + +/** + * @brief Calculate offset pose. The offset values are defined in the local coordinate of the input + * pose. + */ +inline geometry_msgs::msg::Pose calcOffsetPose( + const geometry_msgs::msg::Pose & p, const double x, const double y, const double z, + const double yaw) +{ + geometry_msgs::msg::Pose pose; + geometry_msgs::msg::Transform transform; + transform.translation = createTranslation(x, y, z); + transform.rotation = createQuaternionFromYaw(yaw); + tf2::Transform tf_pose; + tf2::Transform tf_offset; + tf2::fromMsg(transform, tf_offset); + tf2::fromMsg(p, tf_pose); + tf2::toMsg(tf_pose * tf_offset, pose); + return pose; +} + } // namespace tier4_autoware_utils #endif // TIER4_AUTOWARE_UTILS__GEOMETRY__GEOMETRY_HPP_ diff --git a/common/tier4_autoware_utils/include/tier4_autoware_utils/ros/polling_subscriber.hpp b/common/tier4_autoware_utils/include/tier4_autoware_utils/ros/polling_subscriber.hpp new file mode 100644 index 0000000000000..2cc993abd859a --- /dev/null +++ b/common/tier4_autoware_utils/include/tier4_autoware_utils/ros/polling_subscriber.hpp @@ -0,0 +1,253 @@ +// Copyright 2024 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#ifndef TIER4_AUTOWARE_UTILS__ROS__POLLING_SUBSCRIBER_HPP_ +#define TIER4_AUTOWARE_UTILS__ROS__POLLING_SUBSCRIBER_HPP_ + +#include + +#include +#include +#include +#include + +namespace autoware::universe_utils +{ + +/** + * @brief Creates a SensorDataQoS profile with a single depth. + * @return rclcpp::SensorDataQoS The QoS profile with depth set to 1. + */ +inline rclcpp::SensorDataQoS SingleDepthSensorQoS() +{ + rclcpp::SensorDataQoS qos; + qos.get_rmw_qos_profile().depth = 1; + return qos; +} + +namespace polling_policy +{ + +/** + * @brief Polling policy that keeps the latest received message. + * + * This policy retains the latest received message and provides it when requested. If a new message + * is received, it overwrites the previously stored message. + * + * @tparam MessageT The message type. + */ +template +class Latest +{ +private: + typename MessageT::ConstSharedPtr data_{nullptr}; ///< Data pointer to store the latest data + +protected: + /** + * @brief Check the QoS settings for the subscription. + * + * @param qos The QoS profile to check. + * @throws std::invalid_argument If the QoS depth is not 1. + */ + void checkQoS(const rclcpp::QoS & qos) + { + if (qos.get_rmw_qos_profile().depth > 1) { + throw std::invalid_argument( + "InterProcessPollingSubscriber will be used with depth > 1, which may cause inefficient " + "serialization while updateLatestData()"); + } + } + +public: + /** + * @brief Retrieve the latest data. If no new data has been received, the previously received data + * + * @return typename MessageT::ConstSharedPtr The latest data. + */ + typename MessageT::ConstSharedPtr takeData(); +}; + +/** + * @brief Polling policy that keeps the newest received message. + * + * @tparam MessageT The message type. + */ +template +class Newest +{ +protected: + /** + * @brief Check the QoS settings for the subscription. + * + * @param qos The QoS profile to check. + * @throws std::invalid_argument If the QoS depth is not 1. + */ + void checkQoS(const rclcpp::QoS & qos) + { + if (qos.get_rmw_qos_profile().depth > 1) { + throw std::invalid_argument( + "InterProcessPollingSubscriber will be used with depth > 1, which may cause inefficient " + "serialization while updateLatestData()"); + } + } + +public: + /** + * @brief Retrieve the newest data. If no new data has been received, nullptr is returned. + * + * @return typename MessageT::ConstSharedPtr The newest data. + */ + typename MessageT::ConstSharedPtr takeData(); +}; + +/** + * @brief Polling policy that keeps all received messages. + * + * @tparam MessageT The message type. + */ +template +class All +{ +protected: + /** + * @brief Check the QoS settings for the subscription. + * + * @param qos The QoS profile to check. + */ + void checkQoS(const rclcpp::QoS &) {} + +public: + /** + * @brief Retrieve all data. + * + * @return std::vector The list of all received data. + */ + std::vector takeData(); +}; + +} // namespace polling_policy + +/** + * @brief Subscriber class that uses a specified polling policy. + * + * @tparam MessageT The message type. + * @tparam PollingPolicy The polling policy to use. + */ +template class PollingPolicy = polling_policy::Latest> +class InterProcessPollingSubscriber : public PollingPolicy +{ + friend PollingPolicy; + +private: + typename rclcpp::Subscription::SharedPtr subscriber_; ///< Subscription object + +public: + using SharedPtr = std::shared_ptr>; + + /** + * @brief Construct a new InterProcessPollingSubscriber object. + * + * @param node The node to attach the subscriber to. + * @param topic_name The topic name to subscribe to. + * @param qos The QoS profile to use for the subscription. + */ + explicit InterProcessPollingSubscriber( + rclcpp::Node * node, const std::string & topic_name, const rclcpp::QoS & qos = rclcpp::QoS{1}) + { + this->checkQoS(qos); + + auto noexec_callback_group = + node->create_callback_group(rclcpp::CallbackGroupType::MutuallyExclusive, false); + + auto noexec_subscription_options = rclcpp::SubscriptionOptions(); + noexec_subscription_options.callback_group = noexec_callback_group; + + subscriber_ = node->create_subscription( + topic_name, qos, + [node]([[maybe_unused]] const typename MessageT::ConstSharedPtr msg) { assert(false); }, + noexec_subscription_options); + } + + /** + * @brief Create a subscription. + * + * @param node The node to attach the subscriber to. + * @param topic_name The topic name to subscribe to. + * @param qos The QoS profile to use for the subscription. + * @return SharedPtr The created subscription. + */ + static SharedPtr create_subscription( + rclcpp::Node * node, const std::string & topic_name, const rclcpp::QoS & qos = rclcpp::QoS{1}) + { + return std::make_shared>( + node, topic_name, qos); + } + + typename rclcpp::Subscription::SharedPtr subscriber() { return subscriber_; } +}; + +namespace polling_policy +{ + +template +typename MessageT::ConstSharedPtr Latest::takeData() +{ + auto & subscriber = + static_cast *>(this)->subscriber_; + auto new_data = std::make_shared(); + rclcpp::MessageInfo message_info; + const bool success = subscriber->take(*new_data, message_info); + if (success) { + data_ = new_data; + } + + return data_; +} + +template +typename MessageT::ConstSharedPtr Newest::takeData() +{ + auto & subscriber = + static_cast *>(this)->subscriber_; + auto new_data = std::make_shared(); + rclcpp::MessageInfo message_info; + const bool success = subscriber->take(*new_data, message_info); + if (success) { + return new_data; + } + return nullptr; +} + +template +std::vector All::takeData() +{ + auto & subscriber = + static_cast *>(this)->subscriber_; + std::vector data; + rclcpp::MessageInfo message_info; + for (;;) { + auto datum = std::make_shared(); + if (subscriber->take(*datum, message_info)) { + data.push_back(datum); + } else { + break; + } + } + return data; +} + +} // namespace polling_policy + +} // namespace autoware::universe_utils + +#endif // TIER4_AUTOWARE_UTILS__ROS__POLLING_SUBSCRIBER_HPP_ diff --git a/common/tier4_autoware_utils/include/tier4_autoware_utils/tier4_autoware_utils.hpp b/common/tier4_autoware_utils/include/tier4_autoware_utils/tier4_autoware_utils.hpp index 24c679424797d..428ad1b97bf17 100644 --- a/common/tier4_autoware_utils/include/tier4_autoware_utils/tier4_autoware_utils.hpp +++ b/common/tier4_autoware_utils/include/tier4_autoware_utils/tier4_autoware_utils.hpp @@ -27,6 +27,7 @@ #include "tier4_autoware_utils/ros/debug_publisher.hpp" #include "tier4_autoware_utils/ros/debug_traits.hpp" #include "tier4_autoware_utils/ros/marker_helper.hpp" +#include "tier4_autoware_utils/ros/polling_subscriber.hpp" #include "tier4_autoware_utils/ros/processing_time_publisher.hpp" #include "tier4_autoware_utils/ros/self_pose_listener.hpp" #include "tier4_autoware_utils/ros/transform_listener.hpp" diff --git a/control/autonomous_emergency_braking/CMakeLists.txt b/control/autonomous_emergency_braking/CMakeLists.txt deleted file mode 100644 index 6027aeac76333..0000000000000 --- a/control/autonomous_emergency_braking/CMakeLists.txt +++ /dev/null @@ -1,26 +0,0 @@ -cmake_minimum_required(VERSION 3.14) -project(autonomous_emergency_braking) - -find_package(autoware_cmake REQUIRED) -autoware_package() - -set(AEB_NODE ${PROJECT_NAME}_node) -ament_auto_add_library(${AEB_NODE} SHARED - src/node.cpp -) - -rclcpp_components_register_node(${AEB_NODE} - PLUGIN "autoware::motion::control::autonomous_emergency_braking::AEB" - EXECUTABLE ${PROJECT_NAME} -) - -if(BUILD_TESTING) - find_package(ament_lint_auto REQUIRED) - ament_lint_auto_find_test_dependencies() -endif() - -ament_auto_package( - INSTALL_TO_SHARE - launch - config -) diff --git a/control/autonomous_emergency_braking/config/autonomous_emergency_braking.param.yaml b/control/autonomous_emergency_braking/config/autonomous_emergency_braking.param.yaml deleted file mode 100644 index 09db0feb34597..0000000000000 --- a/control/autonomous_emergency_braking/config/autonomous_emergency_braking.param.yaml +++ /dev/null @@ -1,17 +0,0 @@ -/**: - ros__parameters: - use_predicted_trajectory: true - use_imu_path: true - voxel_grid_x: 0.05 - voxel_grid_y: 0.05 - voxel_grid_z: 100000.0 - min_generated_path_length: 0.5 - expand_width: 0.1 - longitudinal_offset: 2.0 - t_response: 1.0 - a_ego_min: -3.0 - a_obj_min: -1.0 - prediction_time_horizon: 1.5 - prediction_time_interval: 0.1 - collision_keeping_sec: 0.0 - aeb_hz: 10.0 diff --git a/control/autonomous_emergency_braking/include/autonomous_emergency_braking/node.hpp b/control/autonomous_emergency_braking/include/autonomous_emergency_braking/node.hpp deleted file mode 100644 index 5865530ec1e3b..0000000000000 --- a/control/autonomous_emergency_braking/include/autonomous_emergency_braking/node.hpp +++ /dev/null @@ -1,192 +0,0 @@ -// Copyright 2023 TIER IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef AUTONOMOUS_EMERGENCY_BRAKING__NODE_HPP_ -#define AUTONOMOUS_EMERGENCY_BRAKING__NODE_HPP_ - -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include -#include - -#include - -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include - -namespace autoware::motion::control::autonomous_emergency_braking -{ - -using autoware_auto_planning_msgs::msg::Trajectory; -using autoware_auto_system_msgs::msg::AutowareState; -using autoware_auto_vehicle_msgs::msg::VelocityReport; -using nav_msgs::msg::Odometry; -using sensor_msgs::msg::Imu; -using sensor_msgs::msg::PointCloud2; -using PointCloud = pcl::PointCloud; -using diagnostic_updater::DiagnosticStatusWrapper; -using diagnostic_updater::Updater; -using tier4_autoware_utils::Point2d; -using tier4_autoware_utils::Polygon2d; -using vehicle_info_util::VehicleInfo; -using visualization_msgs::msg::Marker; -using visualization_msgs::msg::MarkerArray; -using Path = std::vector; -using Vector3 = geometry_msgs::msg::Vector3; - -struct ObjectData -{ - rclcpp::Time stamp; - geometry_msgs::msg::Point position; - double velocity{0.0}; - double rss{0.0}; - double distance_to_object{0.0}; -}; - -class CollisionDataKeeper -{ -public: - explicit CollisionDataKeeper(rclcpp::Clock::SharedPtr clock) { clock_ = clock; } - - void setTimeout(const double timeout_sec) { timeout_sec_ = timeout_sec; } - - bool checkExpired() - { - if (data_ && (clock_->now() - data_->stamp).seconds() > timeout_sec_) { - data_.reset(); - } - return (data_ == nullptr); - } - - void update(const ObjectData & data) { data_.reset(new ObjectData(data)); } - - ObjectData get() - { - if (data_) { - return *data_; - } else { - return ObjectData(); - } - } - -private: - std::unique_ptr data_; - double timeout_sec_{0.0}; - rclcpp::Clock::SharedPtr clock_; -}; - -class AEB : public rclcpp::Node -{ -public: - explicit AEB(const rclcpp::NodeOptions & node_options); - - // subscriber - rclcpp::Subscription::SharedPtr sub_point_cloud_; - rclcpp::Subscription::SharedPtr sub_velocity_; - rclcpp::Subscription::SharedPtr sub_imu_; - rclcpp::Subscription::SharedPtr sub_predicted_traj_; - rclcpp::Subscription::SharedPtr sub_autoware_state_; - - // publisher - rclcpp::Publisher::SharedPtr pub_obstacle_pointcloud_; - rclcpp::Publisher::SharedPtr debug_ego_path_publisher_; // debug - - // timer - rclcpp::TimerBase::SharedPtr timer_; - - // callback - void onPointCloud(const PointCloud2::ConstSharedPtr input_msg); - void onVelocity(const VelocityReport::ConstSharedPtr input_msg); - void onImu(const Imu::ConstSharedPtr input_msg); - void onTimer(); - void onPredictedTrajectory(const Trajectory::ConstSharedPtr input_msg); - void onAutowareState(const AutowareState::ConstSharedPtr input_msg); - - bool isDataReady(); - - // main function - void onCheckCollision(DiagnosticStatusWrapper & stat); - bool checkCollision(MarkerArray & debug_markers); - bool hasCollision( - const double current_v, const Path & ego_path, const std::vector & objects); - - void generateEgoPath( - const double curr_v, const double curr_w, Path & path, std::vector & polygons); - void generateEgoPath( - const Trajectory & predicted_traj, Path & path, std::vector & polygons); - void createObjectData( - const Path & ego_path, const std::vector & ego_polys, const rclcpp::Time & stamp, - std::vector & objects); - - void addMarker( - const rclcpp::Time & current_time, const Path & path, const std::vector & polygons, - const std::vector & objects, const double color_r, const double color_g, - const double color_b, const double color_a, const std::string & ns, - MarkerArray & debug_markers); - - void addCollisionMarker(const ObjectData & data, MarkerArray & debug_markers); - - PointCloud2::SharedPtr obstacle_ros_pointcloud_ptr_{nullptr}; - VelocityReport::ConstSharedPtr current_velocity_ptr_{nullptr}; - Vector3::SharedPtr angular_velocity_ptr_{nullptr}; - Trajectory::ConstSharedPtr predicted_traj_ptr_{nullptr}; - AutowareState::ConstSharedPtr autoware_state_{nullptr}; - - tf2_ros::Buffer tf_buffer_{get_clock()}; - tf2_ros::TransformListener tf_listener_{tf_buffer_}; - - // vehicle info - VehicleInfo vehicle_info_; - - // diag - Updater updater_{this}; - - // member variables - bool use_predicted_trajectory_; - bool use_imu_path_; - double voxel_grid_x_; - double voxel_grid_y_; - double voxel_grid_z_; - double min_generated_path_length_; - double expand_width_; - double longitudinal_offset_; - double t_response_; - double a_ego_min_; - double a_obj_min_; - double prediction_time_horizon_; - double prediction_time_interval_; - CollisionDataKeeper collision_data_keeper_; -}; -} // namespace autoware::motion::control::autonomous_emergency_braking - -#endif // AUTONOMOUS_EMERGENCY_BRAKING__NODE_HPP_ diff --git a/control/autonomous_emergency_braking/src/node.cpp b/control/autonomous_emergency_braking/src/node.cpp deleted file mode 100644 index 39419eeb6ae53..0000000000000 --- a/control/autonomous_emergency_braking/src/node.cpp +++ /dev/null @@ -1,532 +0,0 @@ -// Copyright 2022 TIER IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "autonomous_emergency_braking/node.hpp" - -#include -#include - -#ifdef ROS_DISTRO_GALACTIC -#include -#include -#else -#include - -#include -#endif - -namespace autoware::motion::control::autonomous_emergency_braking -{ -using diagnostic_msgs::msg::DiagnosticStatus; -namespace bg = boost::geometry; - -void appendPointToPolygon(Polygon2d & polygon, const geometry_msgs::msg::Point & geom_point) -{ - Point2d point; - point.x() = geom_point.x; - point.y() = geom_point.y; - - bg::append(polygon.outer(), point); -} - -Polygon2d createPolygon( - const geometry_msgs::msg::Pose & base_pose, const geometry_msgs::msg::Pose & next_pose, - const vehicle_info_util::VehicleInfo & vehicle_info, const double expand_width) -{ - Polygon2d polygon; - - const double longitudinal_offset = vehicle_info.max_longitudinal_offset_m; - const double width = vehicle_info.vehicle_width_m / 2.0 + expand_width; - const double rear_overhang = vehicle_info.rear_overhang_m; - - appendPointToPolygon( - polygon, - tier4_autoware_utils::calcOffsetPose(base_pose, longitudinal_offset, width, 0.0).position); - appendPointToPolygon( - polygon, - tier4_autoware_utils::calcOffsetPose(base_pose, longitudinal_offset, -width, 0.0).position); - appendPointToPolygon( - polygon, tier4_autoware_utils::calcOffsetPose(base_pose, -rear_overhang, -width, 0.0).position); - appendPointToPolygon( - polygon, tier4_autoware_utils::calcOffsetPose(base_pose, -rear_overhang, width, 0.0).position); - - appendPointToPolygon( - polygon, - tier4_autoware_utils::calcOffsetPose(next_pose, longitudinal_offset, width, 0.0).position); - appendPointToPolygon( - polygon, - tier4_autoware_utils::calcOffsetPose(next_pose, longitudinal_offset, -width, 0.0).position); - appendPointToPolygon( - polygon, tier4_autoware_utils::calcOffsetPose(next_pose, -rear_overhang, -width, 0.0).position); - appendPointToPolygon( - polygon, tier4_autoware_utils::calcOffsetPose(next_pose, -rear_overhang, width, 0.0).position); - - polygon = tier4_autoware_utils::isClockwise(polygon) - ? polygon - : tier4_autoware_utils::inverseClockwise(polygon); - - Polygon2d hull_polygon; - bg::convex_hull(polygon, hull_polygon); - - return hull_polygon; -} - -AEB::AEB(const rclcpp::NodeOptions & node_options) -: Node("AEB", node_options), - vehicle_info_(vehicle_info_util::VehicleInfoUtil(*this).getVehicleInfo()), - collision_data_keeper_(this->get_clock()) -{ - // Subscribers - sub_point_cloud_ = this->create_subscription( - "~/input/pointcloud", rclcpp::SensorDataQoS(), - std::bind(&AEB::onPointCloud, this, std::placeholders::_1)); - - sub_velocity_ = this->create_subscription( - "~/input/velocity", rclcpp::QoS{1}, std::bind(&AEB::onVelocity, this, std::placeholders::_1)); - - sub_imu_ = this->create_subscription( - "~/input/imu", rclcpp::QoS{1}, std::bind(&AEB::onImu, this, std::placeholders::_1)); - - sub_predicted_traj_ = this->create_subscription( - "~/input/predicted_trajectory", rclcpp::QoS{1}, - std::bind(&AEB::onPredictedTrajectory, this, std::placeholders::_1)); - - sub_autoware_state_ = this->create_subscription( - "/autoware/state", rclcpp::QoS{1}, - std::bind(&AEB::onAutowareState, this, std::placeholders::_1)); - - // Publisher - pub_obstacle_pointcloud_ = - this->create_publisher("~/debug/obstacle_pointcloud", 1); - debug_ego_path_publisher_ = this->create_publisher("~/debug/markers", 1); - - // Diagnostics - updater_.setHardwareID("autonomous_emergency_braking"); - updater_.add("aeb_emergency_stop", this, &AEB::onCheckCollision); - - // parameter - use_predicted_trajectory_ = declare_parameter("use_predicted_trajectory"); - use_imu_path_ = declare_parameter("use_imu_path"); - voxel_grid_x_ = declare_parameter("voxel_grid_x"); - voxel_grid_y_ = declare_parameter("voxel_grid_y"); - voxel_grid_z_ = declare_parameter("voxel_grid_z"); - min_generated_path_length_ = declare_parameter("min_generated_path_length"); - expand_width_ = declare_parameter("expand_width"); - longitudinal_offset_ = declare_parameter("longitudinal_offset"); - t_response_ = declare_parameter("t_response"); - a_ego_min_ = declare_parameter("a_ego_min"); - a_obj_min_ = declare_parameter("a_obj_min"); - prediction_time_horizon_ = declare_parameter("prediction_time_horizon"); - prediction_time_interval_ = declare_parameter("prediction_time_interval"); - - const auto collision_keeping_sec = declare_parameter("collision_keeping_sec"); - collision_data_keeper_.setTimeout(collision_keeping_sec); - - // start time - const double aeb_hz = declare_parameter("aeb_hz"); - const auto period_ns = rclcpp::Rate(aeb_hz).period(); - timer_ = rclcpp::create_timer(this, get_clock(), period_ns, std::bind(&AEB::onTimer, this)); -} - -void AEB::onTimer() { updater_.force_update(); } - -void AEB::onVelocity(const VelocityReport::ConstSharedPtr input_msg) -{ - current_velocity_ptr_ = input_msg; -} - -void AEB::onImu(const Imu::ConstSharedPtr input_msg) -{ - // transform imu - geometry_msgs::msg::TransformStamped transform_stamped{}; - try { - transform_stamped = tf_buffer_.lookupTransform( - "base_link", input_msg->header.frame_id, input_msg->header.stamp, - rclcpp::Duration::from_seconds(0.5)); - } catch (tf2::TransformException & ex) { - RCLCPP_ERROR_STREAM( - get_logger(), - "[AEB] Failed to look up transform from base_link to" << input_msg->header.frame_id); - return; - } - - angular_velocity_ptr_ = std::make_shared(); - tf2::doTransform(input_msg->angular_velocity, *angular_velocity_ptr_, transform_stamped); -} - -void AEB::onPredictedTrajectory( - const autoware_auto_planning_msgs::msg::Trajectory::ConstSharedPtr input_msg) -{ - predicted_traj_ptr_ = input_msg; -} - -void AEB::onAutowareState(const AutowareState::ConstSharedPtr input_msg) -{ - autoware_state_ = input_msg; -} - -void AEB::onPointCloud(const PointCloud2::ConstSharedPtr input_msg) -{ - PointCloud::Ptr pointcloud_ptr(new PointCloud); - pcl::fromROSMsg(*input_msg, *pointcloud_ptr); - - if (input_msg->header.frame_id != "base_link") { - RCLCPP_ERROR_STREAM( - get_logger(), - "[AEB]: Input point cloud frame is not base_link and it is " << input_msg->header.frame_id); - // transform pointcloud - geometry_msgs::msg::TransformStamped transform_stamped{}; - try { - transform_stamped = tf_buffer_.lookupTransform( - "base_link", input_msg->header.frame_id, input_msg->header.stamp, - rclcpp::Duration::from_seconds(0.5)); - } catch (tf2::TransformException & ex) { - RCLCPP_ERROR_STREAM( - get_logger(), - "[AEB] Failed to look up transform from base_link to" << input_msg->header.frame_id); - return; - } - - // transform by using eigen matrix - PointCloud2 transformed_points{}; - const Eigen::Matrix4f affine_matrix = - tf2::transformToEigen(transform_stamped.transform).matrix().cast(); - pcl_ros::transformPointCloud(affine_matrix, *input_msg, transformed_points); - pcl::fromROSMsg(transformed_points, *pointcloud_ptr); - } - - pcl::VoxelGrid filter; - PointCloud::Ptr no_height_filtered_pointcloud_ptr(new PointCloud); - filter.setInputCloud(pointcloud_ptr); - filter.setLeafSize(voxel_grid_x_, voxel_grid_y_, voxel_grid_z_); - filter.filter(*no_height_filtered_pointcloud_ptr); - - obstacle_ros_pointcloud_ptr_ = std::make_shared(); - pcl::toROSMsg(*no_height_filtered_pointcloud_ptr, *obstacle_ros_pointcloud_ptr_); - obstacle_ros_pointcloud_ptr_->header = input_msg->header; - pub_obstacle_pointcloud_->publish(*obstacle_ros_pointcloud_ptr_); -} - -bool AEB::isDataReady() -{ - const auto missing = [this](const auto & name) { - RCLCPP_INFO_SKIPFIRST_THROTTLE(get_logger(), *get_clock(), 5000, "[AEB] waiting for %s", name); - return false; - }; - - if (!current_velocity_ptr_) { - return missing("ego velocity"); - } - - if (!obstacle_ros_pointcloud_ptr_) { - return missing("object pointcloud"); - } - - if (use_imu_path_ && !angular_velocity_ptr_) { - return missing("imu"); - } - - if (use_predicted_trajectory_ && !predicted_traj_ptr_) { - return missing("control predicted trajectory"); - } - - if (!autoware_state_) { - return missing("autoware_state"); - } - - return true; -} - -void AEB::onCheckCollision(DiagnosticStatusWrapper & stat) -{ - MarkerArray debug_markers; - checkCollision(debug_markers); - - if (!collision_data_keeper_.checkExpired()) { - const std::string error_msg = "[AEB]: Emergency Brake"; - const auto diag_level = DiagnosticStatus::ERROR; - stat.summary(diag_level, error_msg); - const auto & data = collision_data_keeper_.get(); - stat.addf("RSS", "%.2f", data.rss); - stat.addf("Distance", "%.2f", data.distance_to_object); - addCollisionMarker(data, debug_markers); - } else { - const std::string error_msg = "[AEB]: No Collision"; - const auto diag_level = DiagnosticStatus::OK; - stat.summary(diag_level, error_msg); - } - - // publish debug markers - debug_ego_path_publisher_->publish(debug_markers); -} - -bool AEB::checkCollision(MarkerArray & debug_markers) -{ - // step1. check data - if (!isDataReady()) { - return false; - } - - // if not driving, disable aeb - if (autoware_state_->state != AutowareState::DRIVING) { - return false; - } - - // step2. create velocity data check if the vehicle stops or not - const double current_v = current_velocity_ptr_->longitudinal_velocity; - if (current_v < 0.1) { - return false; - } - - // step3. create ego path based on sensor data - bool has_collision_ego = false; - if (use_imu_path_) { - Path ego_path; - std::vector ego_polys; - const double current_w = angular_velocity_ptr_->z; - constexpr double color_r = 0.0 / 256.0; - constexpr double color_g = 148.0 / 256.0; - constexpr double color_b = 205.0 / 256.0; - constexpr double color_a = 0.999; - const auto current_time = get_clock()->now(); - generateEgoPath(current_v, current_w, ego_path, ego_polys); - - std::vector objects; - createObjectData(ego_path, ego_polys, current_time, objects); - has_collision_ego = hasCollision(current_v, ego_path, objects); - - std::string ns = "ego"; - addMarker( - current_time, ego_path, ego_polys, objects, color_r, color_g, color_b, color_a, ns, - debug_markers); - } - - // step4. transform predicted trajectory from control module - bool has_collision_predicted = false; - if (use_predicted_trajectory_) { - Path predicted_path; - std::vector predicted_polys; - const auto predicted_traj_ptr = predicted_traj_ptr_; - constexpr double color_r = 0.0; - constexpr double color_g = 100.0 / 256.0; - constexpr double color_b = 0.0; - constexpr double color_a = 0.999; - const auto current_time = predicted_traj_ptr->header.stamp; - generateEgoPath(*predicted_traj_ptr, predicted_path, predicted_polys); - std::vector objects; - createObjectData(predicted_path, predicted_polys, current_time, objects); - has_collision_predicted = hasCollision(current_v, predicted_path, objects); - - std::string ns = "predicted"; - addMarker( - current_time, predicted_path, predicted_polys, objects, color_r, color_g, color_b, color_a, - ns, debug_markers); - } - - return has_collision_ego || has_collision_predicted; -} - -bool AEB::hasCollision( - const double current_v, const Path & ego_path, const std::vector & objects) -{ - // calculate RSS - const auto current_p = tier4_autoware_utils::createPoint(0.0, 0.0, 0.0); - const double & t = t_response_; - for (const auto & obj : objects) { - const double & obj_v = obj.velocity; - const double rss_dist = current_v * t + (current_v * current_v) / (2 * std::fabs(a_ego_min_)) - - obj_v * obj_v / (2 * std::fabs(a_obj_min_)) + longitudinal_offset_; - const double dist_ego_to_object = - tier4_autoware_utils::calcSignedArcLength(ego_path, current_p, obj.position) - - vehicle_info_.max_longitudinal_offset_m; - if (dist_ego_to_object < rss_dist) { - // collision happens - ObjectData collision_data = obj; - collision_data.rss = rss_dist; - collision_data.distance_to_object = dist_ego_to_object; - collision_data_keeper_.update(collision_data); - return true; - } - } - - return false; -} - -void AEB::generateEgoPath( - const double curr_v, const double curr_w, Path & path, std::vector & polygons) -{ - double curr_x = 0.0; - double curr_y = 0.0; - double curr_yaw = 0.0; - geometry_msgs::msg::Pose ini_pose; - ini_pose.position = tier4_autoware_utils::createPoint(curr_x, curr_y, 0.0); - ini_pose.orientation = tier4_autoware_utils::createQuaternionFromYaw(curr_yaw); - path.push_back(ini_pose); - - if (curr_v < 0.1) { - // if current velocity is too small, assume it stops at the same point - return; - } - - constexpr double epsilon = 1e-6; - const double & dt = prediction_time_interval_; - const double & horizon = prediction_time_horizon_; - for (double t = 0.0; t < horizon + epsilon; t += dt) { - curr_x = curr_x + curr_v * std::cos(curr_yaw) * dt; - curr_y = curr_y + curr_v * std::sin(curr_yaw) * dt; - curr_yaw = curr_yaw + curr_w * dt; - geometry_msgs::msg::Pose current_pose; - current_pose.position = tier4_autoware_utils::createPoint(curr_x, curr_y, 0.0); - current_pose.orientation = tier4_autoware_utils::createQuaternionFromYaw(curr_yaw); - if (tier4_autoware_utils::calcDistance2d(path.back(), current_pose) < 1e-3) { - continue; - } - path.push_back(current_pose); - } - - // If path is shorter than minimum path length - while (tier4_autoware_utils::calcArcLength(path) < min_generated_path_length_) { - curr_x = curr_x + curr_v * std::cos(curr_yaw) * dt; - curr_y = curr_y + curr_v * std::sin(curr_yaw) * dt; - curr_yaw = curr_yaw + curr_w * dt; - geometry_msgs::msg::Pose current_pose; - current_pose.position = tier4_autoware_utils::createPoint(curr_x, curr_y, 0.0); - current_pose.orientation = tier4_autoware_utils::createQuaternionFromYaw(curr_yaw); - if (tier4_autoware_utils::calcDistance2d(path.back(), current_pose) < 1e-3) { - continue; - } - path.push_back(current_pose); - } - - // generate ego polygons - polygons.resize(path.size() - 1); - for (size_t i = 0; i < path.size() - 1; ++i) { - polygons.at(i) = createPolygon(path.at(i), path.at(i + 1), vehicle_info_, expand_width_); - } -} - -void AEB::generateEgoPath( - const Trajectory & predicted_traj, Path & path, - std::vector & polygons) -{ - geometry_msgs::msg::TransformStamped transform_stamped{}; - try { - transform_stamped = tf_buffer_.lookupTransform( - "base_link", predicted_traj.header.frame_id, predicted_traj.header.stamp, - rclcpp::Duration::from_seconds(0.5)); - } catch (tf2::TransformException & ex) { - RCLCPP_ERROR_STREAM(get_logger(), "[AEB] Failed to look up transform from base_link to map"); - return; - } - - // create path - path.resize(predicted_traj.points.size()); - for (size_t i = 0; i < predicted_traj.points.size(); ++i) { - geometry_msgs::msg::Pose map_pose; - tf2::doTransform(predicted_traj.points.at(i).pose, map_pose, transform_stamped); - path.at(i) = map_pose; - } - - // create polygon - polygons.resize(path.size()); - for (size_t i = 0; i < path.size() - 1; ++i) { - polygons.at(i) = createPolygon(path.at(i), path.at(i + 1), vehicle_info_, expand_width_); - } -} - -void AEB::createObjectData( - const Path & ego_path, const std::vector & ego_polys, - const rclcpp::Time & stamp, std::vector & objects) -{ - // check if the predicted path has valid number of points - if (ego_path.size() < 2 || ego_polys.empty()) { - return; - } - - PointCloud::Ptr obstacle_points_ptr(new PointCloud); - pcl::fromROSMsg(*obstacle_ros_pointcloud_ptr_, *obstacle_points_ptr); - for (const auto & point : obstacle_points_ptr->points) { - ObjectData obj; - obj.stamp = stamp; - obj.position = tier4_autoware_utils::createPoint(point.x, point.y, point.z); - obj.velocity = 0.0; - const Point2d obj_point(point.x, point.y); - const double lat_dist = tier4_autoware_utils::calcLateralOffset(ego_path, obj.position); - if (lat_dist > 5.0) { - continue; - } - for (const auto & ego_poly : ego_polys) { - if (bg::within(obj_point, ego_poly)) { - objects.push_back(obj); - break; - } - } - } -} - -void AEB::addMarker( - const rclcpp::Time & current_time, const Path & path, const std::vector & polygons, - const std::vector & objects, const double color_r, const double color_g, - const double color_b, const double color_a, const std::string & ns, MarkerArray & debug_markers) -{ - auto path_marker = tier4_autoware_utils::createDefaultMarker( - "base_link", current_time, ns + "_path", 0L, Marker::LINE_STRIP, - tier4_autoware_utils::createMarkerScale(0.2, 0.2, 0.2), - tier4_autoware_utils::createMarkerColor(color_r, color_g, color_b, color_a)); - path_marker.points.resize(path.size()); - for (size_t i = 0; i < path.size(); ++i) { - path_marker.points.at(i) = path.at(i).position; - } - debug_markers.markers.push_back(path_marker); - - auto polygon_marker = tier4_autoware_utils::createDefaultMarker( - "base_link", current_time, ns + "_polygon", 0, Marker::LINE_LIST, - tier4_autoware_utils::createMarkerScale(0.03, 0.0, 0.0), - tier4_autoware_utils::createMarkerColor(color_r, color_g, color_b, color_a)); - for (const auto & poly : polygons) { - for (size_t dp_idx = 0; dp_idx < poly.outer().size(); ++dp_idx) { - const auto & boost_cp = poly.outer().at(dp_idx); - const auto & boost_np = poly.outer().at((dp_idx + 1) % poly.outer().size()); - const auto curr_point = tier4_autoware_utils::createPoint(boost_cp.x(), boost_cp.y(), 0.0); - const auto next_point = tier4_autoware_utils::createPoint(boost_np.x(), boost_np.y(), 0.0); - polygon_marker.points.push_back(curr_point); - polygon_marker.points.push_back(next_point); - } - } - debug_markers.markers.push_back(polygon_marker); - - auto object_data_marker = tier4_autoware_utils::createDefaultMarker( - "base_link", current_time, ns + "_objects", 0, Marker::SPHERE_LIST, - tier4_autoware_utils::createMarkerScale(0.05, 0.05, 0.05), - tier4_autoware_utils::createMarkerColor(color_r, color_g, color_b, color_a)); - for (const auto & e : objects) { - object_data_marker.points.push_back(e.position); - } - debug_markers.markers.push_back(object_data_marker); -} - -void AEB::addCollisionMarker(const ObjectData & data, MarkerArray & debug_markers) -{ - auto point_marker = tier4_autoware_utils::createDefaultMarker( - "base_link", data.stamp, "collision_point", 0, Marker::SPHERE, - tier4_autoware_utils::createMarkerScale(0.3, 0.3, 0.3), - tier4_autoware_utils::createMarkerColor(1.0, 0.0, 0.0, 0.3)); - point_marker.pose.position = data.position; - debug_markers.markers.push_back(point_marker); -} - -} // namespace autoware::motion::control::autonomous_emergency_braking - -#include -RCLCPP_COMPONENTS_REGISTER_NODE(autoware::motion::control::autonomous_emergency_braking::AEB) diff --git a/control/autoware_autonomous_emergency_braking/CMakeLists.txt b/control/autoware_autonomous_emergency_braking/CMakeLists.txt new file mode 100644 index 0000000000000..6f3a3d06daf4e --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/CMakeLists.txt @@ -0,0 +1,44 @@ +cmake_minimum_required(VERSION 3.14) +project(autoware_autonomous_emergency_braking) + +find_package(autoware_cmake REQUIRED) +autoware_package() + +find_package(PCL REQUIRED) + +include_directories( + include + SYSTEM + ${PCL_INCLUDE_DIRS} +) + +ament_auto_add_library(autoware_autonomous_emergency_braking_helpers SHARED + include/autoware/autonomous_emergency_braking/utils.hpp + src/utils.cpp +) + +set(AEB_NODE ${PROJECT_NAME}_node) +ament_auto_add_library(${AEB_NODE} SHARED + include/autoware/autonomous_emergency_braking/node.hpp + src/node.cpp +) + +target_link_libraries(${AEB_NODE} autoware_autonomous_emergency_braking_helpers) +rclcpp_components_register_node(${AEB_NODE} + PLUGIN "autoware::motion::control::autonomous_emergency_braking::AEB" + EXECUTABLE ${PROJECT_NAME} +) + +# if(BUILD_TESTING) +# ament_add_ros_isolated_gtest(test_aeb +# test/test.cpp) + +# target_link_libraries(test_aeb ${AEB_NODE}) + +# endif() + +ament_auto_package( + INSTALL_TO_SHARE + launch + config +) diff --git a/control/autoware_autonomous_emergency_braking/README.md b/control/autoware_autonomous_emergency_braking/README.md new file mode 100644 index 0000000000000..5ccd2f0709009 --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/README.md @@ -0,0 +1,229 @@ +# Autonomous Emergency Braking (AEB) + +## Purpose / Role + +`autonomous_emergency_braking` is a module that prevents collisions with obstacles on the predicted path created by a control module or sensor values estimated from the control module. + +### Assumptions + +This module has following assumptions. + +- It is used when driving at low speeds (about 15 km/h). + +- The predicted path of the ego vehicle can be made from either the path created from sensors or the path created from a control module, or both. + +- The current speed and angular velocity can be obtained from the sensors of the ego vehicle, and it uses points as obstacles. + +- The AEBs target obstacles are 2D points that can be obtained from the input point cloud or by obtaining the intersection points between the predicted ego footprint path and a predicted object's shape. + +### IMU path generation: steering angle vs IMU's angular velocity + +Currently, the IMU-based path is generated using the angular velocity obtained by the IMU itself. It has been suggested that the steering angle could be used instead onf the angular velocity. + +The pros and cons of both approaches are: + +IMU angular velocity: + +- (+) Usually, it has high accuracy +- (-)Vehicle vibration might introduce noise. + +Steering angle: + +- (+) Not so noisy +- (-) May have a steering offset or a wrong gear ratio, and the steering angle of Autoware and the real steering may not be the same. + +For the moment, there are no plans to implement the steering angle on the path creation process of the AEB module. + +## Inner-workings / Algorithms + +AEB has the following steps before it outputs the emergency stop signal. + +1. Activate AEB if necessary. + +2. Generate a predicted path of the ego vehicle. + +3. Get target obstacles from the input point cloud and/or predicted object data. + +4. Estimate the closest obstacle speed. + +5. Collision check with target obstacles. + +6. Send emergency stop signals to `/diagnostics`. + +We give more details of each section below. + +### 1. Activate AEB if necessary + +We do not activate AEB module if it satisfies the following conditions. + +- Ego vehicle is not in autonomous driving state + +- When the ego vehicle is not moving (Current Velocity is below a 0.1 m/s threshold) + +### 2. Generate a predicted path of the ego vehicle + +AEB generates a predicted footprint path based on current velocity and current angular velocity obtained from attached sensors. Note that if `use_imu_path` is `false`, it skips this step. This predicted path is generated as: + +$$ +x_{k+1} = x_k + v cos(\theta_k) dt \\ +y_{k+1} = y_k + v sin(\theta_k) dt \\ +\theta_{k+1} = \theta_k + \omega dt +$$ + +where $v$ and $\omega$ are current longitudinal velocity and angular velocity respectively. $dt$ is time interval that users can define in advance. + +On the other hand, if `use_predicted_trajectory` is set to true, the AEB module will use the predicted path from the MPC as a base to generate a footprint path. Both the IMU footprint path and the MPC footprint path can be used at the same time. + +### 3. Get target obstacles + +After generating the ego footprint path(s), the target obstacles are identified. There are two methods to find target obstacles: using the input point cloud, or using the predicted object information coming from perception modules. + +#### Pointcloud obstacle filtering + +The AEB module can filter the input pointcloud to find target obstacles with which the ego vehicle might collide. This method can be enable if the `use_pointcloud_data` parameter is set to true. The pointcloud obstacle filtering has three major steps, which are rough filtering, noise filtering with clustering and rigorous filtering. + +##### Rough filtering + +In rough filtering step, we select target obstacle with simple filter. Create a search area up to a certain distance (default is half of the ego vehicle width plus the `path_footprint_extra_margin` parameter) away from the predicted path of the ego vehicle and ignore the point cloud that are not within it. The rough filtering step is illustrated below. + +![rough_filtering](./image/obstacle_filtering_1.drawio.svg) + +##### Noise filtering with clustering and convex hulls + +To prevent the AEB from considering noisy points, euclidean clustering is performed on the filtered point cloud. The points in the point cloud that are not close enough to other points to form a cluster are discarded. Furthermore, each point in a cluster is compared against the `cluster_minimum_height` parameter, if no point inside a cluster has a height/z value greater than `cluster_minimum_height`, the whole cluster of points is discarded. The parameters `cluster_tolerance`, `minimum_cluster_size` and `maximum_cluster_size` can be used to tune the clustering and the size of objects to be ignored, for more information about the clustering method used by the AEB module, please check the official documentation on euclidean clustering of the PCL library: . + +Furthermore, a 2D convex hull is created around each detected cluster, the vertices of each hull represent the most extreme/outside points of the cluster. These vertices are then checked in the next step. + +##### Rigorous filtering + +After Noise filtering, the module performs a geometric collision check to determine whether the filtered obstacles/hull vertices actually have possibility to collide with the ego vehicle. In this check, the ego vehicle is represented as a rectangle, and the point cloud obstacles are represented as points. Only the vertices with a possibility of collision are kept. + +![rigorous_filtering](./image/obstacle_filtering_2.drawio.svg) + +#### Using predicted objects to get target obstacles + +If the `use_predicted_object_data` parameter is set to true, the AEB can use predicted object data coming from the perception modules, to get target obstacle points. This is done by obtaining the 2D intersection points between the ego's predicted footprint path and each of the predicted objects enveloping polygon or bounding box. + +![predicted_object_and_path_intersection](./image/using-predicted-objects.drawio.svg) + +### Finding the closest target obstacle + +Once all target obstacles have been identified, the AEB module chooses the point that is closest to the ego vehicle as the candidate for collision checking. Only the closest point is considered because RSS distance is used to judge if a collision will happen or not, and if the closest vertex to the ego is deemed to be safe from collision, the rest of the target obstacles will also be safe. + +![closest_object](./image/closest-point.drawio.svg) + +### 4. Obstacle velocity estimation + +Once the position of the closest obstacle/point is determined, the AEB modules uses the history of previously detected objects to estimate the closest object relative speed using the following equations: + +$$ +d_{t} = t_{1} - t_{0} +$$ + +$$ +d_{x} = norm(o_{x} - prev_{x}) +$$ + +$$ +v_{norm} = d_{x} / d_{t} +$$ + +Where $t_{1}$ and $t_{0}$ are the timestamps of the point clouds used to detect the current closest object and the closest object of the previous point cloud frame, and $o_{x}$ and $prev_{x}$ are the positions of those objects, respectively. + +![relative_speed](./image/object_relative_speed.drawio.svg) + +Note that, when the closest obstacle/point comes from using predicted object data, $v_{norm}$ is calculated by directly computing the norm of the predicted object's velocity in the x and y axes. + +The velocity vector is then compared against the ego's predicted path to get the longitudinal velocity $v_{obj}$: + +$$ +v_{obj} = v_{norm} * Cos(yaw_{diff}) + v_{ego} +$$ + +where $yaw_{diff}$ is the difference in yaw between the ego path and the displacement vector $$v_{pos} = o_{pos} - prev_{pos} $$ and $v_{ego}$ is the ego's current speed, which accounts for the movement of points caused by the ego moving and not the object. All these equations are performed disregarding the z axis (in 2D). + +Note that, the object velocity is calculated against the ego's current movement direction. If the object moves in the opposite direction to the ego's movement, the object velocity will be negative, which will reduce the rss distance on the next step. + +The resulting estimated object speed is added to a queue of speeds with timestamps. The AEB then checks for expiration of past speed estimations and eliminates expired speed measurements from the queue, the object expiration is determined by checking if the time elapsed since the speed was first added to the queue is larger than the parameter `previous_obstacle_keep_time`. Finally, the median speed of the queue is calculated. The median speed will be used to calculate the RSS distance used for collision checking. + +### 5. Collision check with target obstacles using RSS distance + +In the fourth step, it checks the collision with the closest obstacle point using RSS distance. RSS distance is formulated as: + +$$ +d = v_{ego}*t_{response} + v_{ego}^2/(2*a_{min}) -(sign(v_{obj})) * v_{obj}^2/(2*a_{obj_{min}}) + offset +$$ + +where $v_{ego}$ and $v_{obj}$ is current ego and obstacle velocity, $a_{min}$ and $a_{obj_{min}}$ is ego and object minimum acceleration (maximum deceleration), $t_{response}$ is response time of the ego vehicle to start deceleration. Therefore the distance from the ego vehicle to the obstacle is smaller than this RSS distance $d$, the ego vehicle send emergency stop signals. This is illustrated in the following picture. + +![rss_check](./image/rss_check.drawio.svg) + +### 6. Send emergency stop signals to `/diagnostics` + +If AEB detects collision with point cloud obstacles in the previous step, it sends emergency signal to `/diagnostics` in this step. Note that in order to enable emergency stop, it has to send ERROR level emergency. Moreover, AEB user should modify the setting file to keep the emergency level, otherwise Autoware does not hold the emergency state. + +## Use cases + +### Front vehicle suddenly brakes + +The AEB can activate when a vehicle in front suddenly brakes, and a collision is detected by the AEB module. Provided the distance between the ego vehicle and the front vehicle is large enough and the ego’s emergency acceleration value is high enough, it is possible to avoid or soften collisions with vehicles in front that suddenly brake. NOTE: the acceleration used by the AEB to calculate rss_distance is NOT necessarily the acceleration used by the ego while doing an emergency brake. The acceleration used by the real vehicle can be tuned by changing the [mrm_emergency stop jerk and acceleration values](https://github.com/tier4/autoware_launch/blob/d1b2688f2788acab95bb9995d72efd7182e9006a/autoware_launch/config/system/mrm_emergency_stop_operator/mrm_emergency_stop_operator.param.yaml#L4). + +![front vehicle collision prevention](./image/front_vehicle_collision.drawio.svg) + +### Stop for objects that appear suddenly + +When an object appears suddenly, the AEB can act as a fail-safe to stop the ego vehicle when other modules fail to detect the object on time. If sudden object cut ins are expected, it might be useful for the AEB module to detect collisions of objects BEFORE they enter the real ego vehicle path by increasing the `expand_width` parameter. + +![occluded object collision prevention](./image/occluded_space.drawio.svg) + +### Preventing Collisions with rear objects + +The AEB module can also prevent collisions when the ego vehicle is moving backwards. + +![backward driving](./image/backward-driving.drawio.svg) + +### Preventing collisions in case of wrong Odometry (IMU path only) + +When vehicle odometry information is faulty, it is possible that the MPC fails to predict a correct path for the ego vehicle. If the MPC predicted path is wrong, collision avoidance will not work as intended on the planning modules. However, the AEB’s IMU path does not depend on the MPC and could be able to predict a collision when the other modules cannot. As an example you can see a figure of a hypothetical case in which the MPC path is wrong and only the AEB’s IMU path detects a collision. + +![backward driving](./image/wrong-mpc.drawio.svg) + +## Parameters + +| Name | Unit | Type | Description | Default value | +| :-------------------------------- | :----- | :----- | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------ | +| publish_debug_markers | [-] | bool | flag to publish debug markers | true | +| publish_debug_pointcloud | [-] | bool | flag to publish the point cloud used for debugging | false | +| use_predicted_trajectory | [-] | bool | flag to use the predicted path from the control module | true | +| use_imu_path | [-] | bool | flag to use the predicted path generated by sensor data | true | +| use_object_velocity_calculation | [-] | bool | flag to use the object velocity calculation. If set to false, object velocity is set to 0 [m/s] | true | +| detection_range_min_height | [m] | double | minimum hight of detection range used for avoiding the ghost brake by false positive point clouds | 0.0 | +| detection_range_max_height_margin | [m] | double | margin for maximum hight of detection range used for avoiding the ghost brake by false positive point clouds. `detection_range_max_height = vehicle_height + detection_range_max_height_margin` | 0.0 | +| voxel_grid_x | [m] | double | down sampling parameters of x-axis for voxel grid filter | 0.05 | +| voxel_grid_y | [m] | double | down sampling parameters of y-axis for voxel grid filter | 0.05 | +| voxel_grid_z | [m] | double | down sampling parameters of z-axis for voxel grid filter | 100000.0 | +| cluster tolerance | [m] | double | maximum allowable distance between any two points to be considered part of the same cluster | 0.15 | +| cluster_minimum_height | [m] | double | at least one point in a cluster must be higher than this value for the cluster to be included in the set of possible collision targets | 0.1 | +| minimum_cluster_size | [-] | int | minimum required amount of points contained by a cluster for it to be considered as a possible target obstacle | 10 | +| maximum_cluster_size | [-] | int | maximum amount of points contained by a cluster for it to be considered as a possible target obstacle | 10000 | +| min_generated_path_length | [m] | double | minimum distance for a predicted path generated by sensors | 0.5 | +| expand_width | [m] | double | expansion width of the ego vehicle for the collision check | 0.1 | +| longitudinal_offset | [m] | double | longitudinal offset distance for collision check | 2.0 | +| t_response | [s] | double | response time for the ego to detect the front vehicle starting deceleration | 1.0 | +| a_ego_min | [m/ss] | double | maximum deceleration value of the ego vehicle | -3.0 | +| a_obj_min | [m/ss] | double | maximum deceleration value of objects | -3.0 | +| imu_prediction_time_horizon | [s] | double | time horizon of the predicted path generated by sensors | 1.5 | +| imu_prediction_time_interval | [s] | double | time interval of the predicted path generated by sensors | 0.1 | +| mpc_prediction_time_horizon | [s] | double | time horizon of the predicted path generated by mpc | 1.5 | +| mpc_prediction_time_interval | [s] | double | time interval of the predicted path generated by mpc | 0.1 | +| aeb_hz | [-] | double | frequency at which AEB operates per second | 10 | + +## Limitations + +- AEB might not be able to react with obstacles that are close to the ground. It depends on the performance of the pre-processing methods applied to the point cloud. + +- Longitudinal acceleration information obtained from sensors is not used due to the high amount of noise. + +- The accuracy of the predicted path created from sensor data depends on the accuracy of sensors attached to the ego vehicle. + +![aeb_range](./image/range.drawio.svg) diff --git a/control/autoware_autonomous_emergency_braking/config/autonomous_emergency_braking.param.yaml b/control/autoware_autonomous_emergency_braking/config/autonomous_emergency_braking.param.yaml new file mode 100644 index 0000000000000..f13e95c6db8e5 --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/config/autonomous_emergency_braking.param.yaml @@ -0,0 +1,44 @@ +/**: + ros__parameters: + # Ego path calculation + use_predicted_trajectory: true + use_imu_path: false + use_pointcloud_data: true + use_predicted_object_data: true + use_object_velocity_calculation: true + check_autoware_state: true + min_generated_path_length: 0.5 + imu_prediction_time_horizon: 1.5 + imu_prediction_time_interval: 0.1 + mpc_prediction_time_horizon: 1.5 + mpc_prediction_time_interval: 0.1 + + # Debug + publish_debug_pointcloud: false + publish_debug_markers: true + + # Point cloud partitioning + detection_range_min_height: 0.0 + detection_range_max_height_margin: 0.0 + voxel_grid_x: 0.05 + voxel_grid_y: 0.05 + voxel_grid_z: 100000.0 + + # Point cloud cropping + expand_width: 0.1 + path_footprint_extra_margin: 4.0 + + # Point cloud clustering + cluster_tolerance: 0.1 #[m] + cluster_minimum_height: 0.0 + minimum_cluster_size: 10 + maximum_cluster_size: 10000 + + # RSS distance collision check + longitudinal_offset: 2.0 + t_response: 1.0 + a_ego_min: -3.0 + a_obj_min: -1.0 + collision_keeping_sec: 2.0 + previous_obstacle_keep_time: 1.0 + aeb_hz: 10.0 diff --git a/control/autoware_autonomous_emergency_braking/image/backward-driving.drawio.svg b/control/autoware_autonomous_emergency_braking/image/backward-driving.drawio.svg new file mode 100644 index 0000000000000..4897078d28f00 --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/image/backward-driving.drawio.svg @@ -0,0 +1,218 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + + +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + +
+
+
+ + + +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
diff --git a/control/autoware_autonomous_emergency_braking/image/closest-point.drawio.svg b/control/autoware_autonomous_emergency_braking/image/closest-point.drawio.svg new file mode 100644 index 0000000000000..154bba1fc9152 --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/image/closest-point.drawio.svg @@ -0,0 +1,245 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ Closest Point +
+
+
+
+ +
+
+
+
+
+ + + + + + + + +
+
+
+ Find the closest target obstacle +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + +
+
+
+
diff --git a/control/autoware_autonomous_emergency_braking/image/front_vehicle_collision.drawio.svg b/control/autoware_autonomous_emergency_braking/image/front_vehicle_collision.drawio.svg new file mode 100644 index 0000000000000..cb7caefad5a8a --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/image/front_vehicle_collision.drawio.svg @@ -0,0 +1,557 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ Ego +
+
+
+
+ +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ Predicted path +
+
+
+
+ +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ Front vehicle suddenly brakes +
+
+
+
+ +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ After the reaction time, the AEB has detected a collision an activated the brakes +
+
+
+
+ +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ Front vehicle stops +
+
+
+
+ +
+
+
+ + + + + + + +
+
+
+ Ego stops +
+
+
+
+ +
+
+
+ + + + + + + + + + + + +
+
diff --git a/control/autoware_autonomous_emergency_braking/image/object_relative_speed.drawio.svg b/control/autoware_autonomous_emergency_braking/image/object_relative_speed.drawio.svg new file mode 100644 index 0000000000000..7b0b7b8631943 --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/image/object_relative_speed.drawio.svg @@ -0,0 +1,192 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
Closest Point t_0
+
+
+
+ +
+
+
+ + + + + + + + + +
+
+
Closest Point t_1
+
+
+
+ +
+
+
+ + + + + + + + + +
+
+
dx
+
+
+
+ +
+
+
+ + + + + + + + + + + + + + + + + + +
+
diff --git a/control/autoware_autonomous_emergency_braking/image/obstacle_filtering_1.drawio.svg b/control/autoware_autonomous_emergency_braking/image/obstacle_filtering_1.drawio.svg new file mode 100644 index 0000000000000..ee5ec62fbc1d4 --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/image/obstacle_filtering_1.drawio.svg @@ -0,0 +1,300 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
Original Point Cloud
+
+
+
+ +
+
+
+ + + + + + + +
+
+
+ Cropping the Point Cloud with extended path +
+
+
+
+ +
+
+
+ + + + + + +
+
diff --git a/control/autoware_autonomous_emergency_braking/image/obstacle_filtering_2.drawio.svg b/control/autoware_autonomous_emergency_braking/image/obstacle_filtering_2.drawio.svg new file mode 100644 index 0000000000000..137872daebace --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/image/obstacle_filtering_2.drawio.svg @@ -0,0 +1,254 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ Filter out points outside clusters and get a 2D convex hull of each cluster +
+
+
+
+ +
+
+
+
+ + + + + + + + + + +
+
+
+
diff --git a/control/autoware_autonomous_emergency_braking/image/occluded_space.drawio.svg b/control/autoware_autonomous_emergency_braking/image/occluded_space.drawio.svg new file mode 100644 index 0000000000000..0cb31d5ab6e72 --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/image/occluded_space.drawio.svg @@ -0,0 +1,453 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ Ego +
+
+
+
+ +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ Occlusion +
+
+
+
+ +
+
+
+ + + + + + + +
+
+
+ Occlusion +
+
+
+
+ +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ Occlusion +
+
+
+
+ +
+
+
+ + + + + + + + + + + + + +
+
+
+ AEB triggers emergency brakes +
+
+
+
+ +
+
+
+ + + + + + + +
+
+
+ Ego stops before collision +
+
+
+
+ +
+
+
+
+
diff --git a/control/autoware_autonomous_emergency_braking/image/range.drawio.svg b/control/autoware_autonomous_emergency_braking/image/range.drawio.svg new file mode 100644 index 0000000000000..d647d9060a67c --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/image/range.drawio.svg @@ -0,0 +1,54 @@ + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ Obstacles in this range might not be able to react +
+
+
+
+ Obstacles in this range might not be able to rea... +
+
+
+ + + + Text is not SVG - cannot display + + +
diff --git a/control/autoware_autonomous_emergency_braking/image/rss_check.drawio.svg b/control/autoware_autonomous_emergency_braking/image/rss_check.drawio.svg new file mode 100644 index 0000000000000..2d1716519631d --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/image/rss_check.drawio.svg @@ -0,0 +1,134 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + Closest Point Distance +
+
+
+
+ +
+
+
+ + + + + + + + + + + + +
+
+
RSS distance
+
+
+
+ +
+
+
+
+
diff --git a/control/autoware_autonomous_emergency_braking/image/using-predicted-objects.drawio.svg b/control/autoware_autonomous_emergency_braking/image/using-predicted-objects.drawio.svg new file mode 100644 index 0000000000000..244b609c7f009 --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/image/using-predicted-objects.drawio.svg @@ -0,0 +1,114 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ Ego path footprint and object polygon intersection points +
+
+
+
+ +
+
+
+
+
diff --git a/control/autoware_autonomous_emergency_braking/image/wrong-mpc.drawio.svg b/control/autoware_autonomous_emergency_braking/image/wrong-mpc.drawio.svg new file mode 100644 index 0000000000000..a829cb60ec7d6 --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/image/wrong-mpc.drawio.svg @@ -0,0 +1,208 @@ + + + + + + + + + + + + + + + +
+
+
+ + + +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + +
+
+
+ IMU +
+
+
+
+ +
+
+
+ + + + + + + +
+
+
+ MPC +
+
+
+
+ +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ MPC path is wrong +
+
+
+
+ +
+
+
+
+
diff --git a/control/autoware_autonomous_emergency_braking/include/autoware/autonomous_emergency_braking/node.hpp b/control/autoware_autonomous_emergency_braking/include/autoware/autonomous_emergency_braking/node.hpp new file mode 100644 index 0000000000000..30ac91a3c890b --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/include/autoware/autonomous_emergency_braking/node.hpp @@ -0,0 +1,540 @@ +// Copyright 2023 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef AUTOWARE__AUTONOMOUS_EMERGENCY_BRAKING__NODE_HPP_ +#define AUTOWARE__AUTONOMOUS_EMERGENCY_BRAKING__NODE_HPP_ + +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +namespace autoware::motion::control::autonomous_emergency_braking +{ + +using autoware_auto_planning_msgs::msg::Trajectory; +using autoware_auto_system_msgs::msg::AutowareState; +using autoware_auto_vehicle_msgs::msg::VelocityReport; +using nav_msgs::msg::Odometry; +using sensor_msgs::msg::Imu; +using sensor_msgs::msg::PointCloud2; +using PointCloud = pcl::PointCloud; +using diagnostic_updater::DiagnosticStatusWrapper; +using diagnostic_updater::Updater; +using tier4_autoware_utils::Polygon2d; +using vehicle_info_util::VehicleInfo; +using visualization_msgs::msg::Marker; +using visualization_msgs::msg::MarkerArray; +using Path = std::vector; +using Vector3 = geometry_msgs::msg::Vector3; +using autoware_auto_perception_msgs::msg::PredictedObject; +using autoware_auto_perception_msgs::msg::PredictedObjects; + +/** + * @brief Struct to store object data + */ +struct ObjectData +{ + rclcpp::Time stamp; + geometry_msgs::msg::Point position; + double velocity{0.0}; + double rss{0.0}; + double distance_to_object{0.0}; +}; + +/** + * @brief Class to manage collision data + */ +class CollisionDataKeeper +{ +public: + /** + * @brief Constructor for CollisionDataKeeper + * @param clock Shared pointer to the clock + */ + explicit CollisionDataKeeper(rclcpp::Clock::SharedPtr clock) { clock_ = clock; } + + /** + * @brief Set timeout values for collision and obstacle data + * @param collision_keep_time Time to keep collision data + * @param previous_obstacle_keep_time Time to keep previous obstacle data + */ + void setTimeout(const double collision_keep_time, const double previous_obstacle_keep_time) + { + collision_keep_time_ = collision_keep_time; + previous_obstacle_keep_time_ = previous_obstacle_keep_time; + } + + /** + * @brief Get timeout values for collision and obstacle data + * @return Pair of collision and obstacle data timeout values + */ + std::pair getTimeout() + { + return {collision_keep_time_, previous_obstacle_keep_time_}; + } + + /** + * @brief Check if object data has expired + * @param data Optional reference to the object data + * @param timeout Timeout value to check against + * @return True if object data has expired, false otherwise + */ + bool checkObjectDataExpired(std::optional & data, const double timeout) + { + if (!data.has_value()) return true; + const auto now = clock_->now(); + const auto & prev_obj = data.value(); + const auto & data_time_stamp = prev_obj.stamp; + if ((now - data_time_stamp).nanoseconds() * 1e-9 > timeout) { + data = std::nullopt; + return true; + } + return false; + } + + /** + * @brief Check if collision data has expired + * @return True if collision data has expired, false otherwise + */ + bool checkCollisionExpired() + { + return this->checkObjectDataExpired(closest_object_, collision_keep_time_); + } + + /** + * @brief Check if previous object data has expired + * @return True if previous object data has expired, false otherwise + */ + bool checkPreviousObjectDataExpired() + { + return this->checkObjectDataExpired(prev_closest_object_, previous_obstacle_keep_time_); + } + + /** + * @brief Get the closest object data + * @return Object data of the closest object + */ + [[nodiscard]] std::optional get() const { return closest_object_; } + + /** + * @brief Get the previous closest object data + * @return Object data of the previous closest object + */ + [[nodiscard]] ObjectData getPreviousObjectData() const + { + return (prev_closest_object_.has_value()) ? prev_closest_object_.value() : ObjectData(); + } + + /** + * @brief Set collision data + * @param data Object data to set + */ + void setCollisionData(const ObjectData & data) + { + closest_object_ = std::make_optional(data); + } + + /** + * @brief Set previous object data + * @param data Object data to set + */ + void setPreviousObjectData(const ObjectData & data) + { + prev_closest_object_ = std::make_optional(data); + } + + /** + * @brief Reset the obstacle velocity history + */ + void resetVelocityHistory() { obstacle_velocity_history_.clear(); } + + /** + * @brief Update the velocity history with current object velocity + * @param current_object_velocity Current object velocity + * @param current_object_velocity_time_stamp Timestamp of the current object velocity + */ + void updateVelocityHistory( + const double current_object_velocity, const rclcpp::Time & current_object_velocity_time_stamp) + { + // remove old msg from deque + const auto now = clock_->now(); + obstacle_velocity_history_.erase( + std::remove_if( + obstacle_velocity_history_.begin(), obstacle_velocity_history_.end(), + [&](const auto & velocity_time_pair) { + const auto & vel_time = velocity_time_pair.second; + return ((now - vel_time).nanoseconds() * 1e-9 > previous_obstacle_keep_time_); + }), + obstacle_velocity_history_.end()); + obstacle_velocity_history_.emplace_back( + current_object_velocity, current_object_velocity_time_stamp); + } + + /** + * @brief Get the median obstacle velocity from history + * @return Optional median obstacle velocity + */ + [[nodiscard]] std::optional getMedianObstacleVelocity() const + { + if (obstacle_velocity_history_.empty()) return std::nullopt; + std::vector raw_velocities; + raw_velocities.reserve(obstacle_velocity_history_.size()); + for (const auto & vel_time_pair : obstacle_velocity_history_) { + raw_velocities.emplace_back(vel_time_pair.first); + } + + const size_t med1 = (raw_velocities.size() % 2 == 0) ? (raw_velocities.size()) / 2 - 1 + : (raw_velocities.size()) / 2; + const size_t med2 = (raw_velocities.size()) / 2; + std::nth_element(raw_velocities.begin(), raw_velocities.begin() + med1, raw_velocities.end()); + const double vel1 = raw_velocities.at(med1); + std::nth_element(raw_velocities.begin(), raw_velocities.begin() + med2, raw_velocities.end()); + const double vel2 = raw_velocities.at(med2); + return (vel1 + vel2) / 2.0; + } + + /** + * @brief Calculate object speed from velocity history + * @param closest_object Closest object data + * @param path Ego vehicle path + * @param current_ego_speed Current ego vehicle speed + * @return Optional calculated object speed + */ + std::optional calcObjectSpeedFromHistory( + const ObjectData & closest_object, const Path & path, const double current_ego_speed) + { + // in case the object comes from predicted objects info, we reuse the speed. + if (std::abs(closest_object.velocity) > std::numeric_limits::epsilon()) { + this->setPreviousObjectData(closest_object); + this->updateVelocityHistory(closest_object.velocity, closest_object.stamp); + return this->getMedianObstacleVelocity(); + } + + if (this->checkPreviousObjectDataExpired()) { + this->setPreviousObjectData(closest_object); + this->resetVelocityHistory(); + return std::nullopt; + } + + const auto estimated_velocity_opt = std::invoke([&]() -> std::optional { + const auto & prev_object = this->getPreviousObjectData(); + const double p_dt = + (closest_object.stamp.nanoseconds() - prev_object.stamp.nanoseconds()) * 1e-9; + if (p_dt < std::numeric_limits::epsilon()) return std::nullopt; + const auto & nearest_collision_point = closest_object.position; + const auto & prev_collision_point = prev_object.position; + + const double p_dx = nearest_collision_point.x - prev_collision_point.x; + const double p_dy = nearest_collision_point.y - prev_collision_point.y; + const double p_dist = std::hypot(p_dx, p_dy); + const double p_yaw = std::atan2(p_dy, p_dx); + const double p_vel = p_dist / p_dt; + + const auto nearest_idx = + tier4_autoware_utils::findNearestIndex(path, nearest_collision_point); + const auto & nearest_path_pose = path.at(nearest_idx); + // When the ego moves backwards, the direction of movement axis is reversed + const auto & traj_yaw = (current_ego_speed > 0.0) + ? tf2::getYaw(nearest_path_pose.orientation) + : tf2::getYaw(nearest_path_pose.orientation) + M_PI; + const auto estimated_velocity = + p_vel * std::cos(p_yaw - traj_yaw) + std::abs(current_ego_speed); + + // Current RSS distance calculation does not account for negative velocities + return estimated_velocity; + }); + + if (!estimated_velocity_opt.has_value()) { + return std::nullopt; + } + + const auto & estimated_velocity = estimated_velocity_opt.value(); + this->setPreviousObjectData(closest_object); + this->updateVelocityHistory(estimated_velocity, closest_object.stamp); + return this->getMedianObstacleVelocity(); + } + +private: + std::optional prev_closest_object_{std::nullopt}; + std::optional closest_object_{std::nullopt}; + double collision_keep_time_{0.0}; + double previous_obstacle_keep_time_{0.0}; + + std::deque> obstacle_velocity_history_; + rclcpp::Clock::SharedPtr clock_; +}; + +/** + * @brief Autonomous Emergency Braking (AEB) node + */ +class AEB : public rclcpp::Node +{ +public: + /** + * @brief Constructor for AEB + * @param node_options Options for the node + */ + explicit AEB(const rclcpp::NodeOptions & node_options); + + // subscriber + autoware::universe_utils::InterProcessPollingSubscriber sub_point_cloud_{ + this, "~/input/pointcloud", autoware::universe_utils::SingleDepthSensorQoS()}; + autoware::universe_utils::InterProcessPollingSubscriber sub_velocity_{ + this, "~/input/velocity"}; + autoware::universe_utils::InterProcessPollingSubscriber sub_imu_{this, "~/input/imu"}; + autoware::universe_utils::InterProcessPollingSubscriber sub_predicted_traj_{ + this, "~/input/predicted_trajectory"}; + autoware::universe_utils::InterProcessPollingSubscriber predicted_objects_sub_{ + this, "~/input/objects"}; + autoware::universe_utils::InterProcessPollingSubscriber sub_autoware_state_{ + this, "/autoware/state"}; + // publisher + rclcpp::Publisher::SharedPtr pub_obstacle_pointcloud_; + rclcpp::Publisher::SharedPtr debug_marker_publisher_; + rclcpp::Publisher::SharedPtr info_marker_publisher_; + + // timer + rclcpp::TimerBase::SharedPtr timer_; + + // callback + /** + * @brief Callback for point cloud messages + * @param input_msg Shared pointer to the point cloud message + */ + void onPointCloud(const PointCloud2::ConstSharedPtr input_msg); + + /** + * @brief Callback for IMU messages + * @param input_msg Shared pointer to the IMU message + */ + void onImu(const Imu::ConstSharedPtr input_msg); + + /** + * @brief Timer callback function + */ + void onTimer(); + + /** + * @brief Callback for parameter updates + * @param parameters Vector of updated parameters + * @return Set parameters result + */ + rcl_interfaces::msg::SetParametersResult onParameter( + const std::vector & parameters); + + /** + * @brief Fetch the latest data from subscribers + * @return True if data fetch was successful, false otherwise + */ + bool fetchLatestData(); + + /** + * @brief Diagnostic check for collisions + * @param stat Diagnostic status wrapper + */ + void onCheckCollision(DiagnosticStatusWrapper & stat); + + /** + * @brief Check for collisions + * @param debug_markers Marker array for debugging + * @return True if a collision is detected, false otherwise + */ + bool checkCollision(MarkerArray & debug_markers); + + /** + * @brief Check if there is a collision with the closest object + * @param current_v Current velocity of the ego vehicle + * @param closest_object Data of the closest object + * @return True if a collision is detected, false otherwise + */ + bool hasCollision(const double current_v, const ObjectData & closest_object); + + /** + * @brief Generate the ego vehicle path + * @param curr_v Current velocity of the ego vehicle + * @param curr_w Current angular velocity of the ego vehicle + * @return Generated ego path + */ + Path generateEgoPath(const double curr_v, const double curr_w); + + /** + * @brief Generate the ego vehicle path from the predicted trajectory + * @param predicted_traj Predicted trajectory of the ego vehicle + * @return Optional generated ego path + */ + std::optional generateEgoPath(const Trajectory & predicted_traj); + + /** + * @brief Generate the footprint of the path with extra width margin + * @param path Ego vehicle path + * @param extra_width_margin Extra width margin for the footprint + * @return Vector of polygons representing the path footprint + */ + std::vector generatePathFootprint(const Path & path, const double extra_width_margin); + + /** + * @brief Create object data using point cloud clusters + * @param ego_path Ego vehicle path + * @param ego_polys Polygons representing the ego vehicle footprint + * @param stamp Timestamp of the data + * @param objects Vector to store the created object data + * @param obstacle_points_ptr Pointer to the point cloud of obstacles + */ + void createObjectDataUsingPointCloudClusters( + const Path & ego_path, const std::vector & ego_polys, const rclcpp::Time & stamp, + std::vector & objects, + const pcl::PointCloud::Ptr obstacle_points_ptr); + + /** + * @brief Create object data using predicted objects + * @param ego_path Ego vehicle path + * @param ego_polys Polygons representing the ego vehicle footprint + * @param objects Vector to store the created object data + */ + void createObjectDataUsingPredictedObjects( + const Path & ego_path, const std::vector & ego_polys, + std::vector & objects); + + /** + * @brief Crop the point cloud with the ego vehicle footprint path + * @param ego_polys Polygons representing the ego vehicle footprint + * @param filtered_objects Pointer to the filtered point cloud of obstacles + */ + void cropPointCloudWithEgoFootprintPath( + const std::vector & ego_polys, pcl::PointCloud::Ptr filtered_objects); + + /** + * @brief Add a marker for debugging + * @param current_time Current time + * @param path Ego vehicle path + * @param polygons Polygons representing the ego vehicle footprint + * @param objects Vector of object data + * @param closest_object Optional data of the closest object + * @param color_r Red color component + * @param color_g Green color component + * @param color_b Blue color component + * @param color_a Alpha (transparency) component + * @param ns Namespace for the marker + * @param debug_markers Marker array for debugging + */ + void addMarker( + const rclcpp::Time & current_time, const Path & path, const std::vector & polygons, + const std::vector & objects, const std::optional & closest_object, + const double color_r, const double color_g, const double color_b, const double color_a, + const std::string & ns, MarkerArray & debug_markers); + + /** + * @brief Add a collision marker for debugging + * @param data Data of the collision object + * @param debug_markers Marker array for debugging + */ + void addCollisionMarker(const ObjectData & data, MarkerArray & debug_markers); + + /** + * @brief Add an info marker stop wall in front of the ego vehicle + * @param markers Data of the closest object + */ + void addVirtualStopWallMarker(MarkerArray & markers); + + /** + * @brief Calculate object speed from history + * @param closest_object Data of the closest object + * @param path Ego vehicle path + * @param current_ego_speed Current speed of the ego vehicle + * @return Optional calculated object speed + */ + std::optional calcObjectSpeedFromHistory( + const ObjectData & closest_object, const Path & path, const double current_ego_speed); + + // Member variables + PointCloud2::SharedPtr obstacle_ros_pointcloud_ptr_{nullptr}; + VelocityReport::ConstSharedPtr current_velocity_ptr_{nullptr}; + Vector3::SharedPtr angular_velocity_ptr_{nullptr}; + Trajectory::ConstSharedPtr predicted_traj_ptr_{nullptr}; + PredictedObjects::ConstSharedPtr predicted_objects_ptr_{nullptr}; + AutowareState::ConstSharedPtr autoware_state_{nullptr}; + + tf2_ros::Buffer tf_buffer_{get_clock()}; + tf2_ros::TransformListener tf_listener_{tf_buffer_}; + + // vehicle info + VehicleInfo vehicle_info_; + + // diag + Updater updater_{this}; + + // Member variables + bool publish_debug_pointcloud_; + bool publish_debug_markers_; + bool use_predicted_trajectory_; + bool use_imu_path_; + bool use_pointcloud_data_; + bool use_predicted_object_data_; + bool use_object_velocity_calculation_; + bool check_autoware_state_; + double path_footprint_extra_margin_; + double detection_range_min_height_; + double detection_range_max_height_margin_; + double voxel_grid_x_; + double voxel_grid_y_; + double voxel_grid_z_; + double min_generated_path_length_; + double expand_width_; + double longitudinal_offset_; + double t_response_; + double a_ego_min_; + double a_obj_min_; + double cluster_tolerance_; + double cluster_minimum_height_; + int minimum_cluster_size_; + int maximum_cluster_size_; + double imu_prediction_time_horizon_; + double imu_prediction_time_interval_; + double mpc_prediction_time_horizon_; + double mpc_prediction_time_interval_; + CollisionDataKeeper collision_data_keeper_; + // Parameter callback + OnSetParametersCallbackHandle::SharedPtr set_param_res_; +}; +} // namespace autoware::motion::control::autonomous_emergency_braking + +#endif // AUTOWARE__AUTONOMOUS_EMERGENCY_BRAKING__NODE_HPP_ diff --git a/control/autoware_autonomous_emergency_braking/include/autoware/autonomous_emergency_braking/utils.hpp b/control/autoware_autonomous_emergency_braking/include/autoware/autonomous_emergency_braking/utils.hpp new file mode 100644 index 0000000000000..30227560a0b0a --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/include/autoware/autonomous_emergency_braking/utils.hpp @@ -0,0 +1,85 @@ +// Copyright 2024 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef AUTOWARE__AUTONOMOUS_EMERGENCY_BRAKING__UTILS_HPP_ +#define AUTOWARE__AUTONOMOUS_EMERGENCY_BRAKING__UTILS_HPP_ + +#include + +#include +#include +#include + +#include + +#include +#ifdef ROS_DISTRO_GALACTIC +#include +#include +#else +#include + +#include +#endif + +namespace autoware::motion::control::autonomous_emergency_braking::utils +{ +using autoware_auto_perception_msgs::msg::PredictedObject; +using autoware_auto_perception_msgs::msg::PredictedObjects; +using geometry_msgs::msg::Point; +using geometry_msgs::msg::Pose; +using geometry_msgs::msg::TransformStamped; +using tier4_autoware_utils::Polygon2d; + +/** + * @brief Apply a transform to a predicted object + * @param input the predicted object + * @param transform_stamped the tf2 transform + */ +PredictedObject transformObjectFrame( + const PredictedObject & input, geometry_msgs::msg::TransformStamped & transform_stamped); + +/** + * @brief Get the predicted objects polygon as a geometry polygon + * @param current_pose the predicted object's pose + * @param obj_shape the object's shape + */ +Polygon2d convertPolygonObjectToGeometryPolygon( + const Pose & current_pose, const autoware_auto_perception_msgs::msg::Shape & obj_shape); + +/** + * @brief Get the predicted objects cylindrical shape as a geometry polygon + * @param current_pose the predicted object's pose + * @param obj_shape the object's shape + */ +Polygon2d convertCylindricalObjectToGeometryPolygon( + const Pose & current_pose, const autoware_auto_perception_msgs::msg::Shape & obj_shape); + +/** + * @brief Get the predicted objects bounding box shape as a geometry polygon + * @param current_pose the predicted object's pose + * @param obj_shape the object's shape + */ +Polygon2d convertBoundingBoxObjectToGeometryPolygon( + const Pose & current_pose, const double & base_to_front, const double & base_to_rear, + const double & base_to_width); + +/** + * @brief Get the predicted object's shape as a geometry polygon + * @param obj the object + */ +Polygon2d convertObjToPolygon(const PredictedObject & obj); +} // namespace autoware::motion::control::autonomous_emergency_braking::utils + +#endif // AUTOWARE__AUTONOMOUS_EMERGENCY_BRAKING__UTILS_HPP_ diff --git a/control/autonomous_emergency_braking/launch/autonomous_emergency_braking.launch.xml b/control/autoware_autonomous_emergency_braking/launch/autoware_autonomous_emergency_braking.launch.xml similarity index 64% rename from control/autonomous_emergency_braking/launch/autonomous_emergency_braking.launch.xml rename to control/autoware_autonomous_emergency_braking/launch/autoware_autonomous_emergency_braking.launch.xml index cf6640ec6be52..34d0492799577 100644 --- a/control/autonomous_emergency_braking/launch/autonomous_emergency_braking.launch.xml +++ b/control/autoware_autonomous_emergency_braking/launch/autoware_autonomous_emergency_braking.launch.xml @@ -1,12 +1,12 @@ - + - + - + @@ -15,5 +15,6 @@ + diff --git a/control/autonomous_emergency_braking/package.xml b/control/autoware_autonomous_emergency_braking/package.xml similarity index 62% rename from control/autonomous_emergency_braking/package.xml rename to control/autoware_autonomous_emergency_braking/package.xml index f68c16798ca79..364a9dc66322b 100644 --- a/control/autonomous_emergency_braking/package.xml +++ b/control/autoware_autonomous_emergency_braking/package.xml @@ -1,17 +1,26 @@ - autonomous_emergency_braking + autoware_autonomous_emergency_braking 0.1.0 - Autonomous Emergency Braking package as a ROS2 node - yutaka + Autonomous Emergency Braking package as a ROS 2 node + Takamasa Horibe + Tomoya Kimura + Mamoru Sobue + Daniel Sanchez + Kyoichi Sugahara + Apache License 2.0 ament_cmake - - autoware_cmake + autoware_cmake + ament_cmake_ros + ament_lint_auto + autoware_lint_common + autoware_test_utils autoware_auto_control_msgs + autoware_auto_perception_msgs autoware_auto_system_msgs autoware_auto_vehicle_msgs diagnostic_updater @@ -32,9 +41,6 @@ vehicle_info_util visualization_msgs - ament_lint_auto - autoware_lint_common - ament_cmake diff --git a/control/autoware_autonomous_emergency_braking/src/node.cpp b/control/autoware_autonomous_emergency_braking/src/node.cpp new file mode 100644 index 0000000000000..52b4d22f2e40d --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/src/node.cpp @@ -0,0 +1,951 @@ +// Copyright 2022 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include + +#include + +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#ifdef ROS_DISTRO_GALACTIC +#include +#include +#else +#include + +#include +#endif + +namespace autoware::motion::control::autonomous_emergency_braking +{ +using autoware::motion::control::autonomous_emergency_braking::utils::convertObjToPolygon; +using diagnostic_msgs::msg::DiagnosticStatus; +using tier4_autoware_utils::Point2d; +namespace bg = boost::geometry; + +void appendPointToPolygon(Polygon2d & polygon, const geometry_msgs::msg::Point & geom_point) +{ + Point2d point; + point.x() = geom_point.x; + point.y() = geom_point.y; + + bg::append(polygon.outer(), point); +} + +Polygon2d createPolygon( + const geometry_msgs::msg::Pose & base_pose, const geometry_msgs::msg::Pose & next_pose, + const vehicle_info_util::VehicleInfo & vehicle_info, const double expand_width) +{ + Polygon2d polygon; + + const double longitudinal_offset = vehicle_info.max_longitudinal_offset_m; + const double width = vehicle_info.vehicle_width_m / 2.0 + expand_width; + const double rear_overhang = vehicle_info.rear_overhang_m; + + appendPointToPolygon( + polygon, + tier4_autoware_utils::calcOffsetPose(base_pose, longitudinal_offset, width, 0.0).position); + appendPointToPolygon( + polygon, + tier4_autoware_utils::calcOffsetPose(base_pose, longitudinal_offset, -width, 0.0).position); + appendPointToPolygon( + polygon, tier4_autoware_utils::calcOffsetPose(base_pose, -rear_overhang, -width, 0.0).position); + appendPointToPolygon( + polygon, tier4_autoware_utils::calcOffsetPose(base_pose, -rear_overhang, width, 0.0).position); + + appendPointToPolygon( + polygon, + tier4_autoware_utils::calcOffsetPose(next_pose, longitudinal_offset, width, 0.0).position); + appendPointToPolygon( + polygon, + tier4_autoware_utils::calcOffsetPose(next_pose, longitudinal_offset, -width, 0.0).position); + appendPointToPolygon( + polygon, tier4_autoware_utils::calcOffsetPose(next_pose, -rear_overhang, -width, 0.0).position); + appendPointToPolygon( + polygon, tier4_autoware_utils::calcOffsetPose(next_pose, -rear_overhang, width, 0.0).position); + + polygon = tier4_autoware_utils::isClockwise(polygon) + ? polygon + : tier4_autoware_utils::inverseClockwise(polygon); + + Polygon2d hull_polygon; + bg::convex_hull(polygon, hull_polygon); + bg::correct(hull_polygon); + return hull_polygon; +} + +AEB::AEB(const rclcpp::NodeOptions & node_options) +: Node("AEB", node_options), + vehicle_info_(vehicle_info_util::VehicleInfoUtil(*this).getVehicleInfo()), + collision_data_keeper_(this->get_clock()) +{ + // Publisher + { + pub_obstacle_pointcloud_ = + this->create_publisher("~/debug/obstacle_pointcloud", 1); + debug_marker_publisher_ = this->create_publisher("~/debug/markers", 1); + info_marker_publisher_ = this->create_publisher("~/info/markers", 1); + } + // Diagnostics + { + updater_.setHardwareID("autonomous_emergency_braking"); + updater_.add("aeb_emergency_stop", this, &AEB::onCheckCollision); + } + // parameter + publish_debug_pointcloud_ = declare_parameter("publish_debug_pointcloud"); + publish_debug_markers_ = declare_parameter("publish_debug_markers"); + use_predicted_trajectory_ = declare_parameter("use_predicted_trajectory"); + use_imu_path_ = declare_parameter("use_imu_path"); + use_pointcloud_data_ = declare_parameter("use_pointcloud_data"); + use_predicted_object_data_ = declare_parameter("use_predicted_object_data"); + use_object_velocity_calculation_ = declare_parameter("use_object_velocity_calculation"); + check_autoware_state_ = declare_parameter("check_autoware_state"); + path_footprint_extra_margin_ = declare_parameter("path_footprint_extra_margin"); + detection_range_min_height_ = declare_parameter("detection_range_min_height"); + detection_range_max_height_margin_ = + declare_parameter("detection_range_max_height_margin"); + voxel_grid_x_ = declare_parameter("voxel_grid_x"); + voxel_grid_y_ = declare_parameter("voxel_grid_y"); + voxel_grid_z_ = declare_parameter("voxel_grid_z"); + min_generated_path_length_ = declare_parameter("min_generated_path_length"); + expand_width_ = declare_parameter("expand_width"); + longitudinal_offset_ = declare_parameter("longitudinal_offset"); + t_response_ = declare_parameter("t_response"); + a_ego_min_ = declare_parameter("a_ego_min"); + a_obj_min_ = declare_parameter("a_obj_min"); + + cluster_tolerance_ = declare_parameter("cluster_tolerance"); + cluster_minimum_height_ = declare_parameter("cluster_minimum_height"); + minimum_cluster_size_ = declare_parameter("minimum_cluster_size"); + maximum_cluster_size_ = declare_parameter("maximum_cluster_size"); + + imu_prediction_time_horizon_ = declare_parameter("imu_prediction_time_horizon"); + imu_prediction_time_interval_ = declare_parameter("imu_prediction_time_interval"); + mpc_prediction_time_horizon_ = declare_parameter("mpc_prediction_time_horizon"); + mpc_prediction_time_interval_ = declare_parameter("mpc_prediction_time_interval"); + + { // Object history data keeper setup + const auto previous_obstacle_keep_time = + declare_parameter("previous_obstacle_keep_time"); + const auto collision_keeping_sec = declare_parameter("collision_keeping_sec"); + collision_data_keeper_.setTimeout(collision_keeping_sec, previous_obstacle_keep_time); + } + + // Parameter Callback + set_param_res_ = + add_on_set_parameters_callback(std::bind(&AEB::onParameter, this, std::placeholders::_1)); + + // start time + const double aeb_hz = declare_parameter("aeb_hz"); + const auto period_ns = rclcpp::Rate(aeb_hz).period(); + timer_ = rclcpp::create_timer(this, this->get_clock(), period_ns, std::bind(&AEB::onTimer, this)); +} + +rcl_interfaces::msg::SetParametersResult AEB::onParameter( + const std::vector & parameters) +{ + using tier4_autoware_utils::updateParam; + updateParam(parameters, "publish_debug_pointcloud", publish_debug_pointcloud_); + updateParam(parameters, "publish_debug_markers", publish_debug_markers_); + updateParam(parameters, "use_predicted_trajectory", use_predicted_trajectory_); + updateParam(parameters, "use_imu_path", use_imu_path_); + updateParam(parameters, "use_pointcloud_data", use_pointcloud_data_); + updateParam(parameters, "use_predicted_object_data", use_predicted_object_data_); + updateParam( + parameters, "use_object_velocity_calculation", use_object_velocity_calculation_); + updateParam(parameters, "check_autoware_state", check_autoware_state_); + updateParam(parameters, "path_footprint_extra_margin", path_footprint_extra_margin_); + updateParam(parameters, "detection_range_min_height", detection_range_min_height_); + updateParam( + parameters, "detection_range_max_height_margin", detection_range_max_height_margin_); + updateParam(parameters, "voxel_grid_x", voxel_grid_x_); + updateParam(parameters, "voxel_grid_y", voxel_grid_y_); + updateParam(parameters, "voxel_grid_z", voxel_grid_z_); + updateParam(parameters, "min_generated_path_length", min_generated_path_length_); + updateParam(parameters, "expand_width", expand_width_); + updateParam(parameters, "longitudinal_offset", longitudinal_offset_); + updateParam(parameters, "t_response", t_response_); + updateParam(parameters, "a_ego_min", a_ego_min_); + updateParam(parameters, "a_obj_min", a_obj_min_); + + updateParam(parameters, "cluster_tolerance", cluster_tolerance_); + updateParam(parameters, "cluster_minimum_height", cluster_minimum_height_); + updateParam(parameters, "minimum_cluster_size", minimum_cluster_size_); + updateParam(parameters, "maximum_cluster_size", maximum_cluster_size_); + + updateParam(parameters, "imu_prediction_time_horizon", imu_prediction_time_horizon_); + updateParam(parameters, "imu_prediction_time_interval", imu_prediction_time_interval_); + updateParam(parameters, "mpc_prediction_time_horizon", mpc_prediction_time_horizon_); + updateParam(parameters, "mpc_prediction_time_interval", mpc_prediction_time_interval_); + + { // Object history data keeper setup + auto [previous_obstacle_keep_time, collision_keeping_sec] = collision_data_keeper_.getTimeout(); + updateParam(parameters, "previous_obstacle_keep_time", previous_obstacle_keep_time); + updateParam(parameters, "collision_keeping_sec", collision_keeping_sec); + collision_data_keeper_.setTimeout(collision_keeping_sec, previous_obstacle_keep_time); + } + + rcl_interfaces::msg::SetParametersResult result; + result.successful = true; + result.reason = "success"; + return result; +} + +void AEB::onTimer() { updater_.force_update(); } + +void AEB::onImu(const Imu::ConstSharedPtr input_msg) +{ + // transform imu + geometry_msgs::msg::TransformStamped transform_stamped{}; + try { + transform_stamped = tf_buffer_.lookupTransform( + "base_link", input_msg->header.frame_id, input_msg->header.stamp, + rclcpp::Duration::from_seconds(0.5)); + } catch (tf2::TransformException & ex) { + RCLCPP_ERROR_STREAM( + get_logger(), + "[AEB] Failed to look up transform from base_link to" << input_msg->header.frame_id); + return; + } + + angular_velocity_ptr_ = std::make_shared(); + tf2::doTransform(input_msg->angular_velocity, *angular_velocity_ptr_, transform_stamped); +} + +void AEB::onPointCloud(const PointCloud2::ConstSharedPtr input_msg) +{ + PointCloud::Ptr pointcloud_ptr(new PointCloud); + pcl::fromROSMsg(*input_msg, *pointcloud_ptr); + + if (input_msg->header.frame_id != "base_link") { + RCLCPP_ERROR_STREAM( + get_logger(), + "[AEB]: Input point cloud frame is not base_link and it is " << input_msg->header.frame_id); + // transform pointcloud + geometry_msgs::msg::TransformStamped transform_stamped{}; + try { + transform_stamped = tf_buffer_.lookupTransform( + "base_link", input_msg->header.frame_id, input_msg->header.stamp, + rclcpp::Duration::from_seconds(0.5)); + } catch (tf2::TransformException & ex) { + RCLCPP_ERROR_STREAM( + get_logger(), + "[AEB] Failed to look up transform from base_link to" << input_msg->header.frame_id); + return; + } + + // transform by using eigen matrix + PointCloud2 transformed_points{}; + const Eigen::Matrix4f affine_matrix = + tf2::transformToEigen(transform_stamped.transform).matrix().cast(); + pcl_ros::transformPointCloud(affine_matrix, *input_msg, transformed_points); + pcl::fromROSMsg(transformed_points, *pointcloud_ptr); + } + + // apply z-axis filter for removing False Positive points + PointCloud::Ptr height_filtered_pointcloud_ptr(new PointCloud); + pcl::PassThrough height_filter; + height_filter.setInputCloud(pointcloud_ptr); + height_filter.setFilterFieldName("z"); + height_filter.setFilterLimits( + detection_range_min_height_, + vehicle_info_.vehicle_height_m + detection_range_max_height_margin_); + height_filter.filter(*height_filtered_pointcloud_ptr); + + pcl::VoxelGrid filter; + PointCloud::Ptr no_height_filtered_pointcloud_ptr(new PointCloud); + filter.setInputCloud(height_filtered_pointcloud_ptr); + filter.setLeafSize(voxel_grid_x_, voxel_grid_y_, voxel_grid_z_); + filter.filter(*no_height_filtered_pointcloud_ptr); + + obstacle_ros_pointcloud_ptr_ = std::make_shared(); + + pcl::toROSMsg(*no_height_filtered_pointcloud_ptr, *obstacle_ros_pointcloud_ptr_); + obstacle_ros_pointcloud_ptr_->header = input_msg->header; +} + +bool AEB::fetchLatestData() +{ + const auto missing = [this](const auto & name) { + RCLCPP_INFO_SKIPFIRST_THROTTLE(get_logger(), *get_clock(), 5000, "[AEB] waiting for %s", name); + return false; + }; + + current_velocity_ptr_ = sub_velocity_.takeData(); + if (!current_velocity_ptr_) { + return missing("ego velocity"); + } + + if (use_pointcloud_data_) { + const auto pointcloud_ptr = sub_point_cloud_.takeData(); + if (!pointcloud_ptr) { + return missing("object pointcloud message"); + } + + onPointCloud(pointcloud_ptr); + if (!obstacle_ros_pointcloud_ptr_) { + return missing("object pointcloud"); + } + } else { + obstacle_ros_pointcloud_ptr_.reset(); + } + + if (use_predicted_object_data_) { + predicted_objects_ptr_ = predicted_objects_sub_.takeData(); + if (!predicted_objects_ptr_) { + return missing("predicted objects"); + } + } else { + predicted_objects_ptr_.reset(); + } + + if (!obstacle_ros_pointcloud_ptr_ && !predicted_objects_ptr_) { + return missing("object detection method (pointcloud or predicted objects)"); + } + + const bool has_imu_path = std::invoke([&]() { + if (!use_imu_path_) return false; + const auto imu_ptr = sub_imu_.takeData(); + if (!imu_ptr) { + return missing("imu message"); + } + // imu_ptr is valid + onImu(imu_ptr); + return (!angular_velocity_ptr_) ? missing("imu") : true; + }); + + const bool has_predicted_path = std::invoke([&]() { + if (!use_predicted_trajectory_) { + return false; + } + predicted_traj_ptr_ = sub_predicted_traj_.takeData(); + return (!predicted_traj_ptr_) ? missing("control predicted trajectory") : true; + }); + + if (!has_imu_path && !has_predicted_path) { + RCLCPP_INFO_SKIPFIRST_THROTTLE( + get_logger(), *get_clock(), 5000, + "[AEB] At least one path (IMU or predicted trajectory) is required for operation"); + return false; + } + + autoware_state_ = sub_autoware_state_.takeData(); + if (check_autoware_state_ && !autoware_state_) { + return missing("autoware_state"); + } + + return true; +} + +void AEB::onCheckCollision(DiagnosticStatusWrapper & stat) +{ + MarkerArray debug_markers; + MarkerArray info_markers; + checkCollision(debug_markers); + + if (!collision_data_keeper_.checkCollisionExpired()) { + const std::string error_msg = "[AEB]: Emergency Brake"; + const auto diag_level = DiagnosticStatus::ERROR; + stat.summary(diag_level, error_msg); + const auto & data = collision_data_keeper_.get(); + if (data.has_value()) { + stat.addf("RSS", "%.2f", data.value().rss); + stat.addf("Distance", "%.2f", data.value().distance_to_object); + stat.addf("Object Speed", "%.2f", data.value().velocity); + if (publish_debug_markers_) { + addCollisionMarker(data.value(), debug_markers); + } + } + + addVirtualStopWallMarker(info_markers); + } else { + const std::string error_msg = "[AEB]: No Collision"; + const auto diag_level = DiagnosticStatus::OK; + stat.summary(diag_level, error_msg); + } + + // publish debug markers + debug_marker_publisher_->publish(debug_markers); + info_marker_publisher_->publish(info_markers); +} + +bool AEB::checkCollision(MarkerArray & debug_markers) +{ + using colorTuple = std::tuple; + + // step1. check data + if (!fetchLatestData()) { + return false; + } + + // if not driving, disable aeb + if (check_autoware_state_ && autoware_state_->state != AutowareState::DRIVING) { + return false; + } + + // step2. create velocity data check if the vehicle stops or not + constexpr double min_moving_velocity_th{0.1}; + const double current_v = current_velocity_ptr_->longitudinal_velocity; + if (std::abs(current_v) < min_moving_velocity_th) { + return false; + } + + auto check_collision = [&]( + const auto & path, const colorTuple & debug_colors, + const std::string & debug_ns, + pcl::PointCloud::Ptr filtered_objects) { + // Check which points of the cropped point cloud are on the ego path, and get the closest one + const auto ego_polys = generatePathFootprint(path, expand_width_); + auto objects = std::invoke([&]() { + std::vector objects; + // Crop out Pointcloud using an extra wide ego path + if (use_pointcloud_data_) { + const auto expanded_ego_polys = + generatePathFootprint(path, expand_width_ + path_footprint_extra_margin_); + cropPointCloudWithEgoFootprintPath(expanded_ego_polys, filtered_objects); + const auto current_time = obstacle_ros_pointcloud_ptr_->header.stamp; + createObjectDataUsingPointCloudClusters( + path, ego_polys, current_time, objects, filtered_objects); + } + if (use_predicted_object_data_) { + createObjectDataUsingPredictedObjects(path, ego_polys, objects); + } + return objects; + }); + + // Get only the closest object and calculate its speed + const auto closest_object_point = std::invoke([&]() -> std::optional { + const auto closest_object_point_itr = + std::min_element(objects.begin(), objects.end(), [](const auto & o1, const auto & o2) { + return o1.distance_to_object < o2.distance_to_object; + }); + + if (closest_object_point_itr == objects.end()) { + return std::nullopt; + } + const auto closest_object_speed = (use_object_velocity_calculation_) + ? collision_data_keeper_.calcObjectSpeedFromHistory( + *closest_object_point_itr, path, current_v) + : std::make_optional(0.0); + if (!closest_object_speed.has_value()) { + return std::nullopt; + } + closest_object_point_itr->velocity = closest_object_speed.value(); + return std::make_optional(*closest_object_point_itr); + }); + + const bool has_collision = (closest_object_point.has_value()) + ? hasCollision(current_v, closest_object_point.value()) + : false; + + // Add debug markers + if (publish_debug_markers_) { + const auto [color_r, color_g, color_b, color_a] = debug_colors; + addMarker( + this->get_clock()->now(), path, ego_polys, objects, collision_data_keeper_.get(), color_r, + color_g, color_b, color_a, debug_ns, debug_markers); + } + // check collision using rss distance + return has_collision; + }; + + // step3. make function to check collision with ego path created with sensor data + const auto has_collision_ego = [&](pcl::PointCloud::Ptr filtered_objects) -> bool { + if (!use_imu_path_ || !angular_velocity_ptr_) return false; + const double current_w = angular_velocity_ptr_->z; + constexpr colorTuple debug_color = {0.0 / 256.0, 148.0 / 256.0, 205.0 / 256.0, 0.999}; + const std::string ns = "ego"; + const auto ego_path = generateEgoPath(current_v, current_w); + + return check_collision(ego_path, debug_color, ns, filtered_objects); + }; + + // step4. make function to check collision with predicted trajectory from control module + const auto has_collision_predicted = + [&](pcl::PointCloud::Ptr filtered_objects) -> bool { + if (!use_predicted_trajectory_ || !predicted_traj_ptr_) return false; + const auto predicted_traj_ptr = predicted_traj_ptr_; + const auto predicted_path_opt = generateEgoPath(*predicted_traj_ptr); + + if (!predicted_path_opt) return false; + constexpr colorTuple debug_color = {0.0 / 256.0, 100.0 / 256.0, 0.0 / 256.0, 0.999}; + const std::string ns = "predicted"; + const auto & predicted_path = predicted_path_opt.value(); + + return check_collision(predicted_path, debug_color, ns, filtered_objects); + }; + + // Data of filtered point cloud + pcl::PointCloud::Ptr filtered_objects = + pcl::make_shared>(); + // evaluate if there is a collision for both paths + const bool has_collision = + has_collision_ego(filtered_objects) || has_collision_predicted(filtered_objects); + + // Debug print + if (!filtered_objects->empty() && publish_debug_pointcloud_) { + const auto filtered_objects_ros_pointcloud_ptr = std::make_shared(); + pcl::toROSMsg(*filtered_objects, *filtered_objects_ros_pointcloud_ptr); + pub_obstacle_pointcloud_->publish(*filtered_objects_ros_pointcloud_ptr); + } + return has_collision; +} + +bool AEB::hasCollision(const double current_v, const ObjectData & closest_object) +{ + const double & obj_v = closest_object.velocity; + const double & t = t_response_; + + const double rss_dist = std::invoke([&]() { + const double pre_braking_covered_distance = std::abs(current_v) * t; + const double braking_distance = (current_v * current_v) / (2 * std::fabs(a_ego_min_)); + const double ego_stopping_distance = pre_braking_covered_distance + braking_distance; + const double obj_braking_distance = (obj_v > 0.0) + ? -(obj_v * obj_v) / (2 * std::fabs(a_obj_min_)) + : (obj_v * obj_v) / (2 * std::fabs(a_obj_min_)); + return ego_stopping_distance + obj_braking_distance + longitudinal_offset_; + }); + + if (closest_object.distance_to_object < rss_dist) { + // collision happens + ObjectData collision_data = closest_object; + collision_data.rss = rss_dist; + collision_data.distance_to_object = closest_object.distance_to_object; + collision_data_keeper_.setCollisionData(collision_data); + return true; + } + return false; +} + +Path AEB::generateEgoPath(const double curr_v, const double curr_w) +{ + Path path; + double curr_x = 0.0; + double curr_y = 0.0; + double curr_yaw = 0.0; + geometry_msgs::msg::Pose ini_pose; + ini_pose.position = tier4_autoware_utils::createPoint(curr_x, curr_y, 0.0); + ini_pose.orientation = tier4_autoware_utils::createQuaternionFromYaw(curr_yaw); + path.push_back(ini_pose); + + if (std::abs(curr_v) < 0.1) { + // if current velocity is too small, assume it stops at the same point + return path; + } + + constexpr double epsilon = std::numeric_limits::epsilon(); + const double & dt = imu_prediction_time_interval_; + const double & horizon = imu_prediction_time_horizon_; + for (double t = 0.0; t < horizon + epsilon; t += dt) { + curr_x = curr_x + curr_v * std::cos(curr_yaw) * dt; + curr_y = curr_y + curr_v * std::sin(curr_yaw) * dt; + curr_yaw = curr_yaw + curr_w * dt; + geometry_msgs::msg::Pose current_pose; + current_pose.position = tier4_autoware_utils::createPoint(curr_x, curr_y, 0.0); + current_pose.orientation = tier4_autoware_utils::createQuaternionFromYaw(curr_yaw); + if (tier4_autoware_utils::calcDistance2d(path.back(), current_pose) < 1e-3) { + continue; + } + path.push_back(current_pose); + } + + // If path is shorter than minimum path length + while (tier4_autoware_utils::calcArcLength(path) < min_generated_path_length_) { + curr_x = curr_x + curr_v * std::cos(curr_yaw) * dt; + curr_y = curr_y + curr_v * std::sin(curr_yaw) * dt; + curr_yaw = curr_yaw + curr_w * dt; + geometry_msgs::msg::Pose current_pose; + current_pose.position = tier4_autoware_utils::createPoint(curr_x, curr_y, 0.0); + current_pose.orientation = tier4_autoware_utils::createQuaternionFromYaw(curr_yaw); + if (tier4_autoware_utils::calcDistance2d(path.back(), current_pose) < 1e-3) { + continue; + } + path.push_back(current_pose); + } + return path; +} + +std::optional AEB::generateEgoPath(const Trajectory & predicted_traj) +{ + if (predicted_traj.points.empty()) { + return std::nullopt; + } + + geometry_msgs::msg::TransformStamped transform_stamped{}; + try { + transform_stamped = tf_buffer_.lookupTransform( + "base_link", predicted_traj.header.frame_id, predicted_traj.header.stamp, + rclcpp::Duration::from_seconds(0.5)); + } catch (tf2::TransformException & ex) { + RCLCPP_ERROR_STREAM(get_logger(), "[AEB] Failed to look up transform from base_link to map"); + return std::nullopt; + } + + // create path + Path path; + path.reserve(predicted_traj.points.size()); + for (size_t i = 0; i < predicted_traj.points.size(); ++i) { + geometry_msgs::msg::Pose map_pose; + tf2::doTransform(predicted_traj.points.at(i).pose, map_pose, transform_stamped); + path.push_back(map_pose); + + if (i * mpc_prediction_time_interval_ > mpc_prediction_time_horizon_) { + break; + } + } + return path; +} + +std::vector AEB::generatePathFootprint( + const Path & path, const double extra_width_margin) +{ + std::vector polygons; + for (size_t i = 0; i < path.size() - 1; ++i) { + polygons.push_back( + createPolygon(path.at(i), path.at(i + 1), vehicle_info_, extra_width_margin)); + } + return polygons; +} + +void AEB::createObjectDataUsingPredictedObjects( + const Path & ego_path, const std::vector & ego_polys, + std::vector & object_data_vector) +{ + if (predicted_objects_ptr_->objects.empty()) return; + + const double current_ego_speed = current_velocity_ptr_->longitudinal_velocity; + const auto & objects = predicted_objects_ptr_->objects; + const auto & stamp = predicted_objects_ptr_->header.stamp; + + // Ego position + const auto current_p = [&]() { + const auto & first_point_of_path = ego_path.front(); + const auto & p = first_point_of_path.position; + return tier4_autoware_utils::createPoint(p.x, p.y, p.z); + }(); + + auto get_object_tangent_velocity = + [&](const PredictedObject & predicted_object, const auto & obj_pose) { + const double obj_vel_norm = std::hypot( + predicted_object.kinematics.initial_twist_with_covariance.twist.linear.x, + predicted_object.kinematics.initial_twist_with_covariance.twist.linear.y); + + const auto obj_yaw = tf2::getYaw(obj_pose.orientation); + const auto obj_idx = tier4_autoware_utils::findNearestIndex(ego_path, obj_pose.position); + const auto path_yaw = (current_ego_speed > 0.0) + ? tf2::getYaw(ego_path.at(obj_idx).orientation) + : tf2::getYaw(ego_path.at(obj_idx).orientation) + M_PI; + return obj_vel_norm * std::cos(obj_yaw - path_yaw); + }; + + geometry_msgs::msg::TransformStamped transform_stamped{}; + try { + transform_stamped = tf_buffer_.lookupTransform( + "base_link", predicted_objects_ptr_->header.frame_id, stamp, + rclcpp::Duration::from_seconds(0.5)); + } catch (tf2::TransformException & ex) { + RCLCPP_ERROR_STREAM(get_logger(), "[AEB] Failed to look up transform from base_link to map"); + return; + } + + // Check which objects collide with the ego footprints + std::for_each(objects.begin(), objects.end(), [&](const auto & predicted_object) { + // get objects in base_link frame + const auto t_predicted_object = + utils::transformObjectFrame(predicted_object, transform_stamped); + const auto & obj_pose = t_predicted_object.kinematics.initial_pose_with_covariance.pose; + const auto obj_poly = convertObjToPolygon(t_predicted_object); + const double obj_tangent_velocity = get_object_tangent_velocity(t_predicted_object, obj_pose); + + for (const auto & ego_poly : ego_polys) { + // check collision with 2d polygon + std::vector collision_points_bg; + bg::intersection(ego_poly, obj_poly, collision_points_bg); + if (collision_points_bg.empty()) continue; + + // Create an object for each intersection point + bool collision_points_added{false}; + for (const auto & collision_point : collision_points_bg) { + const auto obj_position = + tier4_autoware_utils::createPoint(collision_point.x(), collision_point.y(), 0.0); + const double obj_arc_length = + tier4_autoware_utils::calcSignedArcLength(ego_path, current_p, obj_position); + if (std::isnan(obj_arc_length)) continue; + + // If the object is behind the ego, we need to use the backward long offset. The + // distance should be a positive number in any case + const bool is_object_in_front_of_ego = obj_arc_length > 0.0; + const double dist_ego_to_object = + (is_object_in_front_of_ego) ? obj_arc_length - vehicle_info_.max_longitudinal_offset_m + : obj_arc_length + vehicle_info_.min_longitudinal_offset_m; + + ObjectData obj; + obj.stamp = stamp; + obj.position = obj_position; + obj.velocity = obj_tangent_velocity; + obj.distance_to_object = std::abs(dist_ego_to_object); + object_data_vector.push_back(obj); + collision_points_added = true; + } + // The ego polygons are in order, so the first intersection points found are the closest + // points. It is not necessary to continue iterating the ego polys for the same object. + if (collision_points_added) break; + } + }); +} + +void AEB::createObjectDataUsingPointCloudClusters( + const Path & ego_path, const std::vector & ego_polys, const rclcpp::Time & stamp, + std::vector & objects, const pcl::PointCloud::Ptr obstacle_points_ptr) +{ + // check if the predicted path has valid number of points + if (ego_path.size() < 2 || ego_polys.empty() || obstacle_points_ptr->empty()) { + return; + } + + // eliminate noisy points by only considering points belonging to clusters of at least a certain + // size + const std::vector cluster_indices = std::invoke([&]() { + std::vector cluster_idx; + pcl::search::KdTree::Ptr tree(new pcl::search::KdTree); + tree->setInputCloud(obstacle_points_ptr); + pcl::EuclideanClusterExtraction ec; + ec.setClusterTolerance(cluster_tolerance_); + ec.setMinClusterSize(minimum_cluster_size_); + ec.setMaxClusterSize(maximum_cluster_size_); + ec.setSearchMethod(tree); + ec.setInputCloud(obstacle_points_ptr); + ec.extract(cluster_idx); + return cluster_idx; + }); + + PointCloud::Ptr points_belonging_to_cluster_hulls(new PointCloud); + for (const auto & indices : cluster_indices) { + PointCloud::Ptr cluster(new PointCloud); + bool cluster_surpasses_threshold_height{false}; + for (const auto & index : indices.indices) { + const auto & p = (*obstacle_points_ptr)[index]; + cluster_surpasses_threshold_height = (cluster_surpasses_threshold_height) + ? cluster_surpasses_threshold_height + : (p.z > cluster_minimum_height_); + cluster->push_back(p); + } + if (!cluster_surpasses_threshold_height) continue; + // Make a 2d convex hull for the objects + pcl::ConvexHull hull; + hull.setDimension(2); + hull.setInputCloud(cluster); + std::vector polygons; + PointCloud::Ptr surface_hull(new pcl::PointCloud); + hull.reconstruct(*surface_hull, polygons); + for (const auto & p : *surface_hull) { + points_belonging_to_cluster_hulls->push_back(p); + } + } + + // select points inside the ego footprint path + const auto current_p = [&]() { + const auto & first_point_of_path = ego_path.front(); + const auto & p = first_point_of_path.position; + return tier4_autoware_utils::createPoint(p.x, p.y, p.z); + }(); + + for (const auto & p : *points_belonging_to_cluster_hulls) { + const auto obj_position = tier4_autoware_utils::createPoint(p.x, p.y, p.z); + const double obj_arc_length = + tier4_autoware_utils::calcSignedArcLength(ego_path, current_p, obj_position); + if (std::isnan(obj_arc_length)) continue; + + // If the object is behind the ego, we need to use the backward long offset. The distance should + // be a positive number in any case + const bool is_object_in_front_of_ego = obj_arc_length > 0.0; + const double dist_ego_to_object = (is_object_in_front_of_ego) + ? obj_arc_length - vehicle_info_.max_longitudinal_offset_m + : obj_arc_length + vehicle_info_.min_longitudinal_offset_m; + + ObjectData obj; + obj.stamp = stamp; + obj.position = obj_position; + obj.velocity = 0.0; + obj.distance_to_object = std::abs(dist_ego_to_object); + + const Point2d obj_point(p.x, p.y); + for (const auto & ego_poly : ego_polys) { + if (bg::within(obj_point, ego_poly)) { + objects.push_back(obj); + break; + } + } + } +} + +void AEB::cropPointCloudWithEgoFootprintPath( + const std::vector & ego_polys, pcl::PointCloud::Ptr filtered_objects) +{ + PointCloud::Ptr full_points_ptr(new PointCloud); + pcl::fromROSMsg(*obstacle_ros_pointcloud_ptr_, *full_points_ptr); + // Create a Point cloud with the points of the ego footprint + PointCloud::Ptr path_polygon_points(new PointCloud); + std::for_each(ego_polys.begin(), ego_polys.end(), [&](const auto & poly) { + std::for_each(poly.outer().begin(), poly.outer().end(), [&](const auto & p) { + pcl::PointXYZ point(p.x(), p.y(), 0.0); + path_polygon_points->push_back(point); + }); + }); + // Make a surface hull with the ego footprint to filter out points + pcl::ConvexHull hull; + hull.setDimension(2); + hull.setInputCloud(path_polygon_points); + std::vector polygons; + pcl::PointCloud::Ptr surface_hull(new pcl::PointCloud); + hull.reconstruct(*surface_hull, polygons); + // Filter out points outside of the path's convex hull + pcl::CropHull path_polygon_hull_filter; + path_polygon_hull_filter.setDim(2); + path_polygon_hull_filter.setInputCloud(full_points_ptr); + path_polygon_hull_filter.setHullIndices(polygons); + path_polygon_hull_filter.setHullCloud(surface_hull); + path_polygon_hull_filter.filter(*filtered_objects); + filtered_objects->header.stamp = full_points_ptr->header.stamp; +} + +void AEB::addMarker( + const rclcpp::Time & current_time, const Path & path, const std::vector & polygons, + const std::vector & objects, const std::optional & closest_object, + const double color_r, const double color_g, const double color_b, const double color_a, + const std::string & ns, MarkerArray & debug_markers) +{ + auto path_marker = tier4_autoware_utils::createDefaultMarker( + "base_link", current_time, ns + "_path", 0L, Marker::LINE_STRIP, + tier4_autoware_utils::createMarkerScale(0.2, 0.2, 0.2), + tier4_autoware_utils::createMarkerColor(color_r, color_g, color_b, color_a)); + path_marker.points.resize(path.size()); + for (size_t i = 0; i < path.size(); ++i) { + path_marker.points.at(i) = path.at(i).position; + } + debug_markers.markers.push_back(path_marker); + + auto polygon_marker = tier4_autoware_utils::createDefaultMarker( + "base_link", current_time, ns + "_polygon", 0, Marker::LINE_LIST, + tier4_autoware_utils::createMarkerScale(0.03, 0.0, 0.0), + tier4_autoware_utils::createMarkerColor(color_r, color_g, color_b, color_a)); + for (const auto & poly : polygons) { + for (size_t dp_idx = 0; dp_idx < poly.outer().size(); ++dp_idx) { + const auto & boost_cp = poly.outer().at(dp_idx); + const auto & boost_np = poly.outer().at((dp_idx + 1) % poly.outer().size()); + const auto curr_point = tier4_autoware_utils::createPoint(boost_cp.x(), boost_cp.y(), 0.0); + const auto next_point = tier4_autoware_utils::createPoint(boost_np.x(), boost_np.y(), 0.0); + polygon_marker.points.push_back(curr_point); + polygon_marker.points.push_back(next_point); + } + } + debug_markers.markers.push_back(polygon_marker); + + auto object_data_marker = tier4_autoware_utils::createDefaultMarker( + "base_link", current_time, ns + "_objects", 0, Marker::SPHERE_LIST, + tier4_autoware_utils::createMarkerScale(0.5, 0.5, 0.5), + tier4_autoware_utils::createMarkerColor(color_r, color_g, color_b, color_a)); + for (const auto & e : objects) { + object_data_marker.points.push_back(e.position); + } + debug_markers.markers.push_back(object_data_marker); + + // Visualize planner type text + if (closest_object.has_value()) { + const auto & obj = closest_object.value(); + const auto color = tier4_autoware_utils::createMarkerColor(0.95, 0.95, 0.95, 0.999); + auto closest_object_velocity_marker_array = tier4_autoware_utils::createDefaultMarker( + "base_link", obj.stamp, ns + "_closest_object_velocity", 0, + visualization_msgs::msg::Marker::TEXT_VIEW_FACING, + tier4_autoware_utils::createMarkerScale(0.0, 0.0, 0.7), color); + closest_object_velocity_marker_array.pose.position = obj.position; + const auto ego_velocity = current_velocity_ptr_->longitudinal_velocity; + closest_object_velocity_marker_array.text = + "Object velocity: " + std::to_string(obj.velocity) + " [m/s]\n"; + closest_object_velocity_marker_array.text += + "Object relative velocity to ego: " + std::to_string(obj.velocity - std::abs(ego_velocity)) + + " [m/s]\n"; + closest_object_velocity_marker_array.text += + "Object distance to ego: " + std::to_string(obj.distance_to_object) + " [m]\n"; + closest_object_velocity_marker_array.text += + "RSS distance: " + std::to_string(obj.rss) + " [m]"; + debug_markers.markers.push_back(closest_object_velocity_marker_array); + } +} + +void AEB::addVirtualStopWallMarker(MarkerArray & markers) +{ + const auto ego_map_pose = std::invoke([this]() -> std::optional { + geometry_msgs::msg::TransformStamped tf_current_pose; + geometry_msgs::msg::Pose p; + try { + tf_current_pose = tf_buffer_.lookupTransform( + "map", "base_link", rclcpp::Time(0), rclcpp::Duration::from_seconds(1.0)); + } catch (tf2::TransformException & ex) { + RCLCPP_ERROR(get_logger(), "%s", ex.what()); + return std::nullopt; + } + + p.orientation = tf_current_pose.transform.rotation; + p.position.x = tf_current_pose.transform.translation.x; + p.position.y = tf_current_pose.transform.translation.y; + p.position.z = tf_current_pose.transform.translation.z; + return std::make_optional(p); + }); + + if (ego_map_pose.has_value()) { + const double base_to_front_offset = vehicle_info_.max_longitudinal_offset_m; + const auto ego_front_pose = tier4_autoware_utils::calcOffsetPose( + ego_map_pose.value(), base_to_front_offset, 0.0, 0.0, 0.0); + const auto virtual_stop_wall = tier4_autoware_utils::createStopVirtualWallMarker( + ego_front_pose, "autonomous_emergency_braking", this->now(), 0); + tier4_autoware_utils::appendMarkerArray(virtual_stop_wall, &markers); + } +} + +void AEB::addCollisionMarker(const ObjectData & data, MarkerArray & debug_markers) +{ + auto point_marker = tier4_autoware_utils::createDefaultMarker( + "base_link", data.stamp, "collision_point", 0, Marker::SPHERE, + tier4_autoware_utils::createMarkerScale(0.3, 0.3, 0.3), + tier4_autoware_utils::createMarkerColor(1.0, 0.0, 0.0, 0.3)); + point_marker.pose.position = data.position; + debug_markers.markers.push_back(point_marker); +} + +} // namespace autoware::motion::control::autonomous_emergency_braking + +#include +RCLCPP_COMPONENTS_REGISTER_NODE(autoware::motion::control::autonomous_emergency_braking::AEB) diff --git a/control/autoware_autonomous_emergency_braking/src/utils.cpp b/control/autoware_autonomous_emergency_braking/src/utils.cpp new file mode 100644 index 0000000000000..4524458c1d955 --- /dev/null +++ b/control/autoware_autonomous_emergency_braking/src/utils.cpp @@ -0,0 +1,152 @@ +// Copyright 2024 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include + +namespace autoware::motion::control::autonomous_emergency_braking::utils +{ +using autoware_auto_perception_msgs::msg::PredictedObject; +using autoware_auto_perception_msgs::msg::PredictedObjects; +using geometry_msgs::msg::Point; +using geometry_msgs::msg::Pose; +using geometry_msgs::msg::TransformStamped; +using geometry_msgs::msg::Vector3; +using tier4_autoware_utils::Polygon2d; + +PredictedObject transformObjectFrame( + const PredictedObject & input, geometry_msgs::msg::TransformStamped & transform_stamped) +{ + PredictedObject output = input; + const auto & linear_twist = input.kinematics.initial_twist_with_covariance.twist.linear; + const auto & angular_twist = input.kinematics.initial_twist_with_covariance.twist.angular; + const auto & pose = input.kinematics.initial_pose_with_covariance.pose; + + geometry_msgs::msg::Pose t_pose; + Vector3 t_linear_twist; + Vector3 t_angular_twist; + + tf2::doTransform(pose, t_pose, transform_stamped); + tf2::doTransform(linear_twist, t_linear_twist, transform_stamped); + tf2::doTransform(angular_twist, t_angular_twist, transform_stamped); + + output.kinematics.initial_pose_with_covariance.pose = t_pose; + output.kinematics.initial_twist_with_covariance.twist.linear = t_linear_twist; + output.kinematics.initial_twist_with_covariance.twist.angular = t_angular_twist; + return output; +} + +Polygon2d convertPolygonObjectToGeometryPolygon( + const Pose & current_pose, const autoware_auto_perception_msgs::msg::Shape & obj_shape) +{ + if (obj_shape.footprint.points.empty()) { + return {}; + } + Polygon2d object_polygon; + tf2::Transform tf_map2obj; + fromMsg(current_pose, tf_map2obj); + const auto obj_points = obj_shape.footprint.points; + object_polygon.outer().reserve(obj_points.size() + 1); + for (const auto & obj_point : obj_points) { + tf2::Vector3 obj(obj_point.x, obj_point.y, obj_point.z); + tf2::Vector3 tf_obj = tf_map2obj * obj; + object_polygon.outer().emplace_back(tf_obj.x(), tf_obj.y()); + } + object_polygon.outer().push_back(object_polygon.outer().front()); + boost::geometry::correct(object_polygon); + + return object_polygon; +} + +Polygon2d convertCylindricalObjectToGeometryPolygon( + const Pose & current_pose, const autoware_auto_perception_msgs::msg::Shape & obj_shape) +{ + Polygon2d object_polygon; + + const double obj_x = current_pose.position.x; + const double obj_y = current_pose.position.y; + + constexpr int n = 20; + const double r = obj_shape.dimensions.x / 2; + object_polygon.outer().reserve(n + 1); + for (int i = 0; i < n; ++i) { + object_polygon.outer().emplace_back( + obj_x + r * std::cos(2.0 * M_PI / n * i), obj_y + r * std::sin(2.0 * M_PI / n * i)); + } + + object_polygon.outer().push_back(object_polygon.outer().front()); + boost::geometry::correct(object_polygon); + + return object_polygon; +} + +Polygon2d convertBoundingBoxObjectToGeometryPolygon( + const Pose & current_pose, const double & base_to_front, const double & base_to_rear, + const double & base_to_width) +{ + const auto mapped_point = [](const double & length_scalar, const double & width_scalar) { + tf2::Vector3 map; + map.setX(length_scalar); + map.setY(width_scalar); + map.setZ(0.0); + map.setW(1.0); + return map; + }; + + // set vertices at map coordinate + const tf2::Vector3 p1_map = std::invoke(mapped_point, base_to_front, -base_to_width); + const tf2::Vector3 p2_map = std::invoke(mapped_point, base_to_front, base_to_width); + const tf2::Vector3 p3_map = std::invoke(mapped_point, -base_to_rear, base_to_width); + const tf2::Vector3 p4_map = std::invoke(mapped_point, -base_to_rear, -base_to_width); + + // transform vertices from map coordinate to object coordinate + tf2::Transform tf_map2obj; + tf2::fromMsg(current_pose, tf_map2obj); + const tf2::Vector3 p1_obj = tf_map2obj * p1_map; + const tf2::Vector3 p2_obj = tf_map2obj * p2_map; + const tf2::Vector3 p3_obj = tf_map2obj * p3_map; + const tf2::Vector3 p4_obj = tf_map2obj * p4_map; + + Polygon2d object_polygon; + object_polygon.outer().reserve(5); + object_polygon.outer().emplace_back(p1_obj.x(), p1_obj.y()); + object_polygon.outer().emplace_back(p2_obj.x(), p2_obj.y()); + object_polygon.outer().emplace_back(p3_obj.x(), p3_obj.y()); + object_polygon.outer().emplace_back(p4_obj.x(), p4_obj.y()); + + object_polygon.outer().push_back(object_polygon.outer().front()); + boost::geometry::correct(object_polygon); + + return object_polygon; +} + +Polygon2d convertObjToPolygon(const PredictedObject & obj) +{ + Polygon2d object_polygon{}; + if (obj.shape.type == autoware_auto_perception_msgs::msg::Shape::CYLINDER) { + object_polygon = utils::convertCylindricalObjectToGeometryPolygon( + obj.kinematics.initial_pose_with_covariance.pose, obj.shape); + } else if (obj.shape.type == autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX) { + const double & length_m = obj.shape.dimensions.x / 2; + const double & width_m = obj.shape.dimensions.y / 2; + object_polygon = utils::convertBoundingBoxObjectToGeometryPolygon( + obj.kinematics.initial_pose_with_covariance.pose, length_m, length_m, width_m); + } else if (obj.shape.type == autoware_auto_perception_msgs::msg::Shape::POLYGON) { + object_polygon = utils::convertPolygonObjectToGeometryPolygon( + obj.kinematics.initial_pose_with_covariance.pose, obj.shape); + } else { + throw std::runtime_error("Unsupported shape type"); + } + return object_polygon; +} +} // namespace autoware::motion::control::autonomous_emergency_braking::utils diff --git a/launch/tier4_control_launch/launch/control.launch.py b/launch/tier4_control_launch/launch/control.launch.py index 6e3340dcd5703..c86f993ac2a40 100644 --- a/launch/tier4_control_launch/launch/control.launch.py +++ b/launch/tier4_control_launch/launch/control.launch.py @@ -156,7 +156,7 @@ def launch_setup(context, *args, **kwargs): # autonomous emergency braking autonomous_emergency_braking = ComposableNode( - package="autonomous_emergency_braking", + package="autoware_autonomous_emergency_braking", plugin="autoware::motion::control::autonomous_emergency_braking::AEB", name="autonomous_emergency_braking", remappings=[