diff --git a/common/tier4_automatic_goal_rviz_plugin/CMakeLists.txt b/common/tier4_automatic_goal_rviz_plugin/CMakeLists.txt
index c6d4e30061626..cdc57743a6cb1 100644
--- a/common/tier4_automatic_goal_rviz_plugin/CMakeLists.txt
+++ b/common/tier4_automatic_goal_rviz_plugin/CMakeLists.txt
@@ -11,6 +11,7 @@ set(CMAKE_INCLUDE_CURRENT_DIR ON)
add_definitions(-DQT_NO_KEYWORDS)
ament_auto_add_library(${PROJECT_NAME} SHARED
+ src/automatic_checkpoint_append_tool.cpp
src/automatic_goal_sender.cpp
src/automatic_goal_append_tool.cpp
src/automatic_goal_panel.cpp
diff --git a/common/tier4_automatic_goal_rviz_plugin/icons/classes/AutowareAutomaticCheckpointTool.png b/common/tier4_automatic_goal_rviz_plugin/icons/classes/AutowareAutomaticCheckpointTool.png
new file mode 100644
index 0000000000000..6a67573717ae1
Binary files /dev/null and b/common/tier4_automatic_goal_rviz_plugin/icons/classes/AutowareAutomaticCheckpointTool.png differ
diff --git a/common/tier4_automatic_goal_rviz_plugin/package.xml b/common/tier4_automatic_goal_rviz_plugin/package.xml
index deda078ace22e..fb5331379acb6 100644
--- a/common/tier4_automatic_goal_rviz_plugin/package.xml
+++ b/common/tier4_automatic_goal_rviz_plugin/package.xml
@@ -6,6 +6,8 @@
The autoware automatic goal rviz plugin package
Shumpei Wakabayashi
Dawid Moszyński
+ Kyoichi Sugahara
+ Satoshi Ota
Apache License 2.0
Dawid Moszyński
@@ -22,6 +24,7 @@
rviz_default_plugins
tf2
tf2_geometry_msgs
+ tier4_autoware_utils
visualization_msgs
yaml-cpp
diff --git a/common/tier4_automatic_goal_rviz_plugin/plugins/plugin_description.xml b/common/tier4_automatic_goal_rviz_plugin/plugins/plugin_description.xml
index e7d5224550868..e9d77491941ed 100644
--- a/common/tier4_automatic_goal_rviz_plugin/plugins/plugin_description.xml
+++ b/common/tier4_automatic_goal_rviz_plugin/plugins/plugin_description.xml
@@ -9,4 +9,9 @@
base_class_type="rviz_common::Tool">
AutowareAutomaticGoalTool
+
+ AutowareAutomaticCheckpointTool
+
diff --git a/common/tier4_automatic_goal_rviz_plugin/src/automatic_checkpoint_append_tool.cpp b/common/tier4_automatic_goal_rviz_plugin/src/automatic_checkpoint_append_tool.cpp
new file mode 100644
index 0000000000000..4efa6fedbaabd
--- /dev/null
+++ b/common/tier4_automatic_goal_rviz_plugin/src/automatic_checkpoint_append_tool.cpp
@@ -0,0 +1,122 @@
+// Copyright 2024 TIER IV, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+/*
+ * Software License Agreement (BSD License)
+ *
+ * Copyright (c) 2012, Willow Garage, Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of Willow Garage, Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "automatic_checkpoint_append_tool.hpp"
+
+#ifdef ROS_DISTRO_GALACTIC
+#include
+#else
+#include
+#endif
+#include
+
+#include
+
+namespace rviz_plugins
+{
+AutowareAutomaticCheckpointTool::AutowareAutomaticCheckpointTool()
+{
+ shortcut_key_ = 'c';
+
+ pose_topic_property_ = new rviz_common::properties::StringProperty(
+ "Pose Topic", "~/automatic_goal/checkpoint", "The topic on which to publish automatic_goal.",
+ getPropertyContainer(), SLOT(updateTopic()), this);
+ std_dev_x_ = new rviz_common::properties::FloatProperty(
+ "X std deviation", 0.5, "X standard deviation for checkpoint pose [m]", getPropertyContainer());
+ std_dev_y_ = new rviz_common::properties::FloatProperty(
+ "Y std deviation", 0.5, "Y standard deviation for checkpoint pose [m]", getPropertyContainer());
+ std_dev_theta_ = new rviz_common::properties::FloatProperty(
+ "Theta std deviation", M_PI / 12.0, "Theta standard deviation for checkpoint pose [rad]",
+ getPropertyContainer());
+ position_z_ = new rviz_common::properties::FloatProperty(
+ "Z position", 0.0, "Z position for checkpoint pose [m]", getPropertyContainer());
+ std_dev_x_->setMin(0);
+ std_dev_y_->setMin(0);
+ std_dev_theta_->setMin(0);
+ position_z_->setMin(0);
+}
+
+void AutowareAutomaticCheckpointTool::onInitialize()
+{
+ PoseTool::onInitialize();
+ setName("2D AppendCheckpoint");
+ updateTopic();
+}
+
+void AutowareAutomaticCheckpointTool::updateTopic()
+{
+ rclcpp::Node::SharedPtr raw_node = context_->getRosNodeAbstraction().lock()->get_raw_node();
+ pose_pub_ = raw_node->create_publisher(
+ pose_topic_property_->getStdString(), 1);
+ clock_ = raw_node->get_clock();
+}
+
+void AutowareAutomaticCheckpointTool::onPoseSet(double x, double y, double theta)
+{
+ // pose
+ std::string fixed_frame = context_->getFixedFrame().toStdString();
+ geometry_msgs::msg::PoseStamped pose;
+ pose.header.frame_id = fixed_frame;
+ pose.header.stamp = clock_->now();
+ pose.pose.position.x = x;
+ pose.pose.position.y = y;
+ pose.pose.position.z = position_z_->getFloat();
+
+ tf2::Quaternion quat;
+ quat.setRPY(0.0, 0.0, theta);
+ pose.pose.orientation = tf2::toMsg(quat);
+ RCLCPP_INFO(
+ rclcpp::get_logger("AutowareAutomaticCheckpointTool"),
+ "Setting pose: %.3f %.3f %.3f %.3f [frame=%s]", x, y, position_z_->getFloat(), theta,
+ fixed_frame.c_str());
+ pose_pub_->publish(pose);
+}
+
+} // end namespace rviz_plugins
+
+#include
+PLUGINLIB_EXPORT_CLASS(rviz_plugins::AutowareAutomaticCheckpointTool, rviz_common::Tool)
diff --git a/common/tier4_automatic_goal_rviz_plugin/src/automatic_checkpoint_append_tool.hpp b/common/tier4_automatic_goal_rviz_plugin/src/automatic_checkpoint_append_tool.hpp
new file mode 100644
index 0000000000000..4ea3fa4bd009a
--- /dev/null
+++ b/common/tier4_automatic_goal_rviz_plugin/src/automatic_checkpoint_append_tool.hpp
@@ -0,0 +1,91 @@
+// Copyright 2024 TIER IV, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+/*
+ * Software License Agreement (BSD License)
+ *
+ * Copyright (c) 2012, Willow Garage, Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of Willow Garage, Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef AUTOMATIC_CHECKPOINT_APPEND_TOOL_HPP_
+#define AUTOMATIC_CHECKPOINT_APPEND_TOOL_HPP_
+
+#ifndef Q_MOC_RUN // See: https://bugreports.qt-project.org/browse/QTBUG-22829
+#include
+#include
+#include
+#include
+#include
+#include
+#endif
+
+#include
+
+namespace rviz_plugins
+{
+class AutowareAutomaticCheckpointTool : public rviz_default_plugins::tools::PoseTool
+{
+ Q_OBJECT
+
+public:
+ AutowareAutomaticCheckpointTool();
+ void onInitialize() override;
+
+protected:
+ void onPoseSet(double x, double y, double theta) override;
+
+private Q_SLOTS:
+ void updateTopic();
+
+private: // NOLINT for Qt
+ rclcpp::Clock::SharedPtr clock_{nullptr};
+ rclcpp::Publisher::SharedPtr pose_pub_{nullptr};
+
+ rviz_common::properties::StringProperty * pose_topic_property_{nullptr};
+ rviz_common::properties::FloatProperty * std_dev_x_{nullptr};
+ rviz_common::properties::FloatProperty * std_dev_y_{nullptr};
+ rviz_common::properties::FloatProperty * std_dev_theta_{nullptr};
+ rviz_common::properties::FloatProperty * position_z_{nullptr};
+};
+
+} // namespace rviz_plugins
+
+#endif // AUTOMATIC_CHECKPOINT_APPEND_TOOL_HPP_
diff --git a/common/tier4_automatic_goal_rviz_plugin/src/automatic_goal_panel.cpp b/common/tier4_automatic_goal_rviz_plugin/src/automatic_goal_panel.cpp
index bee390bfe29ed..6b8d7765a989a 100644
--- a/common/tier4_automatic_goal_rviz_plugin/src/automatic_goal_panel.cpp
+++ b/common/tier4_automatic_goal_rviz_plugin/src/automatic_goal_panel.cpp
@@ -16,6 +16,8 @@
#include "automatic_goal_panel.hpp"
+#include
+
namespace rviz_plugins
{
AutowareAutomaticGoalPanel::AutowareAutomaticGoalPanel(QWidget * parent)
@@ -139,6 +141,9 @@ void AutowareAutomaticGoalPanel::onInitialize()
sub_append_goal_ = raw_node_->create_subscription(
"~/automatic_goal/goal", 5,
std::bind(&AutowareAutomaticGoalPanel::onAppendGoal, this, std::placeholders::_1));
+ sub_append_checkpoint_ = raw_node_->create_subscription(
+ "~/automatic_goal/checkpoint", 5,
+ std::bind(&AutowareAutomaticGoalPanel::onAppendCheckpoint, this, std::placeholders::_1));
initCommunication(raw_node_.get());
}
@@ -244,12 +249,28 @@ void AutowareAutomaticGoalPanel::onClickSaveListToFile()
void AutowareAutomaticGoalPanel::onAppendGoal(const PoseStamped::ConstSharedPtr pose)
{
if (state_ == State::EDITING) {
- goals_list_.push_back(pose);
+ goals_list_.emplace_back(pose);
updateGoalsList();
updateGUI();
}
}
+void AutowareAutomaticGoalPanel::onAppendCheckpoint(const PoseStamped::ConstSharedPtr pose)
+{
+ if (goals_list_widget_ptr_->selectedItems().count() != 1) {
+ showMessageBox("Select a goal in GoalsList before set checkpoint");
+ return;
+ }
+
+ current_goal_ = goals_list_widget_ptr_->currentRow();
+ if (current_goal_ >= goals_list_.size()) {
+ return;
+ }
+
+ goals_list_.at(current_goal_).checkpoint_pose_ptrs.push_back(pose);
+ publishMarkers();
+}
+
// Override
void AutowareAutomaticGoalPanel::onCallResult()
{
@@ -418,47 +439,71 @@ void AutowareAutomaticGoalPanel::updateGoalIcon(const unsigned goal_index, const
void AutowareAutomaticGoalPanel::publishMarkers()
{
+ using tier4_autoware_utils::createDefaultMarker;
+ using tier4_autoware_utils::createMarkerColor;
+ using tier4_autoware_utils::createMarkerScale;
+
MarkerArray text_array;
MarkerArray arrow_array;
// Clear existing
- visualization_msgs::msg::Marker marker;
- marker.header.stamp = rclcpp::Time();
- marker.ns = "names";
- marker.id = 0;
- marker.action = visualization_msgs::msg::Marker::DELETEALL;
- text_array.markers.push_back(marker);
- marker.ns = "poses";
- arrow_array.markers.push_back(marker);
- pub_marker_->publish(text_array);
- pub_marker_->publish(arrow_array);
+ {
+ auto marker = createDefaultMarker(
+ "map", rclcpp::Clock{RCL_ROS_TIME}.now(), "names", 0L, Marker::CUBE,
+ createMarkerScale(1.0, 1.0, 1.0), createMarkerColor(1.0, 1.0, 1.0, 0.999));
+ marker.action = visualization_msgs::msg::Marker::DELETEALL;
+ text_array.markers.push_back(marker);
+ pub_marker_->publish(text_array);
+ }
+
+ {
+ auto marker = createDefaultMarker(
+ "map", rclcpp::Clock{RCL_ROS_TIME}.now(), "poses", 0L, Marker::CUBE,
+ createMarkerScale(1.0, 1.0, 1.0), createMarkerColor(1.0, 1.0, 1.0, 0.999));
+ arrow_array.markers.push_back(marker);
+ pub_marker_->publish(arrow_array);
+ }
+
text_array.markers.clear();
arrow_array.markers.clear();
- // Publish current
- for (unsigned i = 0; i < goals_list_.size(); i++) {
- visualization_msgs::msg::Marker marker;
- marker.header.frame_id = "map";
- marker.header.stamp = rclcpp::Time();
- marker.ns = "names";
- marker.id = static_cast(i);
- marker.type = visualization_msgs::msg::Marker::TEXT_VIEW_FACING;
+
+ const auto push_arrow_marker = [&](const auto & pose, const auto & color, const size_t id) {
+ auto marker = createDefaultMarker(
+ "map", rclcpp::Clock{RCL_ROS_TIME}.now(), "poses", id, Marker::ARROW,
+ createMarkerScale(1.6, 0.5, 0.5), color);
marker.action = visualization_msgs::msg::Marker::ADD;
- marker.pose = goals_list_[i]->pose;
+ marker.pose = pose;
marker.lifetime = rclcpp::Duration(0, 0);
- marker.scale.x = 1.6;
- marker.scale.y = 1.6;
- marker.scale.z = 1.6;
- marker.color.r = 1.0;
- marker.color.g = 1.0;
- marker.color.b = 1.0;
- marker.color.a = 1.0;
marker.frame_locked = false;
- marker.text = "G" + std::to_string(i);
- text_array.markers.push_back(marker);
- marker.ns = "poses";
- marker.scale.y = 0.2;
- marker.scale.z = 0.2;
- marker.type = visualization_msgs::msg::Marker::ARROW;
arrow_array.markers.push_back(marker);
+ };
+
+ const auto push_text_marker = [&](const auto & pose, const auto & text, const size_t id) {
+ auto marker = createDefaultMarker(
+ "map", rclcpp::Clock{RCL_ROS_TIME}.now(), "names", id, Marker::TEXT_VIEW_FACING,
+ createMarkerScale(1.6, 1.6, 1.6), createMarkerColor(1.0, 1.0, 1.0, 0.999));
+ marker.action = visualization_msgs::msg::Marker::ADD;
+ marker.pose = pose;
+ marker.lifetime = rclcpp::Duration(0, 0);
+ marker.frame_locked = false;
+ marker.text = text;
+ text_array.markers.push_back(marker);
+ };
+
+ // Publish current
+ size_t id = 0;
+ for (size_t i = 0; i < goals_list_.size(); ++i) {
+ {
+ const auto pose = goals_list_.at(i).goal_pose_ptr->pose;
+ push_arrow_marker(pose, createMarkerColor(0.0, 1.0, 0.0, 0.999), id++);
+ push_text_marker(pose, "Goal:" + std::to_string(i), id++);
+ }
+
+ for (size_t j = 0; j < goals_list_.at(i).checkpoint_pose_ptrs.size(); ++j) {
+ const auto pose = goals_list_.at(i).checkpoint_pose_ptrs.at(j)->pose;
+ push_arrow_marker(pose, createMarkerColor(1.0, 1.0, 0.0, 0.999), id++);
+ push_text_marker(
+ pose, "Checkpoint:" + std::to_string(i) + "[Goal:" + std::to_string(j) + "]", id++);
+ }
}
pub_marker_->publish(text_array);
pub_marker_->publish(arrow_array);
@@ -469,13 +514,13 @@ void AutowareAutomaticGoalPanel::saveGoalsList(const std::string & file_path)
{
YAML::Node node;
for (unsigned i = 0; i < goals_list_.size(); ++i) {
- node[i]["position_x"] = goals_list_[i]->pose.position.x;
- node[i]["position_y"] = goals_list_[i]->pose.position.y;
- node[i]["position_z"] = goals_list_[i]->pose.position.z;
- node[i]["orientation_x"] = goals_list_[i]->pose.orientation.x;
- node[i]["orientation_y"] = goals_list_[i]->pose.orientation.y;
- node[i]["orientation_z"] = goals_list_[i]->pose.orientation.z;
- node[i]["orientation_w"] = goals_list_[i]->pose.orientation.w;
+ node[i]["position_x"] = goals_list_[i].goal_pose_ptr->pose.position.x;
+ node[i]["position_y"] = goals_list_[i].goal_pose_ptr->pose.position.y;
+ node[i]["position_z"] = goals_list_[i].goal_pose_ptr->pose.position.z;
+ node[i]["orientation_x"] = goals_list_[i].goal_pose_ptr->pose.orientation.x;
+ node[i]["orientation_y"] = goals_list_[i].goal_pose_ptr->pose.orientation.y;
+ node[i]["orientation_z"] = goals_list_[i].goal_pose_ptr->pose.orientation.z;
+ node[i]["orientation_w"] = goals_list_[i].goal_pose_ptr->pose.orientation.w;
}
std::ofstream file_out(file_path);
file_out << node;
diff --git a/common/tier4_automatic_goal_rviz_plugin/src/automatic_goal_panel.hpp b/common/tier4_automatic_goal_rviz_plugin/src/automatic_goal_panel.hpp
index 0ec9ca530f074..72a5bd4fb80fe 100644
--- a/common/tier4_automatic_goal_rviz_plugin/src/automatic_goal_panel.hpp
+++ b/common/tier4_automatic_goal_rviz_plugin/src/automatic_goal_panel.hpp
@@ -61,7 +61,9 @@ class AutowareAutomaticGoalPanel : public rviz_common::Panel,
public automatic_goal::AutowareAutomaticGoalSender
{
using State = automatic_goal::AutomaticGoalState;
+ using Pose = geometry_msgs::msg::Pose;
using PoseStamped = geometry_msgs::msg::PoseStamped;
+ using Marker = visualization_msgs::msg::Marker;
using MarkerArray = visualization_msgs::msg::MarkerArray;
using OperationModeState = autoware_adapi_v1_msgs::msg::OperationModeState;
using ChangeOperationMode = autoware_adapi_v1_msgs::srv::ChangeOperationMode;
@@ -96,6 +98,7 @@ public Q_SLOTS: // NOLINT for Qt
// Inputs
void onAppendGoal(const PoseStamped::ConstSharedPtr pose);
+ void onAppendCheckpoint(const PoseStamped::ConstSharedPtr pose);
// Visual updates
void updateGUI();
@@ -116,6 +119,7 @@ public Q_SLOTS: // NOLINT for Qt
// Pub/Sub
rclcpp::Publisher::SharedPtr pub_marker_{nullptr};
rclcpp::Subscription::SharedPtr sub_append_goal_{nullptr};
+ rclcpp::Subscription::SharedPtr sub_append_checkpoint_{nullptr};
// Containers
rclcpp::Node::SharedPtr raw_node_{nullptr};
diff --git a/common/tier4_automatic_goal_rviz_plugin/src/automatic_goal_sender.cpp b/common/tier4_automatic_goal_rviz_plugin/src/automatic_goal_sender.cpp
index cc8d46e2f60c6..3ca368a3bd1a4 100644
--- a/common/tier4_automatic_goal_rviz_plugin/src/automatic_goal_sender.cpp
+++ b/common/tier4_automatic_goal_rviz_plugin/src/automatic_goal_sender.cpp
@@ -108,9 +108,9 @@ void AutowareAutomaticGoalSender::updateGoalsList()
std::stringstream ss;
ss << std::fixed << std::setprecision(2);
tf2::Quaternion tf2_quat;
- tf2::convert(goal->pose.orientation, tf2_quat);
- ss << "G" << i << " (" << goal->pose.position.x << ", ";
- ss << goal->pose.position.y << ", " << tf2::getYaw(tf2_quat) << ")";
+ tf2::convert(goal.goal_pose_ptr->pose.orientation, tf2_quat);
+ ss << "G" << i << " (" << goal.goal_pose_ptr->pose.position.x << ", ";
+ ss << goal.goal_pose_ptr->pose.position.y << ", " << tf2::getYaw(tf2_quat) << ")";
goals_achieved_.insert({i++, std::make_pair(ss.str(), 0)});
}
onGoalListUpdated();
@@ -178,7 +178,7 @@ void AutowareAutomaticGoalSender::loadGoalsList(const std::string & file_path)
pose->pose.orientation.y = goal["orientation_y"].as();
pose->pose.orientation.z = goal["orientation_z"].as();
pose->pose.orientation.w = goal["orientation_w"].as();
- goals_list_.push_back(pose);
+ goals_list_.emplace_back(pose);
}
resetAchievedGoals();
updateGoalsList();
diff --git a/common/tier4_automatic_goal_rviz_plugin/src/automatic_goal_sender.hpp b/common/tier4_automatic_goal_rviz_plugin/src/automatic_goal_sender.hpp
index aacdada352811..88b7b5e7dac20 100644
--- a/common/tier4_automatic_goal_rviz_plugin/src/automatic_goal_sender.hpp
+++ b/common/tier4_automatic_goal_rviz_plugin/src/automatic_goal_sender.hpp
@@ -80,8 +80,11 @@ class AutowareAutomaticGoalSender : public rclcpp::Node
}
auto req = std::make_shared();
- req->header = goals_list_.at(goal_index)->header;
- req->goal = goals_list_.at(goal_index)->pose;
+ req->header = goals_list_.at(goal_index).goal_pose_ptr->header;
+ req->goal = goals_list_.at(goal_index).goal_pose_ptr->pose;
+ for (const auto & checkpoint : goals_list_.at(goal_index).checkpoint_pose_ptrs) {
+ req->waypoints.push_back(checkpoint->pose);
+ }
client->async_send_request(
req, [this](typename rclcpp::Client::SharedFuture result) {
if (result.get()->status.code != 0) state_ = State::ERROR;
@@ -120,6 +123,13 @@ class AutowareAutomaticGoalSender : public rclcpp::Node
}
}
+ struct Route
+ {
+ explicit Route(const PoseStamped::ConstSharedPtr & goal) : goal_pose_ptr{goal} {}
+ PoseStamped::ConstSharedPtr goal_pose_ptr{};
+ std::vector checkpoint_pose_ptrs{};
+ };
+
// Update
void updateGoalsList();
virtual void updateAutoExecutionTimerTick();
@@ -155,7 +165,7 @@ class AutowareAutomaticGoalSender : public rclcpp::Node
// Containers
unsigned current_goal_{0};
State state_{State::INITIALIZING};
- std::vector goals_list_{};
+ std::vector goals_list_{};
std::map> goals_achieved_{};
std::string goals_achieved_file_path_{};
diff --git a/evaluator/planning_evaluator/src/metrics/trajectory_metrics.cpp b/evaluator/planning_evaluator/src/metrics/trajectory_metrics.cpp
index d0d8d61c4c231..53265872bffa8 100644
--- a/evaluator/planning_evaluator/src/metrics/trajectory_metrics.cpp
+++ b/evaluator/planning_evaluator/src/metrics/trajectory_metrics.cpp
@@ -125,7 +125,7 @@ Stat calcTrajectoryLength(const Trajectory & traj)
Stat calcTrajectoryDuration(const Trajectory & traj)
{
double duration = 0.0;
- for (size_t i = 0; i < traj.points.size() - 1; ++i) {
+ for (size_t i = 0; i + 1 < traj.points.size(); ++i) {
const double length = calcDistance2d(traj.points.at(i), traj.points.at(i + 1));
const double velocity = traj.points.at(i).longitudinal_velocity_mps;
if (velocity != 0) {
@@ -158,7 +158,7 @@ Stat calcTrajectoryAcceleration(const Trajectory & traj)
Stat calcTrajectoryJerk(const Trajectory & traj)
{
Stat stat;
- for (size_t i = 0; i < traj.points.size() - 1; ++i) {
+ for (size_t i = 0; i + 1 < traj.points.size(); ++i) {
const double vel = traj.points.at(i).longitudinal_velocity_mps;
if (vel != 0) {
const double duration =
diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_radar_fusion_based_detection.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_radar_fusion_based_detection.launch.xml
deleted file mode 100644
index ba86bc1e334ff..0000000000000
--- a/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_radar_fusion_based_detection.launch.xml
+++ /dev/null
@@ -1,437 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/detection.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/detection.launch.xml
index 885fa80ed7004..ab9ed65999048 100644
--- a/launch/tier4_perception_launch/launch/object_recognition/detection/detection.launch.xml
+++ b/launch/tier4_perception_launch/launch/object_recognition/detection/detection.launch.xml
@@ -3,7 +3,7 @@
-
+
@@ -34,9 +34,10 @@
-
+
-
+
+
@@ -57,23 +58,61 @@
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
-
+
+
@@ -94,47 +133,107 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
-
+
+
+
+
+
-
+
-
-
+
+
+
+
+
+
-
+
+
diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml
similarity index 57%
rename from launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml
rename to launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml
index 4cb648852a03c..1c9201a9af8b3 100644
--- a/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml
+++ b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml
@@ -1,17 +1,13 @@
-
-
-
-
+
-
-
-
-
+
+
+
@@ -54,6 +50,44 @@
-->
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -192,190 +226,4 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml
new file mode 100644
index 0000000000000..1a97659b357d8
--- /dev/null
+++ b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml
@@ -0,0 +1,56 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_rule_detector.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_rule_detector.launch.xml
new file mode 100644
index 0000000000000..ce34640bd3179
--- /dev/null
+++ b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_rule_detector.launch.xml
@@ -0,0 +1,55 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/radar_based_detection.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/radar_detector.launch.xml
similarity index 100%
rename from launch/tier4_perception_launch/launch/object_recognition/detection/radar_based_detection.launch.xml
rename to launch/tier4_perception_launch/launch/object_recognition/detection/detector/radar_detector.launch.xml
diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/lidar_radar_based_detection.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/lidar_radar_based_detection.launch.xml
deleted file mode 100644
index adedc2312677f..0000000000000
--- a/launch/tier4_perception_launch/launch/object_recognition/detection/lidar_radar_based_detection.launch.xml
+++ /dev/null
@@ -1,51 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/merger/camera_lidar_merger.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/merger/camera_lidar_merger.launch.xml
new file mode 100644
index 0000000000000..2f62e83ae0ef5
--- /dev/null
+++ b/launch/tier4_perception_launch/launch/object_recognition/detection/merger/camera_lidar_merger.launch.xml
@@ -0,0 +1,124 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/merger/camera_lidar_radar_merger.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/merger/camera_lidar_radar_merger.launch.xml
new file mode 100644
index 0000000000000..b4d19c9052a63
--- /dev/null
+++ b/launch/tier4_perception_launch/launch/object_recognition/detection/merger/camera_lidar_radar_merger.launch.xml
@@ -0,0 +1,154 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/lidar_based_detection.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/merger/lidar_merger.launch.xml
similarity index 51%
rename from launch/tier4_perception_launch/launch/object_recognition/detection/lidar_based_detection.launch.xml
rename to launch/tier4_perception_launch/launch/object_recognition/detection/merger/lidar_merger.launch.xml
index 04c4264b70e5f..6ccff44933966 100644
--- a/launch/tier4_perception_launch/launch/object_recognition/detection/lidar_based_detection.launch.xml
+++ b/launch/tier4_perception_launch/launch/object_recognition/detection/merger/lidar_merger.launch.xml
@@ -1,113 +1,19 @@
-
-
+
-
-
-
-
-
-
-
-
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
-
-
-
-
-
-
-
-
+
+
+
@@ -133,9 +39,9 @@
-
-
-
+
+
+
diff --git a/localization/landmark_based_localizer/ar_tag_based_localizer/README.md b/localization/landmark_based_localizer/ar_tag_based_localizer/README.md
index f0c54d6393f7e..1a776bd810fff 100644
--- a/localization/landmark_based_localizer/ar_tag_based_localizer/README.md
+++ b/localization/landmark_based_localizer/ar_tag_based_localizer/README.md
@@ -2,7 +2,7 @@
**ArTagBasedLocalizer** is a vision-based localization node.
-
+
This node uses [the ArUco library](https://index.ros.org/p/aruco/) to detect AR-Tags from camera images and calculates and publishes the pose of the ego vehicle based on these detections.
The positions and orientations of the AR-Tags are assumed to be written in the Lanelet2 format.
@@ -30,6 +30,10 @@ The positions and orientations of the AR-Tags are assumed to be written in the L
| `/tf` | `geometry_msgs::msg::TransformStamped` | [debug topic] TF from camera to detected tag |
| `/diagnostics` | `diagnostic_msgs::msg::DiagnosticArray` | Diagnostics outputs |
+## Parameters
+
+{{ json_to_markdown("localization/landmark_based_localizer/ar_tag_based_localizer/schema/ar_tag_based_localizer.schema.json") }}
+
## How to launch
When launching Autoware, set `artag` for `pose_source`.
diff --git a/localization/landmark_based_localizer/ar_tag_based_localizer/schema/ar_tag_based_localizer.schema.json b/localization/landmark_based_localizer/ar_tag_based_localizer/schema/ar_tag_based_localizer.schema.json
new file mode 100644
index 0000000000000..bde5221aa1bbc
--- /dev/null
+++ b/localization/landmark_based_localizer/ar_tag_based_localizer/schema/ar_tag_based_localizer.schema.json
@@ -0,0 +1,87 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Parameters for ar_tag_based_localizer",
+ "type": "object",
+ "definitions": {
+ "ar_tag_based_localizer": {
+ "type": "object",
+ "properties": {
+ "marker_size": {
+ "type": "number",
+ "description": "marker_size",
+ "default": 0.6
+ },
+ "target_tag_ids": {
+ "type": "array",
+ "description": "target_tag_ids",
+ "default": "['0','1','2','3','4','5','6']"
+ },
+ "base_covariance": {
+ "type": "array",
+ "description": "base_covariance",
+ "default": [
+ 0.2, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2, 0.0, 0.0,
+ 0.0, 0.0, 0.0, 0.0, 0.02, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.02, 0.0, 0.0, 0.0, 0.0, 0.0,
+ 0.0, 0.02
+ ]
+ },
+ "distance_threshold": {
+ "type": "number",
+ "description": "distance_threshold(m)",
+ "default": "13.0"
+ },
+ "consider_orientation": {
+ "type": "boolean",
+ "description": "consider_orientation",
+ "default": "false"
+ },
+ "detection_mode": {
+ "type": "string",
+ "description": "detection_mode select from [DM_NORMAL, DM_FAST, DM_VIDEO_FAST]",
+ "default": "DM_NORMAL"
+ },
+ "min_marker_size": {
+ "type": "number",
+ "description": "min_marker_size",
+ "default": 0.02
+ },
+ "ekf_time_tolerance": {
+ "type": "number",
+ "description": "ekf_time_tolerance(sec)",
+ "default": 5.0
+ },
+ "ekf_position_tolerance": {
+ "type": "number",
+ "description": "ekf_position_tolerance(m)",
+ "default": 10.0
+ }
+ },
+ "required": [
+ "marker_size",
+ "target_tag_ids",
+ "base_covariance",
+ "distance_threshold",
+ "consider_orientation",
+ "detection_mode",
+ "min_marker_size",
+ "ekf_time_tolerance",
+ "ekf_position_tolerance"
+ ],
+ "additionalProperties": false
+ }
+ },
+ "properties": {
+ "/**": {
+ "type": "object",
+ "properties": {
+ "ros__parameters": {
+ "$ref": "#/definitions/ar_tag_based_localizer"
+ }
+ },
+ "required": ["ros__parameters"],
+ "additionalProperties": false
+ }
+ },
+ "required": ["/**"],
+ "additionalProperties": false
+}
diff --git a/localization/localization_error_monitor/README.md b/localization/localization_error_monitor/README.md
index 2ddf41e4eac70..7912ad843ef19 100644
--- a/localization/localization_error_monitor/README.md
+++ b/localization/localization_error_monitor/README.md
@@ -29,10 +29,4 @@ The package monitors the following two values:
## Parameters
-| Name | Type | Description |
-| -------------------------------------- | ------ | -------------------------------------------------------------------------------------------- |
-| `scale` | double | scale factor for monitored values (default: 3.0) |
-| `error_ellipse_size` | double | error threshold for long radius of confidence ellipse [m] (default: 1.0) |
-| `warn_ellipse_size` | double | warning threshold for long radius of confidence ellipse [m] (default: 0.8) |
-| `error_ellipse_size_lateral_direction` | double | error threshold for size of confidence ellipse along lateral direction [m] (default: 0.3) |
-| `warn_ellipse_size_lateral_direction` | double | warning threshold for size of confidence ellipse along lateral direction [m] (default: 0.25) |
+{{ json_to_markdown("localization/localization_error_monitor/schema/localization_error_monitor.schema.json") }}
diff --git a/localization/localization_error_monitor/schema/localization_error_monitor.schema.json b/localization/localization_error_monitor/schema/localization_error_monitor.schema.json
new file mode 100644
index 0000000000000..0cdd5fb946756
--- /dev/null
+++ b/localization/localization_error_monitor/schema/localization_error_monitor.schema.json
@@ -0,0 +1,56 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Parameters for Localization Error Monitor node",
+ "type": "object",
+ "definitions": {
+ "localization_error_monitor": {
+ "type": "object",
+ "properties": {
+ "scale": {
+ "type": "number",
+ "default": 3.0,
+ "description": "scale factor for monitored values"
+ },
+ "error_ellipse_size": {
+ "type": "number",
+ "default": 1.5,
+ "description": "error threshold for long radius of confidence ellipse [m]"
+ },
+ "warn_ellipse_size": {
+ "type": "number",
+ "default": 1.2,
+ "description": "warning threshold for long radius of confidence ellipse [m]"
+ },
+ "error_ellipse_size_lateral_direction": {
+ "type": "number",
+ "default": 0.3,
+ "description": "error threshold for size of confidence ellipse along lateral direction [m]"
+ },
+ "warn_ellipse_size_lateral_direction": {
+ "type": "number",
+ "default": 0.25,
+ "description": "warning threshold for size of confidence ellipse along lateral direction [m]"
+ }
+ },
+ "required": [
+ "scale",
+ "error_ellipse_size",
+ "warn_ellipse_size",
+ "error_ellipse_size_lateral_direction",
+ "warn_ellipse_size_lateral_direction"
+ ]
+ }
+ },
+ "properties": {
+ "/**": {
+ "type": "object",
+ "properties": {
+ "ros__parameters": {
+ "$ref": "#/definitions/localization_error_monitor"
+ }
+ },
+ "required": ["ros__parameters"]
+ }
+ },
+ "required": ["/**"]
+}
diff --git a/localization/ndt_scan_matcher/config/ndt_scan_matcher.param.yaml b/localization/ndt_scan_matcher/config/ndt_scan_matcher.param.yaml
index 9bc62d3f919c6..a5a4941bfec62 100644
--- a/localization/ndt_scan_matcher/config/ndt_scan_matcher.param.yaml
+++ b/localization/ndt_scan_matcher/config/ndt_scan_matcher.param.yaml
@@ -54,6 +54,9 @@
# Tolerance of distance difference between two initial poses used for linear interpolation. [m]
initial_pose_distance_tolerance_m: 10.0
+ # The execution time which means probably NDT cannot matches scans properly. [ms]
+ critical_upper_bound_exe_time_ms: 100.0
+
# Number of threads used for parallel computing
num_threads: 4
@@ -98,6 +101,3 @@
# If lidar_point.z - base_link.z <= this threshold , the point will be removed
z_margin_for_ground_removal: 0.8
-
- # The execution time which means probably NDT cannot matches scans properly. [ms]
- critical_upper_bound_exe_time_ms: 100
diff --git a/localization/ndt_scan_matcher/include/ndt_scan_matcher/ndt_scan_matcher_core.hpp b/localization/ndt_scan_matcher/include/ndt_scan_matcher/ndt_scan_matcher_core.hpp
index 71895c59ec37d..e2503c20aef6e 100644
--- a/localization/ndt_scan_matcher/include/ndt_scan_matcher/ndt_scan_matcher_core.hpp
+++ b/localization/ndt_scan_matcher/include/ndt_scan_matcher/ndt_scan_matcher_core.hpp
@@ -225,7 +225,7 @@ class NDTScanMatcher : public rclcpp::Node
double z_margin_for_ground_removal_;
// The execution time which means probably NDT cannot matches scans properly
- int64_t critical_upper_bound_exe_time_ms_;
+ double critical_upper_bound_exe_time_ms_;
};
#endif // NDT_SCAN_MATCHER__NDT_SCAN_MATCHER_CORE_HPP_
diff --git a/localization/ndt_scan_matcher/src/ndt_scan_matcher_core.cpp b/localization/ndt_scan_matcher/src/ndt_scan_matcher_core.cpp
index 942df03410165..0382d805b7276 100644
--- a/localization/ndt_scan_matcher/src/ndt_scan_matcher_core.cpp
+++ b/localization/ndt_scan_matcher/src/ndt_scan_matcher_core.cpp
@@ -118,7 +118,7 @@ NDTScanMatcher::NDTScanMatcher()
lidar_topic_timeout_sec_ = this->declare_parameter("lidar_topic_timeout_sec");
critical_upper_bound_exe_time_ms_ =
- this->declare_parameter("critical_upper_bound_exe_time_ms");
+ this->declare_parameter("critical_upper_bound_exe_time_ms");
initial_pose_timeout_sec_ = this->declare_parameter("initial_pose_timeout_sec");
@@ -316,8 +316,7 @@ void NDTScanMatcher::publish_diagnostic()
}
if (
state_ptr_->count("execution_time") &&
- std::stod((*state_ptr_)["execution_time"]) >=
- static_cast(critical_upper_bound_exe_time_ms_)) {
+ std::stod((*state_ptr_)["execution_time"]) >= critical_upper_bound_exe_time_ms_) {
diag_status_msg.level = diagnostic_msgs::msg::DiagnosticStatus::WARN;
diag_status_msg.message +=
"NDT exe time is too long. (took " + (*state_ptr_)["execution_time"] + " [ms])";
diff --git a/localization/yabloc/yabloc_common/README.md b/localization/yabloc/yabloc_common/README.md
index 1160f1b314b99..9ed871c6ac401 100644
--- a/localization/yabloc/yabloc_common/README.md
+++ b/localization/yabloc/yabloc_common/README.md
@@ -63,8 +63,4 @@ This node extracts the elements related to the road surface markings and yabloc
### Parameters
-| Name | Type | Description |
-| --------------------- | ---------------- | ---------------------------------------------------------------------- |
-| `road_marking_labels` | vector\ | This label is used to extract the road surface markings from lanelet2. |
-| `sign_board_labels` | vector\ | This label is used to extract the traffic sign boards from lanelet2. |
-| `bounding_box_labels` | vector\ | This label is used to extract the bounding boxes from lanelet2. |
+{{ json_to_markdown("localization/yabloc/yabloc_common/schema/ll2_decomposer.schema.json") }}
diff --git a/localization/yabloc/yabloc_common/schema/ground_server.schema.json b/localization/yabloc/yabloc_common/schema/ground_server.schema.json
new file mode 100644
index 0000000000000..a9b0674cedd1c
--- /dev/null
+++ b/localization/yabloc/yabloc_common/schema/ground_server.schema.json
@@ -0,0 +1,43 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Parameters for ground_server",
+ "type": "object",
+ "definitions": {
+ "ground_server": {
+ "type": "object",
+ "properties": {
+ "force_zero_tilt": {
+ "type": "boolean",
+ "description": "if true, the tilt is always determined to be horizontal",
+ "default": false
+ },
+ "K": {
+ "type": "number",
+ "description": "the number of neighbors for ground search on a map",
+ "default": 50
+ },
+ "R": {
+ "type": "number",
+ "description": "radius for ground search on a map [m]",
+ "default": 10
+ }
+ },
+ "required": ["force_zero_tilt", "K", "R"],
+ "additionalProperties": false
+ }
+ },
+ "properties": {
+ "/**": {
+ "type": "object",
+ "properties": {
+ "ros__parameters": {
+ "$ref": "#/definitions/ground_server"
+ }
+ },
+ "required": ["ros__parameters"],
+ "additionalProperties": false
+ }
+ },
+ "required": ["/**"],
+ "additionalProperties": false
+}
diff --git a/localization/yabloc/yabloc_common/schema/ll2_decomposer.schema.json b/localization/yabloc/yabloc_common/schema/ll2_decomposer.schema.json
new file mode 100644
index 0000000000000..fa5a7b37464f3
--- /dev/null
+++ b/localization/yabloc/yabloc_common/schema/ll2_decomposer.schema.json
@@ -0,0 +1,51 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Parameters for ll2_decomposer",
+ "type": "object",
+ "definitions": {
+ "ll2_decomposer": {
+ "type": "object",
+ "properties": {
+ "road_marking_labels": {
+ "type": "array",
+ "description": "line string types that indicating road surface markings in lanelet2",
+ "default": [
+ "cross_walk",
+ "zebra_marking",
+ "line_thin",
+ "line_thick",
+ "pedestrian_marking",
+ "stop_line",
+ "road_border"
+ ]
+ },
+ "sign_board_labels": {
+ "type": "array",
+ "description": "line string types that indicating traffic sign boards in lanelet2",
+ "default": ["sign-board"]
+ },
+ "bounding_box_labels": {
+ "type": "array",
+ "description": "line string types that indicating not mapped areas in lanelet2",
+ "default": ["none"]
+ }
+ },
+ "required": ["road_marking_labels", "sign_board_labels", "bounding_box_labels"],
+ "additionalProperties": false
+ }
+ },
+ "properties": {
+ "/**": {
+ "type": "object",
+ "properties": {
+ "ros__parameters": {
+ "$ref": "#/definitions/ll2_decomposer"
+ }
+ },
+ "required": ["ros__parameters"],
+ "additionalProperties": false
+ }
+ },
+ "required": ["/**"],
+ "additionalProperties": false
+}
diff --git a/localization/yabloc/yabloc_image_processing/README.md b/localization/yabloc/yabloc_image_processing/README.md
index 85e1d408a1b4b..921a9277a727c 100644
--- a/localization/yabloc/yabloc_image_processing/README.md
+++ b/localization/yabloc/yabloc_image_processing/README.md
@@ -89,14 +89,7 @@ This is a node that integrates the results of graph_segment and lsd to extract r
### Parameters
-| Name | Type | Description |
-| -------------------------------------- | ------ | ------------------------------------------------------------------- |
-| `min_segment_length` | double | min length threshold (if it is negative, it is unlimited) |
-| `max_segment_distance` | double | max distance threshold (if it is negative, it is unlimited) |
-| `max_lateral_distance` | double | max lateral distance threshold (if it is negative, it is unlimited) |
-| `publish_image_with_segment_for_debug` | bool | toggle whether to publish the filtered line segment for debug |
-| `max_range` | double | range of debug projection visualization |
-| `image_size` | int | image size of debug projection visualization |
+{{ json_to_markdown("localization/yabloc/yabloc_common/schema/segment_filter.schema.json") }}
## undistort
@@ -127,11 +120,7 @@ This is to avoid redundant decompression within Autoware.
### Parameters
-| Name | Type | Description |
-| ------------------- | ------ | ---------------------------------------------------------------------------------------------- |
-| `use_sensor_qos` | bool | where to use sensor qos or not |
-| `width` | int | resized image width size |
-| `override_frame_id` | string | value for overriding the camera's frame_id. if blank, frame_id of static_tf is not overwritten |
+{{ json_to_markdown("localization/yabloc/yabloc_common/schema/undistort.schema.json") }}
#### about tf_static overriding
diff --git a/localization/yabloc/yabloc_image_processing/schema/graph_segment.schema.json b/localization/yabloc/yabloc_image_processing/schema/graph_segment.schema.json
new file mode 100644
index 0000000000000..c0bf96dd0de7a
--- /dev/null
+++ b/localization/yabloc/yabloc_image_processing/schema/graph_segment.schema.json
@@ -0,0 +1,71 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Parameters for graph_segment",
+ "type": "object",
+ "definitions": {
+ "graph_segment": {
+ "type": "object",
+ "properties": {
+ "target_height_ratio": {
+ "type": "number",
+ "description": "height on the image to retrieve the candidate road surface",
+ "default": 0.85
+ },
+ "target_candidate_box_width": {
+ "type": "number",
+ "description": "size of the square area to search for candidate road surfaces",
+ "default": 15
+ },
+ "pickup_additional_graph_segment": {
+ "type": "boolean",
+ "description": "if this is true, additional regions of similar color are retrieved",
+ "default": true
+ },
+ "similarity_score_threshold": {
+ "type": "number",
+ "description": "threshold for picking up additional areas",
+ "default": 0.8
+ },
+ "sigma": {
+ "type": "number",
+ "description": "parameter for cv::ximgproc::segmentation::GraphSegmentation",
+ "default": 0.5
+ },
+ "k": {
+ "type": "number",
+ "description": "parameter for cv::ximgproc::segmentation::GraphSegmentation",
+ "default": 300.0
+ },
+ "min_size": {
+ "type": "number",
+ "description": "parameter for cv::ximgproc::segmentation::GraphSegmentation",
+ "default": 100.0
+ }
+ },
+ "required": [
+ "target_height_ratio",
+ "target_candidate_box_width",
+ "pickup_additional_graph_segment",
+ "similarity_score_threshold",
+ "sigma",
+ "k",
+ "min_size"
+ ],
+ "additionalProperties": false
+ }
+ },
+ "properties": {
+ "/**": {
+ "type": "object",
+ "properties": {
+ "ros__parameters": {
+ "$ref": "#/definitions/graph_segment"
+ }
+ },
+ "required": ["ros__parameters"],
+ "additionalProperties": false
+ }
+ },
+ "required": ["/**"],
+ "additionalProperties": false
+}
diff --git a/localization/yabloc/yabloc_image_processing/schema/segment_filter.schema.json b/localization/yabloc/yabloc_image_processing/schema/segment_filter.schema.json
new file mode 100644
index 0000000000000..bfe74ea80569c
--- /dev/null
+++ b/localization/yabloc/yabloc_image_processing/schema/segment_filter.schema.json
@@ -0,0 +1,65 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Parameters for segment_filter",
+ "type": "object",
+ "definitions": {
+ "segment_filter": {
+ "type": "object",
+ "properties": {
+ "min_segment_length": {
+ "type": "number",
+ "description": "min length threshold (if it is negative, it is unlimited)",
+ "default": 1.5
+ },
+ "max_segment_distance": {
+ "type": "number",
+ "description": "max distance threshold (if it is negative, it is unlimited)",
+ "default": 30.0
+ },
+ "max_lateral_distance": {
+ "type": "number",
+ "description": "max lateral distance threshold (if it is negative, it is unlimited)",
+ "default": 10.0
+ },
+ "publish_image_with_segment_for_debug": {
+ "type": "boolean",
+ "description": "toggle whether to publish the filtered line segment for debug",
+ "default": true
+ },
+ "max_range": {
+ "type": "number",
+ "description": "range of debug projection visualization",
+ "default": 20.0
+ },
+ "image_size": {
+ "type": "number",
+ "description": "image size of debug projection visualization",
+ "default": 800
+ }
+ },
+ "required": [
+ "min_segment_length",
+ "max_segment_distance",
+ "max_lateral_distance",
+ "publish_image_with_segment_for_debug",
+ "max_range",
+ "image_size"
+ ],
+ "additionalProperties": false
+ }
+ },
+ "properties": {
+ "/**": {
+ "type": "object",
+ "properties": {
+ "ros__parameters": {
+ "$ref": "#/definitions/segment_filter"
+ }
+ },
+ "required": ["ros__parameters"],
+ "additionalProperties": false
+ }
+ },
+ "required": ["/**"],
+ "additionalProperties": false
+}
diff --git a/localization/yabloc/yabloc_image_processing/schema/undistort.schema.json b/localization/yabloc/yabloc_image_processing/schema/undistort.schema.json
new file mode 100644
index 0000000000000..d166d85024903
--- /dev/null
+++ b/localization/yabloc/yabloc_image_processing/schema/undistort.schema.json
@@ -0,0 +1,43 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Parameters for undistort",
+ "type": "object",
+ "definitions": {
+ "undistort": {
+ "type": "object",
+ "properties": {
+ "use_sensor_qos": {
+ "type": "boolean",
+ "description": "whether to use sensor qos or not",
+ "default": true
+ },
+ "width": {
+ "type": "number",
+ "description": "resized image width size",
+ "default": 800
+ },
+ "override_frame_id": {
+ "type": "string",
+ "description": "value for overriding the camera's frame_id. if blank, frame_id of static_tf is not overwritten",
+ "default": ""
+ }
+ },
+ "required": ["use_sensor_qos", "width", "override_frame_id"],
+ "additionalProperties": false
+ }
+ },
+ "properties": {
+ "/**": {
+ "type": "object",
+ "properties": {
+ "ros__parameters": {
+ "$ref": "#/definitions/undistort"
+ }
+ },
+ "required": ["ros__parameters"],
+ "additionalProperties": false
+ }
+ },
+ "required": ["/**"],
+ "additionalProperties": false
+}
diff --git a/localization/yabloc/yabloc_monitor/README.md b/localization/yabloc/yabloc_monitor/README.md
index ed4cdc36b6ba0..849429290b427 100644
--- a/localization/yabloc/yabloc_monitor/README.md
+++ b/localization/yabloc/yabloc_monitor/README.md
@@ -25,3 +25,7 @@ To be added,
| Name | Type | Description |
| -------------- | --------------------------------- | ------------------- |
| `/diagnostics` | `diagnostic_msgs/DiagnosticArray` | Diagnostics outputs |
+
+### Parameters
+
+{{ json_to_markdown("localization/yabloc/yabloc_monitor/schema/yabloc_monitor.schema.json") }}
diff --git a/localization/yabloc/yabloc_monitor/schema/yabloc_monitor.schema.json b/localization/yabloc/yabloc_monitor/schema/yabloc_monitor.schema.json
new file mode 100644
index 0000000000000..c83a9d3ac45f3
--- /dev/null
+++ b/localization/yabloc/yabloc_monitor/schema/yabloc_monitor.schema.json
@@ -0,0 +1,33 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Parameters for yabloc_monitor",
+ "type": "object",
+ "definitions": {
+ "yabloc_monitor": {
+ "type": "object",
+ "properties": {
+ "availability/timestamp_tolerance": {
+ "type": "number",
+ "description": "tolerable time difference between current time and latest estimated pose",
+ "default": 1.0
+ }
+ },
+ "required": ["availability/timestamp_tolerance"],
+ "additionalProperties": false
+ }
+ },
+ "properties": {
+ "/**": {
+ "type": "object",
+ "properties": {
+ "ros__parameters": {
+ "$ref": "#/definitions/yabloc_monitor"
+ }
+ },
+ "required": ["ros__parameters"],
+ "additionalProperties": false
+ }
+ },
+ "required": ["/**"],
+ "additionalProperties": false
+}
diff --git a/localization/yabloc/yabloc_particle_filter/README.md b/localization/yabloc/yabloc_particle_filter/README.md
index d42ce4647bc95..f46a363102c8b 100644
--- a/localization/yabloc/yabloc_particle_filter/README.md
+++ b/localization/yabloc/yabloc_particle_filter/README.md
@@ -36,15 +36,7 @@ This package contains some executable nodes related to particle filter.
### Parameters
-| Name | Type | Description |
-| ----------------------------- | ---------------- | ----------------------------------------------------------------- |
-| `visualize` | bool | whether particles are also published in visualization_msgs or not |
-| `static_linear_covariance` | double | to override the covariance of `/twist_with_covariance` |
-| `static_angular_covariance` | double | to override the covariance of `/twist_with_covariance` |
-| `resampling_interval_seconds` | double | the interval of particle resampling |
-| `num_of_particles` | int | the number of particles |
-| `prediction_rate` | double | frequency of forecast updates, in Hz |
-| `cov_xx_yy` | vector\ | the covariance of initial pose |
+{{ json_to_markdown("localization/yabloc/yabloc_particle_filter/schema/predictor.schema.json") }}
### Services
@@ -80,19 +72,7 @@ This package contains some executable nodes related to particle filter.
### Parameters
-| Name | Type | Description |
-| -------------------------------- | ------ | ---------------------------------------------------------------------------------------- |
-| `acceptable_max_delay` | double | how long to hold the predicted particles |
-| `visualize` | double | whether publish particles as marker_array or not |
-| `mahalanobis_distance_threshold` | double | if the Mahalanobis distance to the GNSS for particle exceeds this, the correction skips. |
-| `for_fixed/max_weight` | bool | parameter for gnss weight distribution |
-| `for_fixed/flat_radius` | bool | parameter for gnss weight distribution |
-| `for_fixed/max_radius` | bool | parameter for gnss weight distribution |
-| `for_fixed/min_weight` | bool | parameter for gnss weight distribution |
-| `for_not_fixed/flat_radius` | bool | parameter for gnss weight distribution |
-| `for_not_fixed/max_radius` | bool | parameter for gnss weight distribution |
-| `for_not_fixed/min_weight` | bool | parameter for gnss weight distribution |
-| `for_not_fixed/max_weight` | bool | parameter for gnss weight distribution |
+{{ json_to_markdown("localization/yabloc/yabloc_particle_filter/schema/gnss_particle_corrector.schema.json") }}
## camera_particle_corrector
@@ -127,16 +107,7 @@ This package contains some executable nodes related to particle filter.
### Parameters
-| Name | Type | Description |
-| ---------------------- | ------ | ----------------------------------------------------------------------------------------------------------------------------------------- |
-| `acceptable_max_delay` | double | how long to hold the predicted particles |
-| `visualize` | double | whether publish particles as marker_array or not |
-| `image_size` | int | image size of debug/cost_map_image |
-| `max_range` | double | width of hierarchical cost map |
-| `gamma` | double | gamma value of the intensity gradient of the cost map |
-| `min_prob` | double | minimum particle weight the corrector node gives |
-| `far_weight_gain` | double | `exp(-far_weight_gain_ * squared_distance_from_camera)` is weight gain. if this is large, the nearby road markings will be more important |
-| `enabled_at_first` | bool | if it is false, this node is not activated at first. you can activate by service call |
+{{ json_to_markdown("localization/yabloc/yabloc_particle_filter/schema/camera_particle_corrector.schema.json") }}
### Services
diff --git a/localization/yabloc/yabloc_particle_filter/schema/camera_particle_corrector.schema.json b/localization/yabloc/yabloc_particle_filter/schema/camera_particle_corrector.schema.json
new file mode 100644
index 0000000000000..a8b4873347f52
--- /dev/null
+++ b/localization/yabloc/yabloc_particle_filter/schema/camera_particle_corrector.schema.json
@@ -0,0 +1,77 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Parameters for camera_particle_corrector",
+ "type": "object",
+ "definitions": {
+ "camera_particle_corrector": {
+ "type": "object",
+ "properties": {
+ "acceptable_max_delay": {
+ "type": "number",
+ "description": "how long to hold the predicted particles",
+ "default": 1.0
+ },
+ "visualize": {
+ "type": "boolean",
+ "description": "whether publish particles as marker_array or not",
+ "default": false
+ },
+ "image_size": {
+ "type": "number",
+ "description": "image size of debug/cost_map_image",
+ "default": 800
+ },
+ "max_range": {
+ "type": "number",
+ "description": "width of hierarchical cost map",
+ "default": 40.0
+ },
+ "gamma": {
+ "type": "number",
+ "description": "gamma value of the intensity gradient of the cost map",
+ "default": 5.0
+ },
+ "min_prob": {
+ "type": "number",
+ "description": "minimum particle weight the corrector node gives",
+ "default": 0.1
+ },
+ "far_weight_gain": {
+ "type": "number",
+ "description": "`exp(-far_weight_gain_ * squared_distance_from_camera)` is weight gain. if this is large, the nearby road markings will be more important",
+ "default": 0.001
+ },
+ "enabled_at_first": {
+ "type": "boolean",
+ "description": "if it is false, this node is not activated at first. you can activate by service call",
+ "default": true
+ }
+ },
+ "required": [
+ "acceptable_max_delay",
+ "visualize",
+ "image_size",
+ "max_range",
+ "gamma",
+ "min_prob",
+ "far_weight_gain",
+ "enabled_at_first"
+ ],
+ "additionalProperties": false
+ }
+ },
+ "properties": {
+ "/**": {
+ "type": "object",
+ "properties": {
+ "ros__parameters": {
+ "$ref": "#/definitions/camera_particle_corrector"
+ }
+ },
+ "required": ["ros__parameters"],
+ "additionalProperties": false
+ }
+ },
+ "required": ["/**"],
+ "additionalProperties": false
+}
diff --git a/localization/yabloc/yabloc_particle_filter/schema/gnss_particle_corrector.schema.json b/localization/yabloc/yabloc_particle_filter/schema/gnss_particle_corrector.schema.json
new file mode 100644
index 0000000000000..a3990ad333981
--- /dev/null
+++ b/localization/yabloc/yabloc_particle_filter/schema/gnss_particle_corrector.schema.json
@@ -0,0 +1,94 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Parameters for gnss_particle_corrector",
+ "type": "object",
+ "definitions": {
+ "gnss_particle_corrector": {
+ "type": "object",
+ "properties": {
+ "acceptable_max_delay": {
+ "type": "number",
+ "description": "how long to hold the predicted particles",
+ "default": 1.0
+ },
+ "visualize": {
+ "type": "boolean",
+ "description": "whether publish particles as marker_array or not",
+ "default": false
+ },
+ "mahalanobis_distance_threshold": {
+ "type": "number",
+ "description": "if the Mahalanobis distance to the GNSS for particle exceeds this, the correction skips.",
+ "default": 30.0
+ },
+ "for_fixed/max_weight": {
+ "type": "number",
+ "description": "gnss weight distribution used when observation is fixed",
+ "default": 5.0
+ },
+ "for_fixed/flat_radius": {
+ "type": "number",
+ "description": "gnss weight distribution used when observation is fixed",
+ "default": 0.5
+ },
+ "for_fixed/max_radius": {
+ "type": "number",
+ "description": "gnss weight distribution used when observation is fixed",
+ "default": 10.0
+ },
+ "for_fixed/min_weight": {
+ "type": "number",
+ "description": "gnss weight distribution used when observation is fixed",
+ "default": 0.5
+ },
+ "for_not_fixed/max_weight": {
+ "type": "number",
+ "description": "gnss weight distribution used when observation is not fixed",
+ "default": 1.0
+ },
+ "for_not_fixed/flat_radius": {
+ "type": "number",
+ "description": "gnss weight distribution used when observation is not fixed",
+ "default": 5.0
+ },
+ "for_not_fixed/max_radius": {
+ "type": "number",
+ "description": "gnss weight distribution used when observation is not fixed",
+ "default": 20.0
+ },
+ "for_not_fixed/min_weight": {
+ "type": "number",
+ "description": "gnss weight distribution used when observation is not fixed",
+ "default": 0.5
+ }
+ },
+ "required": [
+ "acceptable_max_delay",
+ "visualize",
+ "for_fixed/max_weight",
+ "for_fixed/flat_radius",
+ "for_fixed/max_radius",
+ "for_fixed/min_weight",
+ "for_not_fixed/max_weight",
+ "for_not_fixed/flat_radius",
+ "for_not_fixed/max_radius",
+ "for_not_fixed/min_weight"
+ ],
+ "additionalProperties": false
+ }
+ },
+ "properties": {
+ "/**": {
+ "type": "object",
+ "properties": {
+ "ros__parameters": {
+ "$ref": "#/definitions/gnss_particle_corrector"
+ }
+ },
+ "required": ["ros__parameters"],
+ "additionalProperties": false
+ }
+ },
+ "required": ["/**"],
+ "additionalProperties": false
+}
diff --git a/localization/yabloc/yabloc_particle_filter/schema/predictor.schema.json b/localization/yabloc/yabloc_particle_filter/schema/predictor.schema.json
new file mode 100644
index 0000000000000..487dffdb00094
--- /dev/null
+++ b/localization/yabloc/yabloc_particle_filter/schema/predictor.schema.json
@@ -0,0 +1,71 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Parameters for predictor",
+ "type": "object",
+ "definitions": {
+ "predictor": {
+ "type": "object",
+ "properties": {
+ "visualize": {
+ "type": "boolean",
+ "description": "whether particles are also published in visualization_msgs or not",
+ "default": true
+ },
+ "static_linear_covariance": {
+ "type": "number",
+ "description": "overriding covariance of `/twist_with_covariance`",
+ "default": 0.04
+ },
+ "static_angular_covariance": {
+ "type": "number",
+ "description": "overriding covariance of `/twist_with_covariance`",
+ "default": 0.006
+ },
+ "resampling_interval_seconds": {
+ "type": "number",
+ "description": "the interval of particle resampling",
+ "default": 1.0
+ },
+ "num_of_particles": {
+ "type": "number",
+ "description": "the number of particles",
+ "default": 500
+ },
+ "prediction_rate": {
+ "type": "number",
+ "description": "frequency of forecast updates, in Hz",
+ "default": 50.0
+ },
+ "cov_xx_yy": {
+ "type": "array",
+ "description": "the covariance of initial pose",
+ "default": [2.0, 0.25]
+ }
+ },
+ "required": [
+ "visualize",
+ "static_linear_covariance",
+ "static_angular_covariance",
+ "resampling_interval_seconds",
+ "num_of_particles",
+ "prediction_rate",
+ "cov_xx_yy"
+ ],
+ "additionalProperties": false
+ }
+ },
+ "properties": {
+ "/**": {
+ "type": "object",
+ "properties": {
+ "ros__parameters": {
+ "$ref": "#/definitions/predictor"
+ }
+ },
+ "required": ["ros__parameters"],
+ "additionalProperties": false
+ }
+ },
+ "required": ["/**"],
+ "additionalProperties": false
+}
diff --git a/localization/yabloc/yabloc_pose_initializer/README.md b/localization/yabloc/yabloc_pose_initializer/README.md
index 907b79c1459ec..33b9788cca3d3 100644
--- a/localization/yabloc/yabloc_pose_initializer/README.md
+++ b/localization/yabloc/yabloc_pose_initializer/README.md
@@ -59,9 +59,7 @@ Converted model URL
### Parameters
-| Name | Type | Description |
-| ------------------ | ---- | ----------------------------------------- |
-| `angle_resolution` | int | how many divisions of 1 sigma angle range |
+{{ json_to_markdown("localization/yabloc/yabloc_pose_initializer/schema/camera_pose_initializer.schema.json") }}
### Services
diff --git a/localization/yabloc/yabloc_pose_initializer/schema/camera_pose_initializer.schema.json b/localization/yabloc/yabloc_pose_initializer/schema/camera_pose_initializer.schema.json
new file mode 100644
index 0000000000000..26a53e0fe408f
--- /dev/null
+++ b/localization/yabloc/yabloc_pose_initializer/schema/camera_pose_initializer.schema.json
@@ -0,0 +1,33 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Parameters for camera_pose_initializer",
+ "type": "object",
+ "definitions": {
+ "camera_pose_initializer": {
+ "type": "object",
+ "properties": {
+ "angle_resolution": {
+ "type": "number",
+ "description": "how many divisions of 1 sigma angle range",
+ "default": 30
+ }
+ },
+ "required": ["angle_resolution"],
+ "additionalProperties": false
+ }
+ },
+ "properties": {
+ "/**": {
+ "type": "object",
+ "properties": {
+ "ros__parameters": {
+ "$ref": "#/definitions/camera_pose_initializer"
+ }
+ },
+ "required": ["ros__parameters"],
+ "additionalProperties": false
+ }
+ },
+ "required": ["/**"],
+ "additionalProperties": false
+}
diff --git a/map/map_projection_loader/README.md b/map/map_projection_loader/README.md
index 94b142da3dcf9..1887a1cd8934f 100644
--- a/map/map_projection_loader/README.md
+++ b/map/map_projection_loader/README.md
@@ -28,6 +28,15 @@ sample-map-rosbag
projector_type: local
```
+#### Limitation
+
+The functionality that requires latitude and longitude will become unavailable.
+
+The currently identified unavailable functionalities are:
+
+- GNSS localization
+- Sending the self-position in latitude and longitude using ADAPI
+
### Using MGRS
If you want to use MGRS, please specify the MGRS grid as well.
@@ -39,6 +48,10 @@ vertical_datum: WGS84
mgrs_grid: 54SUE
```
+#### Limitation
+
+It cannot be used with maps that span across two or more MGRS grids. Please use it only when it falls within the scope of a single MGRS grid.
+
### Using LocalCartesianUTM
If you want to use local cartesian UTM, please specify the map origin as well.
diff --git a/perception/shape_estimation/launch/shape_estimation.launch.xml b/perception/shape_estimation/launch/shape_estimation.launch.xml
index 65d1944417cc0..f13b5d1e5f273 100644
--- a/perception/shape_estimation/launch/shape_estimation.launch.xml
+++ b/perception/shape_estimation/launch/shape_estimation.launch.xml
@@ -4,9 +4,6 @@
-
-
-
diff --git a/perception/simple_object_merger/README.md b/perception/simple_object_merger/README.md
index 4609638cc008c..9bcd03e8abfa1 100644
--- a/perception/simple_object_merger/README.md
+++ b/perception/simple_object_merger/README.md
@@ -1,60 +1,78 @@
# simple_object_merger
-This package can merge multiple topics of [autoware_auto_perception_msgs/msg/DetectedObject](https://gitlab.com/autowarefoundation/autoware.auto/autoware_auto_msgs/-/blob/master/autoware_auto_perception_msgs/msg/DetectedObject.idl) without data association algorithm.
+This package can merge multiple topics of [autoware_auto_perception_msgs/msg/DetectedObject](https://gitlab.com/autowarefoundation/autoware.auto/autoware_auto_msgs/-/blob/master/autoware_auto_perception_msgs/msg/DetectedObject.idl) without low calculation cost.
## Algorithm
### Background
-This package can merge multiple DetectedObjects without matching processing.
-[Object_merger](https://github.com/autowarefoundation/autoware.universe/tree/main/perception/object_merger) solve data association algorithm like Hungarian algorithm for matching problem, but it needs computational cost.
-In addition, for now, [object_merger](https://github.com/autowarefoundation/autoware.universe/tree/main/perception/object_merger) can handle only 2 DetectedObjects topics and cannot handle more than 2 topics in one node.
-To merge 6 DetectedObjects topics, 6 [object_merger](https://github.com/autowarefoundation/autoware.universe/tree/main/perception/object_merger) nodes need to stand.
+[Object_merger](https://github.com/autowarefoundation/autoware.universe/tree/main/perception/object_merger) is mainly used for merge process with DetectedObjects. There are 2 characteristics in `Object_merger`. First, `object_merger` solve data association algorithm like Hungarian algorithm for matching problem, but it needs computational cost. Second, `object_merger` can handle only 2 DetectedObjects topics and cannot handle more than 2 topics in one node. To merge 6 DetectedObjects topics, 6 `object_merger` nodes need to stand for now.
-So this package aim to merge DetectedObjects simply.
-This package do not use data association algorithm to reduce the computational cost, and it can handle more than 2 topics in one node to prevent launching a large number of nodes.
+So `simple_object_merger` aim to merge multiple DetectedObjects with low calculation cost.
+The package do not use data association algorithm to reduce the computational cost, and it can handle more than 2 topics in one node to prevent launching a large number of nodes.
+
+### Use case
+
+Use case is as below.
+
+- Multiple radar detection
+
+`Simple_object_merger` can be used for multiple radar detection. By combining them into one topic from multiple radar topics with `simple_object_merger`, the pipeline for faraway detection with radar is simpler.
### Limitation
- Sensor data drops and delay
-Merged objects will not be published until all topic data is received when initializing.
-In addition, to care sensor data drops and delayed, this package has a parameter to judge timeout.
-When the latest time of the data of a topic is older than the timeout parameter, it is not merged for output objects.
-For now specification of this package, if all topic data is received at first and after that the data drops, and the merged objects are published without objects which is judged as timeout.
-The timeout parameter should be determined by sensor cycle time.
+Merged objects will not be published until all topic data is received when initializing. In addition, to care sensor data drops and delayed, this package has a parameter to judge timeout. When the latest time of the data of a topic is older than the timeout parameter, it is not merged for output objects. For now specification of this package, if all topic data is received at first and after that the data drops, and the merged objects are published without objects which is judged as timeout.The timeout parameter should be determined by sensor cycle time.
- Post-processing
-Because this package does not have matching processing, so it can be used only when post-processing is used.
-For now, [clustering processing](https://github.com/autowarefoundation/autoware.universe/tree/main/perception/radar_object_clustering) can be used as post-processing.
+Because this package does not have matching processing, there are overlapping objects depending on the input objects. So output objects can be used only when post-processing is used. For now, [clustering processing](https://github.com/autowarefoundation/autoware.universe/tree/main/perception/radar_object_clustering) can be used as post-processing.
-### Use case
+## Interface
-Use case is as below.
+### Input
-- Multiple radar detection
+Input topics is defined by the parameter of `input_topics` (List[string]). The type of input topics is `std::vector`.
+
+### Output
+
+- `~/output/objects` (`autoware_auto_perception_msgs/msg/DetectedObjects.msg`)
+ - Merged objects combined from input topics.
+
+### Parameters
+
+- `update_rate_hz` (double) [hz]
+ - Default parameter: 20.0
+
+This parameter is update rate for the `onTimer` function.
+This parameter should be same as the frame rate of input topics.
-This package can be used for multiple radar detection.
-Since [clustering processing](https://github.com/autowarefoundation/autoware.universe/tree/main/perception/radar_object_clustering) will be included later process in radar faraway detection, this package can be used.
+- `new_frame_id` (string)
+ - Default parameter: "base_link"
-## Input
+This parameter is the header frame_id of the output topic.
+If output topics use for perception module, it should be set for "base_link"
-| Name | Type | Description |
-| ---- | ------------------------------------------------------------------ | ------------------------------------------------------ |
-| | std::vector | 3D detected objects. Topic names are set by parameters |
+- `timeout_threshold` (double) [s]
+ - Default parameter: 0.1
-## Output
+This parameter is the threshold for timeout judgement.
+If the time difference between the first topic of `input_topics` and an input topic is exceeded to this parameter, then the objects of topic is not merged to output objects.
-| Name | Type | Description |
-| ------------------ | ----------------------------------------------------- | -------------- |
-| `~/output/objects` | autoware_auto_perception_msgs/msg/DetectedObjects.msg | Merged objects |
+```cpp
+ for (size_t i = 0; i < input_topic_size; i++) {
+ double time_diff = rclcpp::Time(objects_data_.at(i)->header.stamp).seconds() -
+ rclcpp::Time(objects_data_.at(0)->header.stamp).seconds();
+ if (std::abs(time_diff) < node_param_.timeout_threshold) {
+ // merge objects
+ }
+ }
+```
-## Parameters
+- `input_topics` (List[string])
+ - Default parameter: "[]"
-| Name | Type | Description | Default value |
-| :------------------ | :----------- | :----------------------------------- | :------------ |
-| `update_rate_hz` | double | Update rate. [hz] | 20.0 |
-| `new_frame_id` | string | The header frame_id of output topic. | "base_link" |
-| `timeout_threshold` | double | Threshold for timeout judgement [s]. | 1.0 |
-| `input_topics` | List[string] | Input topics name. | "[]" |
+This parameter is the name of input topics.
+For example, when this packages use for radar objects, `"[/sensing/radar/front_center/detected_objects, /sensing/radar/front_left/detected_objects, /sensing/radar/rear_left/detected_objects, /sensing/radar/rear_center/detected_objects, /sensing/radar/rear_right/detected_objects, /sensing/radar/front_right/detected_objects]"` can be set.
+For now, the time difference is calculated by the header time between the first topic of `input_topics` and the input topics, so the most important objects to detect should be set in the first of `input_topics` list.
diff --git a/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/scene.hpp b/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/scene.hpp
index 9c3c371627a78..f991ca0d849f0 100644
--- a/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/scene.hpp
+++ b/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/scene.hpp
@@ -89,7 +89,7 @@ class NormalLaneChange : public LaneChangeBase
bool isAbortState() const override;
- bool isLaneChangeRequired() const override;
+ bool isLaneChangeRequired() override;
bool isStoppedAtRedTrafficLight() const override;
diff --git a/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/base_class.hpp b/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/base_class.hpp
index 451899fbf27e6..400d5505dc49f 100644
--- a/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/base_class.hpp
+++ b/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/base_class.hpp
@@ -80,7 +80,7 @@ class LaneChangeBase
virtual bool hasFinishedAbort() const = 0;
- virtual bool isLaneChangeRequired() const = 0;
+ virtual bool isLaneChangeRequired() = 0;
virtual bool isAbortState() const = 0;
diff --git a/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/path.hpp b/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/path.hpp
index 9e42b49635b82..3af4f487a0fa0 100644
--- a/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/path.hpp
+++ b/planning/behavior_path_lane_change_module/include/behavior_path_lane_change_module/utils/path.hpp
@@ -45,7 +45,7 @@ struct LaneChangeStatus
std::vector lane_follow_lane_ids{};
std::vector lane_change_lane_ids{};
bool is_safe{false};
- bool is_valid_path{true};
+ bool is_valid_path{false};
double start_distance{0.0};
};
diff --git a/planning/behavior_path_lane_change_module/src/interface.cpp b/planning/behavior_path_lane_change_module/src/interface.cpp
index 448f83ecaf15e..c8e1229fe5af5 100644
--- a/planning/behavior_path_lane_change_module/src/interface.cpp
+++ b/planning/behavior_path_lane_change_module/src/interface.cpp
@@ -50,9 +50,6 @@ LaneChangeInterface::LaneChangeInterface(
void LaneChangeInterface::processOnEntry()
{
waitApproval();
- module_type_->setPreviousModulePaths(
- getPreviousModuleOutput().reference_path, getPreviousModuleOutput().path);
- module_type_->updateLaneChangeStatus();
}
void LaneChangeInterface::processOnExit()
@@ -80,6 +77,14 @@ void LaneChangeInterface::updateData()
{
module_type_->setPreviousModulePaths(
getPreviousModuleOutput().reference_path, getPreviousModuleOutput().path);
+ module_type_->setPreviousDrivableAreaInfo(getPreviousModuleOutput().drivable_area_info);
+ module_type_->setPreviousTurnSignalInfo(getPreviousModuleOutput().turn_signal_info);
+
+ if (isWaitingApproval()) {
+ module_type_->updateLaneChangeStatus();
+ }
+ setObjectDebugVisualization();
+
module_type_->updateSpecialData();
module_type_->resetStopPose();
}
@@ -98,8 +103,6 @@ BehaviorModuleOutput LaneChangeInterface::plan()
return {};
}
- module_type_->setPreviousDrivableAreaInfo(getPreviousModuleOutput().drivable_area_info);
- module_type_->setPreviousTurnSignalInfo(getPreviousModuleOutput().turn_signal_info);
auto output = module_type_->generateOutput();
path_reference_ = std::make_shared(output.reference_path);
*prev_approved_path_ = getPreviousModuleOutput().path;
@@ -128,22 +131,14 @@ BehaviorModuleOutput LaneChangeInterface::planWaitingApproval()
out.reference_path = getPreviousModuleOutput().reference_path;
out.turn_signal_info = getPreviousModuleOutput().turn_signal_info;
out.drivable_area_info = getPreviousModuleOutput().drivable_area_info;
-
- module_type_->setPreviousModulePaths(
- getPreviousModuleOutput().reference_path, getPreviousModuleOutput().path);
- module_type_->updateLaneChangeStatus();
- setObjectDebugVisualization();
+ out.turn_signal_info = getCurrentTurnSignalInfo(out.path, out.turn_signal_info);
for (const auto & [uuid, data] : module_type_->getDebugData()) {
const auto color = data.is_safe ? ColorName::GREEN : ColorName::RED;
setObjectsOfInterestData(data.current_obj_pose, data.obj_shape, color);
}
- // change turn signal when the vehicle reaches at the end of the path for waiting lane change
- out.turn_signal_info = getCurrentTurnSignalInfo(out.path, out.turn_signal_info);
-
path_reference_ = std::make_shared(getPreviousModuleOutput().reference_path);
-
stop_pose_ = module_type_->getStopPose();
if (!module_type_->isValidPath()) {
@@ -211,6 +206,7 @@ bool LaneChangeInterface::canTransitSuccessState()
}
if (module_type_->hasFinishedLaneChange()) {
+ module_type_->resetParameters();
log_debug_throttled("Lane change process has completed.");
return true;
}
diff --git a/planning/behavior_path_lane_change_module/src/scene.cpp b/planning/behavior_path_lane_change_module/src/scene.cpp
index c7ddd8fe63636..f5100f16129c2 100644
--- a/planning/behavior_path_lane_change_module/src/scene.cpp
+++ b/planning/behavior_path_lane_change_module/src/scene.cpp
@@ -115,17 +115,17 @@ std::pair NormalLaneChange::getSafePath(LaneChangePath & safe_path)
return {true, found_safe_path};
}
-bool NormalLaneChange::isLaneChangeRequired() const
+bool NormalLaneChange::isLaneChangeRequired()
{
- const auto current_lanes = getCurrentLanes();
+ status_.current_lanes = getCurrentLanes();
- if (current_lanes.empty()) {
+ if (status_.current_lanes.empty()) {
return false;
}
- const auto target_lanes = getLaneChangeLanes(current_lanes, direction_);
+ status_.target_lanes = getLaneChangeLanes(status_.current_lanes, direction_);
- return !target_lanes.empty();
+ return !status_.target_lanes.empty();
}
bool NormalLaneChange::isStoppedAtRedTrafficLight() const
@@ -420,8 +420,15 @@ void NormalLaneChange::resetParameters()
is_abort_approval_requested_ = false;
current_lane_change_state_ = LaneChangeStates::Normal;
abort_path_ = nullptr;
+ status_ = {};
object_debug_.clear();
+ object_debug_after_approval_.clear();
+ debug_filtered_objects_.current_lane.clear();
+ debug_filtered_objects_.target_lane.clear();
+ debug_filtered_objects_.other_lane.clear();
+ debug_valid_path_.clear();
+ RCLCPP_DEBUG(logger_, "reset all flags and debug information.");
}
TurnSignalInfo NormalLaneChange::updateOutputTurnSignal()
diff --git a/planning/behavior_velocity_crosswalk_module/config/crosswalk.param.yaml b/planning/behavior_velocity_crosswalk_module/config/crosswalk.param.yaml
index aa5dfed1e4bc5..29079b99b6718 100644
--- a/planning/behavior_velocity_crosswalk_module/config/crosswalk.param.yaml
+++ b/planning/behavior_velocity_crosswalk_module/config/crosswalk.param.yaml
@@ -30,7 +30,7 @@
enable_stuck_check_in_intersection: false # [-] flag to enable stuck vehicle checking for crosswalk which is in intersection
stuck_vehicle_velocity: 1.0 # [m/s] threshold velocity whether other vehicles are assumed to be stuck or not.
max_stuck_vehicle_lateral_offset: 2.0 # [m] The feature does not handle the vehicles farther than this value to the ego's path.
- stuck_vehicle_attention_range: 10.0 # [m] Ego does not enter the crosswalk area if a stuck vehicle exists within this distance from the end of the crosswalk on the ego's path.
+ required_clearance: 6.0 # [m] clearance to be secured between the ego and the ahead vehicle
min_acc: -1.0 # min acceleration [m/ss]
min_jerk: -1.0 # min jerk [m/sss]
max_jerk: 1.0 # max jerk [m/sss]
diff --git a/planning/behavior_velocity_crosswalk_module/src/manager.cpp b/planning/behavior_velocity_crosswalk_module/src/manager.cpp
index dc3ce32be8505..094369b0a7a3c 100644
--- a/planning/behavior_velocity_crosswalk_module/src/manager.cpp
+++ b/planning/behavior_velocity_crosswalk_module/src/manager.cpp
@@ -68,8 +68,8 @@ CrosswalkModuleManager::CrosswalkModuleManager(rclcpp::Node & node)
getOrDeclareParameter(node, ns + ".stuck_vehicle.stuck_vehicle_velocity");
cp.max_stuck_vehicle_lateral_offset =
getOrDeclareParameter(node, ns + ".stuck_vehicle.max_stuck_vehicle_lateral_offset");
- cp.stuck_vehicle_attention_range =
- getOrDeclareParameter(node, ns + ".stuck_vehicle.stuck_vehicle_attention_range");
+ cp.required_clearance =
+ getOrDeclareParameter(node, ns + ".stuck_vehicle.required_clearance");
cp.min_acc_for_stuck_vehicle = getOrDeclareParameter(node, ns + ".stuck_vehicle.min_acc");
cp.max_jerk_for_stuck_vehicle =
getOrDeclareParameter(node, ns + ".stuck_vehicle.max_jerk");
diff --git a/planning/behavior_velocity_crosswalk_module/src/scene_crosswalk.cpp b/planning/behavior_velocity_crosswalk_module/src/scene_crosswalk.cpp
index 48ae5ff03b8bd..cee6975df3155 100644
--- a/planning/behavior_velocity_crosswalk_module/src/scene_crosswalk.cpp
+++ b/planning/behavior_velocity_crosswalk_module/src/scene_crosswalk.cpp
@@ -906,48 +906,48 @@ std::optional CrosswalkModule::checkStopForStuckVehicles(
continue;
}
- const auto & obj_pos = object.kinematics.initial_pose_with_covariance.pose.position;
- const auto lateral_offset = calcLateralOffset(ego_path.points, obj_pos);
+ const auto & obj_pose = object.kinematics.initial_pose_with_covariance.pose;
+ const auto lateral_offset = calcLateralOffset(ego_path.points, obj_pose.position);
if (p.max_stuck_vehicle_lateral_offset < std::abs(lateral_offset)) {
continue;
}
- const auto & ego_pos = planner_data_->current_odometry->pose.position;
- const auto ego_vel = planner_data_->current_velocity->twist.linear.x;
- const auto ego_acc = planner_data_->current_acceleration->accel.accel.linear.x;
-
- const double near_attention_range =
- calcSignedArcLength(ego_path.points, ego_pos, path_intersects.back());
- const double far_attention_range = near_attention_range + p.stuck_vehicle_attention_range;
-
- const auto dist_ego2obj = calcSignedArcLength(ego_path.points, ego_pos, obj_pos);
-
- if (near_attention_range < dist_ego2obj && dist_ego2obj < far_attention_range) {
- // Plan STOP considering min_acc, max_jerk and min_jerk.
- const auto min_feasible_dist_ego2stop = calcDecelDistWithJerkAndAccConstraints(
- ego_vel, 0.0, ego_acc, p.min_acc_for_stuck_vehicle, p.max_jerk_for_stuck_vehicle,
- p.min_jerk_for_stuck_vehicle);
- if (!min_feasible_dist_ego2stop) {
- continue;
+ // check if STOP is required
+ const double crosswalk_front_to_obj_rear =
+ calcSignedArcLength(ego_path.points, path_intersects.front(), obj_pose.position) -
+ object.shape.dimensions.x / 2.0;
+ const double crosswalk_back_to_obj_rear =
+ calcSignedArcLength(ego_path.points, path_intersects.back(), obj_pose.position) -
+ object.shape.dimensions.x / 2.0;
+ const double required_space_length =
+ planner_data_->vehicle_info_.vehicle_length_m + planner_param_.required_clearance;
+
+ if (crosswalk_front_to_obj_rear > 0.0 && crosswalk_back_to_obj_rear < required_space_length) {
+ // If there exists at least one vehicle ahead, plan STOP considering min_acc, max_jerk and
+ // min_jerk. Note that nearest search is not required because the stop pose independents to
+ // the vehicles.
+ const auto braking_distance = calcDecelDistWithJerkAndAccConstraints(
+ planner_data_->current_velocity->twist.linear.x, 0.0,
+ planner_data_->current_acceleration->accel.accel.linear.x, p.min_acc_for_stuck_vehicle,
+ p.max_jerk_for_stuck_vehicle, p.min_jerk_for_stuck_vehicle);
+ if (!braking_distance) {
+ return {};
}
+ const auto & ego_pos = planner_data_->current_odometry->pose.position;
const double dist_ego2stop =
calcSignedArcLength(ego_path.points, ego_pos, stop_pose->position);
- const double feasible_dist_ego2stop = std::max(*min_feasible_dist_ego2stop, dist_ego2stop);
+ const double feasible_dist_ego2stop = std::max(*braking_distance, dist_ego2stop);
const double dist_to_ego =
calcSignedArcLength(ego_path.points, ego_path.points.front().point.pose.position, ego_pos);
-
const auto feasible_stop_pose =
calcLongitudinalOffsetPose(ego_path.points, 0, dist_to_ego + feasible_dist_ego2stop);
if (!feasible_stop_pose) {
- continue;
+ return {};
}
- setObjectsOfInterestData(
- object.kinematics.initial_pose_with_covariance.pose, object.shape, ColorName::RED);
-
- // early return may not appropriate because the nearest in range object should be handled
- return createStopFactor(*feasible_stop_pose, {obj_pos});
+ setObjectsOfInterestData(obj_pose, object.shape, ColorName::RED);
+ return createStopFactor(*feasible_stop_pose, {obj_pose.position});
}
}
diff --git a/planning/behavior_velocity_crosswalk_module/src/scene_crosswalk.hpp b/planning/behavior_velocity_crosswalk_module/src/scene_crosswalk.hpp
index 247907ad2dc80..ed1e342df9f7a 100644
--- a/planning/behavior_velocity_crosswalk_module/src/scene_crosswalk.hpp
+++ b/planning/behavior_velocity_crosswalk_module/src/scene_crosswalk.hpp
@@ -124,7 +124,7 @@ class CrosswalkModule : public SceneModuleInterface
bool enable_stuck_check_in_intersection{false};
double stuck_vehicle_velocity;
double max_stuck_vehicle_lateral_offset;
- double stuck_vehicle_attention_range;
+ double required_clearance;
double min_acc_for_stuck_vehicle;
double max_jerk_for_stuck_vehicle;
double min_jerk_for_stuck_vehicle;
diff --git a/sensing/gnss_poser/schema/gnss_poser.schema.json b/sensing/gnss_poser/schema/gnss_poser.schema.json
index 6d9bc716e121a..d6e104b7432f2 100644
--- a/sensing/gnss_poser/schema/gnss_poser.schema.json
+++ b/sensing/gnss_poser/schema/gnss_poser.schema.json
@@ -42,7 +42,6 @@
},
"required": [
"base_frame",
- "gnss_frame",
"gnss_base_frame",
"map_frame",
"use_gnss_ins_orientation",
diff --git a/sensing/pointcloud_preprocessor/docs/ring-outlier-filter.md b/sensing/pointcloud_preprocessor/docs/ring-outlier-filter.md
index eedc8e6bcb573..42ca694235a00 100644
--- a/sensing/pointcloud_preprocessor/docs/ring-outlier-filter.md
+++ b/sensing/pointcloud_preprocessor/docs/ring-outlier-filter.md
@@ -22,12 +22,13 @@ This implementation inherits `pointcloud_preprocessor::Filter` class, please ref
### Core Parameters
-| Name | Type | Default Value | Description |
-| ------------------------- | ------- | ------------- | ----------- |
-| `distance_ratio` | double | 1.03 | |
-| `object_length_threshold` | double | 0.1 | |
-| `num_points_threshold` | int | 4 | |
-| `max_rings_num` | uint_16 | 128 | |
+| Name | Type | Default Value | Description |
+| ------------------------- | ------- | ------------- | ----------------------------------------------------------------------------------- |
+| `distance_ratio` | double | 1.03 | |
+| `object_length_threshold` | double | 0.1 | |
+| `num_points_threshold` | int | 4 | |
+| `max_rings_num` | uint_16 | 128 | |
+| `max_points_num_per_ring` | size_t | 4000 | Set this value large enough such that `HFoV / resolution < max_points_num_per_ring` |
## Assumptions / Known limits
diff --git a/sensing/pointcloud_preprocessor/include/pointcloud_preprocessor/outlier_filter/ring_outlier_filter_nodelet.hpp b/sensing/pointcloud_preprocessor/include/pointcloud_preprocessor/outlier_filter/ring_outlier_filter_nodelet.hpp
index f655a9245ca6d..ab7d4e0012304 100644
--- a/sensing/pointcloud_preprocessor/include/pointcloud_preprocessor/outlier_filter/ring_outlier_filter_nodelet.hpp
+++ b/sensing/pointcloud_preprocessor/include/pointcloud_preprocessor/outlier_filter/ring_outlier_filter_nodelet.hpp
@@ -46,6 +46,7 @@ class RingOutlierFilterComponent : public pointcloud_preprocessor::Filter
double object_length_threshold_;
int num_points_threshold_;
uint16_t max_rings_num_;
+ size_t max_points_num_per_ring_;
/** \brief Parameter service callback result : needed to be hold */
OnSetParametersCallbackHandle::SharedPtr set_param_res_;
@@ -53,6 +54,21 @@ class RingOutlierFilterComponent : public pointcloud_preprocessor::Filter
/** \brief Parameter service callback */
rcl_interfaces::msg::SetParametersResult paramCallback(const std::vector & p);
+ bool isCluster(
+ const PointCloud2ConstPtr & input, std::pair data_idx_both_ends, int walk_size)
+ {
+ if (walk_size > num_points_threshold_) return true;
+
+ auto first_point = reinterpret_cast(&input->data[data_idx_both_ends.first]);
+ auto last_point = reinterpret_cast(&input->data[data_idx_both_ends.second]);
+
+ const auto x = first_point->x - last_point->x;
+ const auto y = first_point->y - last_point->y;
+ const auto z = first_point->z - last_point->z;
+
+ return x * x + y * y + z * z >= object_length_threshold_ * object_length_threshold_;
+ }
+
public:
PCL_MAKE_ALIGNED_OPERATOR_NEW
explicit RingOutlierFilterComponent(const rclcpp::NodeOptions & options);
diff --git a/sensing/pointcloud_preprocessor/package.xml b/sensing/pointcloud_preprocessor/package.xml
index e3090f34d6854..a65d14c0a1194 100644
--- a/sensing/pointcloud_preprocessor/package.xml
+++ b/sensing/pointcloud_preprocessor/package.xml
@@ -31,7 +31,6 @@
pcl_msgs
pcl_ros
point_cloud_msg_wrapper
- range-v3
rclcpp
rclcpp_components
sensor_msgs
diff --git a/sensing/pointcloud_preprocessor/src/outlier_filter/ring_outlier_filter_nodelet.cpp b/sensing/pointcloud_preprocessor/src/outlier_filter/ring_outlier_filter_nodelet.cpp
index e277b221a090d..d2570b9c4d786 100644
--- a/sensing/pointcloud_preprocessor/src/outlier_filter/ring_outlier_filter_nodelet.cpp
+++ b/sensing/pointcloud_preprocessor/src/outlier_filter/ring_outlier_filter_nodelet.cpp
@@ -14,18 +14,10 @@
#include "pointcloud_preprocessor/outlier_filter/ring_outlier_filter_nodelet.hpp"
-#include "pointcloud_preprocessor/utility/utilities.hpp"
-
-#include
-#include
-#include
-
#include
#include
-#include
#include
-
namespace pointcloud_preprocessor
{
RingOutlierFilterComponent::RingOutlierFilterComponent(const rclcpp::NodeOptions & options)
@@ -48,6 +40,8 @@ RingOutlierFilterComponent::RingOutlierFilterComponent(const rclcpp::NodeOptions
static_cast(declare_parameter("object_length_threshold", 0.1));
num_points_threshold_ = static_cast(declare_parameter("num_points_threshold", 4));
max_rings_num_ = static_cast(declare_parameter("max_rings_num", 128));
+ max_points_num_per_ring_ =
+ static_cast(declare_parameter("max_points_num_per_ring", 4000));
}
using std::placeholders::_1;
@@ -67,252 +61,122 @@ void RingOutlierFilterComponent::faster_filter(
}
stop_watch_ptr_->toc("processing_time", true);
- // The ring_outlier_filter specifies the expected input point cloud format,
- // however, we want to verify the input is correct and make failures explicit.
- auto getFieldOffsetSafely = [=](
- const std::string & field_name,
- const pcl::PCLPointField::PointFieldTypes expected_type) -> size_t {
- const auto field_index = pcl::getFieldIndex(*input, field_name);
- if (field_index == -1) {
- RCLCPP_ERROR(get_logger(), "Field %s not found in input point cloud", field_name.c_str());
- return -1UL;
- }
-
- auto field = (*input).fields.at(field_index);
- if (field.datatype != expected_type) {
- RCLCPP_ERROR(
- get_logger(), "Field %s has unexpected type %d (expected %d)", field_name.c_str(),
- field.datatype, expected_type);
- return -1UL;
- }
-
- return static_cast(field.offset);
- };
+ output.point_step = sizeof(PointXYZI);
+ output.data.resize(output.point_step * input->width);
+ size_t output_size = 0;
- // as per the specification of this node, these fields must be present in the input
- const auto ring_offset = getFieldOffsetSafely("ring", pcl::PCLPointField::UINT16);
- const auto azimuth_offset = getFieldOffsetSafely("azimuth", pcl::PCLPointField::FLOAT32);
- const auto distance_offset = getFieldOffsetSafely("distance", pcl::PCLPointField::FLOAT32);
- const auto intensity_offset = getFieldOffsetSafely("intensity", pcl::PCLPointField::FLOAT32);
+ const auto ring_offset =
+ input->fields.at(static_cast(autoware_point_types::PointIndex::Ring)).offset;
+ const auto azimuth_offset =
+ input->fields.at(static_cast(autoware_point_types::PointIndex::Azimuth)).offset;
+ const auto distance_offset =
+ input->fields.at(static_cast(autoware_point_types::PointIndex::Distance)).offset;
+ const auto intensity_offset =
+ input->fields.at(static_cast(autoware_point_types::PointIndex::Intensity)).offset;
+
+ std::vector> ring2indices;
+ ring2indices.reserve(max_rings_num_);
+
+ for (uint16_t i = 0; i < max_rings_num_; i++) {
+ ring2indices.push_back(std::vector());
+ ring2indices.back().reserve(max_points_num_per_ring_);
+ }
- if (
- ring_offset == -1UL || azimuth_offset == -1UL || distance_offset == -1UL ||
- intensity_offset == -1UL) {
- RCLCPP_ERROR(get_logger(), "One or more required fields are missing in input point cloud");
- return;
+ for (size_t data_idx = 0; data_idx < input->data.size(); data_idx += input->point_step) {
+ const uint16_t ring = *reinterpret_cast(&input->data[data_idx + ring_offset]);
+ ring2indices[ring].push_back(data_idx);
}
- // The initial implementation of ring outlier filter looked like this:
- // 1. Iterate over the input cloud and group point indices by ring
- // 2. For each ring:
- // 2.1. iterate over the ring points, and group points belonging to the same "walk"
- // 2.2. when a walk is closed, copy indexed points to the output cloud if the walk is long
- // enough.
- //
- // Because LIDAR data is naturally "azimuth-major order" and not "ring-major order", such
- // implementation is not cache friendly at all, and has negative impact on all the other nodes.
- //
- // To tackle this issue, the algorithm has been rewritten so that points would be accessed in
- // order. To do so, all rings are being processing simultaneously instead of separately. The
- // overall logic is still the same.
-
- // ad-hoc struct to store finished walks information (for isCluster())
- struct WalkInfo
- {
- size_t id;
- int num_points;
- float first_point_distance;
- float first_point_azimuth;
- float last_point_distance;
- float last_point_azimuth;
- };
-
- // ad-hoc struct to keep track of each ring current walk
- struct RingWalkInfo
- {
- WalkInfo first_walk;
- WalkInfo current_walk;
- };
-
- // helper functions
-
- // check if walk is a valid cluster
- const float object_length_threshold2 = object_length_threshold_ * object_length_threshold_;
- auto isCluster = [=](const WalkInfo & walk_info) {
- // A cluster is a walk which has many points or is long enough
- if (walk_info.num_points > num_points_threshold_) return true;
-
- // Using the law of cosines, the distance between 2 points can be written as:
- // |p2-p1|^2 = d1^2 + d2^2 - 2*d1*d2*cos(a)
- // where d1 and d2 are the distance attribute of p1 and p2, and 'a' the azimuth diff between
- // the 2 points.
- const float dist2 =
- walk_info.first_point_distance * walk_info.first_point_distance +
- walk_info.last_point_distance * walk_info.last_point_distance -
- 2 * walk_info.first_point_distance * walk_info.last_point_distance *
- std::cos((walk_info.last_point_azimuth - walk_info.first_point_azimuth) * (M_PI / 18000.0));
- return dist2 > object_length_threshold2;
- };
-
- // check if 2 points belong to the same walk
- auto isSameWalk =
- [=](float curr_distance, float curr_azimuth, float prev_distance, float prev_azimuth) {
- float azimuth_diff = curr_azimuth - prev_azimuth;
- azimuth_diff = azimuth_diff < 0.f ? azimuth_diff + 36000.f : azimuth_diff;
- return std::max(curr_distance, prev_distance) <
- std::min(curr_distance, prev_distance) * distance_ratio_ &&
- azimuth_diff < 100.f;
- };
-
- // tmp vectors to keep track of walk/ring state while processing points in order (cache efficient)
- std::vector rings; // info for each LiDAR ring
- std::vector points_walk_id; // for each input point, the walk index associated with it
- std::vector
- walks_cluster_status; // for each generated walk, stores whether it is a cluster
-
- size_t latest_walk_id = -1UL; // ID given to the latest walk created
-
- // initialize each ring with two empty walks (first and current walk)
- rings.resize(max_rings_num_, RingWalkInfo{{-1UL, 0, 0, 0, 0, 0}, {-1UL, 0, 0, 0, 0, 0}});
- // points are initially associated to no walk (-1UL)
- points_walk_id.resize(input->width * input->height, -1UL);
- walks_cluster_status.reserve(
- max_rings_num_ * 2); // In the worst case, this could grow to the number of input points
-
- int invalid_ring_count = 0;
-
- // Build walks and classify points
- for (const auto & [raw_p, point_walk_id] :
- ranges::views::zip(input->data | ranges::views::chunk(input->point_step), points_walk_id)) {
- uint16_t ring_idx{};
- float curr_azimuth{};
- float curr_distance{};
- std::memcpy(&ring_idx, &raw_p.data()[ring_offset], sizeof(ring_idx));
- std::memcpy(&curr_azimuth, &raw_p.data()[azimuth_offset], sizeof(curr_azimuth));
- std::memcpy(&curr_distance, &raw_p.data()[distance_offset], sizeof(curr_distance));
-
- if (ring_idx >= max_rings_num_) {
- // Either the data is corrupted or max_rings_num_ is not set correctly
- // Note: point_walk_id == -1 so the point will be filtered out
- ++invalid_ring_count;
- continue;
- }
+ // walk range: [walk_first_idx, walk_last_idx]
+ int walk_first_idx = 0;
+ int walk_last_idx = -1;
- auto & ring = rings[ring_idx];
- if (ring.current_walk.id == -1UL) {
- // first walk ever for this ring. It is both the first and current walk of the ring.
- ring.first_walk =
- WalkInfo{++latest_walk_id, 1, curr_distance, curr_azimuth, curr_distance, curr_azimuth};
- ring.current_walk = ring.first_walk;
- point_walk_id = latest_walk_id;
- continue;
- }
+ for (const auto & indices : ring2indices) {
+ if (indices.size() < 2) continue;
- auto & walk = ring.current_walk;
- if (isSameWalk(
- curr_distance, curr_azimuth, walk.last_point_distance, walk.last_point_azimuth)) {
- // current point is part of previous walk
- walk.num_points += 1;
- walk.last_point_distance = curr_distance;
- walk.last_point_azimuth = curr_azimuth;
- point_walk_id = walk.id;
- } else {
- // previous walk is finished, start a new one
-
- // check and store whether the previous walks is a cluster
- if (walk.id >= walks_cluster_status.size()) walks_cluster_status.resize(walk.id + 1, false);
- walks_cluster_status.at(walk.id) = isCluster(walk);
-
- ring.current_walk =
- WalkInfo{++latest_walk_id, 1, curr_distance, curr_azimuth, curr_distance, curr_azimuth};
- point_walk_id = latest_walk_id;
- }
- }
+ walk_first_idx = 0;
+ walk_last_idx = -1;
- // So far, we have processed ring points as if rings were not circular. Of course, the last and
- // first points of a ring could totally be part of the same walk. When such thing happens, we need
- // to merge the two walks
- for (auto & ring : rings) {
- if (ring.current_walk.id == -1UL) {
- continue;
- }
+ for (size_t idx = 0U; idx < indices.size() - 1; ++idx) {
+ const size_t & current_data_idx = indices[idx];
+ const size_t & next_data_idx = indices[idx + 1];
+ walk_last_idx = idx;
- const auto & walk = ring.current_walk;
- if (walk.id >= walks_cluster_status.size()) walks_cluster_status.resize(walk.id + 1, false);
- walks_cluster_status.at(walk.id) = isCluster(walk);
+ // if(std::abs(iter->distance - (iter+1)->distance) <= std::sqrt(iter->distance) * 0.08)
- if (ring.first_walk.id == ring.current_walk.id) {
- continue;
- }
+ const float & current_azimuth =
+ *reinterpret_cast(&input->data[current_data_idx + azimuth_offset]);
+ const float & next_azimuth =
+ *reinterpret_cast(&input->data[next_data_idx + azimuth_offset]);
+ float azimuth_diff = next_azimuth - current_azimuth;
+ azimuth_diff = azimuth_diff < 0.f ? azimuth_diff + 36000.f : azimuth_diff;
- auto & first_walk = ring.first_walk;
- auto & last_walk = ring.current_walk;
-
- // check if the two walks are connected
- if (isSameWalk(
- first_walk.first_point_distance, first_walk.first_point_azimuth,
- last_walk.last_point_distance, last_walk.last_point_azimuth)) {
- // merge
- auto combined_num_points = first_walk.num_points + last_walk.num_points;
- first_walk.first_point_distance = last_walk.first_point_distance;
- first_walk.first_point_azimuth = last_walk.first_point_azimuth;
- first_walk.num_points = combined_num_points;
- last_walk.last_point_distance = first_walk.last_point_distance;
- last_walk.last_point_azimuth = first_walk.last_point_azimuth;
- last_walk.num_points = combined_num_points;
-
- walks_cluster_status.at(first_walk.id) = isCluster(first_walk);
- walks_cluster_status.at(last_walk.id) = isCluster(last_walk);
- }
- }
+ const float & current_distance =
+ *reinterpret_cast(&input->data[current_data_idx + distance_offset]);
+ const float & next_distance =
+ *reinterpret_cast(&input->data[next_data_idx + distance_offset]);
- // finally copy points
- output.point_step = sizeof(PointXYZI);
- output.data.resize(output.point_step * input->width * input->height);
- size_t output_size = 0;
- if (transform_info.need_transform) {
- for (const auto & [raw_p, point_walk_id] : ranges::views::zip(
- input->data | ranges::views::chunk(input->point_step), points_walk_id)) {
- // Filter out points on invalid rings and points not in a cluster
- if (point_walk_id == -1UL || !walks_cluster_status.at(point_walk_id)) {
- continue;
+ if (
+ std::max(current_distance, next_distance) <
+ std::min(current_distance, next_distance) * distance_ratio_ &&
+ azimuth_diff < 100.f) {
+ continue; // Determined to be included in the same walk
}
- // assume binary representation of input point is compatible with PointXYZ*
- PointXYZI out_point;
- std::memcpy(&out_point, raw_p.data(), sizeof(PointXYZI));
-
- Eigen::Vector4f p(out_point.x, out_point.y, out_point.z, 1);
- p = transform_info.eigen_transform * p;
- out_point.x = p[0];
- out_point.y = p[1];
- out_point.z = p[2];
- // FIXME(VRichardJP) tmp fix because input data does not have the same layout than PointXYZI
- std::memcpy(
- &out_point.intensity, &raw_p.data()[intensity_offset], sizeof(out_point.intensity));
-
- std::memcpy(&output.data[output_size], &out_point, sizeof(PointXYZI));
- output_size += sizeof(PointXYZI);
- }
- } else {
- for (const auto & [raw_p, point_walk_id] : ranges::views::zip(
- input->data | ranges::views::chunk(input->point_step), points_walk_id)) {
- // Filter out points on invalid rings and points not in a cluster
- if (point_walk_id == -1UL || !walks_cluster_status.at(point_walk_id)) {
- continue;
+ if (isCluster(
+ input, std::make_pair(indices[walk_first_idx], indices[walk_last_idx]),
+ walk_last_idx - walk_first_idx + 1)) {
+ for (int i = walk_first_idx; i <= walk_last_idx; i++) {
+ auto output_ptr = reinterpret_cast(&output.data[output_size]);
+ auto input_ptr = reinterpret_cast(&input->data[indices[i]]);
+
+ if (transform_info.need_transform) {
+ Eigen::Vector4f p(input_ptr->x, input_ptr->y, input_ptr->z, 1);
+ p = transform_info.eigen_transform * p;
+ output_ptr->x = p[0];
+ output_ptr->y = p[1];
+ output_ptr->z = p[2];
+ } else {
+ *output_ptr = *input_ptr;
+ }
+ const float & intensity =
+ *reinterpret_cast(&input->data[indices[i] + intensity_offset]);
+ output_ptr->intensity = intensity;
+
+ output_size += output.point_step;
+ }
}
- PointXYZI out_point;
- std::memcpy(&out_point, raw_p.data(), sizeof(PointXYZI));
- // FIXME(VRichardJP) tmp fix because input data does not have the same layout than PointXYZI
- std::memcpy(
- &out_point.intensity, &raw_p.data()[intensity_offset], sizeof(out_point.intensity));
-
- std::memcpy(&output.data[output_size], &out_point, sizeof(PointXYZI));
+ walk_first_idx = idx + 1;
+ }
- output_size += sizeof(PointXYZI);
+ if (walk_first_idx > walk_last_idx) continue;
+
+ if (isCluster(
+ input, std::make_pair(indices[walk_first_idx], indices[walk_last_idx]),
+ walk_last_idx - walk_first_idx + 1)) {
+ for (int i = walk_first_idx; i <= walk_last_idx; i++) {
+ auto output_ptr = reinterpret_cast(&output.data[output_size]);
+ auto input_ptr = reinterpret_cast(&input->data[indices[i]]);
+
+ if (transform_info.need_transform) {
+ Eigen::Vector4f p(input_ptr->x, input_ptr->y, input_ptr->z, 1);
+ p = transform_info.eigen_transform * p;
+ output_ptr->x = p[0];
+ output_ptr->y = p[1];
+ output_ptr->z = p[2];
+ } else {
+ *output_ptr = *input_ptr;
+ }
+ const float & intensity =
+ *reinterpret_cast(&input->data[indices[i] + intensity_offset]);
+ output_ptr->intensity = intensity;
+
+ output_size += output.point_step;
+ }
}
}
+
output.data.resize(output_size);
// Note that `input->header.frame_id` is data before converted when `transform_info.need_transform
@@ -332,12 +196,6 @@ void RingOutlierFilterComponent::faster_filter(
sensor_msgs::msg::PointField::FLOAT32, "z", 1, sensor_msgs::msg::PointField::FLOAT32,
"intensity", 1, sensor_msgs::msg::PointField::FLOAT32);
- if (invalid_ring_count > 0) {
- RCLCPP_WARN(
- get_logger(), "%d points had ring index over max_rings_num (%d) and have been ignored.",
- invalid_ring_count, max_rings_num_);
- }
-
// add processing time for debug
if (debug_publisher_) {
const double cyclic_time_ms = stop_watch_ptr_->toc("cyclic_time", true);