diff --git a/pr-465/404.html b/pr-465/404.html index 6dd5dd59683..459a6a2d438 100644 --- a/pr-465/404.html +++ b/pr-465/404.html @@ -2101,6 +2101,8 @@ + + @@ -2261,6 +2263,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/autoware-competitions/index.html b/pr-465/autoware-competitions/index.html index 63e35a49112..192eddb029c 100644 --- a/pr-465/autoware-competitions/index.html +++ b/pr-465/autoware-competitions/index.html @@ -2112,6 +2112,8 @@ + + @@ -2272,6 +2274,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/index.html b/pr-465/contributing/coding-guidelines/index.html index 90bb353c6cc..cbe9a76a481 100644 --- a/pr-465/contributing/coding-guidelines/index.html +++ b/pr-465/contributing/coding-guidelines/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/languages/cmake/index.html b/pr-465/contributing/coding-guidelines/languages/cmake/index.html index 577e983d100..f2f655c87db 100644 --- a/pr-465/contributing/coding-guidelines/languages/cmake/index.html +++ b/pr-465/contributing/coding-guidelines/languages/cmake/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/languages/cpp/index.html b/pr-465/contributing/coding-guidelines/languages/cpp/index.html index fac1b2ee89e..9d1fd0ca8f6 100644 --- a/pr-465/contributing/coding-guidelines/languages/cpp/index.html +++ b/pr-465/contributing/coding-guidelines/languages/cpp/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/languages/docker/index.html b/pr-465/contributing/coding-guidelines/languages/docker/index.html index e6a0ab850e7..2ccfcaca106 100644 --- a/pr-465/contributing/coding-guidelines/languages/docker/index.html +++ b/pr-465/contributing/coding-guidelines/languages/docker/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/languages/github-actions/index.html b/pr-465/contributing/coding-guidelines/languages/github-actions/index.html index f67c09d72f0..579fd28385d 100644 --- a/pr-465/contributing/coding-guidelines/languages/github-actions/index.html +++ b/pr-465/contributing/coding-guidelines/languages/github-actions/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/languages/markdown/index.html b/pr-465/contributing/coding-guidelines/languages/markdown/index.html index 3306c53ca2b..e0205ff84ab 100644 --- a/pr-465/contributing/coding-guidelines/languages/markdown/index.html +++ b/pr-465/contributing/coding-guidelines/languages/markdown/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/languages/package-xml/index.html b/pr-465/contributing/coding-guidelines/languages/package-xml/index.html index 22d7e8ff14d..e27f7ea42af 100644 --- a/pr-465/contributing/coding-guidelines/languages/package-xml/index.html +++ b/pr-465/contributing/coding-guidelines/languages/package-xml/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/languages/python/index.html b/pr-465/contributing/coding-guidelines/languages/python/index.html index 59873639289..d6cac037603 100644 --- a/pr-465/contributing/coding-guidelines/languages/python/index.html +++ b/pr-465/contributing/coding-guidelines/languages/python/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/languages/shell-scripts/index.html b/pr-465/contributing/coding-guidelines/languages/shell-scripts/index.html index bb284fe4a17..63240d5addc 100644 --- a/pr-465/contributing/coding-guidelines/languages/shell-scripts/index.html +++ b/pr-465/contributing/coding-guidelines/languages/shell-scripts/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/ros-nodes/class-design/index.html b/pr-465/contributing/coding-guidelines/ros-nodes/class-design/index.html index 85e8f124e4e..22af27a82b8 100644 --- a/pr-465/contributing/coding-guidelines/ros-nodes/class-design/index.html +++ b/pr-465/contributing/coding-guidelines/ros-nodes/class-design/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/ros-nodes/console-logging/index.html b/pr-465/contributing/coding-guidelines/ros-nodes/console-logging/index.html index fc53eb99a3a..bda86c377c8 100644 --- a/pr-465/contributing/coding-guidelines/ros-nodes/console-logging/index.html +++ b/pr-465/contributing/coding-guidelines/ros-nodes/console-logging/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/ros-nodes/coordinate-system/index.html b/pr-465/contributing/coding-guidelines/ros-nodes/coordinate-system/index.html index b39ad66169a..a103df62180 100644 --- a/pr-465/contributing/coding-guidelines/ros-nodes/coordinate-system/index.html +++ b/pr-465/contributing/coding-guidelines/ros-nodes/coordinate-system/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/ros-nodes/directory-structure/index.html b/pr-465/contributing/coding-guidelines/ros-nodes/directory-structure/index.html index 09a7f3b911e..c487e844d77 100644 --- a/pr-465/contributing/coding-guidelines/ros-nodes/directory-structure/index.html +++ b/pr-465/contributing/coding-guidelines/ros-nodes/directory-structure/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/ros-nodes/launch-files/index.html b/pr-465/contributing/coding-guidelines/ros-nodes/launch-files/index.html index 61d0f4df774..a04a0b783f2 100644 --- a/pr-465/contributing/coding-guidelines/ros-nodes/launch-files/index.html +++ b/pr-465/contributing/coding-guidelines/ros-nodes/launch-files/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/ros-nodes/message-guidelines/index.html b/pr-465/contributing/coding-guidelines/ros-nodes/message-guidelines/index.html index 111b5a44348..0ef3c6227f6 100644 --- a/pr-465/contributing/coding-guidelines/ros-nodes/message-guidelines/index.html +++ b/pr-465/contributing/coding-guidelines/ros-nodes/message-guidelines/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/ros-nodes/parameters/index.html b/pr-465/contributing/coding-guidelines/ros-nodes/parameters/index.html index 10db829842a..fbf6f2e5d31 100644 --- a/pr-465/contributing/coding-guidelines/ros-nodes/parameters/index.html +++ b/pr-465/contributing/coding-guidelines/ros-nodes/parameters/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/ros-nodes/task-scheduling/index.html b/pr-465/contributing/coding-guidelines/ros-nodes/task-scheduling/index.html index f33b3c60086..edd0ee325b8 100644 --- a/pr-465/contributing/coding-guidelines/ros-nodes/task-scheduling/index.html +++ b/pr-465/contributing/coding-guidelines/ros-nodes/task-scheduling/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/coding-guidelines/ros-nodes/topic-namespaces/index.html b/pr-465/contributing/coding-guidelines/ros-nodes/topic-namespaces/index.html index dabf9a0e24f..c56241e4864 100644 --- a/pr-465/contributing/coding-guidelines/ros-nodes/topic-namespaces/index.html +++ b/pr-465/contributing/coding-guidelines/ros-nodes/topic-namespaces/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/discussion-guidelines/index.html b/pr-465/contributing/discussion-guidelines/index.html index 261976ff40d..3238afb50c5 100644 --- a/pr-465/contributing/discussion-guidelines/index.html +++ b/pr-465/contributing/discussion-guidelines/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/documentation-guidelines/index.html b/pr-465/contributing/documentation-guidelines/index.html index 8d914e10de8..9085c4aa8f7 100644 --- a/pr-465/contributing/documentation-guidelines/index.html +++ b/pr-465/contributing/documentation-guidelines/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/index.html b/pr-465/contributing/index.html index 1c2f20cc759..1b1f2f31d1a 100644 --- a/pr-465/contributing/index.html +++ b/pr-465/contributing/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/license/index.html b/pr-465/contributing/license/index.html index 34156279311..9fa76032968 100644 --- a/pr-465/contributing/license/index.html +++ b/pr-465/contributing/license/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/pull-request-guidelines/ci-checks/index.html b/pr-465/contributing/pull-request-guidelines/ci-checks/index.html index 73540167bac..ee03942e887 100644 --- a/pr-465/contributing/pull-request-guidelines/ci-checks/index.html +++ b/pr-465/contributing/pull-request-guidelines/ci-checks/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/pull-request-guidelines/commit-guidelines/index.html b/pr-465/contributing/pull-request-guidelines/commit-guidelines/index.html index 43c485b2032..69b09154c2b 100644 --- a/pr-465/contributing/pull-request-guidelines/commit-guidelines/index.html +++ b/pr-465/contributing/pull-request-guidelines/commit-guidelines/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/pull-request-guidelines/index.html b/pr-465/contributing/pull-request-guidelines/index.html index 47c0c36d975..d59d98af377 100644 --- a/pr-465/contributing/pull-request-guidelines/index.html +++ b/pr-465/contributing/pull-request-guidelines/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/pull-request-guidelines/review-guidelines/index.html b/pr-465/contributing/pull-request-guidelines/review-guidelines/index.html index c89149d38ee..75ebe6a56a1 100644 --- a/pr-465/contributing/pull-request-guidelines/review-guidelines/index.html +++ b/pr-465/contributing/pull-request-guidelines/review-guidelines/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/pull-request-guidelines/review-tips/index.html b/pr-465/contributing/pull-request-guidelines/review-tips/index.html index 55e92f3fdd8..9a0b364346a 100644 --- a/pr-465/contributing/pull-request-guidelines/review-tips/index.html +++ b/pr-465/contributing/pull-request-guidelines/review-tips/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/testing-guidelines/index.html b/pr-465/contributing/testing-guidelines/index.html index c3b49538072..852b6fdf7f7 100644 --- a/pr-465/contributing/testing-guidelines/index.html +++ b/pr-465/contributing/testing-guidelines/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/testing-guidelines/integration-testing/index.html b/pr-465/contributing/testing-guidelines/integration-testing/index.html index 9339f9fa84b..f7bcb6fdd97 100644 --- a/pr-465/contributing/testing-guidelines/integration-testing/index.html +++ b/pr-465/contributing/testing-guidelines/integration-testing/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/contributing/testing-guidelines/unit-testing/index.html b/pr-465/contributing/testing-guidelines/unit-testing/index.html index 5a32f44547f..e47fef8e5c4 100644 --- a/pr-465/contributing/testing-guidelines/unit-testing/index.html +++ b/pr-465/contributing/testing-guidelines/unit-testing/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/datasets/index.html b/pr-465/datasets/index.html index 7d42662a4c4..66b33e1ccf1 100644 --- a/pr-465/datasets/index.html +++ b/pr-465/datasets/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-architecture/control/index.html b/pr-465/design/autoware-architecture/control/index.html index d1799039d6b..22eb0606a37 100644 --- a/pr-465/design/autoware-architecture/control/index.html +++ b/pr-465/design/autoware-architecture/control/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-architecture/index.html b/pr-465/design/autoware-architecture/index.html index d7f652f2125..3e345cb7570 100644 --- a/pr-465/design/autoware-architecture/index.html +++ b/pr-465/design/autoware-architecture/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-architecture/localization/index.html b/pr-465/design/autoware-architecture/localization/index.html index ae7b885f9c4..230666e285e 100644 --- a/pr-465/design/autoware-architecture/localization/index.html +++ b/pr-465/design/autoware-architecture/localization/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-architecture/map/index.html b/pr-465/design/autoware-architecture/map/index.html index 645167c103f..2f3e657975a 100644 --- a/pr-465/design/autoware-architecture/map/index.html +++ b/pr-465/design/autoware-architecture/map/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-architecture/node-diagram/index.html b/pr-465/design/autoware-architecture/node-diagram/index.html index 5bd82938815..bc11aec58d0 100644 --- a/pr-465/design/autoware-architecture/node-diagram/index.html +++ b/pr-465/design/autoware-architecture/node-diagram/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-architecture/perception/index.html b/pr-465/design/autoware-architecture/perception/index.html index 9ed73aab8e1..13f18f77d95 100644 --- a/pr-465/design/autoware-architecture/perception/index.html +++ b/pr-465/design/autoware-architecture/perception/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-architecture/planning/index.html b/pr-465/design/autoware-architecture/planning/index.html index 7561dabf0ea..2b7bceda96d 100644 --- a/pr-465/design/autoware-architecture/planning/index.html +++ b/pr-465/design/autoware-architecture/planning/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-architecture/sensing/data-types/gnss-ins-data/index.html b/pr-465/design/autoware-architecture/sensing/data-types/gnss-ins-data/index.html index 585352908ac..4dcc2f43b8a 100644 --- a/pr-465/design/autoware-architecture/sensing/data-types/gnss-ins-data/index.html +++ b/pr-465/design/autoware-architecture/sensing/data-types/gnss-ins-data/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-architecture/sensing/data-types/image/index.html b/pr-465/design/autoware-architecture/sensing/data-types/image/index.html index 036fabf51b0..caa1382ad15 100644 --- a/pr-465/design/autoware-architecture/sensing/data-types/image/index.html +++ b/pr-465/design/autoware-architecture/sensing/data-types/image/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-architecture/sensing/data-types/point-cloud/index.html b/pr-465/design/autoware-architecture/sensing/data-types/point-cloud/index.html index 17492e787d8..6c938e79bf3 100644 --- a/pr-465/design/autoware-architecture/sensing/data-types/point-cloud/index.html +++ b/pr-465/design/autoware-architecture/sensing/data-types/point-cloud/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-architecture/sensing/data-types/radar-data/index.html b/pr-465/design/autoware-architecture/sensing/data-types/radar-data/index.html index 8c76f5afd70..a91bfa94b67 100644 --- a/pr-465/design/autoware-architecture/sensing/data-types/radar-data/index.html +++ b/pr-465/design/autoware-architecture/sensing/data-types/radar-data/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-architecture/sensing/data-types/ultrasonics-data/index.html b/pr-465/design/autoware-architecture/sensing/data-types/ultrasonics-data/index.html index c69e9d1b335..ae8a719bed7 100644 --- a/pr-465/design/autoware-architecture/sensing/data-types/ultrasonics-data/index.html +++ b/pr-465/design/autoware-architecture/sensing/data-types/ultrasonics-data/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-architecture/sensing/index.html b/pr-465/design/autoware-architecture/sensing/index.html index 0c7eae1af18..2b41ea7c261 100644 --- a/pr-465/design/autoware-architecture/sensing/index.html +++ b/pr-465/design/autoware-architecture/sensing/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-architecture/vehicle/index.html b/pr-465/design/autoware-architecture/vehicle/index.html index ef0af9f994b..0f0d2c7cd35 100644 --- a/pr-465/design/autoware-architecture/vehicle/index.html +++ b/pr-465/design/autoware-architecture/vehicle/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-concepts/difference-from-ai-and-auto/index.html b/pr-465/design/autoware-concepts/difference-from-ai-and-auto/index.html index 7db3cd5ddfd..c2da21cf682 100644 --- a/pr-465/design/autoware-concepts/difference-from-ai-and-auto/index.html +++ b/pr-465/design/autoware-concepts/difference-from-ai-and-auto/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-concepts/index.html b/pr-465/design/autoware-concepts/index.html index 45e4d47e746..6b54ee3ff7e 100644 --- a/pr-465/design/autoware-concepts/index.html +++ b/pr-465/design/autoware-concepts/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/features/cooperation/index.html b/pr-465/design/autoware-interfaces/ad-api/features/cooperation/index.html index 26b7a28352f..31f6ed2d5b3 100644 --- a/pr-465/design/autoware-interfaces/ad-api/features/cooperation/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/features/cooperation/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/features/fail-safe/index.html b/pr-465/design/autoware-interfaces/ad-api/features/fail-safe/index.html index 0b655bbf5b7..8d65a776c48 100644 --- a/pr-465/design/autoware-interfaces/ad-api/features/fail-safe/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/features/fail-safe/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/features/interface/index.html b/pr-465/design/autoware-interfaces/ad-api/features/interface/index.html index 9c3273131ae..34e86668a30 100644 --- a/pr-465/design/autoware-interfaces/ad-api/features/interface/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/features/interface/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/features/localization/index.html b/pr-465/design/autoware-interfaces/ad-api/features/localization/index.html index baea824f21e..e776bc3ffd7 100644 --- a/pr-465/design/autoware-interfaces/ad-api/features/localization/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/features/localization/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/features/motion/index.html b/pr-465/design/autoware-interfaces/ad-api/features/motion/index.html index b3ca2c2355a..c9b3be93ca8 100644 --- a/pr-465/design/autoware-interfaces/ad-api/features/motion/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/features/motion/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/features/operation_mode/index.html b/pr-465/design/autoware-interfaces/ad-api/features/operation_mode/index.html index ed5702ff7f6..c1ecf57ef9c 100644 --- a/pr-465/design/autoware-interfaces/ad-api/features/operation_mode/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/features/operation_mode/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/features/perception/index.html b/pr-465/design/autoware-interfaces/ad-api/features/perception/index.html index fb2c6b4ba6d..1368c96f59b 100644 --- a/pr-465/design/autoware-interfaces/ad-api/features/perception/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/features/perception/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/features/planning-factors/index.html b/pr-465/design/autoware-interfaces/ad-api/features/planning-factors/index.html index d7e4e929877..da52ebe9ebe 100644 --- a/pr-465/design/autoware-interfaces/ad-api/features/planning-factors/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/features/planning-factors/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/features/routing/index.html b/pr-465/design/autoware-interfaces/ad-api/features/routing/index.html index 617dda06c29..b3648be684d 100644 --- a/pr-465/design/autoware-interfaces/ad-api/features/routing/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/features/routing/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/features/vehicle-doors/index.html b/pr-465/design/autoware-interfaces/ad-api/features/vehicle-doors/index.html index 5d8dd24038d..2915b14ddcf 100644 --- a/pr-465/design/autoware-interfaces/ad-api/features/vehicle-doors/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/features/vehicle-doors/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/features/vehicle-status/index.html b/pr-465/design/autoware-interfaces/ad-api/features/vehicle-status/index.html index 49a0073a567..f50ed248870 100644 --- a/pr-465/design/autoware-interfaces/ad-api/features/vehicle-status/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/features/vehicle-status/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/index.html b/pr-465/design/autoware-interfaces/ad-api/index.html index 7f15208d834..eb4d877b6e7 100644 --- a/pr-465/design/autoware-interfaces/ad-api/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/fail_safe/mrm_state/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/fail_safe/mrm_state/index.html index 42e675b8b15..0714dd03ca4 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/fail_safe/mrm_state/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/fail_safe/mrm_state/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/interface/version/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/interface/version/index.html index bbebaed89a4..b6e85d34ff3 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/interface/version/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/interface/version/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/localization/initialization_state/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/localization/initialization_state/index.html index ab4352463be..ea0c6423572 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/localization/initialization_state/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/localization/initialization_state/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/localization/initialize/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/localization/initialize/index.html index 2e91ea632d7..9b86859e605 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/localization/initialize/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/localization/initialize/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/motion/accept_start/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/motion/accept_start/index.html index 96568a68eb2..f6d2ff4aedd 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/motion/accept_start/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/motion/accept_start/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/motion/state/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/motion/state/index.html index 481245e8757..202e2e39769 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/motion/state/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/motion/state/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_autonomous/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_autonomous/index.html index 724ec38c40a..fae7c65073a 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_autonomous/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_autonomous/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_local/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_local/index.html index e9e3d5cfc52..58958e0f8ae 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_local/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_local/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_remote/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_remote/index.html index 7800b94bdd8..70336ffcd96 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_remote/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_remote/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_stop/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_stop/index.html index 32ebfa093bc..d037f43a188 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_stop/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_stop/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/disable_autoware_control/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/disable_autoware_control/index.html index ae731cf05ad..09c2c782741 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/disable_autoware_control/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/disable_autoware_control/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/enable_autoware_control/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/enable_autoware_control/index.html index 547c20d1fcc..9518fb704fe 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/enable_autoware_control/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/enable_autoware_control/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/state/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/state/index.html index 91a4465320e..045ab962a58 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/state/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/operation_mode/state/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/perception/objects/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/perception/objects/index.html index 560da54058a..fbd6729d235 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/perception/objects/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/perception/objects/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/planning/cooperation/get_policies/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/planning/cooperation/get_policies/index.html index 64d68453ce7..9833dd85fb3 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/planning/cooperation/get_policies/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/planning/cooperation/get_policies/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_commands/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_commands/index.html index fdc159b9744..e540496aeb0 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_commands/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_commands/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_policies/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_policies/index.html index 1c1e5b9d69f..7a76adea85c 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_policies/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_policies/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/planning/steering_factors/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/planning/steering_factors/index.html index 9d73261548c..cd30dcf2dc4 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/planning/steering_factors/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/planning/steering_factors/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/planning/velocity_factors/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/planning/velocity_factors/index.html index bdecfb48452..42acbec4e77 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/planning/velocity_factors/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/planning/velocity_factors/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/routing/clear_route/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/routing/clear_route/index.html index 2d52a390c35..9b947a689b2 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/routing/clear_route/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/routing/clear_route/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/routing/route/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/routing/route/index.html index 1cbf81b85fc..a5fb14ddd4e 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/routing/route/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/routing/route/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/routing/set_route/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/routing/set_route/index.html index 0c8f4aabf15..4e15de7b80c 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/routing/set_route/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/routing/set_route/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/routing/set_route_points/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/routing/set_route_points/index.html index 0853c041c87..6407907d33d 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/routing/set_route_points/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/routing/set_route_points/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/routing/state/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/routing/state/index.html index d787bb75abf..e5cfd5c54ca 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/routing/state/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/routing/state/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/dimensions/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/dimensions/index.html index c156a1e10ce..f3c9a3eb268 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/dimensions/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/dimensions/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/doors/command/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/doors/command/index.html index ba96eb22196..765a2c47820 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/doors/command/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/doors/command/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/doors/layout/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/doors/layout/index.html index f3bdbed4659..4d9b1523c9a 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/doors/layout/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/doors/layout/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/doors/status/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/doors/status/index.html index 455cad4394c..ae62dce243c 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/doors/status/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/doors/status/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/kinematics/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/kinematics/index.html index 70a5bde53ca..7524252afcd 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/kinematics/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/kinematics/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/status/index.html b/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/status/index.html index 7c726c68507..2c083279c06 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/status/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/api/vehicle/status/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/list/index.html b/pr-465/design/autoware-interfaces/ad-api/list/index.html index 38105923ec9..9ed175ffb36 100644 --- a/pr-465/design/autoware-interfaces/ad-api/list/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/list/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/stories/bus-service/index.html b/pr-465/design/autoware-interfaces/ad-api/stories/bus-service/index.html index d373c64503a..fee00591379 100644 --- a/pr-465/design/autoware-interfaces/ad-api/stories/bus-service/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/stories/bus-service/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/stories/taxi-service/index.html b/pr-465/design/autoware-interfaces/ad-api/stories/taxi-service/index.html index dd31cd15580..2e78d9412dc 100644 --- a/pr-465/design/autoware-interfaces/ad-api/stories/taxi-service/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/stories/taxi-service/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationCommand/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationCommand/index.html index 469ec638940..60a6e28c72e 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationCommand/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationCommand/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationDecision/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationDecision/index.html index d1db63aecd0..aad136fbc02 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationDecision/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationDecision/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationPolicy/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationPolicy/index.html index 1328367589d..120631d2264 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationPolicy/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationPolicy/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationStatus/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationStatus/index.html index d13289f3a75..daa900a59d4 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationStatus/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationStatus/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorCommand/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorCommand/index.html index 024c4c4c727..6a8d41fa5a5 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorCommand/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorCommand/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorLayout/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorLayout/index.html index 69dcdf008c5..c775a8838d2 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorLayout/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorLayout/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatus/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatus/index.html index 03f8f07feff..bd1a41f89ac 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatus/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatus/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatusArray/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatusArray/index.html index a88660b4161..01b9402b80c 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatusArray/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatusArray/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObject/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObject/index.html index 0625ed19d27..6b6662e379f 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObject/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObject/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectArray/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectArray/index.html index 6d2a4c113c9..35b899e41e1 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectArray/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectArray/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectKinematics/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectKinematics/index.html index 9f6c139ede3..76c45c05f99 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectKinematics/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectKinematics/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectPath/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectPath/index.html index d1bd3992a47..4908ebe0790 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectPath/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectPath/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Gear/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Gear/index.html index 0b1ee4a2fed..c7b98a3eb23 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Gear/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Gear/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/HazardLights/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/HazardLights/index.html index 8e9de22335a..9906d5feacc 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/HazardLights/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/HazardLights/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/LocalizationInitializationState/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/LocalizationInitializationState/index.html index 2f658961a4b..be654f0f0d7 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/LocalizationInitializationState/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/LocalizationInitializationState/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MotionState/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MotionState/index.html index 8160b9f6b1c..ce00da901f9 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MotionState/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MotionState/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MrmState/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MrmState/index.html index 273eff37d94..1ab7014efaa 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MrmState/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MrmState/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ObjectClassification/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ObjectClassification/index.html index 662f5658006..8268d3f9bd1 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ObjectClassification/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ObjectClassification/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/OperationModeState/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/OperationModeState/index.html index 977676753dd..3f144fb1c38 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/OperationModeState/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/OperationModeState/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ResponseStatus/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ResponseStatus/index.html index cec41e0e424..f5dd49e3355 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ResponseStatus/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ResponseStatus/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Route/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Route/index.html index 95568363abb..f234df14b59 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Route/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Route/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteData/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteData/index.html index abab2337ed9..19ac854b99f 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteData/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteData/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteOption/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteOption/index.html index b88c1e1196d..2b1ca4c3f51 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteOption/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteOption/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RoutePrimitive/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RoutePrimitive/index.html index bed41fbd5bf..98916b398d1 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RoutePrimitive/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RoutePrimitive/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteSegment/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteSegment/index.html index f806b732873..3988302405f 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteSegment/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteSegment/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteState/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteState/index.html index 0cedcd48840..79c69536e16 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteState/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteState/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactor/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactor/index.html index 798bfe1788d..fe89f4f75f5 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactor/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactor/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactorArray/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactorArray/index.html index e9d50ec0b62..a56a11de75a 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactorArray/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactorArray/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/TurnIndicators/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/TurnIndicators/index.html index 637de6e13f0..c170a300410 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/TurnIndicators/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/TurnIndicators/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleDimensions/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleDimensions/index.html index 5f1f053e7f8..bc1f3a84c7f 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleDimensions/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleDimensions/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleKinematics/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleKinematics/index.html index a4d4bf0729b..444523a4d0f 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleKinematics/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleKinematics/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleStatus/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleStatus/index.html index af493f16742..3da76a13e11 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleStatus/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleStatus/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactor/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactor/index.html index 2f035843370..a9aaa1c0fe6 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactor/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactor/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactorArray/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactorArray/index.html index 7b8418025c8..f35f42d827c 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactorArray/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactorArray/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/AcceptStart/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/AcceptStart/index.html index c4b3caccbcf..376948a579f 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/AcceptStart/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/AcceptStart/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ChangeOperationMode/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ChangeOperationMode/index.html index 9ddc536d2e1..685f813fea3 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ChangeOperationMode/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ChangeOperationMode/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ClearRoute/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ClearRoute/index.html index 5cdba996d37..588ade1968a 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ClearRoute/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ClearRoute/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetCooperationPolicies/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetCooperationPolicies/index.html index f095ca36eea..85ccbef802e 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetCooperationPolicies/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetCooperationPolicies/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetDoorLayout/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetDoorLayout/index.html index 533fdd45c5e..bbaa8396c63 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetDoorLayout/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetDoorLayout/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetVehicleDimensions/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetVehicleDimensions/index.html index 8f3f81485ce..574df894306 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetVehicleDimensions/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetVehicleDimensions/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/InitializeLocalization/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/InitializeLocalization/index.html index 300b8ffb4dd..4e495c42320 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/InitializeLocalization/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/InitializeLocalization/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationCommands/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationCommands/index.html index 512cde886d1..55d3152983e 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationCommands/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationCommands/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationPolicies/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationPolicies/index.html index d62a3b8d5ad..683356973de 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationPolicies/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationPolicies/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetDoorCommand/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetDoorCommand/index.html index 4700b1cd41e..443b4f4cb83 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetDoorCommand/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetDoorCommand/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoute/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoute/index.html index 9e55d4d16a2..9919278bb79 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoute/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoute/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoutePoints/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoutePoints/index.html index b1fdfe57838..65d6273881e 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoutePoints/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoutePoints/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_version_msgs/srv/InterfaceVersion/index.html b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_version_msgs/srv/InterfaceVersion/index.html index 7489634597d..ca44055e608 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_version_msgs/srv/InterfaceVersion/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/autoware_adapi_version_msgs/srv/InterfaceVersion/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/types/index.html b/pr-465/design/autoware-interfaces/ad-api/types/index.html index 431e3f50644..0f4cbbe624c 100644 --- a/pr-465/design/autoware-interfaces/ad-api/types/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/types/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/use-cases/change-operation-mode/index.html b/pr-465/design/autoware-interfaces/ad-api/use-cases/change-operation-mode/index.html index bd64b112862..e59d578e445 100644 --- a/pr-465/design/autoware-interfaces/ad-api/use-cases/change-operation-mode/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/use-cases/change-operation-mode/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/use-cases/drive-designated-position/index.html b/pr-465/design/autoware-interfaces/ad-api/use-cases/drive-designated-position/index.html index df9b5087b23..487f45ff9ac 100644 --- a/pr-465/design/autoware-interfaces/ad-api/use-cases/drive-designated-position/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/use-cases/drive-designated-position/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/use-cases/get-on-off/index.html b/pr-465/design/autoware-interfaces/ad-api/use-cases/get-on-off/index.html index 800a416ea08..f567accb0fd 100644 --- a/pr-465/design/autoware-interfaces/ad-api/use-cases/get-on-off/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/use-cases/get-on-off/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/use-cases/initialize-pose/index.html b/pr-465/design/autoware-interfaces/ad-api/use-cases/initialize-pose/index.html index a0d221c4893..d4ae4cd9af8 100644 --- a/pr-465/design/autoware-interfaces/ad-api/use-cases/initialize-pose/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/use-cases/initialize-pose/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/use-cases/launch-terminate/index.html b/pr-465/design/autoware-interfaces/ad-api/use-cases/launch-terminate/index.html index 11996244c7a..5376b2cbf9b 100644 --- a/pr-465/design/autoware-interfaces/ad-api/use-cases/launch-terminate/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/use-cases/launch-terminate/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/use-cases/vehicle-monitoring/index.html b/pr-465/design/autoware-interfaces/ad-api/use-cases/vehicle-monitoring/index.html index 390bf50d164..1b41b845468 100644 --- a/pr-465/design/autoware-interfaces/ad-api/use-cases/vehicle-monitoring/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/use-cases/vehicle-monitoring/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/ad-api/use-cases/vehicle-operation/index.html b/pr-465/design/autoware-interfaces/ad-api/use-cases/vehicle-operation/index.html index bc8e7a083d8..babf863f108 100644 --- a/pr-465/design/autoware-interfaces/ad-api/use-cases/vehicle-operation/index.html +++ b/pr-465/design/autoware-interfaces/ad-api/use-cases/vehicle-operation/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/components/control/index.html b/pr-465/design/autoware-interfaces/components/control/index.html index 5486250174e..eeda5e60a2b 100644 --- a/pr-465/design/autoware-interfaces/components/control/index.html +++ b/pr-465/design/autoware-interfaces/components/control/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/components/index.html b/pr-465/design/autoware-interfaces/components/index.html index 2535736460d..2a37b8ff64f 100644 --- a/pr-465/design/autoware-interfaces/components/index.html +++ b/pr-465/design/autoware-interfaces/components/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/components/localization/index.html b/pr-465/design/autoware-interfaces/components/localization/index.html index b364e6d1a3e..2248112a69d 100644 --- a/pr-465/design/autoware-interfaces/components/localization/index.html +++ b/pr-465/design/autoware-interfaces/components/localization/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/components/map/index.html b/pr-465/design/autoware-interfaces/components/map/index.html index f6af2352e68..8341b8e490e 100644 --- a/pr-465/design/autoware-interfaces/components/map/index.html +++ b/pr-465/design/autoware-interfaces/components/map/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/components/perception-interface/index.html b/pr-465/design/autoware-interfaces/components/perception-interface/index.html index bf478eb83e6..f3bef575018 100644 --- a/pr-465/design/autoware-interfaces/components/perception-interface/index.html +++ b/pr-465/design/autoware-interfaces/components/perception-interface/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/components/planning/index.html b/pr-465/design/autoware-interfaces/components/planning/index.html index 90493f0a158..bf9b66b612f 100644 --- a/pr-465/design/autoware-interfaces/components/planning/index.html +++ b/pr-465/design/autoware-interfaces/components/planning/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/components/sensing/index.html b/pr-465/design/autoware-interfaces/components/sensing/index.html index e051768b039..ce61f5f391e 100644 --- a/pr-465/design/autoware-interfaces/components/sensing/index.html +++ b/pr-465/design/autoware-interfaces/components/sensing/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/components/vehicle-dimensions/index.html b/pr-465/design/autoware-interfaces/components/vehicle-dimensions/index.html index 2c76d1488ca..f87ede1e9a2 100644 --- a/pr-465/design/autoware-interfaces/components/vehicle-dimensions/index.html +++ b/pr-465/design/autoware-interfaces/components/vehicle-dimensions/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/components/vehicle-interface/index.html b/pr-465/design/autoware-interfaces/components/vehicle-interface/index.html index f9d62eb7385..679e29605b6 100644 --- a/pr-465/design/autoware-interfaces/components/vehicle-interface/index.html +++ b/pr-465/design/autoware-interfaces/components/vehicle-interface/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/autoware-interfaces/index.html b/pr-465/design/autoware-interfaces/index.html index 88440254c65..68883bc57b9 100644 --- a/pr-465/design/autoware-interfaces/index.html +++ b/pr-465/design/autoware-interfaces/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/configuration-management/development-process/index.html b/pr-465/design/configuration-management/development-process/index.html index f46502cbe97..950549b4f2e 100644 --- a/pr-465/design/configuration-management/development-process/index.html +++ b/pr-465/design/configuration-management/development-process/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/configuration-management/index.html b/pr-465/design/configuration-management/index.html index 488132ebeb5..bbf3d1e3b7d 100644 --- a/pr-465/design/configuration-management/index.html +++ b/pr-465/design/configuration-management/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/configuration-management/release-process/index.html b/pr-465/design/configuration-management/release-process/index.html index 0d7f8cdc4f3..92abcf53c41 100644 --- a/pr-465/design/configuration-management/release-process/index.html +++ b/pr-465/design/configuration-management/release-process/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/configuration-management/repository-structure/index.html b/pr-465/design/configuration-management/repository-structure/index.html index 533047834e4..fc1b2662409 100644 --- a/pr-465/design/configuration-management/repository-structure/index.html +++ b/pr-465/design/configuration-management/repository-structure/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/design/index.html b/pr-465/design/index.html index 253eb9a1f41..b2a5f76af04 100644 --- a/pr-465/design/index.html +++ b/pr-465/design/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/index.html b/pr-465/how-to-guides/index.html index 5385be12450..2a01d21aed2 100644 --- a/pr-465/how-to-guides/index.html +++ b/pr-465/how-to-guides/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/index.html b/pr-465/how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/index.html index 1a9b1c55931..6e40108bea3 100644 --- a/pr-465/how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/index.html +++ b/pr-465/how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/crosswalk/images/crosswalk-test.png b/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/crosswalk/images/crosswalk-test.png new file mode 100644 index 00000000000..83065c11aa6 Binary files /dev/null and b/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/crosswalk/images/crosswalk-test.png differ diff --git a/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/crosswalk/index.html b/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/crosswalk/index.html index e33d4405f82..97ebb88e291 100644 --- a/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/crosswalk/index.html +++ b/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/crosswalk/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + @@ -5487,6 +5535,14 @@

    Testing created cross
  • After that, please press Shift, then click right click button for inserting pedestrians.
  • You can control inserted pedestrian via dragging right click.
  • +

    Crosswalk markers on rviz:

    +
    +

    crosswalk-test +

    +
    + Crosswalk test on the created map. +
    +

    Video Demonstration:

    diff --git a/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/index.html b/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/index.html index 5896287fe7a..c3a2c66b01c 100644 --- a/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/index.html +++ b/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + @@ -5447,10 +5495,14 @@

    OverviewVector Map Builder#

    You need a TIER IV account for using vector map builder tool. So, if you have not before, please create a TIER IV account -in order to use vector map builder tool.

    +in order to use vector map builder tool. +For more information about this tool, please check the official guide.

    You can follow these pages for creating a Lanelet2 map and its regulatory elements.

    diff --git a/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/index.html b/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/index.html index 4a28a9d5880..6434e1cde70 100644 --- a/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/index.html +++ b/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/stop-line/images/stop-line-test.png b/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/stop-line/images/stop-line-test.png new file mode 100644 index 00000000000..aa7a36fc919 Binary files /dev/null and b/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/stop-line/images/stop-line-test.png differ diff --git a/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/stop-line/index.html b/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/stop-line/index.html index a6e695802b3..cb74a4a6f8c 100644 --- a/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/stop-line/index.html +++ b/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/stop-line/index.html @@ -14,7 +14,7 @@ - + @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + @@ -5450,7 +5498,7 @@

    Creating a stop line regulatory element#

    In order to create a stop line on your pointcloud map, please follow these steps:

      -
    1. Please select lanelet to add stop line.
    2. +
    3. Please select lanelet which stop line to be added.
    4. Click Abstraction button on top panel.
    5. Select Stop Line from the panel.
    6. Click on the desired area for inserting stop line.
    7. @@ -5486,6 +5534,14 @@

      Testing c
    8. Click 2D Goal Pose button on rviz or press G and give a pose for goal point.
    9. You can see the stop line marker on the rviz screen.
    +

    Stop line markers on rviz:

    +
    +

    stop-line-test +

    +
    + Stop line test on the created map. +
    +

    Video Demonstration:

    @@ -5531,13 +5587,13 @@

    Testing c - diff --git a/pr-465/how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/index.html b/pr-465/how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/index.html index 3c93bd7b234..5bf1fdc2cc1 100644 --- a/pr-465/how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/index.html +++ b/pr-465/how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/integrating-autoware/launch-autoware/localization-methods/index.html b/pr-465/how-to-guides/integrating-autoware/launch-autoware/localization-methods/index.html index e53cb1d5ca7..6f5d768e73a 100644 --- a/pr-465/how-to-guides/integrating-autoware/launch-autoware/localization-methods/index.html +++ b/pr-465/how-to-guides/integrating-autoware/launch-autoware/localization-methods/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/integrating-autoware/launch-autoware/perception/index.html b/pr-465/how-to-guides/integrating-autoware/launch-autoware/perception/index.html index e07b0b4b1e8..e6debb1edd1 100644 --- a/pr-465/how-to-guides/integrating-autoware/launch-autoware/perception/index.html +++ b/pr-465/how-to-guides/integrating-autoware/launch-autoware/perception/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/integrating-autoware/overview/index.html b/pr-465/how-to-guides/integrating-autoware/overview/index.html index 1170debb877..b0d0cd29642 100644 --- a/pr-465/how-to-guides/integrating-autoware/overview/index.html +++ b/pr-465/how-to-guides/integrating-autoware/overview/index.html @@ -2278,6 +2278,8 @@ + + @@ -2438,6 +2440,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/index.html b/pr-465/how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/index.html index d4474ecaf6c..0295e69c8ab 100644 --- a/pr-465/how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/index.html +++ b/pr-465/how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/index.html b/pr-465/how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/index.html index 4124b683a7b..59635ffefdb 100644 --- a/pr-465/how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/index.html +++ b/pr-465/how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/others/add-a-custom-ros-message/index.html b/pr-465/how-to-guides/others/add-a-custom-ros-message/index.html index cbeac72ce04..67e7291ec5d 100644 --- a/pr-465/how-to-guides/others/add-a-custom-ros-message/index.html +++ b/pr-465/how-to-guides/others/add-a-custom-ros-message/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/others/advanced-usage-of-colcon/index.html b/pr-465/how-to-guides/others/advanced-usage-of-colcon/index.html index bc516c451a8..eee11483105 100644 --- a/pr-465/how-to-guides/others/advanced-usage-of-colcon/index.html +++ b/pr-465/how-to-guides/others/advanced-usage-of-colcon/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/index.html b/pr-465/how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/index.html index d5f835ef672..b7aabc662c3 100644 --- a/pr-465/how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/index.html +++ b/pr-465/how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/others/applying-clang-tidy-to-ros-packages/index.html b/pr-465/how-to-guides/others/applying-clang-tidy-to-ros-packages/index.html index 3b1d7428c58..007b03da7e7 100644 --- a/pr-465/how-to-guides/others/applying-clang-tidy-to-ros-packages/index.html +++ b/pr-465/how-to-guides/others/applying-clang-tidy-to-ros-packages/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/others/debug-autoware/index.html b/pr-465/how-to-guides/others/debug-autoware/index.html index b3c80fe2f1e..c65491d8e74 100644 --- a/pr-465/how-to-guides/others/debug-autoware/index.html +++ b/pr-465/how-to-guides/others/debug-autoware/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/others/defining-temporal-performance-metrics/index.html b/pr-465/how-to-guides/others/defining-temporal-performance-metrics/index.html index 56b811148e6..1a633d70508 100644 --- a/pr-465/how-to-guides/others/defining-temporal-performance-metrics/index.html +++ b/pr-465/how-to-guides/others/defining-temporal-performance-metrics/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/others/determining-component-dependencies/index.html b/pr-465/how-to-guides/others/determining-component-dependencies/index.html index 428dc68cae2..4ddd9a16d31 100644 --- a/pr-465/how-to-guides/others/determining-component-dependencies/index.html +++ b/pr-465/how-to-guides/others/determining-component-dependencies/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/others/fixing-dependent-package-versions/index.html b/pr-465/how-to-guides/others/fixing-dependent-package-versions/index.html index 1fed0d1404f..06852cd085c 100644 --- a/pr-465/how-to-guides/others/fixing-dependent-package-versions/index.html +++ b/pr-465/how-to-guides/others/fixing-dependent-package-versions/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/others/reducing-start-delays/index.html b/pr-465/how-to-guides/others/reducing-start-delays/index.html index 5b611c2f8d9..37ea758ab71 100644 --- a/pr-465/how-to-guides/others/reducing-start-delays/index.html +++ b/pr-465/how-to-guides/others/reducing-start-delays/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/others/running-autoware-without-cuda/index.html b/pr-465/how-to-guides/others/running-autoware-without-cuda/index.html index defa3acbb89..1fd29948117 100644 --- a/pr-465/how-to-guides/others/running-autoware-without-cuda/index.html +++ b/pr-465/how-to-guides/others/running-autoware-without-cuda/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/how-to-guides/training-machine-learning-models/training-models/index.html b/pr-465/how-to-guides/training-machine-learning-models/training-models/index.html index b6ffca0f5e7..1b5b7f96d70 100644 --- a/pr-465/how-to-guides/training-machine-learning-models/training-models/index.html +++ b/pr-465/how-to-guides/training-machine-learning-models/training-models/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/index.html b/pr-465/index.html index fb2e81b3bda..70e7d1b1445 100644 --- a/pr-465/index.html +++ b/pr-465/index.html @@ -2167,6 +2167,8 @@ + + @@ -2327,6 +2329,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/installation/additional-settings-for-developers/index.html b/pr-465/installation/additional-settings-for-developers/index.html index f8e3379d1f7..3bb913dda2b 100644 --- a/pr-465/installation/additional-settings-for-developers/index.html +++ b/pr-465/installation/additional-settings-for-developers/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/installation/autoware/docker-installation-devel/index.html b/pr-465/installation/autoware/docker-installation-devel/index.html index 09bc8b9edc8..a293321d567 100644 --- a/pr-465/installation/autoware/docker-installation-devel/index.html +++ b/pr-465/installation/autoware/docker-installation-devel/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/installation/autoware/docker-installation-prebuilt/index.html b/pr-465/installation/autoware/docker-installation-prebuilt/index.html index 44c19bc4ac8..0b708738520 100644 --- a/pr-465/installation/autoware/docker-installation-prebuilt/index.html +++ b/pr-465/installation/autoware/docker-installation-prebuilt/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/installation/autoware/docker-installation/index.html b/pr-465/installation/autoware/docker-installation/index.html index 80f1109f7fa..85783491222 100644 --- a/pr-465/installation/autoware/docker-installation/index.html +++ b/pr-465/installation/autoware/docker-installation/index.html @@ -2209,6 +2209,8 @@ + + @@ -2369,6 +2371,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/installation/autoware/source-installation/index.html b/pr-465/installation/autoware/source-installation/index.html index 33089433460..6707ead524e 100644 --- a/pr-465/installation/autoware/source-installation/index.html +++ b/pr-465/installation/autoware/source-installation/index.html @@ -2176,6 +2176,8 @@ + + @@ -2336,6 +2338,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/installation/index.html b/pr-465/installation/index.html index 39cf3395da9..95533e97257 100644 --- a/pr-465/installation/index.html +++ b/pr-465/installation/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/installation/related-tools/index.html b/pr-465/installation/related-tools/index.html index bc07dd258e4..a9785006e3e 100644 --- a/pr-465/installation/related-tools/index.html +++ b/pr-465/installation/related-tools/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/models/index.html b/pr-465/models/index.html index 38b53a1b874..e5baceffa02 100644 --- a/pr-465/models/index.html +++ b/pr-465/models/index.html @@ -2112,6 +2112,8 @@ + + @@ -2272,6 +2274,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/reference-hw/ad-computers/index.html b/pr-465/reference-hw/ad-computers/index.html index 8a363014b72..7d3f667cde4 100644 --- a/pr-465/reference-hw/ad-computers/index.html +++ b/pr-465/reference-hw/ad-computers/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/reference-hw/cameras/index.html b/pr-465/reference-hw/cameras/index.html index ff70ef64725..2361e26781c 100644 --- a/pr-465/reference-hw/cameras/index.html +++ b/pr-465/reference-hw/cameras/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/reference-hw/full_drivers_list/index.html b/pr-465/reference-hw/full_drivers_list/index.html index 7c5496ad5d0..27ef4030aed 100644 --- a/pr-465/reference-hw/full_drivers_list/index.html +++ b/pr-465/reference-hw/full_drivers_list/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/reference-hw/imu_ahrs_gnss_ins/index.html b/pr-465/reference-hw/imu_ahrs_gnss_ins/index.html index 59d62380df3..ed37376a8bf 100644 --- a/pr-465/reference-hw/imu_ahrs_gnss_ins/index.html +++ b/pr-465/reference-hw/imu_ahrs_gnss_ins/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/reference-hw/index.html b/pr-465/reference-hw/index.html index d696d33a914..1251bd1190b 100644 --- a/pr-465/reference-hw/index.html +++ b/pr-465/reference-hw/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/reference-hw/lidars/index.html b/pr-465/reference-hw/lidars/index.html index 7132d773649..02dcfb97e14 100644 --- a/pr-465/reference-hw/lidars/index.html +++ b/pr-465/reference-hw/lidars/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/reference-hw/radars/index.html b/pr-465/reference-hw/radars/index.html index 5ba97a76bae..292c92d4057 100644 --- a/pr-465/reference-hw/radars/index.html +++ b/pr-465/reference-hw/radars/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/reference-hw/remote_drive/index.html b/pr-465/reference-hw/remote_drive/index.html index d815891fa9a..24d5ae33869 100644 --- a/pr-465/reference-hw/remote_drive/index.html +++ b/pr-465/reference-hw/remote_drive/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/reference-hw/thermal_cameras/index.html b/pr-465/reference-hw/thermal_cameras/index.html index 3a7b8392924..e401fdee37b 100644 --- a/pr-465/reference-hw/thermal_cameras/index.html +++ b/pr-465/reference-hw/thermal_cameras/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/reference-hw/vehicle_drive_by_wire_suppliers/index.html b/pr-465/reference-hw/vehicle_drive_by_wire_suppliers/index.html index 5c3de0bf334..b5028bec686 100644 --- a/pr-465/reference-hw/vehicle_drive_by_wire_suppliers/index.html +++ b/pr-465/reference-hw/vehicle_drive_by_wire_suppliers/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/reference-hw/vehicle_platform_suppliers/index.html b/pr-465/reference-hw/vehicle_platform_suppliers/index.html index f8a80c8862e..a544fd29f15 100644 --- a/pr-465/reference-hw/vehicle_platform_suppliers/index.html +++ b/pr-465/reference-hw/vehicle_platform_suppliers/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/search/search_index.json b/pr-465/search/search_index.json index a03fedc7101..ed41b37b376 100644 --- a/pr-465/search/search_index.json +++ b/pr-465/search/search_index.json @@ -1 +1 @@ -{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"Introduction","text":""},{"location":"#autoware-documentation","title":"Autoware Documentation","text":""},{"location":"#about-autoware","title":"About Autoware","text":"

    Autoware is the world\u2019s leading open-source software project for autonomous driving. Autoware is built on Robot Operating System (ROS) and enables commercial deployment of autonomous driving in a broad range of vehicles and applications.

    Please see here for more details.

    "},{"location":"#related-documentations","title":"Related Documentations","text":"

    This Autoware Documentation is for Autoware's general information.

    For detailed documents of Autoware Universe components, see Autoware Universe Documentation.

    "},{"location":"#getting-started","title":"Getting started","text":""},{"location":"autoware-competitions/","title":"Autoware Competitions","text":""},{"location":"autoware-competitions/#autoware-competitions","title":"Autoware Competitions","text":"

    This page is a collection of the links to the competitions that are related to the Autoware Foundation.

    Title Status Description Ongoing Autoware / TIER IV Challenge 2023 Date: May 15, 2023 - Nov. 1st, 2023 As one of the main contributors of Autoware, TIER IV has been facing many difficult challenges through development, and TIER IV would like to sponsor a challenge to solve such engineering challenges. Any researchers, students, individuals or organizations are welcome to participate and submit their solution to any of the challenges we propose. Ongoing Japan Automotive AI Challenge 2023 Registration: June 5, 2023 - July 14, 2023 Qualifiers: July 3, 2023 - Aug. 31, 2023 Finals: Nov. 12, 2023 In this competition, we focus on challenging tasks posed by autonomous driving in factory environments and aim to develop Autoware-based AD software that can overcome them. The qualifiers use the digital twin autonomous driving simulator AWSIM to complete specific tasks within a virtual environment. Teams that make it to the finals have the opportunity to run their software on actual vehicles in a test course in Japan."},{"location":"autoware-competitions/#proposing-new-competition","title":"Proposing New Competition","text":"

    If you want add a new competition to this page, please propose it in a TSC meeting and get confirmation from the AWF.

    "},{"location":"contributing/","title":"Contributing","text":""},{"location":"contributing/#contributing","title":"Contributing","text":"

    Thank you for your interest in contributing! Autoware is supported by people like you, and all types and sizes of contribution are welcome.

    As a contributor, here are the guidelines that we would like you to follow for Autoware and its associated repositories.

    Like Autoware itself, these guidelines are being actively developed and suggestions for improvement are always welcome! Guideline changes can be proposed by creating a discussion in the Ideas category.

    "},{"location":"contributing/#code-of-conduct","title":"Code of Conduct","text":"

    To ensure the Autoware community stays open and inclusive, please follow the Code of Conduct.

    If you believe that someone in the community has violated the Code of Conduct, please make a report by emailing conduct@autoware.org.

    "},{"location":"contributing/#what-should-i-know-before-i-get-started","title":"What should I know before I get started?","text":""},{"location":"contributing/#autoware-concepts","title":"Autoware concepts","text":"

    To gain a high-level understanding of Autoware's architecture and design, the following pages provide a brief overview:

    For experienced developers, the Autoware interfaces and individual component pages should also be reviewed to understand the inputs and outputs for each component or module at a more detailed level.

    "},{"location":"contributing/#contributing-to-open-source-projects","title":"Contributing to open source projects","text":"

    If you are new to open source projects, we recommend reading GitHub's How to Contribute to Open Source guide for an overview of why people contribute to open source projects, what it means to contribute and much more besides.

    "},{"location":"contributing/#how-can-i-get-help","title":"How can I get help?","text":"

    Do not open issues for general support questions as we want to keep GitHub issues for confirmed bug reports. Instead, open a discussion in the Q&A category. For more details on the support mechanisms for Autoware, refer to the Support guidelines.

    Note

    Issues created for questions or unconfirmed bugs will be moved to GitHub discussions by the maintainers.

    "},{"location":"contributing/#how-can-i-contribute","title":"How can I contribute?","text":""},{"location":"contributing/#discussions","title":"Discussions","text":"

    You can contribute to Autoware by facilitating and participating in discussions, such as:

    "},{"location":"contributing/#working-groups","title":"Working groups","text":"

    The various working groups within the Autoware Foundation are responsible for accomplishing goals set by the Technical Steering Committee. These working groups are open to everyone, and joining a particular working group will allow you to gain an understanding of current projects, see how those projects are managed within each group and to contribute to issues that will help progress a particular project.

    To see the schedule for upcoming working group meetings, refer to the Autoware Foundation events calendar.

    "},{"location":"contributing/#bug-reports","title":"Bug reports","text":"

    Before you report a bug, please search the issue tracker for the appropriate repository. It is possible that someone has already reported the same issue and that workarounds exist. If you can't determine the appropriate repository, ask the maintainers for help by creating a new discussion in the Q&A category.

    When reporting a bug, you should provide a minimal set of instructions to reproduce the issue. Doing so allows us to quickly confirm and focus on the right problem.

    If you want to fix the bug by yourself that will be appreciated, but you should discuss possible approaches with the maintainers in the issue before submitting a pull request.

    Creating an issue is straightforward, but if you happen to experience any problems then create a Q&A discussion to ask for help.

    "},{"location":"contributing/#pull-requests","title":"Pull requests","text":"

    You can submit pull requests for small changes such as:

    If your pull request is a large change, the following process should be followed:

    1. Create a GitHub Discussion to propose the change. Doing so allows you to get feedback from other members and the Autoware maintainers and to ensure that the proposed change is in line with Autoware's design philosophy and current development plans. If you're not sure where to have that conversation, then create a new Q&A discussion.

    2. Create an issue following consensus in the discussions

    3. Create a pull request to implement the changes that references the Issue created in step 2

    4. Create documentation for the new addition (if relevant)

    Examples of large changes include:

    For more information on how to submit a good pull request, have a read of the pull request guidelines and don't forget to review the required license notations!

    "},{"location":"contributing/license/","title":"License","text":""},{"location":"contributing/license/#license","title":"License","text":"

    Autoware is licensed under Apache License 2.0. Thus all contributions will be licensed as such as per clause 5 of the Apache License 2.0:

    5. Submission of Contributions. Unless You explicitly state otherwise,\n   any Contribution intentionally submitted for inclusion in the Work\n   by You to the Licensor shall be under the terms and conditions of\n   this License, without any additional terms or conditions.\n   Notwithstanding the above, nothing herein shall supersede or modify\n   the terms of any separate license agreement you may have executed\n   with Licensor regarding such Contributions.\n

    Here is an example copyright header to add to the top of a new file:

    Copyright [first year of contribution] The Autoware Contributors\nSPDX-License-Identifier: Apache-2.0\n

    We don't write copyright notations of each contributor here. Instead, we place them in the NOTICE file like the following.

    This product includes code developed by [company name].\nCopyright [first year of contribution] [company name]\n

    Let us know if your legal department has a special request for the copyright notation.

    Currently, the old formats explained here are also acceptable. Those old formats can be replaced by this new format if the original authors agree. Note that we won't write their copyrights to the NOTICE file unless they agree with the new format.

    References:

    "},{"location":"contributing/coding-guidelines/","title":"Coding guidelines","text":""},{"location":"contributing/coding-guidelines/#coding-guidelines","title":"Coding guidelines","text":"

    Warning

    Under Construction

    "},{"location":"contributing/coding-guidelines/#common-guidelines","title":"Common guidelines","text":"

    Refer to the following links for now:

    Also, keep in mind the following concepts.

    "},{"location":"contributing/coding-guidelines/languages/cmake/","title":"CMake","text":""},{"location":"contributing/coding-guidelines/languages/cmake/#cmake","title":"CMake","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/coding-guidelines/languages/cmake/#use-the-autoware_package-macro","title":"Use the autoware_package macro","text":"

    To reduce duplications in CMakeLists.txt, there is the autoware_package() macro. See the README and use it in your package.

    "},{"location":"contributing/coding-guidelines/languages/cpp/","title":"C++","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#c","title":"C++","text":"

    Warning

    Under Construction

    "},{"location":"contributing/coding-guidelines/languages/cpp/#references","title":"References","text":"

    Follow the guidelines below if a rule is not defined on this page.

    1. https://docs.ros.org/en/humble/Contributing/Code-Style-Language-Versions.html
    2. https://www.autosar.org/fileadmin/standards/adaptive/22-11/AUTOSAR_RS_CPP14Guidelines.pdf
    3. https://isocpp.github.io/CppCoreGuidelines/CppCoreGuidelines

    Also, it is encouraged to apply Clang-Tidy to each file. For the usage, see Applying Clang-Tidy to ROS packages.

    Note that not all rules are covered by Clang-Tidy.

    "},{"location":"contributing/coding-guidelines/languages/cpp/#style-rules","title":"Style rules","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#include-header-files-in-the-defined-order-required-partially-automated","title":"Include header files in the defined order (required, partially automated)","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#rationale","title":"Rationale","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#reference","title":"Reference","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#example","title":"Example","text":"

    Include the headers in the following order:

    // Compliant\n#include \"my_header.hpp\"\n\n#include \"my_package/foo.hpp\"\n\n#include <package1/foo.hpp>\n#include <package2/bar.hpp>\n\n#include <std_msgs/msg/header.hpp>\n\n#include <iostream>\n#include <vector>\n

    If you use \"\" and <> properly, ClangFormat in pre-commit sorts headers automatically.

    Do not define macros between #include lines because it prevents automatic sorting.

    // Non-compliant\n#include <package1/foo.hpp>\n#include <package2/bar.hpp>\n\n#define EIGEN_MPL2_ONLY\n#include \"my_header.hpp\"\n#include \"my_package/foo.hpp\"\n\n#include <Eigen/Core>\n\n#include <std_msgs/msg/header.hpp>\n\n#include <iostream>\n#include <vector>\n

    Instead, define macros before #include lines.

    // Compliant\n#define EIGEN_MPL2_ONLY\n\n#include \"my_header.hpp\"\n\n#include \"my_package/foo.hpp\"\n\n#include <Eigen/Core>\n#include <package1/foo.hpp>\n#include <package2/bar.hpp>\n\n#include <std_msgs/msg/header.hpp>\n\n#include <iostream>\n#include <vector>\n

    If there are any reasons for defining macros at a specific position, write a comment before the macro.

    // Compliant\n#include \"my_header.hpp\"\n\n#include \"my_package/foo.hpp\"\n\n#include <package1/foo.hpp>\n#include <package2/bar.hpp>\n\n#include <std_msgs/msg/header.hpp>\n\n#include <iostream>\n#include <vector>\n\n// For the foo bar reason, the FOO_MACRO must be defined here.\n#define FOO_MACRO\n#include <foo/bar.hpp>\n
    "},{"location":"contributing/coding-guidelines/languages/cpp/#use-lower-snake-case-for-function-names-required-partially-automated","title":"Use lower snake case for function names (required, partially automated)","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#rationale_1","title":"Rationale","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#exception","title":"Exception","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#reference_1","title":"Reference","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#example_1","title":"Example","text":"
    void function_name()\n{\n}\n
    "},{"location":"contributing/coding-guidelines/languages/cpp/#use-upper-camel-case-for-enum-names-required-partially-automated","title":"Use upper camel case for enum names (required, partially automated)","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#rationale_2","title":"Rationale","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#exception_1","title":"Exception","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#reference_2","title":"Reference","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#example_2","title":"Example","text":"
    enum class Color\n{\nRed, Green, Blue\n}\n
    "},{"location":"contributing/coding-guidelines/languages/cpp/#use-lower-snake-case-for-constant-names-required-partially-automated","title":"Use lower snake case for constant names (required, partially automated)","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#rationale_3","title":"Rationale","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#exception_2","title":"Exception","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#reference_3","title":"Reference","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#example_3","title":"Example","text":"
    constexpr double gravity = 9.80665;\n
    "},{"location":"contributing/coding-guidelines/languages/cpp/#count-acronyms-and-contractions-of-compound-words-as-one-word-required-partially-automated","title":"Count acronyms and contractions of compound words as one word (required, partially automated)","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#rationale_4","title":"Rationale","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#reference_4","title":"Reference","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#example_4","title":"Example","text":"
    class RosApi;\nRosApi ros_api;\n
    "},{"location":"contributing/coding-guidelines/languages/docker/","title":"Docker","text":""},{"location":"contributing/coding-guidelines/languages/docker/#docker","title":"Docker","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/coding-guidelines/languages/github-actions/","title":"GitHub Actions","text":""},{"location":"contributing/coding-guidelines/languages/github-actions/#github-actions","title":"GitHub Actions","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/coding-guidelines/languages/markdown/","title":"Markdown","text":""},{"location":"contributing/coding-guidelines/languages/markdown/#markdown","title":"Markdown","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/coding-guidelines/languages/package-xml/","title":"package.xml","text":""},{"location":"contributing/coding-guidelines/languages/package-xml/#packagexml","title":"package.xml","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/coding-guidelines/languages/python/","title":"Python","text":""},{"location":"contributing/coding-guidelines/languages/python/#python","title":"Python","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/coding-guidelines/languages/shell-scripts/","title":"Shell scripts","text":""},{"location":"contributing/coding-guidelines/languages/shell-scripts/#shell-scripts","title":"Shell scripts","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/coding-guidelines/ros-nodes/class-design/","title":"Class design","text":""},{"location":"contributing/coding-guidelines/ros-nodes/class-design/#class-design","title":"Class design","text":"

    Warning

    Under Construction

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/","title":"Console logging","text":""},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#console-logging","title":"Console logging","text":"

    ROS 2 logging is a powerful tool for understanding and debugging ROS nodes.

    This page focuses on how to design console logging in Autoware and shows several practical examples. To comprehensively understand how ROS 2 logging works, refer to the logging documentation.

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#logging-use-cases-in-autoware","title":"Logging use cases in Autoware","text":"

    To efficiently support these use cases, clean and highly visible logs are required. For that, several rules are defined below.

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#rules","title":"Rules","text":""},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#choose-appropriate-severity-levels-required-non-automated","title":"Choose appropriate severity levels (required, non-automated)","text":""},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#rationale","title":"Rationale","text":"

    It's confusing if severity levels are inappropriate as follows:

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#example","title":"Example","text":"

    Use the following criteria as a reference:

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#filter-out-unnecessary-logs-by-setting-logging-options-required-non-automated","title":"Filter out unnecessary logs by setting logging options (required, non-automated)","text":""},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#rationale_1","title":"Rationale","text":"

    Some third-party nodes such as drivers may not follow the Autoware's guidelines. If the logs are noisy, unnecessary logs should be filtered out.

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#example_1","title":"Example","text":"

    Use the --log-level {level} option to change the minimum level of logs to be displayed:

    <launch>\n<!-- This outputs only FATAL level logs. -->\n<node pkg=\"demo_nodes_cpp\" exec=\"talker\" ros_args=\"--log-level fatal\" />\n</launch>\n

    If you want to disable only specific output targets, use the --disable-stdout-logs, --disable-rosout-logs, and/or --disable-external-lib-logs options:

    <launch>\n<!-- This outputs to rosout and disk. -->\n<node pkg=\"demo_nodes_cpp\" exec=\"talker\" ros_args=\"--disable-stdout-logs\" />\n</launch>\n
    <launch>\n<!-- This outputs to stdout. -->\n<node pkg=\"demo_nodes_cpp\" exec=\"talker\" ros_args=\"--disable-rosout-logs --disable-external-lib-logs\" />\n</launch>\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#use-throttled-logging-when-the-log-is-unnecessarily-shown-repeatedly-required-non-automated","title":"Use throttled logging when the log is unnecessarily shown repeatedly (required, non-automated)","text":""},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#rationale_2","title":"Rationale","text":"

    If tons of logs are shown on the console, people miss important message.

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#example_2","title":"Example","text":"

    While waiting for some messages, throttled logs are usually enough. In such cases, wait about 5 seconds as a reference value.

    // Compliant\nvoid FooNode::on_timer() {\nif (!current_pose_) {\nRCLCPP_ERROR_THROTTLE(get_logger(), *get_clock(), 5000, \"Waiting for current_pose_.\");\nreturn;\n}\n}\n\n// Non-compliant\nvoid FooNode::on_timer() {\nif (!current_pose_) {\nRCLCPP_ERROR(get_logger(), \"Waiting for current_pose_.\");\nreturn;\n}\n}\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#exception","title":"Exception","text":"

    The following cases are acceptable even if it's not throttled.

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#do-not-depend-on-rclcppnode-in-core-library-classes-but-depend-only-on-rclcpplogginghpp-advisory-non-automated","title":"Do not depend on rclcpp::Node in core library classes but depend only on rclcpp/logging.hpp (advisory, non-automated)","text":""},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#rationale_3","title":"Rationale","text":"

    Core library classes, which contain reusable algorithms, may also be used for non-ROS platforms. When porting libraries to other platforms, fewer dependencies are preferred.

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#example_3","title":"Example","text":"
    // Compliant\n#include <rclcpp/logging.hpp>\n\nclass FooCore {\npublic:\nexplicit FooCore(const rclcpp::Logger & logger) : logger_(logger) {}\n\nvoid process() {\nRCLCPP_INFO(logger_, \"message\");\n}\n\nprivate:\nrclcpp::Logger logger_;\n};\n\n// Compliant\n// Note that logs aren't published to `/rosout` if the logger name is different from the node name.\n#include <rclcpp/logging.hpp>\n\nclass FooCore {\nvoid process() {\nRCLCPP_INFO(rclcpp::get_logger(\"foo_core_logger\"), \"message\");\n}\n};\n\n\n// Non-compliant\n#include <rclcpp/node.hpp>\n\nclass FooCore {\npublic:\nexplicit FooCore(const rclcpp::NodeOptions & node_options) : node_(\"foo_core_node\", node_options) {}\n\nvoid process() {\nRCLCPP_INFO(node_.get_logger(), \"message\");\n}\n\nprivate:\nrclcpp::Node node_;\n};\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#tips","title":"Tips","text":""},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#use-rqt_console-to-filter-logs","title":"Use rqt_console to filter logs","text":"

    To filter logs, using rqt_console is useful:

    ros2 run rqt_console rqt_console\n

    For more details, refer to ROS 2 Documentation.

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#useful-marco-expressions","title":"Useful marco expressions","text":"

    To debug program, sometimes you need to see which functions and lines of code are executed. In that case, you can use __FILE__, __LINE__ and __FUNCTION__ macro:

    void FooNode::on_timer() {\nRCLCPP_DEBUG(get_logger(), \"file: %s, line: %s, function: %s\" __FILE__, __LINE__, __FUNCTION__);\n}\n

    The example output is as follows:

    [DEBUG] [1671720414.395456931] [foo]: file: /path/to/file.cpp, line: 100, function: on_timer

    "},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/","title":"Coordinate system","text":""},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/#coordinate-system","title":"Coordinate system","text":""},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/#overview","title":"Overview","text":"

    The commonly used coordinate systems include the world coordinate system, the vehicle coordinate system, and the sensor coordinate system.

    "},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/#how-coordinates-are-used-in-autoware","title":"How coordinates are used in Autoware","text":"

    In Autoware, coordinate systems are typically used to represent the position and movement of vehicles and obstacles in space. Coordinate systems are commonly used for path planning, perception and control, can help the vehicle decide how to avoid obstacles and to plan a safe and efficient path of travel.

    1. Transformation of sensor data

      In Autoware, each sensor has a unique coordinate system and their data is expressed in terms of the coordinates. In order to correlate the independent data between different sensors, we need to find the position relationship between each sensor and the vehicle body. Once the installation position of the sensor on the vehicle body is determined, it will remain fixed during running, so the offline calibration method can be used to determine the precise position of each sensor relative to the vehicle body.

    2. ROS TF2

      The TF2 system maintains a tree of coordinate transformations to represent the relationships between different coordinate systems. Each coordinate system is given a unique name and they are connected by coordinate transformations. How to use TF2, refer to the TF2 tutorial.

    "},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/#tf-tree","title":"TF tree","text":"

    In Autoware, a common coordinate system structure is shown below:

    graph TD\n    /earth --> /map\n    /map --> /base_link\n    /base_link --> /imu\n    /base_link --> /lidar\n    /base_link --> /gnss\n    /base_link --> /radar\n    /base_link --> /camera_link\n    /camera_link --> /camera_optical_link
    "},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/#estimating-the-base_link-frame-by-using-the-other-sensors","title":"Estimating the base_link frame by using the other sensors","text":"

    Generally we don't have the localization sensors physically at the base_link frame. So various sensors localize with respect to their own frames, let's call it sensor frame.

    We introduce a new frame naming convention: x_by_y:

    x: estimated frame name\ny: localization method/source\n

    We cannot directly get the sensor frame. Because we would need the EKF module to estimate the base_link frame first.

    Without the EKF module the best we can do is to estimate Map[map] --> sensor_by_sensor --> base_link_by_sensor using this sensor.

    "},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/#example-by-the-gnssins-sensor","title":"Example by the GNSS/INS sensor","text":"

    For the integrated GNSS/INS we use the following frames:

    flowchart LR\n    earth --> Map[map] --> gnss_ins_by_gnss_ins --> base_link_by_gnss_ins

    The gnss_ins_by_gnss_ins frame is obtained by the coordinates from GNSS/INS sensor. The coordinates are converted to map frame using the gnss_poser node.

    Finally gnss_ins_by_gnss_ins frame represents the position of the gnss_ins estimated by the gnss_ins sensor in the map.

    Then by using the static transformation between gnss_ins and the base_link frame, we can obtain the base_link_by_gnss_ins frame. Which represents the base_link estimated by the gnss_ins sensor.

    References:

    "},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/#coordinate-axes-conventions","title":"Coordinate Axes Conventions","text":"

    We are using East, North, Up (ENU) coordinate axes convention by default throughout the stack.

    X+: East\nY+: North\nZ+: Up\n

    The position, orientation, velocity, acceleration are all defined in the same axis convention.

    Position by the GNSS/INS sensor is expected to be in earth frame.

    Orientation, velocity, acceleration by the GNSS/INS sensor are expected to be in the sensor frame. Axes parallel to the map frame.

    If roll, pitch, yaw is provided, they correspond to rotation around X, Y, Z axes respectively.

    Rotation around:\nX+: roll\nY+: pitch\nZ+: yaw\n

    References:

    "},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/#how-they-can-be-created","title":"How they can be created","text":"
    1. Calibration of sensor

      The conversion relationship between every sensor coordinate system and base_link can be obtained through sensor calibration technology. How to calibrating your sensors refer to this link calibrating your sensors.

    2. Localization

      The relationship between the base_link coordinate system and the map coordinate system is determined by the position and orientation of the vehicle, and can be obtained from the vehicle localization result.

    3. Geo-referencing of map data

      The geo-referencing information can get the transformation relationship of earth coordinate system to local map coordinate system.

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/","title":"Directory structure","text":""},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#directory-structure","title":"Directory structure","text":"

    Warning

    Under Construction

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#c-package","title":"C++ package","text":"
    <package_name>\n\u251c\u2500 config\n\u2502   \u251c\u2500 foo_ros.param.yaml\n\u2502   \u2514\u2500 foo_non_ros.yaml\n\u251c\u2500 doc\n\u2502   \u251c\u2500 foo_document.md\n\u2502   \u2514\u2500 foo_diagram.svg\n\u251c\u2500 include\n\u2502   \u2514\u2500 <package_name>\n\u2502       \u2514\u2500 foo_public.hpp\n\u251c\u2500 launch\n\u2502   \u251c\u2500 foo.launch.xml\n\u2502   \u2514\u2500 foo.launch.py\n\u251c\u2500 schema\n\u2502   \u2514\u2500 foo_node.schema.json\n\u251c\u2500 src\n\u2502   \u251c\u2500 foo_node.cpp\n\u2502   \u251c\u2500 foo_node.hpp\n\u2502   \u2514\u2500 foo_private.hpp\n\u251c\u2500 test\n\u2502   \u2514\u2500 test_foo.cpp\n\u251c\u2500 package.xml\n\u251c\u2500 CMakeLists.txt\n\u2514\u2500 README.md\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#directory-descriptions","title":"Directory descriptions","text":""},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#config","title":"config","text":"

    Place configuration files such as node parameters. For ROS parameters, use the extension .param.yaml. For non-ROS parameters, use the extension .yaml.

    Rationale: Since ROS parameters files are type-sensitive, they should not be the target of some code formatters and linters. In order to distinguish the file type, we use different file extensions.

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#doc","title":"doc","text":"

    Place document files and link from README.

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#include","title":"include","text":"

    Place header files exposed to other packages. Do not place files directly under the include directory, but place files under the directory with the package name. This directory is used for mostly library headers. Note that many headers do not need to be placed here. It is enough to place the headers under the src directory.

    Reference: https://docs.ros.org/en/rolling/How-To-Guides/Ament-CMake-Documentation.html#adding-files-and-headers

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#launch","title":"launch","text":"

    Place launch files (.launch.xml and .launch.py).

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#schema","title":"schema","text":"

    Place parameter definition files. See parameters for details.

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#src","title":"src","text":"

    Place source files and private header files.

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#test","title":"test","text":"

    Place source files for testing. See unit testing for details.

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#python-package","title":"Python package","text":"

    T.B.D.

    "},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/","title":"Launch files","text":""},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/#launch-files","title":"Launch files","text":""},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/#overview","title":"Overview","text":"

    Autoware use ROS 2 launch system to startup the software. Please see the official documentation to get a basic understanding about ROS 2 Launch system if you are not familiar with it.

    "},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/#guideline","title":"Guideline","text":""},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/#the-organization-of-launch-files-in-autoware","title":"The organization of launch files in Autoware","text":"

    Autoware mainly has two repositories related to launch file organization: the autoware.universe and the autoware_launch.

    "},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/#autowareuniverse","title":"autoware.universe","text":"

    the autoware.universe contains the code of the main Autoware modules, and its launch directory is responsible for launching the nodes of each module. Autoware software stack is organized based on the architecture, so you may find that we try to match the launch structure similar to the architecture (splitting of files, namespace). For example, the tier4_map_launch subdirectory corresponds to the map module, so do the other tier4_*_launch subdirectories.

    "},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/#autoware_launch","title":"autoware_launch","text":"

    The autoware_launch is a repository referring to autoware.universe. The mainly purpose of introducing this repository is to provide the general entrance to start the Autoware software stacks, i.e, calling the launch file of each module.

    graph LR\nA11[logging_simulator.launch.xml]-.->A10[autoware.launch.xml]\nA12[planning_simulator.launch.xml]-.->A10[autoware.launch.xml]\nA13[e2e_simulator.launch.xml]-.->A10[autoware.launch.xml]\n\nA10-->A21[tier4_map_component.launch.xml]\nA10-->A22[xxx.launch.py]\nA10-->A23[tier4_localization_component.launch.xml]\nA10-->A24[xxx.launch.xml]\nA10-->A25[tier4_sensing_component.launch.xml]\n\nA23-->A30[localization.launch.xml]\nA30-->A31[pose_estimator.launch.xml]\nA30-->A32[util.launch.xml]\nA30-->A33[pose_twist_fusion_filter.launch.xml]\nA30-->A34[xxx.launch.xml]\nA30-->A35[twist_estimator.launch.xml]\n\nA33-->A41[stop_filter.launch.xml]\nA33-->A42[ekf_localizer.launch.xml]\nA33-->A43[twist2accel.launch.xml]
    "},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/#add-a-new-package-in-autoware","title":"Add a new package in Autoware","text":"

    If a newly created package has executable node, we expect sample launch file and configuration within the package, just like the recommended structure shown in previous directory structure page.

    In order to automatically load the newly added package when starting Autoware, you need to make some necessary changes to the corresponding launch file. For example, if using ICP instead of NDT as the pointcloud registration algorithm, you can modify the autoware.universe/launch/tier4_localization_launch/launch/pose_estimator/pose_estimator.launch.xml file to load the newly added ICP package.

    "},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/#parameter-management","title":"Parameter management","text":"

    Another purpose of introducing the autoware_launch repository is to facilitate the parameter management of Autoware. Thinking about this situation: if we want to integrate Autoware to a specific vehicle and modify parameters, we have to fork autoware.universe which also has a lot of code other than parameters and is frequently updated by developers. By integrating these parameters in autoware_launch, we can customize the Autoware parameters just by forking autoware_launch repository. Taking the localization module as an examples:

    1. all the \u201claunch parameters\u201d for localization component is listed in the files under autoware_launch/autoware_launch/config/localization.
    2. the \"launch parameters\" file paths are set in the autoware_launch/autoware_launch/launch/components/tier4_localization_component.launch.xml file.
    3. in autoware.universe/launch/tier4_localization_launch/launch, the launch files loads the \u201claunch parameters\u201d if the argument is given in the parameter configuration file. You can still use the default parameters in each packages to launch tier4_localization_launch within autoware.universe.
    "},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/","title":"Message guidelines","text":""},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#message-guidelines","title":"Message guidelines","text":""},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#format","title":"Format","text":"

    All messages should follow ROS message description specification.

    The accepted formats are:

    "},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#naming","title":"Naming","text":"

    Under Construction

    Use Array as a suffix when creating a plural type of a message. This suffix is commonly used in common_interfaces.

    "},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#default-units","title":"Default units","text":"

    All the fields by default have the following units depending on their types:

    type default unit distance meter (m) angle radians (rad) time second (s) speed m/s velocity m/s acceleration m/s\u00b2 angular vel. rad/s angular accel. rad/s\u00b2

    If a field in a message has any of these default units, don't add any suffix or prefix denoting the type.

    "},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#non-default-units","title":"Non-default units","text":"

    For non-default units, use following suffixes:

    type non-default unit suffix distance nanometer _nm distance micrometer _um distance millimeter _mm distance kilometer _km angle degree (deg) _deg time nanosecond _ns time microsecond _us time millisecond _ms time minute _min time hour (h) _hour velocity km/h _kmph

    If a unit that you'd like to use doesn't exist here, create an issue/PR to add it to this list.

    "},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#message-field-types","title":"Message field types","text":"

    For list of types supported by the ROS interfaces see here.

    Also copied here for convenience:

    Message Field Type C++ equivalent bool bool byte uint8_t char char float32 float float64 double int8 int8_t uint8 uint8_t int16 int16_t uint16 uint16_t int32 int32_t uint32 uint32_t int64 int64_t uint64 uint64_t string std::string wstring std::u16string"},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#arrays","title":"Arrays","text":"

    For arrays, use unbounded dynamic array type.

    Example:

    int32[] unbounded_integer_array\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#enumerations","title":"Enumerations","text":"

    ROS 2 interfaces don't support enumerations directly.

    It is possible to define integers constants and assign them to a non-constant integer parameter.

    Constants are written in CONSTANT_CASE.

    Assign a different value to each element of a constant.

    Example from shape_msgs/msg/SolidPrimitive.msg

    uint8 BOX=1\nuint8 SPHERE=2\nuint8 CYLINDER=3\nuint8 CONE=4\nuint8 PRISM=5\n\n# The type of the shape\nuint8 type\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#comments","title":"Comments","text":"

    On top of the message, briefly explain what the message contains and/or what it is used for. For an example, see sensor_msgs/msg/Imu.msg.

    If necessary, add line comments before the fields that explain the context and/or meaning.

    For simple fields like x, y, z, w you might not need to add comments.

    Even though it is not strictly checked, try not to pass 100 characters in a line.

    Example:

    # Number of times the vehicle performed an emergency brake\nuint32 count_emergency_brake\n\n# Seconds passed since the last emergency brake\nuint64 duration\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#example-usages","title":"Example usages","text":""},{"location":"contributing/coding-guidelines/ros-nodes/parameters/","title":"Parameters","text":""},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#parameters","title":"Parameters","text":"

    Autoware ROS nodes have declared parameters which values are provided during the node start up in the form of a parameter file. All the expected parameters with corresponding values should exist in the parameter file. Depending on the application, the parameter values might need to be modified.

    Find more information on parameters from the official ROS documentation:

    "},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#workflow","title":"Workflow","text":"

    A ROS package which uses the declare_parameter(...) function should:

    The rationale behind this workflow is to have a verified single source of truth to pass to the ROS node and to be used in the web documentation. The approach reduces the risk of using invalid parameter values and makes maintenance of documentation easier. This is achieved by:

    Note: a parameter value can still be modified and bypass the validation, as there is no validation during runtime.

    "},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#declare-parameter-function","title":"Declare Parameter Function","text":"

    It is the declare_parameter(...) function which sets the parameter values during a node startup.

    declare_parameter<INSERT_TYPE>(\"INSERT_PARAMETER_1_NAME\"),\ndeclare_parameter<INSERT_TYPE>(\"INSERT_PARAMETER_N_NAME\")\n

    As there is no default_value provided, the function throws an exception if a parameter were to be missing in the provided *.param.yaml file. Use a type from the C++ Type column in the table below for the declare_parameter(...) function, replacing INSERT_TYPE.

    ParameterType Enum C++ Type PARAMETER_BOOL bool PARAMETER_INTEGER int64_t PARAMETER_DOUBLE double PARAMETER_STRING std::string PARAMETER_BYTE_ARRAY std::vector<uint8_t> PARAMETER_BOOL_ARRAY std::vector<bool> PARAMETER_INTEGER_ARRAY std::vector<int64_t> PARAMETER_DOUBLE_ARRAY std::vector<double> PARAMETER_STRING_ARRAY std::vector<std::string>

    The table has been derived from Parameter Type and Parameter Value.

    See example: Lidar Apollo Segmentation TVM Nodes declare function

    "},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#parameter-file","title":"Parameter File","text":"

    The parameter file is minimal as there is no need to provide the user with additional information, e.g., description or type. This is because the associated schema file provides the additional information. Use the template below as a starting point for a ROS node.

    /**:\nros__parameters:\nINSERT_PARAMETER_1_NAME: INSERT_PARAMETER_1_VALUE\nINSERT_PARAMETER_N_NAME: INSERT_PARAMETER_N_VALUE\n

    Note: /** is used instead of the explicit node namespace, this allows the parameter file to be passed to a ROS node which has been remapped.

    To adapt the template to the ROS node, replace each INSERT_PARAMETER_..._NAME and INSERT_PARAMETER_..._VALUE for all parameters. Each declare_parameter(...) takes one parameter as input. All the parameter files should have the .param.yaml suffix so that the auto-format can be applied properly.

    Autoware has the following two types of parameter files for ROS packages:

    <launch>\n<arg name=\"foo_node_param_path\" default=\"$(find-pkg-share FOO_package)/config/foo_node.param.yaml\" />\n\n<node pkg=\"FOO_package\" exec=\"foo_node\">\n...\n    <param from=\"$(var foo_node_param_path)\" />\n</node>\n</launch>\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#json-schema","title":"JSON Schema","text":"

    JSON Schema is used the validate the parameter file(s) ensuring that it has the correct structure and content. Using JSON Schema for this purpose is considered best practice for cloud-native development. The schema template below shall be used as a starting point when defining the schema for a ROS node.

    {\n\"$schema\": \"http://json-schema.org/draft-07/schema#\",\n\"title\": \"INSERT_TITLE\",\n\"type\": \"object\",\n\"definitions\": {\n\"INSERT_ROS_NODE_NAME\": {\n\"type\": \"object\",\n\"properties\": {\n\"INSERT_PARAMETER_1_NAME\": {\n\"type\": \"INSERT_TYPE\",\n\"description\": \"INSERT_DESCRIPTION\",\n\"default\": \"INSERT_DEFAULT\",\n\"INSERT_BOUND_CONDITION(S)\": INSERT_BOUND_VALUE(S)\n},\n\"INSERT_PARAMETER_N_NAME\": {\n\"type\": \"INSERT_TYPE\",\n\"description\": \"INSERT_DESCRIPTION\",\n\"default\": \"INSERT_DEFAULT\",\n\"INSERT_BOUND_CONDITION(S)\": INSERT_BOUND_VALUE(S)\n}\n},\n\"required\": [\"INSERT_PARAMETER_1_NAME\", \"INSERT_PARAMETER_N_NAME\"],\n\"additionalProperties\": false\n}\n},\n\"properties\": {\n\"/**\": {\n\"type\": \"object\",\n\"properties\": {\n\"ros__parameters\": {\n\"$ref\": \"#/definitions/INSERT_ROS_NODE_NAME\"\n}\n},\n\"required\": [\"ros__parameters\"],\n\"additionalProperties\": false\n}\n},\n\"required\": [\"/**\"],\n\"additionalProperties\": false\n}\n

    The schema file path is INSERT_PATH_TO_PACKAGE/schema/ and the schema file name is INSERT_NODE_NAME.schema.json. To adapt the template to the ROS node, replace each INSERT_... and add all parameters 1..N.

    See example: Lidar Apollo Segmentation TVM Nodes schema

    "},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#attributes","title":"Attributes","text":"

    Parameters have several attributes, some are required and some optional. The optional attributes are highly encouraged when applicable, as they provide useful information about a parameter and can ensure the value of the parameter is within its bounds.

    "},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#required","title":"Required","text":""},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#optional","title":"Optional","text":""},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#tips-and-tricks","title":"Tips and Tricks","text":"

    Using well established standards enables the use of conventional tooling. Below is an example of how to link a schema to the parameter file(s) using VS Code. This enables a developer with convenient features such as auto-complete and parameter bound validation.

    In the root directory of where the project is hosted, create a .vscode folder with two files; extensions.json containing

    {\n\"recommendations\": [\"redhat.vscode-yaml\"]\n}\n

    and settings.json containing

    {\n\"yaml.schemas\": {\n\"./INSERT_PATH_TO_PACKAGE/schema/INSERT_NODE_NAME.schema.json\": \"**/INSERT_NODE_NAME/config/*.param.yaml\"\n}\n}\n

    The RedHat YAML extension enables validation of YAML files using JSON Schema and the \"yaml.schemas\" setting associates the *.schema.json file with all *.param.yaml files in the config/ folder.

    "},{"location":"contributing/coding-guidelines/ros-nodes/task-scheduling/","title":"Task scheduling","text":""},{"location":"contributing/coding-guidelines/ros-nodes/task-scheduling/#task-scheduling","title":"Task scheduling","text":"

    Warning

    Under Construction

    "},{"location":"contributing/coding-guidelines/ros-nodes/topic-namespaces/","title":"Topic namespaces","text":""},{"location":"contributing/coding-guidelines/ros-nodes/topic-namespaces/#topic-namespaces","title":"Topic namespaces","text":""},{"location":"contributing/coding-guidelines/ros-nodes/topic-namespaces/#overview","title":"Overview","text":"

    ROS allows topics, parameters and nodes to be namespaced which provides the following benefits:

    This page focuses on how to use namespaces in Autoware and shows some useful examples. For basic information on topic namespaces, refer to this tutorial.

    "},{"location":"contributing/coding-guidelines/ros-nodes/topic-namespaces/#how-topics-should-be-named-in-node","title":"How topics should be named in node","text":"

    Autoware divides the node into the following functional categories, and adds the start namespace for the nodes according to the categories.

    When a node is run in a namespace, all topics which that node publishes are given that same namespace. All nodes in the Autoware stack must support namespaces by avoiding practices such as publishing topics in the global namespace.

    In general, topics should be namespaced based on the function of the node which produces them and not the node (or nodes) which consume them.

    Classify topics as input or output topics based on they are subscribed or published by the node. In the node, input topic is named input/topic_name and output topic is named output/topic_name.

    Configure the topic in the node's launch file. Take the joy_controller node as an example, in the following example, set the input and output topics and remap topics in the joy_controller.launch.xml file.

    <launch>\n<arg name=\"input_joy\" default=\"/joy\"/>\n<arg name=\"input_odometry\" default=\"/localization/kinematic_state\"/>\n\n<arg name=\"output_control_command\" default=\"/external/$(var external_cmd_source)/joy/control_cmd\"/>\n<arg name=\"output_external_control_command\" default=\"/api/external/set/command/$(var external_cmd_source)/control\"/>\n<arg name=\"output_shift\" default=\"/api/external/set/command/$(var external_cmd_source)/shift\"/>\n<arg name=\"output_turn_signal\" default=\"/api/external/set/command/$(var external_cmd_source)/turn_signal\"/>\n<arg name=\"output_heartbeat\" default=\"/api/external/set/command/$(var external_cmd_source)/heartbeat\"/>\n<arg name=\"output_gate_mode\" default=\"/control/gate_mode_cmd\"/>\n<arg name=\"output_vehicle_engage\" default=\"/vehicle/engage\"/>\n\n<node pkg=\"joy_controller\" exec=\"joy_controller\" name=\"joy_controller\" output=\"screen\">\n<remap from=\"input/joy\" to=\"$(var input_joy)\"/>\n<remap from=\"input/odometry\" to=\"$(var input_odometry)\"/>\n\n<remap from=\"output/control_command\" to=\"$(var output_control_command)\"/>\n<remap from=\"output/external_control_command\" to=\"$(var output_external_control_command)\"/>\n<remap from=\"output/shift\" to=\"$(var output_shift)\"/>\n<remap from=\"output/turn_signal\" to=\"$(var output_turn_signal)\"/>\n<remap from=\"output/gate_mode\" to=\"$(var output_gate_mode)\"/>\n<remap from=\"output/heartbeat\" to=\"$(var output_heartbeat)\"/>\n<remap from=\"output/vehicle_engage\" to=\"$(var output_vehicle_engage)\"/>\n</node>\n</launch>\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/topic-namespaces/#topic-names-in-the-code","title":"Topic names in the code","text":"
    1. Have ~ so that namespace in launch configuration is applied(should not start from root /).

    2. Have ~/input ~/output namespace before topic name used to communicate with other nodes.

      e.g., In node obstacle_avoidance_planner, using topic names of type ~/input/topic_name to subscribe to topics.

      objects_sub_ = create_subscription<PredictedObjects>(\n\"~/input/objects\", rclcpp::QoS{10},\nstd::bind(&ObstacleAvoidancePlanner::onObjects, this, std::placeholders::_1));\n

      e.g., In node obstacle_avoidance_planner, using topic names of type ~/output/topic_name to publish topic.

      traj_pub_ = create_publisher<Trajectory>(\"~/output/path\", 1);\n
    3. Visualization or debug purpose topics should have ~/debug/ namespace.

      e.g., In node obstacle_avoidance_planner, in order to debug or visualizing topics, using topic names of type ~/debug/topic_name to publish information.

      debug_markers_pub_ =\ncreate_publisher<visualization_msgs::msg::MarkerArray>(\"~/debug/marker\", durable_qos);\n\ndebug_msg_pub_ =\ncreate_publisher<tier4_debug_msgs::msg::StringStamped>(\"~/debug/calculation_time\", 1);\n

      The launch configured namespace will be add the topics before, so the topic names will be as following:

      /planning/scenario_planning/lane_driving/motion_planning/obstacle_avoidance_planner/debug/marker /planning/scenario_planning/lane_driving/motion_planning/obstacle_avoidance_planner/debug/calculation_time

    4. Rationale: we want to make topic names remapped and configurable from launch files.

    "},{"location":"contributing/discussion-guidelines/","title":"Discussion guidelines","text":""},{"location":"contributing/discussion-guidelines/#discussion-guidelines","title":"Discussion guidelines","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/documentation-guidelines/","title":"Documentation guidelines","text":""},{"location":"contributing/documentation-guidelines/#documentation-guidelines","title":"Documentation guidelines","text":""},{"location":"contributing/documentation-guidelines/#workflow","title":"Workflow","text":"

    Contributions to Autoware's documentation are welcome, and the same principles described in the contribution guidelines should be followed. Small, limited changes can be made by forking this repository and submitting a pull request, but larger changes should be discussed with the community and Autoware maintainers via GitHub Discussion first.

    Examples of small changes include:

    Examples of larger changes include:

    "},{"location":"contributing/documentation-guidelines/#style-guide","title":"Style guide","text":"

    You should refer to the Google developer documentation style guide as much as possible. Reading the Highlights page of that guide is recommended, but if not then the key points below should be noted.

    "},{"location":"contributing/documentation-guidelines/#tips","title":"Tips","text":""},{"location":"contributing/documentation-guidelines/#how-to-preview-your-modification","title":"How to preview your modification","text":"

    There are two ways to preview your modification on a documentation website.

    "},{"location":"contributing/documentation-guidelines/#1-using-github-actions-workflow","title":"1. Using GitHub Actions workflow","text":"

    Follow the steps below.

    1. Create a pull request to the repository.
    2. Add the deploy-docs label from the sidebar (See below figure).
    3. Wait for a couple of minutes, and the github-actions bot will notify the URL for the pull request's preview.

    "},{"location":"contributing/documentation-guidelines/#2-running-an-mkdocs-server-in-your-local-environment","title":"2. Running an MkDocs server in your local environment","text":"

    Instead of creating a PR, you can use the mkdocs command to build Autoware's documentation websites on your local computer. Assuming that you are using Ubuntu OS, run the following to install the required libraries.

    python3 -m pip install -U $(curl -fsSL https://raw.githubusercontent.com/autowarefoundation/autoware-github-actions/main/deploy-docs/mkdocs-requirements.txt)\n

    Then, run mkdocs serve on your documentation directory.

    cd /PATH/TO/YOUR-autoware-documentation\nmkdocs serve\n

    It will launch the MkDocs server. Access http://127.0.0.1:8000/ to see the preview of the website.

    "},{"location":"contributing/pull-request-guidelines/","title":"Pull request guidelines","text":""},{"location":"contributing/pull-request-guidelines/#pull-request-guidelines","title":"Pull request guidelines","text":""},{"location":"contributing/pull-request-guidelines/#general-pull-request-workflow","title":"General pull request workflow","text":"

    Autoware uses the fork-and-pull model. For more details about the model, refer to GitHub Docs.

    The following is a general example of the pull request workflow based on the fork-and-pull model. Use this workflow as a reference when you contribute to Autoware.

    1. Create an issue.
      • Discuss the approaches to the issue with maintainers.
      • Confirm the support guidelines before creating an issue.
      • Follow the discussion guidelines when you discuss with other contributors.
    2. Create a fork repository. (for the first time only)
    3. Write code in your fork repository according to the approach agreed upon in the issue.
      • Write the tests and documentation as appropriate.
      • Follow the coding guidelines guidelines when you write code.
      • Follow the Testing guidelines guidelines when you write tests.
      • Follow the Documentation guidelines guidelines when you write documentation.
      • Follow the commit guidelines when you commit your changes.
    4. Test the code.
      • It is recommended that you summarize the test results, because you will need to explain the test results in the later review process.
      • If you are not sure what tests should be done, discuss them with maintainers.
    5. Create a pull request.
      • Follow the pull request rules when you create a pull request.
    6. Wait for the pull request to be reviewed.
      • The reviewers will review your code following the review guidelines.
        • Not only the reviewers, but also the author is encouraged to understand the review guidelines.
      • If CI checks have failed, fix the errors.
    7. Address the review comments pointed out by the reviewers.
      • If you don't understand the meaning of a review comment, ask the reviewers until you understand it.
        • Fixing without understanding the reason is not recommended because the author should be responsible for the final content of their own pull request.
      • If you don't agree with a review comment, ask the reviewers for a rational reason.
        • The reviewers are obligated to make the author understand the meanings of each comment.
      • After you have done with the review comments, re-request a review to the reviewers and back to 6.
      • If there are no more new review comments, the reviewers will approve the pull request and proceed to 8.
    8. Merge the pull request.
      • Anyone with write access can merge the pull request if there is no special request from maintainers.
        • The author is encouraged to merge the pull request to feel responsible for their own pull request.
        • If the author does not have write access, ask the reviewers or maintainers.
    "},{"location":"contributing/pull-request-guidelines/#pull-request-rules","title":"Pull request rules","text":""},{"location":"contributing/pull-request-guidelines/#use-an-appropriate-pull-request-template-required-non-automated","title":"Use an appropriate pull request template (required, non-automated)","text":""},{"location":"contributing/pull-request-guidelines/#rationale","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/#example","title":"Example","text":"

    There are two types of templates. Select one based on the following condition.

    1. Standard change:
      • Complexity:
        • New features or significant updates.
        • Requires deeper understanding of the codebase.
      • Impact:
        • Affects multiple parts of the system.
        • Basically includes minor features, bug fixes and performance improvement.
        • Needs testing before merging.
    2. Small change:
      • Complexity:
        • Documentation, simple refactoring, or style adjustments.
        • Easy to understand and review.
      • Impact:
        • Minimal effect on the system.
        • Quicker merge with less testing needed.
    "},{"location":"contributing/pull-request-guidelines/#steps-to-use-an-appropriate-pull-request-template","title":"Steps to use an appropriate pull request template","text":"
    1. Select the appropriate template, as shown in this video.
    2. Read the selected template carefully and fill the required content.
    3. Check the checkboxes during a review.
      • There are pre-review checklist and post-review checklist for the author.
    "},{"location":"contributing/pull-request-guidelines/#set-appropriate-reviewers-after-creating-a-pull-request-required-partially-automated","title":"Set appropriate reviewers after creating a pull request (required, partially automated)","text":""},{"location":"contributing/pull-request-guidelines/#rationale_1","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/#example_1","title":"Example","text":""},{"location":"contributing/pull-request-guidelines/#apply-conventional-commits-to-the-pull-request-title-required-automated","title":"Apply Conventional Commits to the pull request title (required, automated)","text":""},{"location":"contributing/pull-request-guidelines/#rationale_2","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/#example_2","title":"Example","text":"
    feat(trajectory_follower): add an awesome feature\n

    Note

    You have to start the description part (here add an awesome feature) with a lowercase.

    If your change breaks some interfaces, use the ! (breaking changes) mark as follows:

    feat(trajectory_follower)!: remove package\nfeat(trajectory_follower)!: change parameter names\nfeat(planning)!: change topic names\nfeat(autoware_utils)!: change function names\n

    For the repositories that contain code (most repositories), use the definition of conventional-commit-types for the type.

    For documentation repositories such as autoware-documentation, use the following definition:

    perf and test are generally unused. Other types have the same meaning as the code repositories.

    "},{"location":"contributing/pull-request-guidelines/#add-the-related-component-names-to-the-scope-of-conventional-commits-advisory-non-automated","title":"Add the related component names to the scope of Conventional Commits (advisory, non-automated)","text":""},{"location":"contributing/pull-request-guidelines/#rationale_3","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/#example_3","title":"Example","text":"

    For ROS packages, adding the package name or component name is good.

    feat(trajectory_follower): add an awesome feature\nrefactor(planning, control): use common utils\n
    "},{"location":"contributing/pull-request-guidelines/#keep-a-pull-request-small-advisory-non-automated","title":"Keep a pull request small (advisory, non-automated)","text":""},{"location":"contributing/pull-request-guidelines/#rationale_4","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/#exception","title":"Exception","text":"

    It is acceptable if it is agreed with maintainers that there is no other way but to submit a big pull request.

    "},{"location":"contributing/pull-request-guidelines/#example_4","title":"Example","text":""},{"location":"contributing/pull-request-guidelines/#remind-reviewers-if-there-is-no-response-for-more-than-a-week-advisory-non-automated","title":"Remind reviewers if there is no response for more than a week (advisory, non-automated)","text":""},{"location":"contributing/pull-request-guidelines/#rationale_5","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/#example_5","title":"Example","text":"
    @{some-of-developers} Would it be possible for you to review this PR?\n@autoware-maintainers friendly ping.\n
    "},{"location":"contributing/pull-request-guidelines/ci-checks/","title":"CI checks","text":""},{"location":"contributing/pull-request-guidelines/ci-checks/#ci-checks","title":"CI checks","text":"

    Autoware has several checks for a pull request. The results are shown at the bottom of the pull request page as below.

    If the \u274c mark is shown, click the Details button and investigate the failure reason.

    If the Required mark is shown, you cannot merge the pull request unless you resolve the error. If not, it is optional, but preferably it should be fixed.

    The following sections explain about common CI checks in Autoware. Note that some repositories may have different settings.

    "},{"location":"contributing/pull-request-guidelines/ci-checks/#dco","title":"DCO","text":"

    The Developer Certificate of Origin (DCO) is a lightweight way for contributors to certify that they wrote or otherwise have the right to submit the code they are contributing to the project.

    This workflow checks whether the pull request fulfills DCO. You need to confirm the required items and commit with git commit -s.

    For more information, refer to the GitHub App page.

    "},{"location":"contributing/pull-request-guidelines/ci-checks/#semantic-pull-request","title":"semantic-pull-request","text":"

    This workflow checks whether the pull request follows Conventional Commits.

    For the detailed rules, see the pull request rules.

    "},{"location":"contributing/pull-request-guidelines/ci-checks/#pre-commit","title":"pre-commit","text":"

    pre-commit is a tool to run formatters or linters when you commit.

    This workflow checks whether the pull request has no error with pre-commit.

    In the workflow pre-commit.ci - pr is enabled in the repository, it will automatically fix errors by pre-commit.ci as many as possible. If there are some errors remain, fix them manually.

    You can run pre-commit in your local environment by the following command:

    pre-commit run -a\n

    Or you can install pre-commit to the repository and automatically run it before committing:

    pre-commit install\n

    Since it is difficult to detect errors with no false positives, some jobs are split into another config file and marked as optional. To check them, use the --config option:

    pre-commit run -a --config .pre-commit-config-optional.yaml\n
    "},{"location":"contributing/pull-request-guidelines/ci-checks/#spell-check-differential","title":"spell-check-differential","text":"

    This workflow detects spelling mistakes using CSpell with our dictionary file. You can submit pull requests to tier4/autoware-spell-check-dict to update the dictionary.

    Since it is difficult to detect errors with no false positives, it is an optional workflow, but it is preferable to remove spelling mistakes as many as possible.

    "},{"location":"contributing/pull-request-guidelines/ci-checks/#build-and-test-differential","title":"build-and-test-differential","text":"

    This workflow checks colcon build and colcon test for the pull request. To make the CI faster, it doesn't check all packages but only modified packages and the dependencies.

    "},{"location":"contributing/pull-request-guidelines/ci-checks/#build-and-test-differential-self-hosted","title":"build-and-test-differential-self-hosted","text":"

    This workflow is the ARM64 version of build-and-test-differential. You need to add the ARM64 label to run this workflow.

    For reference information, since ARM machines are not supported by GitHub-hosted runners, we use self-hosted runners prepared by the AWF. For the details about self-hosted runners, refer to GitHub Docs.

    "},{"location":"contributing/pull-request-guidelines/ci-checks/#deploy-docs","title":"deploy-docs","text":"

    This workflow deploys the preview documentation site for the pull request. You need to add the deploy-docs label to run this workflow.

    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/","title":"Commit guidelines","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#commit-guidelines","title":"Commit guidelines","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#branch-rules","title":"Branch rules","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#start-branch-names-with-the-corresponding-issue-numbers-advisory-non-automated","title":"Start branch names with the corresponding issue numbers (advisory, non-automated)","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#rationale","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#exception","title":"Exception","text":"

    If there are no corresponding issues, you can ignore this rule.

    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/#example","title":"Example","text":"
    123-add-feature\n
    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/#reference","title":"Reference","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#use-dash-case-for-the-separator-of-branch-names-advisory-non-automated","title":"Use dash-case for the separator of branch names (advisory, non-automated)","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#rationale_1","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#example_1","title":"Example","text":"
    123-add-feature\n
    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/#reference_1","title":"Reference","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#make-branch-names-descriptive-advisory-non-automated","title":"Make branch names descriptive (advisory, non-automated)","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#rationale_2","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#exception_1","title":"Exception","text":"

    If you have already submitted a pull request, you do not have to change the branch name because you need to re-create a pull request, which is noisy and a waste of time. Be careful from the next time.

    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/#example_2","title":"Example","text":"

    Usually it is good to start with a verb.

    123-fix-memory-leak-of-trajectory-follower\n
    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/#commit-rules","title":"Commit rules","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#sign-off-your-commits-required-automated","title":"Sign-off your commits (required, automated)","text":"

    Developers must certify that they wrote or otherwise have the right to submit the code they are contributing to the project.

    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/#rationale_3","title":"Rationale","text":"

    If not, it will lead to complex license problems.

    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/#example_3","title":"Example","text":"
    git commit -s\n
    feat: add a feature\n\nSigned-off-by: Autoware <autoware@example.com>\n
    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/#reference_2","title":"Reference","text":""},{"location":"contributing/pull-request-guidelines/review-guidelines/","title":"Review guidelines","text":""},{"location":"contributing/pull-request-guidelines/review-guidelines/#review-guidelines","title":"Review guidelines","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/pull-request-guidelines/review-tips/","title":"Review tips","text":""},{"location":"contributing/pull-request-guidelines/review-tips/#review-tips","title":"Review tips","text":""},{"location":"contributing/pull-request-guidelines/review-tips/#toggle-annotations-or-review-comments-in-the-diff-view","title":"Toggle annotations or review comments in the diff view","text":"

    There might be some annotations or review comments in the diff view during your review.

    To toggle annotations, press the A key.

    Before:

    After:

    To toggle review comments, press the I key.

    For other keyboard shortcuts, refer to GitHub Docs.

    "},{"location":"contributing/pull-request-guidelines/review-tips/#view-code-in-the-web-based-visual-studio-code","title":"View code in the web-based Visual Studio Code","text":"

    You can open Visual Studio Code from your browser to view code in a rich UI. To use it, press the . key on any repository or pull request.

    For more detailed usage, refer to github/dev.

    "},{"location":"contributing/pull-request-guidelines/review-tips/#check-out-the-branch-of-a-pull-request-quickly","title":"Check out the branch of a pull request quickly","text":"

    If you want to check out the branch of a pull request, it's generally troublesome with the fork-and-pull model.

    # Copy the user name and the fork URL.\ngit remote add {user-name} {fork-url}\ngit checkout {user-name}/{branch-name}\ngit remote rm {user-name} # To clean up\n

    Instead, you can use GitHub CLI to simplify the steps, just run gh pr checkout {pr-number}.

    You can copy the command from the top right of the pull request page.

    "},{"location":"contributing/testing-guidelines/","title":"Testing guidelines","text":""},{"location":"contributing/testing-guidelines/#testing-guidelines","title":"Testing guidelines","text":""},{"location":"contributing/testing-guidelines/#unit-testing","title":"Unit testing","text":"

    Unit testing is a software testing method that tests individual units of source code to determine whether they satisfy the specification.

    For details, see the Unit testing guidelines.

    "},{"location":"contributing/testing-guidelines/#integration-testing","title":"Integration testing","text":"

    Integration testing combines and tests the individual software modules as a group, and is done after unit testing.

    While performing integration testing, the following subtypes of tests are written:

    1. Fault injection testing
    2. Back-to-back comparison between a model and code
    3. Requirements-based testing
    4. Anomaly detection during integration testing
    5. Random input testing

    For details, see the Integration testing guidelines.

    "},{"location":"contributing/testing-guidelines/integration-testing/","title":"Integration testing","text":""},{"location":"contributing/testing-guidelines/integration-testing/#integration-testing","title":"Integration testing","text":"

    An integration test is defined as the phase in software testing where individual software modules are combined and tested as a group. Integration tests occur after unit tests, and before validation tests.

    The input to an integration test is a set of independent modules that have been unit tested. The set of modules is tested against the defined integration test plan, and the output is a set of properly integrated software modules that is ready for system testing.

    "},{"location":"contributing/testing-guidelines/integration-testing/#value-of-integration-testing","title":"Value of integration testing","text":"

    Integration tests determine if independently developed software modules work correctly when the modules are connected to each other. In ROS 2, the software modules are called nodes. Testing a single node is a special type of integration test that is commonly referred to as component testing.

    Integration tests help to find the following types of errors:

    With ROS 2, it is possible to program complex autonomous-driving applications with a large number of nodes. Therefore, a lot of effort has been made to provide an integration-test framework that helps developers test the interaction of ROS 2 nodes.

    "},{"location":"contributing/testing-guidelines/integration-testing/#integration-test-framework","title":"Integration-test framework","text":"

    A typical integration-test framework has three parts:

    1. A series of executables with arguments that work together and generate outputs.
    2. A series of expected outputs that should match the output of the executables.
    3. A launcher that starts the tests, compares the outputs to the expected outputs, and determines if the test passes.

    In Autoware, we use the launch_testing framework.

    "},{"location":"contributing/testing-guidelines/integration-testing/#smoke-tests","title":"Smoke tests","text":"

    Autoware has a dedicated API for smoke testing. To use this framework, in package.xml add:

    <test_depend>autoware_testing</test_depend>\n

    And in CMakeLists.txt add:

    if(BUILD_TESTING)\nfind_package(autoware_testing REQUIRED)\nadd_smoke_test(${PROJECT_NAME} ${NODE_NAME})\nendif()\n

    Doing so adds smoke tests that ensure that a node can be:

    1. Launched with a default parameter file.
    2. Terminated with a standard SIGTERM signal.

    For the full API documentation, refer to the package design page.

    Note

    This API is not suitable for all smoke test cases. It cannot be used when a specific file location (eg: for a map) is required to be passed to the node, or if some preparation needs to be conducted before node launch. In such cases use the manual solution from the component test section below.

    "},{"location":"contributing/testing-guidelines/integration-testing/#integration-test-with-a-single-node-component-test","title":"Integration test with a single node: component test","text":"

    The simplest scenario is a single node. In this case, the integration test is commonly referred to as a component test.

    To add a component test to an existing node, you can follow the example of the lanelet2_map_loader in the map_loader package (added in this PR).

    In package.xml, add:

    <test_depend>ros_testing</test_depend>\n

    In CMakeLists.txt, add or modify the BUILD_TESTING section:

    if(BUILD_TESTING)\nadd_ros_test(\ntest/lanelet2_map_loader_launch.test.py\nTIMEOUT \"30\"\n)\ninstall(DIRECTORY\ntest/data/\nDESTINATION share/${PROJECT_NAME}/test/data/\n)\nendif()\n

    In addition to the command add_ros_test, we also install any data that is required by the test using the install command.

    Note

    To create a test, either read the launch_testing quick-start example, or follow the steps below.

    Taking test/lanelet2_map_loader_launch.test.py as an example, first dependencies are imported:

    import os\nimport unittest\n\nfrom ament_index_python import get_package_share_directory\nimport launch\nfrom launch import LaunchDescription\nfrom launch_ros.actions import Node\nimport launch_testing\nimport pytest\n

    Then a launch description is created to launch the node under test. Note that the test_map.osm file path is found and passed to the node, something that cannot be done with the smoke testing API:

    @pytest.mark.launch_test\ndef generate_test_description():\n\n    lanelet2_map_path = os.path.join(\n        get_package_share_directory(\"map_loader\"), \"test/data/test_map.osm\"\n    )\n\n    lanelet2_map_loader = Node(\n        package=\"map_loader\",\n        executable=\"lanelet2_map_loader\",\n        parameters=[{\"lanelet2_map_path\": lanelet2_map_path}],\n    )\n\n    context = {}\n\n    return (\n        LaunchDescription(\n            [\n                lanelet2_map_loader,\n                # Start test after 1s - gives time for the map_loader to finish initialization\n                launch.actions.TimerAction(\n                    period=1.0, actions=[launch_testing.actions.ReadyToTest()]\n                ),\n            ]\n        ),\n        context,\n    )\n

    Note

    Finally, a test is executed after the node executable has been shut down (post_shutdown_test). Here we ensure that the node was launched without error and exited cleanly.

    @launch_testing.post_shutdown_test()\nclass TestProcessOutput(unittest.TestCase):\n    def test_exit_code(self, proc_info):\n        # Check that process exits with code 0: no error\n        launch_testing.asserts.assertExitCodes(proc_info)\n
    "},{"location":"contributing/testing-guidelines/integration-testing/#running-the-test","title":"Running the test","text":"

    Continuing the example from above, first build your package:

    colcon build --packages-up-to map_loader\nsource install/setup.bash\n

    Then either execute the component test manually:

    ros2 test src/universe/autoware.universe/map/map_loader/test/lanelet2_map_loader_launch.test.py\n

    Or as part of testing the entire package:

    colcon test --packages-select map_loader\n

    Verify that the test is executed; e.g.

    $ colcon test-result --all --verbose\n...\nbuild/map_loader/test_results/map_loader/test_lanelet2_map_loader_launch.test.py.xunit.xml: 1 test, 0 errors, 0 failures, 0 skipped\n
    "},{"location":"contributing/testing-guidelines/integration-testing/#next-steps","title":"Next steps","text":"

    The simple test described in Integration test with a single node: component test can be extended in numerous directions, such as testing a node's output.

    "},{"location":"contributing/testing-guidelines/integration-testing/#testing-the-output-of-a-node","title":"Testing the output of a node","text":"

    To test while the node is running, create an active test by adding a subclass of Python's unittest.TestCase to *launch.test.py. Some boilerplate code is required to access output by creating a node and a subscription to a particular topic, e.g.

    import unittest\n\nclass TestRunningDataPublisher(unittest.TestCase):\n\n    @classmethod\n    def setUpClass(cls):\n        cls.context = Context()\n        rclpy.init(context=cls.context)\n        cls.node = rclpy.create_node(\"test_node\", context=cls.context)\n\n    @classmethod\n    def tearDownClass(cls):\n        rclpy.shutdown(context=cls.context)\n\n    def setUp(self):\n        self.msgs = []\n        sub = self.node.create_subscription(\n            msg_type=my_msg_type,\n            topic=\"/info_test\",\n            callback=self._msg_received\n        )\n        self.addCleanup(self.node.destroy_subscription, sub)\n\n    def _msg_received(self, msg):\n        # Callback for ROS 2 subscriber used in the test\n        self.msgs.append(msg)\n\n    def get_message(self):\n        startlen = len(self.msgs)\n\n        executor = rclpy.executors.SingleThreadedExecutor(context=self.context)\n        executor.add_node(self.node)\n\n        try:\n            # Try up to 60 s to receive messages\n            end_time = time.time() + 60.0\n            while time.time() < end_time:\n                executor.spin_once(timeout_sec=0.1)\n                if startlen != len(self.msgs):\n                    break\n\n            self.assertNotEqual(startlen, len(self.msgs))\n            return self.msgs[-1]\n        finally:\n            executor.remove_node(self.node)\n\n    def test_message_content():\n        msg = self.get_message()\n        self.assertEqual(msg, \"Hello, world\")\n
    "},{"location":"contributing/testing-guidelines/integration-testing/#references","title":"References","text":""},{"location":"contributing/testing-guidelines/unit-testing/","title":"Unit testing","text":""},{"location":"contributing/testing-guidelines/unit-testing/#unit-testing","title":"Unit testing","text":"

    Unit testing is the first phase of testing and is used to validate units of source code such as classes and functions. Typically, a unit of code is tested by validating its output for various inputs. Unit testing helps ensure that the code behaves as intended and prevents accidental changes of behavior.

    Autoware uses the ament_cmake framework to build and run tests. The same framework is also used to analyze the test results.

    ament_cmake provides several convenience functions to make it easy to register tests in a CMake-based package and to ensure that JUnit-compatible result files are generated. It currently supports a few different testing frameworks like pytest, gtest, and gmock.

    In order to prevent tests running in parallel from interfering with each other when publishing and subscribing to ROS topics, it is recommended to use commands from ament_cmake_ros to run tests in isolation.

    See below for an example of using ament_add_ros_isolated_gtest with colcon test. All other tests follow a similar pattern.

    "},{"location":"contributing/testing-guidelines/unit-testing/#create-a-unit-test-with-gtest","title":"Create a unit test with gtest","text":"

    In my_cool_pkg/test, create the gtest code file test_my_cool_pkg.cpp:

    #include \"gtest/gtest.h\"\n#include \"my_cool_pkg/my_cool_pkg.hpp\"\nTEST(TestMyCoolPkg, TestHello) {\nEXPECT_EQ(my_cool_pkg::print_hello(), 0);\n}\n

    In package.xml, add the following line:

    <test_depend>ament_cmake_ros</test_depend>\n

    Next add an entry under BUILD_TESTING in the CMakeLists.txt to compile the test source files:

    if(BUILD_TESTING)\n\nament_add_ros_isolated_gtest(test_my_cool_pkg test/test_my_cool_pkg.cpp)\ntarget_link_libraries(test_my_cool_pkg ${PROJECT_NAME})\ntarget_include_directories(test_my_cool_pkg PRIVATE src)  # For private headers.\n...\nendif()\n

    This automatically links the test with the default main function provided by gtest. The code under test is usually in a different CMake target (${PROJECT_NAME} in the example) and its shared object for linking needs to be added. If the test source files include private headers from the src directory, the directory needs to be added to the include path using target_include_directories() function.

    To register a new gtest item, wrap the test code with the macro TEST (). TEST () is a predefined macro that helps generate the final test code, and also registers a gtest item to be available for execution. The test case name should be in CamelCase, since gtest inserts an underscore between the fixture name and the class case name when creating the test executable.

    gtest/gtest.h also contains predefined macros of gtest like ASSERT_TRUE(condition), ASSERT_FALSE(condition), ASSERT_EQ(val1,val2), ASSERT_STREQ(str1,str2), EXPECT_EQ(), etc. ASSERT_* will abort the test if the condition is not satisfied, while EXPECT_* will mark the test as failed but continue on to the next test condition.

    Info

    More information about gtest and its features can be found in the gtest repo.

    In the demo CMakeLists.txt, ament_add_ros_isolated_gtest is a predefined macro in ament_cmake_ros that helps simplify adding gtest code. Details can be viewed in ament_add_gtest.cmake.

    "},{"location":"contributing/testing-guidelines/unit-testing/#build-test","title":"Build test","text":"

    By default, all necessary test files (ELF, CTestTestfile.cmake, etc.) are compiled by colcon:

    cd ~/workspace/\ncolcon build --packages-select my_cool_pkg\n

    Test files are generated under ~/workspace/build/my_cool_pkg.

    "},{"location":"contributing/testing-guidelines/unit-testing/#run-test","title":"Run test","text":"

    To run all tests for a specific package, call:

    $ colcon test --packages-select my_cool_pkg\n\nStarting >>> my_cool_pkg\nFinished <<< my_cool_pkg [7.80s]\n\nSummary: 1 package finished [9.27s]\n

    The test command output contains a brief report of all the test results.

    To get job-wise information of all executed tests, call:

    $ colcon test-result --all\n\nbuild/my_cool_pkg/test_results/my_cool_pkg/copyright.xunit.xml: 8 tests, 0 errors, 0 failures, 0 skipped\nbuild/my_cool_pkg/test_results/my_cool_pkg/cppcheck.xunit.xml: 6 tests, 0 errors, 0 failures, 0 skipped\nbuild/my_cool_pkg/test_results/my_cool_pkg/lint_cmake.xunit.xml: 1 test, 0 errors, 0 failures, 0 skipped\nbuild/my_cool_pkg/test_results/my_cool_pkg/my_cool_pkg_exe_integration_test.xunit.xml: 1 test, 0 errors, 0 failures, 0 skipped\nbuild/my_cool_pkg/test_results/my_cool_pkg/test_my_cool_pkg.gtest.xml: 1 test, 0 errors, 0 failures, 0 skipped\nbuild/my_cool_pkg/test_results/my_cool_pkg/xmllint.xunit.xml: 1 test, 0 errors, 0 failures, 0 skipped\n\nSummary: 18 tests, 0 errors, 0 failures, 0 skipped\n

    Look in the ~/workspace/log/test_<date>/<package_name> directory for all the raw test commands, std_out, and std_err. There is also the ~/workspace/log/latest_*/ directory containing symbolic links to the most recent package-level build and test output.

    To print the tests' details while the tests are being run, use the --event-handlers console_cohesion+ option to print the details directly to the console:

    $ colcon test --event-handlers console_cohesion+ --packages-select my_cool_pkg\n\n...\ntest 1\n    Start 1: test_my_cool_pkg\n\n1: Test command: /usr/bin/python3 \"-u\" \"~/workspace/install/share/ament_cmake_test/cmake/run_test.py\" \"~/workspace/build/my_cool_pkg/test_results/my_cool_pkg/test_my_cool_pkg.gtest.xml\" \"--package-name\" \"my_cool_pkg\" \"--output-file\" \"~/workspace/build/my_cool_pkg/ament_cmake_gtest/test_my_cool_pkg.txt\" \"--command\" \"~/workspace/build/my_cool_pkg/test_my_cool_pkg\" \"--gtest_output=xml:~/workspace/build/my_cool_pkg/test_results/my_cool_pkg/test_my_cool_pkg.gtest.xml\"\n1: Test timeout computed to be: 60\n1: -- run_test.py: invoking following command in '~/workspace/src/my_cool_pkg':\n1:  - ~/workspace/build/my_cool_pkg/test_my_cool_pkg --gtest_output=xml:~/workspace/build/my_cool_pkg/test_results/my_cool_pkg/test_my_cool_pkg.gtest.xml\n1: [==========] Running 1 test from 1 test case.\n1: [----------] Global test environment set-up.\n1: [----------] 1 test from test_my_cool_pkg\n1: [ RUN      ] test_my_cool_pkg.test_hello\n1: Hello World\n1: [       OK ] test_my_cool_pkg.test_hello (0 ms)\n1: [----------] 1 test from test_my_cool_pkg (0 ms total)\n1:\n1: [----------] Global test environment tear-down\n1: [==========] 1 test from 1 test case ran. (0 ms total)\n1: [  PASSED  ] 1 test.\n1: -- run_test.py: return code 0\n1: -- run_test.py: inject classname prefix into gtest result file '~/workspace/build/my_cool_pkg/test_results/my_cool_pkg/test_my_cool_pkg.gtest.xml'\n1: -- run_test.py: verify result file '~/workspace/build/my_cool_pkg/test_results/my_cool_pkg/test_my_cool_pkg.gtest.xml'\n1/5 Test #1: test_my_cool_pkg ...................   Passed    0.09 sec\n\n...\n\n100% tests passed, 0 tests failed out of 5\n\nLabel Time Summary:\ncopyright     =   0.49 sec*proc (1 test)\ncppcheck      =   0.20 sec*proc (1 test)\ngtest         =   0.05 sec*proc (1 test)\nlint_cmake    =   0.18 sec*proc (1 test)\nlinter        =   1.34 sec*proc (4 tests)\nxmllint       =   0.47 sec*proc (1 test)\n\nTotal Test time (real) =   7.91 sec\n...\n
    "},{"location":"contributing/testing-guidelines/unit-testing/#code-coverage","title":"Code coverage","text":"

    Loosely described, a code coverage metric is a measure of how much of the program code has been exercised (covered) during testing.

    In the Autoware repositories, Codecov is used to automatically calculate coverage of any open pull request.

    More details about the code coverage metrics can be found in the Codecov documentation.

    "},{"location":"datasets/","title":"Datasets","text":""},{"location":"datasets/#datasets","title":"Datasets","text":"

    Autoware partners provide datasets for testing and development. These datasets are available for download here.

    "},{"location":"datasets/#bus-odd-operational-design-domain-datasets","title":"Bus-ODD (Operational Design Domain) datasets","text":""},{"location":"datasets/#leo-drive-isuzu-sensor-data","title":"Leo Drive - ISUZU sensor data","text":"

    This dataset contains data from the Isuzu bus used in the Bus ODD project.

    The data contains data from following sensors:

    It also contains /tf topic for static transformations between sensors.

    "},{"location":"datasets/#required-message-types","title":"Required message types","text":"

    The GNSS data is available in sensor_msgs/msg/NavSatFix message type.

    But also the Applanix raw messages are also included in applanix_msgs/msg/NavigationPerformanceGsof50 and applanix_msgs/msg/NavigationSolutionGsof49 message types. In order to be able to play back these messages, you need to build and source the applanix_msgs package.

    # Create a workspace and clone the repository\nmkdir -p ~/applanix_ws/src && cd \"$_\"\ngit clone https://github.com/autowarefoundation/applanix.git\ncd ..\n\n# Build the workspace\ncolcon build --symlink-install --packages-select applanix_msgs\n\n# Source the workspace\nsource ~/applanix_ws/install/setup.bash\n\n# Now you can play back the messages\n

    Also make sure to source Autoware Universe workspace too.

    "},{"location":"datasets/#download-instructions","title":"Download instructions","text":"
    # Install awscli\n$ sudo apt update && sudo apt install awscli -y\n\n# This will download the entire dataset to the current directory.\n# (About 10.9GB of data)\n$ aws s3 sync s3://autoware-files/collected_data/2022-08-22_leo_drive_isuzu_bags/ ./2022-08-22_leo_drive_isuzu_bags  --no-sign-request\n\n# Optionally,\n# If you instead want to download a single bag file, you can get a list of the available files with following:\n$ aws s3 ls s3://autoware-files/collected_data/2022-08-22_leo_drive_isuzu_bags/ --no-sign-request\n   PRE all-sensors-bag1_compressed/\n   PRE all-sensors-bag2_compressed/\n   PRE all-sensors-bag3_compressed/\n   PRE all-sensors-bag4_compressed/\n   PRE all-sensors-bag5_compressed/\n   PRE all-sensors-bag6_compressed/\n   PRE driving_20_kmh_2022_06_10-16_01_55_compressed/\n   PRE driving_30_kmh_2022_06_10-15_47_42_compressed/\n\n# Then you can download a single bag file with the following:\naws s3 sync s3://autoware-files/collected_data/2022-08-22_leo_drive_isuzu_bags/all-sensors-bag1_compressed/ ./all-sensors-bag1_compressed  --no-sign-request\n
    "},{"location":"datasets/#autocoreai-lidar-ros-2-bag-file-and-pcap","title":"AutoCore.ai - lidar ROS 2 bag file and pcap","text":"

    This dataset contains pcap files and ros2 bag files from Ouster OS1-64 Lidar. The pcap file and ros2 bag file is recorded in the same time with slight difference in duration.

    Click here to download (~553MB)

    Reference Issue

    "},{"location":"design/","title":"Autoware's Design","text":""},{"location":"design/#autowares-design","title":"Autoware's Design","text":""},{"location":"design/#architecture","title":"Architecture","text":"

    Core and Universe.

    Autoware provides the runtimes and technology components by open-source software. The runtimes are based on the Robot Operating System (ROS). The technology components are provided by contributors, which include, but are not limited to:

    "},{"location":"design/#concern-assumption-and-limitation","title":"Concern, Assumption, and Limitation","text":"

    The downside of the microautonomy architecture is that the computational performance of end applications is sacrificed due to its data path overhead attributed to functional modularity. In other words, the trade-off characteristic of the microautonomy architecture exists between computational performance and functional modularity. This trade-off problem can be solved technically by introducing real-time capability. This is because autonomous driving systems are not really designed to be real-fast, that is, low-latency computing is nice-to-have but not must-have. The must-have feature for autonomous driving systems is that the latency of computing is predictable, that is, the systems are real-time. As a whole, we can compromise computational performance to an extent that is predictable enough to meet the given timing constraints of autonomous driving systems, often referred to as deadlines of computation.

    "},{"location":"design/#design","title":"Design","text":"

    Warning

    Under Construction

    "},{"location":"design/#autoware-concepts","title":"Autoware concepts","text":"

    The Autoware concepts page describes the design philosophy of Autoware. Readers (service providers and all Autoware users) will learn the basic concepts underlying Autoware development, such as microautonomy and the Core/Universe architecture.

    "},{"location":"design/#autoware-architecture","title":"Autoware architecture","text":"

    The Autoware architecture page describes an overview of each module that makes up Autoware. Readers (all Autoware users) will gain a high-level picture of how each module that composes Autoware works.

    "},{"location":"design/#autoware-interfaces","title":"Autoware interfaces","text":"

    The Autoware interfaces page describes in detail the interface of each module that makes up Autoware. Readers (intermediate developers) will learn how to add new functionality to Autoware and how to integrate their own modules with Autoware.

    "},{"location":"design/#configuration-management","title":"Configuration management","text":""},{"location":"design/#conclusion","title":"Conclusion","text":""},{"location":"design/autoware-architecture/","title":"Architecture overview","text":""},{"location":"design/autoware-architecture/#architecture-overview","title":"Architecture overview","text":"

    This page describes the architecture of Autoware.

    "},{"location":"design/autoware-architecture/#introduction","title":"Introduction","text":"

    The current Autoware is defined to be a layered architecture that clarifies each module's role and simplifies the interface between them. By doing so:

    Note that the initial focus of this architecture design was solely on driving capability, and so the following features were left as future work:

    "},{"location":"design/autoware-architecture/#high-level-architecture-design","title":"High-level architecture design","text":"

    Autoware's architecture consists of the following six stacks. Each linked page contains a more detailed set of requirements and use cases specific to that stack:

    "},{"location":"design/autoware-architecture/#node-diagram","title":"Node diagram","text":"

    A diagram showing Autoware's nodes in the default configuration can be found on the Node diagram page. Detailed documents for each node are available in the Autoware Universe docs.

    Note that Autoware configurations are scalable / selectable and will vary depending on the environment and required use cases.

    "},{"location":"design/autoware-architecture/#references","title":"References","text":""},{"location":"design/autoware-architecture/control/","title":"Control component design","text":""},{"location":"design/autoware-architecture/control/#control-component-design","title":"Control component design","text":""},{"location":"design/autoware-architecture/control/#abstract","title":"Abstract","text":"

    This document presents the design concept of the Control Component. The content is as follows:

    "},{"location":"design/autoware-architecture/control/#autoware-control-design","title":"Autoware Control Design","text":"

    The Control Component generates the control signal to which the Vehicle Component subscribes. The generated control signals are computed based on the reference trajectories from the Planning Component.

    The Control Component consists of two modules. The trajectory_follower module generates a vehicle control command to follow the reference trajectory received from the planning module. The command includes, for example, the desired steering angle and target speed. The vehicle_command_gate is responsible for filtering the control command to prevent abnormal values and then sending it to the vehicle. This gate also allows switching between multiple sources such as the MRM (minimal risk maneuver) module or some remote control module, in addition to the trajectory follower.

    The Autoware control system is designed as a platform for automated driving systems that can be compatible with a diverse range of vehicles.

    The control process in Autoware uses general information (such as target acceleration and deceleration) and no vehicle-specific information (such as brake pressure) is used. Hence it can be adjusted independently of the vehicle's drive interface enabling easy integration or performance tuning.

    Furthermore, significant differences that affect vehicle motion constraints, such as two-wheel steering or four-wheel steering, are addressed by switching the control vehicle model, achieving control specialized for each characteristic.

    Autoware's control module outputs the necessary information to control the vehicle as a substitute for a human driver. For example, the control command from the control module looks like the following:

    - Target steering angle\n- Target steering torque\n- Target speed\n- Target acceleration\n

    Note that vehicle-specific values such as pedal positions and low-level information such as individual wheel rotation speeds are excluded from the command.

    "},{"location":"design/autoware-architecture/control/#vehicle-adaptation-design","title":"Vehicle Adaptation Design","text":""},{"location":"design/autoware-architecture/control/#vehicle-interface-adapter","title":"Vehicle interface adapter","text":"

    Autoware is designed to be an autonomous driving platform able to accommodate vehicles with various drivetrain types.

    This is an explanation of how Autoware handles the standardization of systems with different vehicle drivetrain. The interfaces for vehicle drivetrain are diverse, including steering angle, steering angular velocity, steering torque, speed, accel/brake pedals, and brake pressure. To accommodate these differences, Autoware adds an adapter module between the control component and the vehicle interface. This module performs the conversion between the proprietary message types used by the vehicle (such as brake pressure) and the generic types used by Autoware (such as desired acceleration). By providing this conversion information, the differences in vehicle drivetrain can be accommodated.

    If the information is not known in advance, an automatic calibration tool can be used. Calibration will occur within limited degrees of freedom, generating the information necessary for the drivetrain conversion automatically.

    This configuration is summarized in the following diagram.

    "},{"location":"design/autoware-architecture/control/#examples-of-several-vehicle-interfaces","title":"Examples of several vehicle interfaces","text":"

    This is an example of the several drivetrain types in the vehicle interface.

    Vehicle Lateral interface Longitudinal interface Note Lexus Steering angle Accel/brake pedal position Acceleration lookup table conversion for longitudinal JPN TAXI Steering angle Accel/brake pedal position Acceleration lookup table conversion for longitudinal GSM8 Steering EPS voltage Acceleration motor voltage, Deceleration brake hydraulic pressure lookup table and PID conversion for lateral and longitudinal YMC Golfcart Steering angle Velocity Logiee yaw rate Velocity F1 TENTH Steering angle Motor RPM interface code"},{"location":"design/autoware-architecture/control/#control-feature-design","title":"Control Feature Design","text":"

    The following lists the features provided by Autoware's Control/Vehicle component, as well as the conditions and assumptions required to utilize them effectively.

    The proper operation of the ODD is limited by factors such as whether the functions are enabled, delay time, calibration accuracy and degradation rate, and sensor accuracy.

    Feature Description\u3000 Requirements/Assumptions Note \u3000Limitation for now Lateral Control Control the drivetrain system related to lateral vehicle motion Trying to increase the number of vehicle types that can be supported in the future. Only front-steering type is supported. Longitudinal Control Control the drivetrain system related to longitudinal vehicle motion Slope Compensation Supports precise vehicle motion control on slopes Gradient information can be obtained from maps or sensors attached to the chassis If gradient information is not available, the gradient is estimated from the vehicle's pitch angle. Delay Compensation Controls the drivetrain system appropriately in the presence of time delays The drivetrain delay information is provided in advance If there is no delay information, the drivetrain delay is estimated automatically (automatic calibration). However, the effect of delay cannot be completely eliminated, especially in scenarios with sudden changes in speed. Only fixed delay times can be set for longitudinal and lateral drivetrain systems separately. It does not accommodate different delay times for the accelerator and brake. Drivetrain IF Conversion (Lateral Control) Converts the drivetrain-specific information of the vehicle into the drivetrain information used by Autoware (e.g., target steering angular velocity \u2192 steering torque) The conversion information is provided in advance If there is no conversion information, the conversion map is estimated automatically (automatic calibration). The degree of freedom for conversion is limited (2D lookup table + PID FB). Drivetrain IF Conversion (Longitudinal Control) Converts the drivetrain-specific information of the vehicle into the drivetrain information used by Autoware (e.g., target acceleration \u2192 accelerator/brake pedal value) The conversion information is provided in advance If there is no conversion information, the conversion map is estimated automatically (automatic calibration). The degree of freedom for conversion is limited (2D lookup table + PID FB). Automatic Calibration Automatically estimates and applies values such as drivetrain IF conversion map and delay time. The drivetrain status can be obtained (must) Anomaly Detection Notifies when there is a discrepancy in the calibration or unexpected drivetrain behavior The drivetrain status can be obtained (must) Steering Zero Point Correction Corrects the midpoint of the steering to achieve appropriate steering control The drivetrain status can be obtained (must) Steering Deadzone Correction Corrects the deadzone of the steering to achieve appropriate steering control The steering deadzone parameter is provided in advance If the parameter is unknown, the deadzone parameter is estimated from driving information Not available now Steering Deadzone Estimation Dynamically estimates the steering deadzone from driving data Not available now Weight Compensation Performs appropriate vehicle control according to weight Weight information can be obtained from sensors If there is no weight sensor, estimate the weight from driving information. Currently not available Weight Estimation Dynamically estimates weight from driving data Currently not available

    The list above does not cover wheel control systems such as ABS commonly used in vehicles. Regarding these features, the following considerations are taken into account.

    "},{"location":"design/autoware-architecture/control/#integration-with-vehicle-side-functions","title":"Integration with vehicle-side functions","text":"

    ABS (Anti-lock Brake System) and ESC (Electric Stability Control) are two functions that may be pre-installed on a vehicle, directly impacting its controllability. The control modules of Autoware assume that both ABS and ESC are installed on the vehicle and their absence may cause unreliable controls depending on the target ODD. For example, with low-velocity driving in a controlled environment, these functions are not necessary.

    Also, note that this statement does not negate the development of ABS functionality in autonomous driving systems.

    "},{"location":"design/autoware-architecture/control/#autoware-capabilities-and-vehicle-requirements","title":"Autoware Capabilities and Vehicle Requirements","text":"

    As an alternative to human driving, autonomous driving systems essentially aim to handle tasks that humans can perform. This includes not only controlling the steering wheel, accel, and brake, but also automatically detecting issues such as poor brake response or a misaligned steering angle. However, this is a trade-off, as better vehicle performance will lead to superior system behavior, ultimately affecting the design of ODD.

    On the other hand, for tasks that are not typically anticipated or cannot be handled by a human driver, processing in the vehicle ECU is expected. Examples of such scenarios include cases where the brake response is clearly delayed or when the vehicle rotates due to a single-side tire slipping. These tasks are typically handled by ABS or ESC.

    "},{"location":"design/autoware-architecture/localization/","title":"Index","text":"

    LOCALIZATION COMPONENT DESIGN DOC

    "},{"location":"design/autoware-architecture/localization/#abstract","title":"Abstract","text":""},{"location":"design/autoware-architecture/localization/#1-requirements","title":"1. Requirements","text":"

    Localization aims to estimate vehicle pose, velocity, and acceleration.

    Goals:

    Non-goals:

    "},{"location":"design/autoware-architecture/localization/#2-sensor-configuration-examples","title":"2. Sensor Configuration Examples","text":"

    This section shows example sensor configurations and their expected performances. Each sensor has its own advantages and disadvantages, but overall performance can be improved by fusing multiple sensors.

    "},{"location":"design/autoware-architecture/localization/#3d-lidar-pointcloud-map","title":"3D-LiDAR + PointCloud Map","text":""},{"location":"design/autoware-architecture/localization/#expected-situation","title":"Expected situation","text":""},{"location":"design/autoware-architecture/localization/#situations-that-can-make-the-system-unstable","title":"Situations that can make the system unstable","text":""},{"location":"design/autoware-architecture/localization/#functionality","title":"Functionality","text":""},{"location":"design/autoware-architecture/localization/#3d-lidar-or-camera-vector-map","title":"3D-LiDAR or Camera + Vector Map","text":""},{"location":"design/autoware-architecture/localization/#expected-situation_1","title":"Expected situation","text":""},{"location":"design/autoware-architecture/localization/#situations-that-can-make-the-system-unstable_1","title":"Situations that can make the system unstable","text":""},{"location":"design/autoware-architecture/localization/#functionalities","title":"Functionalities","text":""},{"location":"design/autoware-architecture/localization/#gnss","title":"GNSS","text":""},{"location":"design/autoware-architecture/localization/#expected-situation_2","title":"Expected situation","text":""},{"location":"design/autoware-architecture/localization/#situation-that-can-make-the-system-unstable","title":"Situation that can make the system unstable","text":""},{"location":"design/autoware-architecture/localization/#functionality_1","title":"Functionality","text":""},{"location":"design/autoware-architecture/localization/#camera-visual-odometry-visual-slam","title":"Camera (Visual Odometry, Visual SLAM)","text":""},{"location":"design/autoware-architecture/localization/#expected-situation_3","title":"Expected situation","text":""},{"location":"design/autoware-architecture/localization/#situations-that-can-make-the-system-unstable_2","title":"Situations that can make the system unstable","text":""},{"location":"design/autoware-architecture/localization/#functionality_2","title":"Functionality","text":""},{"location":"design/autoware-architecture/localization/#wheel-speed-sensor","title":"Wheel speed sensor","text":""},{"location":"design/autoware-architecture/localization/#expected-situation_4","title":"Expected situation","text":""},{"location":"design/autoware-architecture/localization/#situations-that-can-make-the-system-unstable_3","title":"Situations that can make the system unstable","text":""},{"location":"design/autoware-architecture/localization/#functionality_3","title":"Functionality","text":""},{"location":"design/autoware-architecture/localization/#imu","title":"IMU","text":""},{"location":"design/autoware-architecture/localization/#expected-environments","title":"Expected environments","text":""},{"location":"design/autoware-architecture/localization/#situations-that-can-make-the-system-unstable_4","title":"Situations that can make the system unstable","text":""},{"location":"design/autoware-architecture/localization/#functionality_4","title":"Functionality","text":""},{"location":"design/autoware-architecture/localization/#geomagnetic-sensor","title":"Geomagnetic sensor","text":""},{"location":"design/autoware-architecture/localization/#expected-situation_5","title":"Expected situation","text":""},{"location":"design/autoware-architecture/localization/#situations-that-can-make-the-system-unstable_5","title":"Situations that can make the system unstable","text":""},{"location":"design/autoware-architecture/localization/#functionality_5","title":"Functionality","text":""},{"location":"design/autoware-architecture/localization/#magnetic-markers","title":"Magnetic markers","text":""},{"location":"design/autoware-architecture/localization/#expected-situation_6","title":"Expected situation","text":""},{"location":"design/autoware-architecture/localization/#situations-where-the-system-becomes-unstable","title":"Situations where the system becomes unstable","text":""},{"location":"design/autoware-architecture/localization/#functionality_6","title":"Functionality","text":""},{"location":"design/autoware-architecture/localization/#3-requirements","title":"3. Requirements","text":""},{"location":"design/autoware-architecture/localization/#4-architecture","title":"4. Architecture","text":""},{"location":"design/autoware-architecture/localization/#abstract_1","title":"Abstract","text":"

    Two architectures are defined, \"Required\" and \"Recommended\". However, the \"Required\" architecture only contains the inputs and outputs necessary to accept various localization algorithms. To improve the reusability of each module, the required components are defined in the \"Recommended\" architecture section along with a more detailed explanation.

    "},{"location":"design/autoware-architecture/localization/#required-architecture","title":"Required Architecture","text":""},{"location":"design/autoware-architecture/localization/#input","title":"Input","text":""},{"location":"design/autoware-architecture/localization/#output","title":"Output","text":""},{"location":"design/autoware-architecture/localization/#recommended-architecture","title":"Recommended Architecture","text":""},{"location":"design/autoware-architecture/localization/#pose-estimator","title":"Pose Estimator","text":""},{"location":"design/autoware-architecture/localization/#twist-accel-estimator","title":"Twist-Accel Estimator","text":""},{"location":"design/autoware-architecture/localization/#kinematics-fusion-filter","title":"Kinematics Fusion Filter","text":""},{"location":"design/autoware-architecture/localization/#localization-diagnostics","title":"Localization Diagnostics","text":""},{"location":"design/autoware-architecture/localization/#tf-tree","title":"TF tree","text":"frame meaning earth ECEF (Earth Centered Earth Fixed\uff09 map Origin of the map coordinate (ex. MGRS origin) viewer User-defined frame for rviz base_link Reference pose of the ego-vehicle (projection of the rear-axle center onto the ground surface) sensor Reference pose of each sensor

    Developers can optionally add other frames such as odom or base_footprint as long as the tf structure above is maintained.

    "},{"location":"design/autoware-architecture/localization/#the-localization-modules-ideal-functionality","title":"The localization module's ideal functionality","text":""},{"location":"design/autoware-architecture/localization/#kpi","title":"KPI","text":"

    To maintain sufficient pose estimation performance for safe operation, the following metrics are considered:

    "},{"location":"design/autoware-architecture/localization/#5-interface-and-data-structure","title":"5. Interface and Data Structure","text":""},{"location":"design/autoware-architecture/localization/#6-concerns-assumptions-and-limitations","title":"6. Concerns, Assumptions, and Limitations","text":""},{"location":"design/autoware-architecture/localization/#prerequisites-of-sensors-and-inputs","title":"Prerequisites of sensors and inputs","text":""},{"location":"design/autoware-architecture/localization/#sensor-prerequisites","title":"Sensor prerequisites","text":""},{"location":"design/autoware-architecture/localization/#map-prerequisites","title":"Map prerequisites","text":""},{"location":"design/autoware-architecture/localization/#computational-resources","title":"Computational resources","text":"
    1. For more details about bias, refer to the VectorNav IMU specifications page.\u00a0\u21a9

    "},{"location":"design/autoware-architecture/map/","title":"Map component design","text":""},{"location":"design/autoware-architecture/map/#map-component-design","title":"Map component design","text":""},{"location":"design/autoware-architecture/map/#1-overview","title":"1. Overview","text":"

    Autoware relies on high-definition point cloud maps and vector maps of the driving environment to perform various tasks such as localization, route planning, traffic light detection, and predicting the trajectories of pedestrians and other vehicles.

    This document describes the design of map component of Autoware, including its requirements, architecture design, features, data formats, and interface to distribute map information to the rest of autonomous driving stack.

    "},{"location":"design/autoware-architecture/map/#2-requirements","title":"2. Requirements","text":"

    Map should provide two types of information to the rest of the stack:

    A vector map contains highly accurate information about a road network, lane geometry, and traffic lights. It is required for route planning, traffic light detection, and predicting the trajectories of other vehicles and pedestrians.

    A 3D point cloud map is primarily used for LiDAR-based localization and part of perception in Autoware. In order to determine the current position and orientation of the vehicle, a live scan captured from one or more LiDAR units is matched against a pre-generated 3D point cloud map. Therefore, an accurate point cloud map is crucial for good localization results. However, if the vehicle has an alternate localization method with enough accuracy, for example using camera-based localization, point cloud map may not be required to use Autoware.

    In addition to above two types of maps, Autoware also requires a supplemental file for specifying the coordinate system of the map in geodetic system.

    "},{"location":"design/autoware-architecture/map/#3-architecture","title":"3. Architecture","text":"

    This diagram describes the high-level architecture of Map component in Autoware.

    The Map component consists of the following sub-components:

    "},{"location":"design/autoware-architecture/map/#4-component-interface","title":"4. Component interface","text":""},{"location":"design/autoware-architecture/map/#input-to-the-map-component","title":"Input to the map component","text":""},{"location":"design/autoware-architecture/map/#output-from-the-map-component","title":"Output from the map component","text":""},{"location":"design/autoware-architecture/map/#5-map-specification","title":"5. Map Specification","text":""},{"location":"design/autoware-architecture/map/#point-cloud-map","title":"Point Cloud Map","text":"

    The point cloud map must be supplied as a file with the following requirements:

    For more details on divided map format, please refer to the readme of map_loader in Autoware Universe.

    Note

    Three global coordinate systems are currently supported by Autoware, including Military Grid Reference System (MGRS), Universal Transverse Mercator (UTM), and Japan Rectangular Coordinate System. However, MGRS is a preferred coordinate system for georeferenced maps. In a map with MGRS coordinate system, the X and Y coordinates of each point represent the point's location within the 100,000-meter square, while the Z coordinate represents the point's elevation.

    "},{"location":"design/autoware-architecture/map/#vector-map","title":"Vector Map","text":"

    The vector cloud map must be supplied as a file with the following requirements:

    Warning

    Under Construction

    "},{"location":"design/autoware-architecture/map/#projection-information","title":"Projection Information","text":"

    The projection information must be supplied as a file with the following requirements:

    For further information, please refer to the readme of map_projection_loader in Autoware Universe.

    "},{"location":"design/autoware-architecture/node-diagram/","title":"Node diagram","text":""},{"location":"design/autoware-architecture/node-diagram/#node-diagram","title":"Node diagram","text":"

    This page depicts the node diagram designs for Autoware Core/Universe architecture.

    "},{"location":"design/autoware-architecture/node-diagram/#autoware-core","title":"Autoware Core","text":"

    TBD.

    "},{"location":"design/autoware-architecture/node-diagram/#autoware-universe","title":"Autoware Universe","text":"

    Open in draw.io for fullscreen

    Note that the diagram is for reference. We are planning to update this diagram every release and may have old information between the releases. If you wish to check the latest node diagram use rqt_graph after launching the Autoware.

    "},{"location":"design/autoware-architecture/perception/","title":"Perception component design","text":""},{"location":"design/autoware-architecture/perception/#perception-component-design","title":"Perception component design","text":"

    Warning

    Under Construction

    "},{"location":"design/autoware-architecture/planning/","title":"Planning component design","text":""},{"location":"design/autoware-architecture/planning/#planning-component-design","title":"Planning component design","text":""},{"location":"design/autoware-architecture/planning/#overview","title":"Overview","text":"

    The Planning component generates the trajectory message that will be subscribed to by the Control component based on the environmental state obtained from the Localization and the Perception components.

    "},{"location":"design/autoware-architecture/planning/#requirements","title":"Requirements","text":"

    The goal of the Planning component is to generate a trajectory (path and velocity) of the ego vehicle that is safe and well-regulated while satisfying the given mission.

    Goals:

    Non-goals:

    "},{"location":"design/autoware-architecture/planning/#high-level-architecture","title":"High-level architecture","text":"

    This diagram describes the high-level architecture of the Planning Component.

    The Planning component consists of the following sub-components:

    Each component contains some modules that can be dynamically loaded and unloaded based on the situation. For instance, the Behavior Planning component includes modules such as lane change, intersection, and crosswalk modules.

    Our planning components are built based on the microautonomy architecture with Autoware. We adopt a modular system framework where the tasks are implemented as modules that can be dynamically loaded and unloaded to achieve different features depending on the given use cases.

    "},{"location":"design/autoware-architecture/planning/#component-interface","title":"Component interface","text":"

    This section describes the inputs and outputs of the Planning Component and of its internal modules. See the Planning Component Interface page for the current implementation.

    "},{"location":"design/autoware-architecture/planning/#input-to-the-planning-component","title":"Input to the planning component","text":""},{"location":"design/autoware-architecture/planning/#output-from-the-planning-component","title":"Output from the planning component","text":""},{"location":"design/autoware-architecture/planning/#internal-interface-in-the-planning-component","title":"Internal interface in the planning component","text":""},{"location":"design/autoware-architecture/planning/#how-to-add-new-modules-wip","title":"How to add new modules (WIP)","text":"

    As mentioned in the goal session, this planning module is designed to be extensible by third-party components. For specific instructions on how to add new modules and expand its functionality, please refer to the provided documentation or guidelines (WIP).

    "},{"location":"design/autoware-architecture/planning/#supported-functions","title":"Supported Functions","text":"Feature Description Requirements Figure Route Planning Plan route from the ego vehicle position to the destination. Reference implementation is in Mission Planner, enabled by launching the mission_planner node. - Lanelet map (driving lanelets) Path Planning from Route Plan path to be followed from the given route. Reference implementation is in Behavior Path Planner. - Lanelet map (driving lanelets) Obstacle Avoidance Plan path to avoid obstacles by steering operation. Reference implementation is in Avoidance, Obstacle Avoidance Planner. Enable flag in parameter: launch obstacle_avoidance_planner true Demonstration Video - objects information Path Smoothing Plan path to achieve smooth steering. Reference implementation is in Obstacle Avoidance Planner. Demonstration Video - Lanelet map (driving lanelet) Narrow Space Driving Plan path to drive within the drivable area. Furthermore, when it is not possible to drive within the drivable area, stop the vehicle to avoid exiting the drivable area. Reference implementation is in Obstacle Avoidance Planner. Demonstration Video - Lanelet map (high-precision lane boundaries) Lane Change Plan path for lane change to reach the destination. Reference implementation is in Lane Change. Demonstration Video - Lanelet map (driving lanelets) Pull Over Plan path for pull over to park at the road shoulder. Reference implementation is in Goal Planner. Demonstration Videos: Simple Pull Over Arc Forward Pull Over Arc Backward Pull Over - Lanelet map (shoulder lane) Pull Out Plan path for pull over to start from the road shoulder. Reference implementation is in Pull Out Module. Demonstration Video: Simple Pull Out Backward Pull Out - Lanelet map (shoulder lane) Path Shift Plan path in lateral direction in response to external instructions. Reference implementation is in Side Shift Module. - None Obstacle Stop Plan velocity to stop for an obstacle on the path. Reference implementation is in Obstacle Stop Planner, Obstacle Cruise Planner. launch obstacle_stop_planner and enable flag: TODO, launch obstacle_cruise_planner and enable flag: TODO Demonstration Video - objects information Obstacle Deceleration Plan velocity to decelerate for an obstacle located around the path. Reference implementation is in Obstacle Stop Planner, Obstacle Cruise Planner. Demonstration Video - objects information Adaptive Cruise Control Plan velocity to follow the vehicle driving in front of the ego vehicle. Reference implementation is in Obstacle Stop Planner, Obstacle Cruise Planner. - objects information Decelerate for cut-in vehicles Plan velocity to avoid a risk for cutting-in vehicle to ego lane. Reference implementation is in Obstacle Cruise Planner. - objects information Surround Check at starting Plan velocity to prevent moving when an obstacle exists around the vehicle. Reference implementation is in Surround Obstacle Checker. Enable flag in parameter: use_surround_obstacle_check true in tier4_planning_component.launch.xml Demonstration Video - objects information Curve Deceleration Plan velocity to decelerate the speed on a curve. Reference implementation is in Motion Velocity Smoother. - None Curve Deceleration for Obstacle Plan velocity to decelerate the speed on a curve for a risk of obstacle collision around the path. Reference implementation is in Obstacle Velocity Limiter. Demonstration Video - objects information - Lanelet map (static obstacle) Crosswalk Plan velocity to stop or decelerate for pedestrians approaching or walking on a crosswalk. Reference implementation is in Crosswalk Module. Demonstration Video - objects information - Lanelet map (pedestrian crossing) Intersection Oncoming Vehicle Check Plan velocity for turning right/left at intersection to avoid a risk with oncoming other vehicles. Reference implementation is in Intersection Module. Demonstration Video - objects information - Lanelet map (intersection lane and yield lane) Intersection Blind Spot Check Plan velocity for turning right/left at intersection to avoid a risk with other vehicles or motorcycles coming from behind blind spot. Reference implementation is in Blind Spot Module. Demonstration Video - objects information - Lanelet map (intersection lane) Intersection Occlusion Check Plan velocity for turning right/left at intersection to avoid a risk with the possibility of coming vehicles from occlusion area. Reference implementation is in Intersection Module. Demonstration Video - objects information - Lanelet map (intersection lane) Intersection Traffic Jam Detection Plan velocity for intersection not to enter the intersection when a vehicle is stopped ahead for a traffic jam. Reference implementation is in Intersection Module. Demonstration Video - objects information - Lanelet map (intersection lane) Traffic Light Plan velocity for intersection according to a traffic light signal. Reference implementation is in Traffic Light Module. Demonstration Video - Traffic light color information Run-out Check Plan velocity to decelerate for the possibility of nearby objects running out into the path. Reference implementation is in Run Out Module. Demonstration Video - objects information Stop Line Plan velocity to stop at a stop line. Reference implementation is in Stop Line Module. Demonstration Video - Lanelet map (stop line) Occlusion Spot Check Plan velocity to decelerate for objects running out from occlusion area, for example, from behind a large vehicle. Reference implementation is in Occlusion Spot Module. Demonstration Video - objects information - Lanelet map (private/public lane) No Stop Area Plan velocity not to stop in areas where stopping is prohibited, such as in front of the fire station entrance. Reference implementation is in No Stopping Area Module. - Lanelet map (no stopping area) Merge from Private Area to Public Road Plan velocity for entering the public road from a private driveway to avoid a risk of collision with pedestrians or other vehicles. Reference implementation is in Merge from Private Area Module. - objects information - Lanelet map (private/public lane) WIP Speed Bump Plan velocity to decelerate for speed bumps. Reference implementation is in Speed Bump Module. Demonstration Video - Lanelet map (speed bump) Detection Area Plan velocity to stop at the corresponding stop when an object exist in the designated detection area. Reference implementation is in Detection Area Module. Demonstration Video - Lanelet map (detection area) No Drivable Lane Plan velocity to stop before exiting the area designated by ODD (Operational Design Domain) or stop the vehicle if autonomous mode started in out of ODD lane. Reference implementation is in No Drivable Lane Module. - Lanelet map (no drivable lane) Collision Detection when deviating from lane Plan velocity to avoid conflict with other vehicles driving in the another lane when the ego vehicle is deviating from own lane. Reference implementation is in Out of Lane Module. - objects information - Lanelet map (driving lane) WIP Parking Plan path and velocity for given goal in parking area. Reference implementation is in Free Space Planner. Demonstration Video - objects information - Lanelet map (parking area) Autonomous Emergency Braking (AEB) Perform an emergency stop if a collision with an object ahead is anticipated. It is noted that this function is expected as a final safety layer, and this should work even in the event of failures in the Localization or Perception system. Reference implementation is in Out of Lane Module. - Primitive objects Minimum Risk Maneuver (MRM) Provide appropriate MRM (Minimum Risk Maneuver) instructions when a hazardous event occurs. For example, when a sensor trouble found, send an instruction for emergency braking, moderate stop, or pulling over to the shoulder, depending on the severity of the situation. Reference implementation is in TODO - TODO WIP Trajectory Validation Check the planned trajectory is safe. If it is unsafe, take appropriate action, such as modify the trajectory, stop sending the trajectory or report to the autonomous driving system. Reference implementation is in Planning Validator. - None Running Lane Map Generation Generate lane map from localization data recorded in manual driving. Reference implementation is in WIP - None WIP Running Lane Optimization Optimize the centerline (reference path) of the map to make it smooth considering the vehicle kinematics. Reference implementation is in Static Centerline Optimizer. - Lanelet map (driving lanes) WIP"},{"location":"design/autoware-architecture/planning/#reference-implementation","title":"Reference Implementation","text":"

    The following diagram describes the reference implementation of the Planning component. By adding new modules or extending the functionalities, various ODDs can be supported.

    Note that some implementation does not adhere to the high-level architecture design and require updating.

    For more details, please refer to the design documents in each package.

    "},{"location":"design/autoware-architecture/planning/#important-parameters","title":"Important Parameters","text":"Package Parameter Type Description obstacle_stop_planner stop_planner.stop_position.max_longitudinal_margin double distance between the ego and the front vehicle when stopping (when cruise_planner_type:=obstacle_stop_planner) obstacle_cruise_planner common.safe_distance_margin double distance between the ego and the front vehicle when stopping (when cruise_planner_type:=obstacle_cruise_planner) behavior_path_planner avoidance.avoidance.lateral.lateral_collision_margin double minimum lateral margin to obstacle on avoidance behavior_path_planner avoidance.avoidance.lateral.lateral_collision_safety_buffer double additional lateral margin to obstacle if possible on avoidance obstacle_avoidance_planner option.enable_outside_drivable_area_stop bool If set true, a stop point will be inserted before the path footprint is outside the drivable area."},{"location":"design/autoware-architecture/planning/#notation","title":"Notation","text":""},{"location":"design/autoware-architecture/planning/#1-self-crossing-road-and-overlapped","title":"[1] self-crossing road and overlapped","text":"

    To support the self-crossing road and overlapped road in the opposite direction, each planning module has to meet the specifications

    Currently, the supported modules are as follows.

    "},{"location":"design/autoware-architecture/planning/#2-size-of-path-points","title":"[2] Size of Path Points","text":"

    Some functions do not support paths with only one point. Therefore, each modules should generate the path with more than two path points.

    "},{"location":"design/autoware-architecture/sensing/","title":"Sensing component design","text":""},{"location":"design/autoware-architecture/sensing/#sensing-component-design","title":"Sensing component design","text":""},{"location":"design/autoware-architecture/sensing/#overview","title":"Overview","text":"

    Sensing component is a collection of modules that apply some primitive pre-processing to the raw sensor data.

    The sensor input formats are defined in this component.

    "},{"location":"design/autoware-architecture/sensing/#role","title":"Role","text":""},{"location":"design/autoware-architecture/sensing/#inputs","title":"Inputs","text":""},{"location":"design/autoware-architecture/sensing/#input-types","title":"Input types","text":"Sensor Data Message Type Point cloud (Lidars, depth cameras, etc.) sensor_msgs/msg/PointCloud2.msg Image (RGB, monochrome, depth, etc. cameras) sensor_msgs/msg/Image.msg Radar scan radar_msgs/msg/RadarScan.msg Radar tracks radar_msgs/msg/RadarTracks.msg GNSS-INS position sensor_msgs/msg/NavSatFix.msg GNSS-INS orientation autoware_sensing_msgs/GnssInsOrientationStamped.msg GNSS-INS velocity geometry_msgs/msg/TwistWithCovarianceStamped.msg GNSS-INS acceleration geometry_msgs/msg/AccelWithCovarianceStamped.msg Ultrasonics sensor_msgs/msg/Range.msg"},{"location":"design/autoware-architecture/sensing/#design-by-data-types","title":"Design by data-types","text":""},{"location":"design/autoware-architecture/sensing/data-types/gnss-ins-data/","title":"GNSS/INS data pre-processing design","text":""},{"location":"design/autoware-architecture/sensing/data-types/gnss-ins-data/#gnssins-data-pre-processing-design","title":"GNSS/INS data pre-processing design","text":"

    Warning

    Under Construction

    "},{"location":"design/autoware-architecture/sensing/data-types/image/","title":"Image pre-processing design","text":""},{"location":"design/autoware-architecture/sensing/data-types/image/#image-pre-processing-design","title":"Image pre-processing design","text":"

    Warning

    Under Construction

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/","title":"Point cloud pre-processing design","text":""},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#point-cloud-pre-processing-design","title":"Point cloud pre-processing design","text":""},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#overview","title":"Overview","text":"

    Point cloud pre-processing is a collection of modules that apply some primitive pre-processing to the raw sensor data.

    This pipeline covers the flow of data from drivers to the perception stack.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#recommended-processing-pipeline","title":"Recommended processing pipeline","text":"
    graph TD\n    Driver[\"Lidar Driver\"] -->|\"Cloud XYZIRCADT\"| FilterPR[\"Polygon Remover Filter / CropBox Filter\"]\n\n    subgraph \"sensing\"\n    FilterPR -->|\"Cloud XYZIRCADT\"| FilterDC[\"Motion Distortion Corrector Filter\"]\n    FilterDC -->|\"Cloud XYZIRCAD\"| FilterOF[\"Outlier Remover Filter\"]\n    FilterOF -->|\"Cloud XYZIRC\"| FilterDS[\"Downsampler Filter\"]\n    FilterDS -->|\"Cloud XYZIRC\"| FilterTrans[\"Cloud Transformer\"]\n    FilterTrans -->|\"Cloud XYZIRC\"| FilterC\n\n    FilterX[\"...\"] -->|\"Cloud XYZIRC (i)\"| FilterC[\"Cloud Concatenator\"]\n    end\n\n    FilterC -->|\"Cloud XYZIRC\"| SegGr[\"Ground Segmentation\"]
    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#list-of-modules","title":"List of modules","text":"

    The modules used here are from pointcloud_preprocessor package.

    For details about the modules, see the following table.

    It is recommended that these modules are used in a single container as components. For details see ROS 2 Composition

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#point-cloud-fields","title":"Point cloud fields","text":"

    In the ideal case, the driver is expected to output a point cloud with the PointXYZIRCADT point type.

    name datatype derived description X FLOAT32 false X position Y FLOAT32 false Y position Z FLOAT32 false Z position I (intensity) UINT8 false Measured reflectivity, intensity of the point R (return type) UINT8 false Laser return type for dual return lidars C (channel) UINT16 false Vertical channel id of the laser that measured the point A (azimuth) FLOAT32 true atan2(Y, X), Horizontal angle from the front of the lidar to the point D (distance) FLOAT32 true hypot(X, Y, Z), Euclidean distance of the point to lidar T (time) UINT32 false Nanoseconds passed since the time of the header when this point was measured

    Note

    A (azimuth) and D (distance) fields are derived fields. They are provided by the driver to reduce the computational load on some parts of the perception stack.

    Note

    If the Motion Distortion Corrector Filter won't be used, the T (time) field can be omitted, PointXYZIRCAD point type can be used.

    Warning

    Autoware will support conversion from PointXYZI to PointXYZIRC or PointXYZIRCAD (with channel and return is set to 0) for prototyping purposes. However, this conversion is not recommended for production use since it's not efficient.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#intensity","title":"Intensity","text":"

    We will use following ranges for intensity, compatible with the VLP16 User Manual:

    Quoting from the VLP-16 User Manual:

    For each laser measurement, a reflectivity byte is returned in addition to distance. Reflectivity byte values are segmented into two ranges, allowing software to distinguish diffuse reflectors (e.g. tree trunks, clothing) in the low range from retroreflectors (e.g. road signs, license plates) in the high range. A retroreflector reflects light back to its source with a minimum of scattering. The VLP-16 provides its own light, with negligible separation between transmitting laser and receiving detector, so retroreflecting surfaces pop with reflected IR light compared to diffuse reflectors that tend to scatter reflected energy.

    In a typical point cloud without retroreflectors, all intensity points will be between 0 and 100.

    Retroreflective Gradient road sign, Image Source

    But in a point cloud with retroreflectors, the intensity points will be between 0 and 255.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#intensity-mapping-for-other-lidar-brands","title":"Intensity mapping for other lidar brands","text":""},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#hesai-pandarxt16","title":"Hesai PandarXT16","text":"

    Hesai Pandar XT16 User Manual

    This lidar has 2 modes for reporting reflectivity:

    If you are using linear mapping mode, you should map from [0, 255] to [0, 100] when constructing the point cloud.

    If you are using non-linear mapping mode, you should map (hesai to autoware)

    when constructing the point cloud.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#livox-mid-70","title":"Livox Mid-70","text":"

    Livox Mid-70 User Manual

    This lidar has 2 modes for reporting reflectivity similar to Velodyne VLP-16, only the ranges are slightly different.

    You should map (livox to autoware)

    when constructing the point cloud.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#robosense-rs-lidar-16","title":"RoboSense RS-LiDAR-16","text":"

    RoboSense RS-LiDAR-16 User Manual

    No mapping required, same as Velodyne VLP-16.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#ouster-os-1-64","title":"Ouster OS-1-64","text":"

    Software User Manual v2.0.0 for all Ouster sensors

    In the manual it is stated:

    Reflectivity [16 bit unsigned int] - sensor Signal Photons measurements are scaled based on measured range and sensor sensitivity at that range, providing an indication of target reflectivity. Calibration of this measurement has not currently been rigorously implemented, but this will be updated in a future firmware release.

    So it is advised to map the 16 bit reflectivity to [0, 100] range.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#leishen-ch64w","title":"Leishen CH64W","text":"

    I couldn't get the english user manual, link of website

    In a user manual I was able to find it says:

    Byte 7 represents echo strength, and the value range is 0-255. (Echo strength can reflect the energy reflection characteristics of the measured object in the actual measurement environment. Therefore, the echo strength can be used to distinguish objects with different reflection characteristics.)

    So it is advised to map the [0, 255] to [0, 100] range.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#return-type","title":"Return type","text":"

    Various lidars support multiple return modes. Velodyne lidars support Strongest and Last return modes.

    In the PointXYZIRCT and PointXYZIRC types, R field represents return mode with an UINT8.

    R (return type) Description 0 Unknown / Not Marked 1 Strongest 2 Last"},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#channel","title":"Channel","text":"

    The channel field is used to identify the vertical channel of the laser that measured the point. In various lidar manuals or literature, it can also be called laser id, ring, laser line.

    For Velodyne VLP-16, there are 16 channels. Default order of channels in drivers are generally in firing order.

    In the PointXYZIRCT and PointXYZIRC types, C field represents the vertical channel id with an UINT16.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#solid-state-and-petal-pattern-lidars","title":"Solid state and petal pattern lidars","text":"

    Warning

    This section is subject to change. Following are suggestions and open for discussion.

    For solid state lidars that have lines, assign row number as the channel id.

    For petal pattern lidars, you can keep channel 0.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#time","title":"Time","text":"

    In lidar point clouds, each point measurement can have its individual time stamp. This information can be used to eliminate the motion blur that is caused by the movement of the lidar during the scan.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#point-cloud-header-time","title":"Point cloud header time","text":"

    The header contains a Time field. The time field has 2 components:

    Field Type Description sec int32 Unix time (seconds elapsed since January 1, 1970) nanosec uint32 Nanoseconds elapsed since the sec field

    The header of the point cloud message is expected to have the time of the earliest point it has.

    Note

    The sec field is int32 in ROS 2 humble. The largest value it can represent is 2^31 seconds, it is subject to year 2038 problems. We will wait for actions on ROS 2 community side.

    More info at: https://github.com/ros2/rcl_interfaces/issues/85

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#individual-point-time","title":"Individual point time","text":"

    Each PointXYZIRCT point type has the T field for representing the nanoseconds passed since the first-shot point of the point cloud.

    To calculate exact time each point was shot, the T nanoseconds are added to the header time.

    Note

    The T field is uint32 type. The largest value it can represent is 2^32 nanoseconds, which equates to roughly 4.29 seconds. Usual point clouds don't last more than 100ms for full cycle. So this field should be enough.

    "},{"location":"design/autoware-architecture/sensing/data-types/radar-data/","title":"Radar data pre-processing design","text":""},{"location":"design/autoware-architecture/sensing/data-types/radar-data/#radar-data-pre-processing-design","title":"Radar data pre-processing design","text":"

    Warning

    Under Construction

    "},{"location":"design/autoware-architecture/sensing/data-types/ultrasonics-data/","title":"Ultrasonics data pre-processing design","text":""},{"location":"design/autoware-architecture/sensing/data-types/ultrasonics-data/#ultrasonics-data-pre-processing-design","title":"Ultrasonics data pre-processing design","text":"

    Warning

    Under Construction

    "},{"location":"design/autoware-architecture/vehicle/","title":"Vehicle Interface design","text":""},{"location":"design/autoware-architecture/vehicle/#vehicle-interface-design","title":"Vehicle Interface design","text":""},{"location":"design/autoware-architecture/vehicle/#abstract","title":"Abstract","text":"

    The Vehicle Interface component provides an interface between Autoware and a vehicle that passes control signals to the vehicle\u2019s drive-by-wire system and receives vehicle information that is passed back to Autoware.

    "},{"location":"design/autoware-architecture/vehicle/#1-requirements","title":"1. Requirements","text":"

    Goals:

    Non-goals:

    "},{"location":"design/autoware-architecture/vehicle/#2-architecture","title":"2. Architecture","text":"

    The Vehicle Interface component consists of the following components:

    Each component contains static nodes of Autoware, while each module can be dynamically loaded and unloaded (corresponding to C++ classes). The mechanism of the Vehicle Interface component is depicted by the following figures:

    "},{"location":"design/autoware-architecture/vehicle/#3-features","title":"3. Features","text":"

    The Vehicle Interface component can provide the following features in functionality and capability:

    Additional functionality and capability features may be added, depending on the vehicle hardware. Some example features are listed below:

    "},{"location":"design/autoware-architecture/vehicle/#4-interface-and-data-structure","title":"4. Interface and Data Structure","text":"

    The interface of the Vehicle Interface component for other components running in the same process space to access the functionality and capability of the Vehicle Interface component is defined as follows.

    From Control

    From Planning

    From the vehicle

    The output interface of the Vehicle Interface component:

    The data structure for the internal representation of semantics for the objects and trajectories used in the Vehicle Interface component is defined as follows:

    "},{"location":"design/autoware-architecture/vehicle/#5-concerns-assumptions-and-limitations","title":"5. Concerns, Assumptions, and Limitations","text":"

    Concerns

    Assumptions

    -

    Limitations

    "},{"location":"design/autoware-architecture/vehicle/#6-examples-of-accuracy-requirements-by-odd","title":"6. Examples of accuracy requirements by ODD","text":""},{"location":"design/autoware-concepts/","title":"Autoware concepts","text":""},{"location":"design/autoware-concepts/#autoware-concepts","title":"Autoware concepts","text":"

    Autoware is the world\u2019s first open-source software for autonomous driving systems. Autoware provides value for both The technology developers of autonomous driving systems can create new components based on Autoware. The service operators of autonomous driving systems, on the other hand, can select appropriate technology components with Autoware. This is enabled by the microautonomy architecture that modularizes its software stack into the core and universe subsystems (modules).

    "},{"location":"design/autoware-concepts/#microautonomy-architecture","title":"Microautonomy architecture","text":"

    Autoware uses a pipeline architecture to enable the development of autonomous driving systems. The pipeline architecture used in Autoware consists of components similar to three-layer-architecture. And they run in parallel. There are 2 main modules: the Core and the Universe. The components in these modules are designed to be extensible and reusable. And we call it microautonomy architecture.

    "},{"location":"design/autoware-concepts/#the-core-module","title":"The Core module","text":"

    The Core module contains basic runtimes and technology components that satisfy the basic functionality and capability of sensing, computing, and actuation required for autonomous driving systems. AWF develops and maintains the Core module with their architects and leading members through their working groups. Anyone can contribute to the Core but the PR(Pull Request) acceptance criteria is more strict compared to the Universe.

    "},{"location":"design/autoware-concepts/#the-universe-module","title":"The Universe module","text":"

    The Universe modules are extensions to the Core module that can be provided by the technology developers to enhance the functionality and capability of sensing, computing, and actuation. AWF provides the base Universe module to extend from. A key feature of the microautonomy architecture is that the Universe modules can be contributed to by any organization and individual. That is, you can even create your Universe and make it available for the Autoware community and ecosystem. AWF is responsible for quality control of the Universe modules through their development process. As a result, there are multiple types of the Universe modules - some are verified and validated by AWF and others are not. It is up to the users of Autoware which Universe modules are selected and integrated to build their end applications.

    "},{"location":"design/autoware-concepts/#interface-design","title":"Interface design","text":"

    The interface design is the most essential piece of the microautonomy architecture, which is classified into internal and external interfaces. The component interface is designed for the components in a Universe module to communicate with those in other modules, including the Core module, within Autoware internally. The AD(Autonomous Driving) API, on the other hand, is designed for the applications of Autoware to access the technology components in the Core and Universe modules of Autoware externally. Designing solid interfaces, the microautonomy architecture is made possible with AWF's partners, and at the same time is made feasible for the partners.

    "},{"location":"design/autoware-concepts/#challenges","title":"Challenges","text":"

    A grand challenge of the microautonomy architecture is to achieve real-time capability, which guarantees all the technology components activated in the system to predictably meet timing constraints (given deadlines). In general, it is difficult, if not impossible, to tightly estimate the worst-case execution times (WCETs) of components.

    In addition, it is also difficult, if not impossible, to tightly estimate the end-to-end latency of components connected by a DAG. Autonomous driving systems based on the microautonomy architecture, therefore, must be designed to be fail-safe but not never-fail. We accept that the timing constraints may be violated (the given deadlines may be missed) as far as the overrun is taken into account. The overrun handlers are two-fold: (i) platform-defined and (ii) user-defined. The platform-defined handler is implemented as part of the platform by default, while the user-defined handler can overwrite it or add a new handler to the system. This is what we call \u201cfail-safe\u201d on a timely basis.

    "},{"location":"design/autoware-concepts/#requirements-and-roadmap","title":"Requirements and roadmap","text":"

    Goals:

    "},{"location":"design/autoware-concepts/difference-from-ai-and-auto/","title":"How is Autoware Core/Universe different from Autoware.AI and Autoware.Auto?","text":""},{"location":"design/autoware-concepts/difference-from-ai-and-auto/#how-is-autoware-coreuniverse-different-from-autowareai-and-autowareauto","title":"How is Autoware Core/Universe different from Autoware.AI and Autoware.Auto?","text":"

    Autoware is the world's first \"all-in-one\" open-source software for self-driving vehicles. Since it was first released in 2015, there have been multiple releases made with differing underlying concepts, each one aimed at improving the software.

    "},{"location":"design/autoware-concepts/difference-from-ai-and-auto/#autowareai","title":"Autoware.AI","text":"

    Autoware.AI is the first distribution of Autoware that was released based on ROS 1. The repository contains a variety of packages covering different aspects of autonomous driving technologies - sensing, actuation, localization, mapping, perception and planning.

    While it was successful in attracting many developers and contributions, it was difficult to improve Autoware.AI's capabilities for a number of reasons:

    Furthermore, there was no clear definition of the conditions under which an Autoware-enabled autonomous vehicle could operate, nor of the use cases or situations supported (eg: the ability to overtake a stationary vehicle).

    From the lessons learned from Autoware.AI development, a different development process was taken for Autoware.Auto to develop a ROS 2 version of Autoware.

    Warning

    Autoware.AI is currently in maintenance mode and will reach end-of-life at the end of 2022.

    "},{"location":"design/autoware-concepts/difference-from-ai-and-auto/#autowareauto","title":"Autoware.Auto","text":"

    Autoware.Auto is the second distribution of Autoware that was released based on ROS 2. As part of the transition to ROS 2, it was decided to avoid simply porting Autoware.AI from ROS 1 to ROS 2. Instead, the codebase was rewritten from scratch with proper engineering practices, including defining target use cases and ODDs (eg: Autonomous Valet Parking [AVP], Cargo Delivery, etc.), designing a proper architecture, writing design documents and test code.

    Autoware.Auto development seemed to work fine initially, but after completing the AVP and and Cargo Delivery ODD projects, we started to see the following issues:

    "},{"location":"design/autoware-concepts/difference-from-ai-and-auto/#autoware-coreuniverse","title":"Autoware Core/Universe","text":"

    In order to address the issues with Autoware.Auto development, the Autoware Foundation decided to create a new architecture called Autoware Core/Universe.

    Autoware Core carries over the original policy of Autoware.Auto to be a stable and well-tested codebase. Alongside Autoware Core is a new concept called Autoware Universe, which acts as an extension of Autoware Core with the following benefits:

    This way, the primary requirement of having a stable and safe autonomous driving system can be achieved, whilst simultaneously enabling access to state-of-the-art features created by third-party contributors. For more details about the design of Autoware Core/Universe, refer to the Autoware concepts documentation page.

    "},{"location":"design/autoware-interfaces/","title":"Autoware interface design","text":""},{"location":"design/autoware-interfaces/#autoware-interface-design","title":"Autoware interface design","text":""},{"location":"design/autoware-interfaces/#abstract","title":"Abstract","text":"

    Autoware defines three categories of interfaces. The first one is Autoware AD API for operating the vehicle from outside the autonomous driving system such as the Fleet Management System (FMS) and Human Machine Interface (HMI) for operators or passengers. The second one is Autoware component interface for components to communicate with each other. The last one is the local interface used inside the component.

    "},{"location":"design/autoware-interfaces/#concept","title":"Concept","text":" "},{"location":"design/autoware-interfaces/#requirements","title":"Requirements","text":"

    Goals:

    Non-goals:

    "},{"location":"design/autoware-interfaces/#architecture","title":"Architecture","text":"

    The components of Autoware are connected via the component interface. Each component uses the interface to provide functionality and to access other components. AD API implementation is also a component. Since the functional elements required for AD API are defined as the component interface, other components do not need to consider AD API directly. Tools for evaluation and debugging, such as simulators, access both AD API and the component interface.

    The component interface has a hierarchical specification. The top-level architecture consists of some components. Each component has some options of the next-level architecture. Developers select one of them when implementing the component. The simplest next-level architecture is monolithic. This is an all-in-one and black box implementation, and is suitable for small group development, prototyping, and very complex functions. Others are arbitrary architecture consists of sub-components and have advantages for large group development. A sub-component can be combined with others that adopt the same architecture. Third parties can define and publish their own architecture and interface for open source development. It is desirable to propose them for standardization if they are sufficiently evaluated.

    "},{"location":"design/autoware-interfaces/#features","title":"Features","text":""},{"location":"design/autoware-interfaces/#communication-methods","title":"Communication methods","text":"

    As shown in the table below, interfaces are classified into four communication methods to define their behavior. Function Call is a request-response communication and is used for processing that requires immediate results. The others are publish-subscribe communication. Notification is used to process data that changes with some event, typically a callback. Streams handle continuously changing data. Reliable Stream expects all data to arrive without loss, Realtime Stream expects the latest data to arrive with low delay.

    Communication Method ROS Implementation Optional Implementation Function Call Service HTTP Notification Topic (reliable, transient_local) MQTT (QoS=2, retain) Reliable Stream Topic (reliable, volatile) MQTT (QoS=2) Realtime Stream Topic (best_effort, volatile) MQTT (QoS=0)

    These methods are provided as services or topics of ROS since Autoware is developed using ROS and mainly communicates with its packages. On the other hand, FMS and HMI are often implemented without ROS, Autoware is also expected to communicate with applications that do not use ROS. It is wasteful for each of these applications to have an adapter for Autoware, and a more suitable means of communication is required. HTTP and MQTT are suggested as additional options because these protocols are widely used and can substitute the behavior of services and topics. In that case, text formats such as JSON where field names are repeated in an array of objects, are inefficient and it is necessary to consider the serialization.

    "},{"location":"design/autoware-interfaces/#naming-convention","title":"Naming convention","text":"

    The name of the interface must be /<component name>/api/<interface name>, where <component name> is the name of the component. For an AD API component, omit this part and start with /api. The <interface name> is an arbitrary string separated by slashes. Note that this rule causes a restriction that the namespace api must not be used as a name other than AD API and the component interface.

    The following are examples of correct interface names for AD API and the component interface:

    The following are examples of incorrect interface names for AD API and the component interface:

    "},{"location":"design/autoware-interfaces/#logging","title":"Logging","text":"

    It is recommended to log the interface for analysis of vehicle behavior. If logging is needed, rosbag is available for topics, and use logger in rclcpp or rclpy for services. Typically, create a wrapper for service and client classes that logs when a service is called.

    "},{"location":"design/autoware-interfaces/#restrictions","title":"Restrictions","text":"

    For each API, consider the restrictions such as following and describe them if necessary.

    Services:

    Topics:

    "},{"location":"design/autoware-interfaces/#data-structure","title":"Data structure","text":""},{"location":"design/autoware-interfaces/#data-type-definition","title":"Data type definition","text":"

    Do not share the types in AD API unless they are obviously the same to avoid changes in one API affecting another. Also, implementation-dependent types, including the component interface, should not be used in AD API for the same reason. Use the type in AD API in implementation, or create the same type and copy the data to convert the type.

    "},{"location":"design/autoware-interfaces/#constants-and-enumeration","title":"Constants and enumeration","text":"

    Since ROS don't support enumeration, use constants instead. The default value of type such as zero and empty string should not be used to detect that a variable is unassigned. Alternatively, assign it a dedicated name to indicate that it is undefined. If one type has multiple enumerations, comment on the correspondence between constants and variables. Do not use enumeration values directly, as assignments are subject to change when the version is updated.

    "},{"location":"design/autoware-interfaces/#time-stamp","title":"Time stamp","text":"

    Clarify what the timestamp indicates. for example, send time, measurement time, update time, etc. Consider having multiple timestamps if necessary. Use std_msgs/msg/Header when using ROS transform. Also consider whether the header is common to all data, independent for each data, or additional timestamp is required.

    "},{"location":"design/autoware-interfaces/#request-header","title":"Request header","text":"

    Currently, there is no required header.

    "},{"location":"design/autoware-interfaces/#response-status","title":"Response status","text":"

    The interfaces whose communication method is Function Call use a common response status to unify the error format. These interfaces should include a variable of ResponseStatus with the name status in the response. See autoware_adapi_v1_msgs/msg/ResponseStatus for details.

    "},{"location":"design/autoware-interfaces/#concerns-assumptions-and-limitations","title":"Concerns, assumptions and limitations","text":""},{"location":"design/autoware-interfaces/ad-api/","title":"Autoware AD API","text":""},{"location":"design/autoware-interfaces/ad-api/#autoware-ad-api","title":"Autoware AD API","text":""},{"location":"design/autoware-interfaces/ad-api/#overview","title":"Overview","text":"

    Autoware AD API is the interface for operating the vehicle from outside the autonomous driving system. See here for the overall interface design of Autoware.

    "},{"location":"design/autoware-interfaces/ad-api/#user-stories","title":"User stories","text":"

    The user stories are service scenarios that AD API assumes. AD API is designed based on these scenarios. Each scenario is realized by a combination of use cases described later. If there are scenarios that cannot be covered, please discuss adding a user story.

    "},{"location":"design/autoware-interfaces/ad-api/#use-cases","title":"Use cases","text":"

    Use cases are partial scenarios derived from the user story and generically designed. Service providers can combine these use cases to define user stories and check if AD API can be applied to their own scenarios.

    "},{"location":"design/autoware-interfaces/ad-api/#features","title":"Features","text":""},{"location":"design/autoware-interfaces/ad-api/features/cooperation/","title":"Cooperation","text":""},{"location":"design/autoware-interfaces/ad-api/features/cooperation/#cooperation","title":"Cooperation","text":""},{"location":"design/autoware-interfaces/ad-api/features/cooperation/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/cooperation/#description","title":"Description","text":"

    Request to cooperate (RTC) is a feature that enables a human operator to support the decision in autonomous driving mode. Autoware usually drives the vehicle using its own decisions, but the operator may prefer to make their decisions in experiments and complex situations.

    The planning component manages each situation that requires decision as a scene. Each scene has an ID that doesn't change until the scene is completed or canceled. The operator can override the decision of the target scene using this ID. In practice, the user interface application can hides the specification of the ID and provides an abstracted interface to the operator.

    For example, in the situation in the diagram below, vehicle is expected to make two lane changes and turning left at the intersection. Therefore the planning component generates three scene instances for each required action, and each scene instance will wait for the decision to be made, in this case \"changing or keeping lane\" and \"turning left or waiting at the intersection\". Here Autoware decides not to change lanes a second time due to the obstacle, so the vehicle will stop there. However, operator could overwrite that decision through RTC function and force the lane change so that vehicle could reach to it's goal. Using RTC, the operator can override these decisions to continue driving the vehicle to the goal.

    "},{"location":"design/autoware-interfaces/ad-api/features/cooperation/#architecture","title":"Architecture","text":"

    Modules that support RTC have the operator decision and cooperation policy in addition to the module decision as shown below. These modules use the merged decision that is determined by these values when planning vehicle behavior. See decisions section for details of these values. The cooperation policy is used when there is no operator decision and has a default value set by the system settings. If the module supports RTC, these information are available in velocity factors or steering factors as cooperation status.

    "},{"location":"design/autoware-interfaces/ad-api/features/cooperation/#sequence","title":"Sequence","text":"

    This is an example sequence that overrides the scene decision to force a lane change. It is for the second scene in the diagram in the architecture section. Here let's assume the cooperation policy is set to optional, see the decisions section described later for details.

    1. A planning module creates a scene instance with unique ID when approaching a place where a lane change is needed.
    2. The scene instance generates the module decision from the current situation. In this case, the module decision is not to do a lane change due to the obstacle.
    3. The scene instance generates the merged decision. At this point, there is no operator decision yet, so it is based on the module decision.
    4. The scene instance plans the vehicle to keep the lane according to the merged decision.
    5. The scene instance sends a cooperation status.
    6. The operator receives the cooperation status.
    7. The operator sends a cooperation command to override the module decision and to do a lane change.
    8. The scene instance receives the cooperation command and update the operator decision.
    9. The scene instance updates the module decision from the current situation.
    10. The scene instance updates the merged decision. It is based on the operator decision received.
    11. The scene instance plans the vehicle to change the lane according to the merged decision.
    "},{"location":"design/autoware-interfaces/ad-api/features/cooperation/#decisions","title":"Decisions","text":"

    The merged decision is determined by the module decision, operator decision, and cooperation policy, each of which takes the value shown in the table below.

    Status Values merged decision deactivate, activate module decision deactivate, activate operator decision deactivate, activate, autonomous, none cooperation policy required, optional

    The meanings of these values are as follows. Note that the cooperation policy is common per module, so changing it will affect all scenes in the same module.

    Value Description deactivate An operator/module decision to plan vehicle behavior with priority on safety. activate An operator/module decision to plan vehicle behavior with priority on driving. autonomous An operator decision that follows the module decision. none An initial value for operator decision, indicating that there is no operator decision yet. required A policy that requires the operator decision to continue driving. optional A policy that does not require the operator decision to continue driving.

    The following flow is how the merged decision is determined.

    "},{"location":"design/autoware-interfaces/ad-api/features/cooperation/#examples","title":"Examples","text":"

    This is an example of cooperation for lane change module. The behaviors by the combination of decisions are as follows.

    Operator decision Policy Module decision Description deactivate - - The operator instructs to keep lane regardless the module decision. So the vehicle keeps the lane by the operator decision. activate - - The operator instructs to change lane regardless the module decision. So the vehicle changes the lane by the operator decision. autonomous - deactivate The operator instructs to follow the module decision. So the vehicle keeps the lane by the module decision. autonomous - activate The operator instructs to follow the module decision. So the vehicle changes the lane by the module decision. none required - The required policy is used because no operator instruction. So the vehicle keeps the lane by the cooperation policy. none optional deactivate The optional policy is used because no operator instruction. So the vehicle keeps the lane by the module decision. none optional activate The optional policy is used because no operator instruction. So the vehicle change the lane by the module decision."},{"location":"design/autoware-interfaces/ad-api/features/fail-safe/","title":"Fail-safe","text":""},{"location":"design/autoware-interfaces/ad-api/features/fail-safe/#fail-safe","title":"Fail-safe","text":""},{"location":"design/autoware-interfaces/ad-api/features/fail-safe/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/fail-safe/#description","title":"Description","text":"

    This API manages the behavior related to the abnormality of the vehicle. It provides the state of Request to Intervene (RTI), Minimal Risk Maneuver (MRM) and Minimal Risk Condition (MRC). As shown below, Autoware has the gate to switch between the command during normal operation and the command during abnormal operation. For safety, Autoware switches the operation to MRM when an abnormality is detected. Since the required behavior differs depending on the situation, MRM is implemented in various places as a specific mode in a normal module or as an independent module. The fail-safe module selects the behavior of MRM according to the abnormality and switches the gate output to that command.

    "},{"location":"design/autoware-interfaces/ad-api/features/fail-safe/#states","title":"States","text":"

    The MRM state indicates whether MRM is operating. This state also provides success or failure. Generally, MRM will switch to another behavior if it fails.

    State Description NONE MRM is not operating. OPERATING MRM is operating because an abnormality has been detected. SUCCEEDED MRM succeeded. The vehicle is in a safe condition. FAILED MRM failed. The vehicle is still in an unsafe condition."},{"location":"design/autoware-interfaces/ad-api/features/fail-safe/#behavior","title":"Behavior","text":"

    There is a dependency between MRM behaviors. For example, it switches from a comfortable stop to a emergency stop, but not the other way around. This is service dependent. Autoware supports the following transitions by default.

    State Description NONE MRM is not operating or is operating but no special behavior is required. COMFORTABLE_STOP The vehicle will stop quickly with a comfortable deceleration. EMERGENCY_STOP The vehicle will stop immediately with as much deceleration as possible."},{"location":"design/autoware-interfaces/ad-api/features/interface/","title":"Interface","text":""},{"location":"design/autoware-interfaces/ad-api/features/interface/#interface","title":"Interface","text":""},{"location":"design/autoware-interfaces/ad-api/features/interface/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/interface/#description","title":"Description","text":"

    This API provides the interface version of the set of AD APIs. It follows Semantic Versioning in order to provide an intuitive understanding of the changes between versions.

    "},{"location":"design/autoware-interfaces/ad-api/features/localization/","title":"Localization","text":""},{"location":"design/autoware-interfaces/ad-api/features/localization/#localization","title":"Localization","text":""},{"location":"design/autoware-interfaces/ad-api/features/localization/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/localization/#description","title":"Description","text":"

    This API manages the initialization of localization. Autoware requires a global pose as the initial guess for localization.

    "},{"location":"design/autoware-interfaces/ad-api/features/localization/#states","title":"States","text":"State Description UNINITIALIZED Localization is not initialized. Waiting for a global pose as the initial guess. INITIALIZING Localization is initializing. INITIALIZED Localization is initialized. Initialization can be requested again if necessary."},{"location":"design/autoware-interfaces/ad-api/features/motion/","title":"Motion","text":""},{"location":"design/autoware-interfaces/ad-api/features/motion/#motion","title":"Motion","text":""},{"location":"design/autoware-interfaces/ad-api/features/motion/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/motion/#description","title":"Description","text":"

    This API manages the current behavior of the vehicle. Applications can notify the vehicle behavior to the people around and visualize it for operator and passengers.

    "},{"location":"design/autoware-interfaces/ad-api/features/motion/#states","title":"States","text":"

    The motion state manages the stop and start of the vehicle. Once the vehicle has stopped, the state will be STOPPED. After this, when the vehicle tries to start (is still stopped), the state will be STARTING. In this state, calling the start API changes the state to MOVING and the vehicle starts. This mechanism can add processing such as announcements before the vehicle starts. Depending on the configuration, the state may transition directly from STOPPED to MOVING.

    State Description STOPPED The vehicle is stopped. STARTING The vehicle is stopped, but is trying to start. MOVING The vehicle is moving. BRAKING (T.B.D.) The vehicle is decelerating strongly."},{"location":"design/autoware-interfaces/ad-api/features/operation_mode/","title":"Operation mode","text":""},{"location":"design/autoware-interfaces/ad-api/features/operation_mode/#operation-mode","title":"Operation mode","text":""},{"location":"design/autoware-interfaces/ad-api/features/operation_mode/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/operation_mode/#description","title":"Description","text":"

    As shown below, Autoware assumes that the vehicle interface has two modes, Autoware control and direct control. In direct control mode, the vehicle is operated using devices such as steering and pedals. If the vehicle does not support direct control mode, it is always treated as Autoware control mode. Autoware control mode has four operation modes.

    Mode Description Stop Keep the vehicle stopped. Autonomous Autonomously control the vehicle. Local Manually control the vehicle from nearby with some device such as a joystick. Remote Manually control the vehicle from a web application on the cloud.

    "},{"location":"design/autoware-interfaces/ad-api/features/operation_mode/#states","title":"States","text":""},{"location":"design/autoware-interfaces/ad-api/features/operation_mode/#autoware-control-flag","title":"Autoware control flag","text":"

    The flag is_autoware_control_enabled indicates if the vehicle is controlled by Autoware. The enable and disable APIs can be used if the control can be switched by software. These APIs will always fail if the vehicle does not support mode switching or is switched by hardware.

    "},{"location":"design/autoware-interfaces/ad-api/features/operation_mode/#operation-mode-and-change-flags","title":"Operation mode and change flags","text":"

    The state operation_mode indicates what command is used when Autoware control is enabled. The flags change_to_* can be used to check if it is possible to transition to each mode.

    "},{"location":"design/autoware-interfaces/ad-api/features/operation_mode/#transition-flag","title":"Transition flag","text":"

    Since Autoware may not be able to guarantee safety, such as switching to autonomous mode during overspeed. There is the flag is_in_transition for this situation and it will be true when changing modes. The operator who changed the mode should ensure safety while this flag is true. The flag will be false when the mode change is complete.

    "},{"location":"design/autoware-interfaces/ad-api/features/perception/","title":"Perception","text":""},{"location":"design/autoware-interfaces/ad-api/features/perception/#perception","title":"Perception","text":""},{"location":"design/autoware-interfaces/ad-api/features/perception/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/perception/#description","title":"Description","text":"

    API for perception related topic.

    "},{"location":"design/autoware-interfaces/ad-api/features/planning-factors/","title":"Planning factors","text":""},{"location":"design/autoware-interfaces/ad-api/features/planning-factors/#planning-factors","title":"Planning factors","text":""},{"location":"design/autoware-interfaces/ad-api/features/planning-factors/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/planning-factors/#description","title":"Description","text":"

    This API manages the planned behavior of the vehicle. Applications can notify the vehicle behavior to the people around and visualize it for operator and passengers.

    "},{"location":"design/autoware-interfaces/ad-api/features/planning-factors/#velocity-factors","title":"Velocity factors","text":"

    The velocity factors is an array of information on the behavior that the vehicle stops or slows down. Each factor has a behavior type which is described below. Some behavior types have sequence and details as additional information.

    Behavior Description surrounding-obstacle There are obstacles immediately around the vehicle. route-obstacle There are obstacles along the route ahead. intersection There are obstacles in other lanes in the path. crosswalk There are obstacles on the crosswalk. rear-check There are obstacles behind that would be in a human driver's blind spot. user-defined-attention-area There are obstacles in the predefined attention area. no-stopping-area There is not enough space beyond the no stopping area. stop-sign A stop by a stop sign. traffic-signal A stop by a traffic signal. v2x-gate-area A stop by a gate area. It has enter and leave as sequences and v2x type as details. merge A stop before merging lanes. sidewalk A stop before crossing the sidewalk. lane-change A lane change. avoidance A path change to avoid an obstacle in the current lane. emergency-operation A stop by emergency instruction from the operator.

    Each factor also provides status, poses in the base link frame, and distance from that pose. As the vehicle approaches the stop position, this factor appears with a status of APPROACHING. And when the vehicle reaches that position and stops, the status will be STOPPED. The pose indicates the stop position, or the base link if the stop position cannot be calculated.

    "},{"location":"design/autoware-interfaces/ad-api/features/planning-factors/#steering-factors","title":"Steering factors","text":"

    The steering factors is an array of information on the maneuver that requires use of turn indicators, such as turning left or right. Each factor has a behavior type which is described below and steering direction. Some behavior types have sequence and details as additional information.

    Behavior Description intersection A turning left or right at an intersection. lane-change A lane change. avoidance A path change to avoid an obstacle. It has a sequence of change and return. start-planner T.B.D. goal-planner T.B.D. emergency-operation A path change by emergency instruction from the operator.

    Each factor also provides status, poses in the base link frame, and distances from that poses. As the vehicle approaches the position to start steering, this factor appears with a status of APPROACHING. And when the vehicle reaches that position, the status will be TURNING. The poses indicate the start and end position of the section where the status is TURNING.

    In cases such as lane change and avoidance, the vehicle will start steering at any position in the range depending on the situation. For these types, the section where the status is TURNING will be updated dynamically and the poses will follow that.

    "},{"location":"design/autoware-interfaces/ad-api/features/routing/","title":"Routing","text":""},{"location":"design/autoware-interfaces/ad-api/features/routing/#routing","title":"Routing","text":""},{"location":"design/autoware-interfaces/ad-api/features/routing/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/routing/#description","title":"Description","text":"

    This API manages destination and waypoints. Note that waypoints are not like stops and just points passing through. In other words, Autoware does not support the route with multiple stops, the application needs to split it up and switch them. There are two ways to set the route. The one is a generic method that uses pose, another is a map-dependent.

    "},{"location":"design/autoware-interfaces/ad-api/features/routing/#states","title":"States","text":"State Description UNSET The route is not set. Waiting for a route request. SET The route is set. ARRIVED The vehicle has arrived at the destination. CHANGING Trying to change the route. Not implemented yet."},{"location":"design/autoware-interfaces/ad-api/features/routing/#goal-modification","title":"Goal modification","text":"

    Autoware tries to look for an alternate goal when goal is unreachable (e.g., when there is an obstacle on the given goal). When setting a route from the API, applications can choose whether they allow Autoware to adjust goal pose in such situation. When set false, Autoware may get stuck until the given goal becomes reachable.

    Option Description allow_goal_modification If true, allow goal modification."},{"location":"design/autoware-interfaces/ad-api/features/vehicle-doors/","title":"Vehicle doors","text":""},{"location":"design/autoware-interfaces/ad-api/features/vehicle-doors/#vehicle-doors","title":"Vehicle doors","text":""},{"location":"design/autoware-interfaces/ad-api/features/vehicle-doors/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/vehicle-doors/#description","title":"Description","text":"

    This feature is available if the vehicle provides a software interface for the doors. It can be used to create user interfaces for passengers or to control sequences at bus stops.

    "},{"location":"design/autoware-interfaces/ad-api/features/vehicle-doors/#layout","title":"Layout","text":"

    Each door in a vehicle is assigned an array index. This assignment is vehicle dependent. The layout API returns this information. The description field is a string to display in the user interface, etc. This is an arbitrary string and is not recommended to use for processing in applications. Use the roles field to know doors for getting on and off. Below is an example of the information returned by the layout API.

    Index Description Roles 0 front right - 1 front left GET_ON 2 rear right GET_OFF 3 rear left GET_ON, GET_OFF"},{"location":"design/autoware-interfaces/ad-api/features/vehicle-doors/#status","title":"Status","text":"

    The status API provides an array of door status. This array order is consistent with the layout API.

    "},{"location":"design/autoware-interfaces/ad-api/features/vehicle-doors/#control","title":"Control","text":"

    Use the command API to control doors. Unlike the status and layout APIs, array index do not correspond to doors. The command has a field to specify the target door index.

    "},{"location":"design/autoware-interfaces/ad-api/features/vehicle-status/","title":"Vehicle status","text":""},{"location":"design/autoware-interfaces/ad-api/features/vehicle-status/#vehicle-status","title":"Vehicle status","text":""},{"location":"design/autoware-interfaces/ad-api/features/vehicle-status/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/vehicle-status/#kinematics","title":"Kinematics","text":"

    This is an estimate of the vehicle kinematics. The vehicle position is necessary for applications to schedule dispatches. Also, using velocity and acceleration, applications can find vehicles that need operator assistance, such as stuck or brake suddenly.

    "},{"location":"design/autoware-interfaces/ad-api/features/vehicle-status/#status","title":"Status","text":"

    This is the status provided by the vehicle. The indicators and steering are mainly used for visualization and remote control. The remaining energy can be also used for vehicle scheduling.

    "},{"location":"design/autoware-interfaces/ad-api/features/vehicle-status/#dimensions","title":"Dimensions","text":"

    The vehicle dimensions are used to know the actual distance between the vehicle and objects because the vehicle position in kinematics is the coordinates of the base link. This is necessary for visualization when supporting vehicles remotely.

    "},{"location":"design/autoware-interfaces/ad-api/list/","title":"List of Autoware AD API","text":""},{"location":"design/autoware-interfaces/ad-api/list/#list-of-autoware-ad-api","title":"List of Autoware AD API","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/fail_safe/mrm_state/","title":"/api/fail_safe/mrm_state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/fail_safe/mrm_state/#apifail_safemrm_state","title":"/api/fail_safe/mrm_state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/fail_safe/mrm_state/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/fail_safe/mrm_state/#description","title":"Description","text":"

    Get the MRM state. For details, see the fail-safe.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/fail_safe/mrm_state/#message","title":"Message","text":"Name Type Description state uint16 The state of MRM operation. behavior uint16 The currently selected behavior of MRM."},{"location":"design/autoware-interfaces/ad-api/list/api/interface/version/","title":"/api/interface/version","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/interface/version/#apiinterfaceversion","title":"/api/interface/version","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/interface/version/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/interface/version/#description","title":"Description","text":"

    Get the interface version. The version follows Semantic Versioning.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/interface/version/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/interface/version/#response","title":"Response","text":"Name Type Description major uint16 major version minor uint16 minor version patch uint16 patch version"},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialization_state/","title":"/api/localization/initialization_state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialization_state/#apilocalizationinitialization_state","title":"/api/localization/initialization_state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialization_state/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialization_state/#description","title":"Description","text":"

    Get the initialization state of localization. For details, see the localization.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialization_state/#message","title":"Message","text":"Name Type Description state uint16 A value of the localization initialization state."},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialize/","title":"/api/localization/initialize","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialize/#apilocalizationinitialize","title":"/api/localization/initialize","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialize/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialize/#description","title":"Description","text":"

    Request to initialize localization. For details, see the localization.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialize/#request","title":"Request","text":"Name Type Description pose geometry_msgs/msg/PoseWithCovarianceStamped[<=1] A global pose as the initial guess. If omitted, the GNSS pose will be used."},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialize/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/motion/accept_start/","title":"/api/motion/accept_start","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/motion/accept_start/#apimotionaccept_start","title":"/api/motion/accept_start","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/motion/accept_start/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/motion/accept_start/#description","title":"Description","text":"

    Accept the vehicle to start. This API can be used when the motion state is STARTING.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/motion/accept_start/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/motion/accept_start/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/motion/state/","title":"/api/motion/state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/motion/state/#apimotionstate","title":"/api/motion/state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/motion/state/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/motion/state/#description","title":"Description","text":"

    Get the motion state. For details, see the motion state.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/motion/state/#message","title":"Message","text":"Name Type Description state uint16 A value of the motion state."},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_autonomous/","title":"/api/operation_mode/change_to_autonomous","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_autonomous/#apioperation_modechange_to_autonomous","title":"/api/operation_mode/change_to_autonomous","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_autonomous/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_autonomous/#description","title":"Description","text":"

    Change the operation mode to autonomous. For details, see the operation mode.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_autonomous/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_autonomous/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_local/","title":"/api/operation_mode/change_to_local","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_local/#apioperation_modechange_to_local","title":"/api/operation_mode/change_to_local","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_local/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_local/#description","title":"Description","text":"

    Change the operation mode to local. For details, see the operation mode.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_local/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_local/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_remote/","title":"/api/operation_mode/change_to_remote","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_remote/#apioperation_modechange_to_remote","title":"/api/operation_mode/change_to_remote","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_remote/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_remote/#description","title":"Description","text":"

    Change the operation mode to remote. For details, see the operation mode.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_remote/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_remote/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_stop/","title":"/api/operation_mode/change_to_stop","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_stop/#apioperation_modechange_to_stop","title":"/api/operation_mode/change_to_stop","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_stop/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_stop/#description","title":"Description","text":"

    Change the operation mode to stop. For details, see the operation mode.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_stop/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_stop/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/disable_autoware_control/","title":"/api/operation_mode/disable_autoware_control","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/disable_autoware_control/#apioperation_modedisable_autoware_control","title":"/api/operation_mode/disable_autoware_control","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/disable_autoware_control/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/disable_autoware_control/#description","title":"Description","text":"

    Disable vehicle control by Autoware. For details, see the operation mode. This API fails if the vehicle does not support mode change by software.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/disable_autoware_control/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/disable_autoware_control/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/enable_autoware_control/","title":"/api/operation_mode/enable_autoware_control","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/enable_autoware_control/#apioperation_modeenable_autoware_control","title":"/api/operation_mode/enable_autoware_control","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/enable_autoware_control/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/enable_autoware_control/#description","title":"Description","text":"

    Enable vehicle control by Autoware. For details, see the operation mode. This API fails if the vehicle does not support mode change by software.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/enable_autoware_control/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/enable_autoware_control/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/state/","title":"/api/operation_mode/state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/state/#apioperation_modestate","title":"/api/operation_mode/state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/state/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/state/#description","title":"Description","text":"

    Get the operation mode state. For details, see the operation mode.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/state/#message","title":"Message","text":"Name Type Description mode uint8 The selected command for Autoware control. is_autoware_control_enabled bool True if vehicle control by Autoware is enabled. is_in_transition bool True if the operation mode is in transition. is_stop_mode_available bool True if the operation mode can be changed to stop. is_autonomous_mode_available bool True if the operation mode can be changed to autonomous. is_local_mode_available bool True if the operation mode can be changed to local. is_remote_mode_available bool True if the operation mode can be changed to remote."},{"location":"design/autoware-interfaces/ad-api/list/api/perception/objects/","title":"/api/perception/objects","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/perception/objects/#apiperceptionobjects","title":"/api/perception/objects","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/perception/objects/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/perception/objects/#description","title":"Description","text":"

    Get the recognized objects array with label, shape, current position and predicted path For details, see the perception.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/perception/objects/#message","title":"Message","text":"Name Type Description objects.id unique_identifier_msgs/msg/UUID The UUID of each object objects.existence_probability float64 The probability of the object exits objects.classification autoware_adapi_v1_msgs/msg/ObjectClassification[] The type of the object recognized and the confidence level objects.kinematics autoware_adapi_v1_msgs/msg/DynamicObjectKinematics Consist of the object pose, twist, acceleration and the predicted_paths objects.shape shape_msgs/msg/SolidPrimitive escribe the shape of the object with dimension, and polygon"},{"location":"design/autoware-interfaces/ad-api/list/api/planning/steering_factors/","title":"/api/planning/steering_factors","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/steering_factors/#apiplanningsteering_factors","title":"/api/planning/steering_factors","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/steering_factors/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/steering_factors/#description","title":"Description","text":"

    Get the steering factors, sorted in ascending order of distance. For details, see the planning factors.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/planning/steering_factors/#message","title":"Message","text":"Name Type Description factors.pose geometry_msgs/msg/Pose[2] The base link pose related to the steering factor. factors.distance float32[2] The distance from the base link to the above pose. factors.direction uint16 The direction of the steering factor. factors.status uint16 The status of the steering factor. factors.behavior string The behavior type of the steering factor. factors.sequence string The sequence type of the steering factor. factors.detail string The additional information of the steering factor. factors.cooperation autoware_adapi_v1_msgs/msg/CooperationStatus[<=1] The cooperation status if the module supports."},{"location":"design/autoware-interfaces/ad-api/list/api/planning/velocity_factors/","title":"/api/planning/velocity_factors","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/velocity_factors/#apiplanningvelocity_factors","title":"/api/planning/velocity_factors","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/velocity_factors/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/velocity_factors/#description","title":"Description","text":"

    Get the velocity factors, sorted in ascending order of distance. For details, see the planning factors.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/planning/velocity_factors/#message","title":"Message","text":"Name Type Description factors.pose geometry_msgs/msg/Pose The base link pose related to the velocity factor. factors.distance float32 The distance from the base link to the above pose. factors.status uint16 The status of the velocity factor. factors.behavior string The behavior type of the velocity factor. factors.sequence string The sequence type of the velocity factor. factors.detail string The additional information of the velocity factor. factors.cooperation autoware_adapi_v1_msgs/msg/CooperationStatus[<=1] The cooperation status if the module supports."},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/get_policies/","title":"/api/planning/cooperation/get_policies","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/get_policies/#apiplanningcooperationget_policies","title":"/api/planning/cooperation/get_policies","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/get_policies/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/get_policies/#description","title":"Description","text":"

    Get the default decision that is used instead when the operator's decision is undecided. For details, see the cooperation.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/get_policies/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/get_policies/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status policies.behavior string The type of the target behavior. policies.sequence string The type of the target sequence. policies.policy uint8 The type of the cooporation policy."},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_commands/","title":"/api/planning/cooperation/set_commands","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_commands/#apiplanningcooperationset_commands","title":"/api/planning/cooperation/set_commands","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_commands/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_commands/#description","title":"Description","text":"

    Set the operator's decision for cooperation. For details, see the cooperation.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_commands/#request","title":"Request","text":"Name Type Description commands.uuid unique_identifier_msgs/msg/UUID The ID in the cooperation status. commands.cooperator autoware_adapi_v1_msgs/msg/CooperationDecision The operator's decision."},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_commands/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_policies/","title":"/api/planning/cooperation/set_policies","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_policies/#apiplanningcooperationset_policies","title":"/api/planning/cooperation/set_policies","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_policies/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_policies/#description","title":"Description","text":"

    Set the default decision that is used instead when the operator's decision is undecided. For details, see the cooperation.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_policies/#request","title":"Request","text":"Name Type Description policies.behavior string The type of the target behavior. policies.sequence string The type of the target sequence. policies.policy uint8 The type of the cooporation policy."},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_policies/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/routing/clear_route/","title":"/api/routing/clear_route","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/clear_route/#apiroutingclear_route","title":"/api/routing/clear_route","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/clear_route/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/clear_route/#description","title":"Description","text":"

    Clear the route.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/routing/clear_route/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/routing/clear_route/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/routing/route/","title":"/api/routing/route","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/route/#apiroutingroute","title":"/api/routing/route","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/route/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/route/#description","title":"Description","text":"

    Get the route with the waypoint segments in lanelet format. It is empty if route is not set.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/routing/route/#message","title":"Message","text":"Name Type Description header std_msgs/msg/Header header for pose transformation data autoware_adapi_v1_msgs/msg/RouteData[<=1] The route in lanelet format"},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route/","title":"/api/routing/set_route","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route/#apiroutingset_route","title":"/api/routing/set_route","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route/#description","title":"Description","text":"

    Set the route with the waypoint segments in lanelet format. If start pose is not specified, the current pose will be used.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route/#request","title":"Request","text":"Name Type Description header std_msgs/msg/Header header for pose transformation goal geometry_msgs/msg/Pose goal pose segments autoware_adapi_v1_msgs/msg/RouteSegment[] waypoint segments in lanelet format"},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route_points/","title":"/api/routing/set_route_points","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route_points/#apiroutingset_route_points","title":"/api/routing/set_route_points","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route_points/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route_points/#description","title":"Description","text":"

    Set the route with the waypoint poses. If start pose is not specified, the current pose will be used.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route_points/#request","title":"Request","text":"Name Type Description header std_msgs/msg/Header header for pose transformation goal geometry_msgs/msg/Pose goal pose waypoints geometry_msgs/msg/Pose[] waypoint poses"},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route_points/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/routing/state/","title":"/api/routing/state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/state/#apiroutingstate","title":"/api/routing/state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/state/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/state/#description","title":"Description","text":"

    Get the route state. For details, see the routing.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/routing/state/#message","title":"Message","text":"Name Type Description state uint16 A value of the route state."},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/dimensions/","title":"/api/vehicle/dimensions","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/dimensions/#apivehicledimensions","title":"/api/vehicle/dimensions","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/dimensions/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/dimensions/#description","title":"Description","text":"

    Get the vehicle dimensions. See here for the definition of each value.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/dimensions/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/dimensions/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status dimensions autoware_adapi_v1_msgs/msg/VehicleDimensions vehicle dimensions"},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/kinematics/","title":"/api/vehicle/kinematics","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/kinematics/#apivehiclekinematics","title":"/api/vehicle/kinematics","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/kinematics/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/kinematics/#description","title":"Description","text":"

    Publish vehicle kinematics.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/kinematics/#message","title":"Message","text":"Name Type Description geographic_pose geographic_msgs/msg/GeoPointStamped The longitude and latitude of the vehicle. If the map uses local coordinates, it will not be available. pose geometry_msgs/msg/PoseWithCovarianceStamped The pose with covariance from the base link. twist geometry_msgs/msg/TwistWithCovarianceStamped Vehicle current twist with covariance. accel geometry_msgs/msg/AccelWithCovarianceStamped Vehicle current acceleration with covariance."},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/status/","title":"/api/vehicle/status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/status/#apivehiclestatus","title":"/api/vehicle/status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/status/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/status/#description","title":"Description","text":"

    Publish vehicle state information.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/status/#message","title":"Message","text":"Name Type Description gear autoware_adapi_v1_msgs/msg/Gear Gear status. turn_indicators autoware_adapi_v1_msgs/msg/TurnIndicators Turn indicators status, only either left or right will be enabled. hazard_lights autoware_adapi_v1_msgs/msg/HazardLights Hazard lights status. steering_tire_angle float64 Vehicle current tire angle in radian. energy_percentage float32 Battery percentage or fuel percentage, it will depends on the vehicle."},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/command/","title":"/api/vehicle/doors/command","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/command/#apivehicledoorscommand","title":"/api/vehicle/doors/command","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/command/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/command/#description","title":"Description","text":"

    Set the door command. This API is only available if the vehicle supports software door control.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/command/#request","title":"Request","text":"Name Type Description doors.index uint32 The index of the target door. doors.command uint8 The command for the target door."},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/command/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/layout/","title":"/api/vehicle/doors/layout","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/layout/#apivehicledoorslayout","title":"/api/vehicle/doors/layout","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/layout/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/layout/#description","title":"Description","text":"

    Get the door layout. It is an array of roles and descriptions for each door.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/layout/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/layout/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status doors.roles uint8[] The roles of the door in the service the vehicle provides. doors.description string The description of the door for display in the interface."},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/status/","title":"/api/vehicle/doors/status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/status/#apivehicledoorsstatus","title":"/api/vehicle/doors/status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/status/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/status/#description","title":"Description","text":"

    The status of each door such as opened or closed.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/status/#message","title":"Message","text":"Name Type Description doors.status uint8 current door status"},{"location":"design/autoware-interfaces/ad-api/stories/bus-service/","title":"User story of bus service","text":""},{"location":"design/autoware-interfaces/ad-api/stories/bus-service/#user-story-of-bus-service","title":"User story of bus service","text":""},{"location":"design/autoware-interfaces/ad-api/stories/bus-service/#overview","title":"Overview","text":"

    This user story is a bus service that goes around the designated stops.

    "},{"location":"design/autoware-interfaces/ad-api/stories/bus-service/#scenario","title":"Scenario","text":"Step Operation Use Case 1 Startup the autonomous driving system. Launch and terminate 2 Drive the vehicle from the garage to the waiting position. Change the operation mode 3 Enable autonomous control. Change the operation mode 4 Drive the vehicle to the next bus stop. Drive to the designated position 5 Get on and off the vehicle. Get on and get off 6 Return to step 4 unless it's the last bus stop. 7 Drive the vehicle to the waiting position. Drive to the designated position 8 Drive the vehicle from the waiting position to the garage. Change the operation mode 9 Shutdown the autonomous driving system. Launch and terminate"},{"location":"design/autoware-interfaces/ad-api/stories/taxi-service/","title":"User story of bus service","text":""},{"location":"design/autoware-interfaces/ad-api/stories/taxi-service/#user-story-of-bus-service","title":"User story of bus service","text":""},{"location":"design/autoware-interfaces/ad-api/stories/taxi-service/#overview","title":"Overview","text":"

    This user story is a taxi service that picks up passengers and drives them to their destination.

    "},{"location":"design/autoware-interfaces/ad-api/stories/taxi-service/#scenario","title":"Scenario","text":"Step Operation Use Case 1 Startup the autonomous driving system. Launch and terminate 2 Drive the vehicle from the garage to the waiting position. Change the operation mode 3 Enable autonomous control. Change the operation mode 4 Drive the vehicle to the position to pick up. Drive to the designated position 5 Get on the vehicle. Get on and get off 6 Drive the vehicle to the destination. Drive to the designated position 7 Get off the vehicle. Get on and get off 8 Drive the vehicle to the waiting position. Drive to the designated position 9 Return to step 4 if there is another request. 10 Drive the vehicle from the waiting position to the garage. Change the operation mode 11 Shutdown the autonomous driving system. Launch and terminate"},{"location":"design/autoware-interfaces/ad-api/types/","title":"Types of Autoware AD API","text":""},{"location":"design/autoware-interfaces/ad-api/types/#types-of-autoware-ad-api","title":"Types of Autoware AD API","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationCommand/","title":"autoware_adapi_v1_msgs/msg/CooperationCommand","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationCommand/#autoware_adapi_v1_msgsmsgcooperationcommand","title":"autoware_adapi_v1_msgs/msg/CooperationCommand","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationCommand/#definition","title":"Definition","text":"
    unique_identifier_msgs/UUID uuid\nautoware_adapi_v1_msgs/CooperationDecision cooperator\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationCommand/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationCommand/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationDecision/","title":"autoware_adapi_v1_msgs/msg/CooperationDecision","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationDecision/#autoware_adapi_v1_msgsmsgcooperationdecision","title":"autoware_adapi_v1_msgs/msg/CooperationDecision","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationDecision/#definition","title":"Definition","text":"
    uint8 UNKNOWN = 0\nuint8 DEACTIVATE = 1\nuint8 ACTIVATE = 2\nuint8 AUTONOMOUS = 3\nuint8 UNDECIDED = 4\n\nuint8 decision\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationDecision/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationDecision/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationPolicy/","title":"autoware_adapi_v1_msgs/msg/CooperationPolicy","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationPolicy/#autoware_adapi_v1_msgsmsgcooperationpolicy","title":"autoware_adapi_v1_msgs/msg/CooperationPolicy","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationPolicy/#definition","title":"Definition","text":"
    uint8 OPTIONAL = 1\nuint8 REQUIRED = 2\n\nstring behavior\nstring sequence\nuint8 policy\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationPolicy/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationPolicy/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationStatus/","title":"autoware_adapi_v1_msgs/msg/CooperationStatus","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationStatus/#autoware_adapi_v1_msgsmsgcooperationstatus","title":"autoware_adapi_v1_msgs/msg/CooperationStatus","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationStatus/#definition","title":"Definition","text":"
    unique_identifier_msgs/UUID uuid\nautoware_adapi_v1_msgs/CooperationDecision autonomous\nautoware_adapi_v1_msgs/CooperationDecision cooperator\nbool cancellable\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationStatus/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationStatus/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorCommand/","title":"autoware_adapi_v1_msgs/msg/DoorCommand","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorCommand/#autoware_adapi_v1_msgsmsgdoorcommand","title":"autoware_adapi_v1_msgs/msg/DoorCommand","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorCommand/#definition","title":"Definition","text":"
    uint8 OPEN = 1\nuint8 CLOSE = 2\n\nuint32 index\nuint8 command\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorCommand/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorCommand/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorLayout/","title":"autoware_adapi_v1_msgs/msg/DoorLayout","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorLayout/#autoware_adapi_v1_msgsmsgdoorlayout","title":"autoware_adapi_v1_msgs/msg/DoorLayout","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorLayout/#definition","title":"Definition","text":"
    uint8 GET_ON = 1\nuint8 GET_OFF = 2\n\nuint8[] roles\nstring description\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorLayout/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorLayout/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatus/","title":"autoware_adapi_v1_msgs/msg/DoorStatus","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatus/#autoware_adapi_v1_msgsmsgdoorstatus","title":"autoware_adapi_v1_msgs/msg/DoorStatus","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatus/#definition","title":"Definition","text":"
    uint8 UNKNOWN = 0\nuint8 NOT_AVAILABLE = 1\nuint8 OPENED = 2\nuint8 CLOSED = 3\nuint8 OPENING = 4\nuint8 CLOSING = 5\n\nuint8 status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatus/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatus/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatusArray/","title":"autoware_adapi_v1_msgs/msg/DoorStatusArray","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatusArray/#autoware_adapi_v1_msgsmsgdoorstatusarray","title":"autoware_adapi_v1_msgs/msg/DoorStatusArray","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatusArray/#definition","title":"Definition","text":"
    builtin_interfaces/Time stamp\nautoware_adapi_v1_msgs/DoorStatus[] doors\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatusArray/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatusArray/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObject/","title":"autoware_adapi_v1_msgs/msg/DynamicObject","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObject/#autoware_adapi_v1_msgsmsgdynamicobject","title":"autoware_adapi_v1_msgs/msg/DynamicObject","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObject/#definition","title":"Definition","text":"
    unique_identifier_msgs/UUID id\nfloat64 existence_probability\nautoware_adapi_v1_msgs/ObjectClassification[] classification\nautoware_adapi_v1_msgs/DynamicObjectKinematics kinematics\nshape_msgs/SolidPrimitive shape\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObject/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObject/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectArray/","title":"autoware_adapi_v1_msgs/msg/DynamicObjectArray","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectArray/#autoware_adapi_v1_msgsmsgdynamicobjectarray","title":"autoware_adapi_v1_msgs/msg/DynamicObjectArray","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectArray/#definition","title":"Definition","text":"
    std_msgs/Header header\nautoware_adapi_v1_msgs/DynamicObject[] objects\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectArray/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectArray/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectKinematics/","title":"autoware_adapi_v1_msgs/msg/DynamicObjectKinematics","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectKinematics/#autoware_adapi_v1_msgsmsgdynamicobjectkinematics","title":"autoware_adapi_v1_msgs/msg/DynamicObjectKinematics","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectKinematics/#definition","title":"Definition","text":"
    geometry_msgs/Pose pose\ngeometry_msgs/Twist twist\ngeometry_msgs/Accel accel\n\nautoware_adapi_v1_msgs/DynamicObjectPath[] predicted_paths\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectKinematics/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectKinematics/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectPath/","title":"autoware_adapi_v1_msgs/msg/DynamicObjectPath","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectPath/#autoware_adapi_v1_msgsmsgdynamicobjectpath","title":"autoware_adapi_v1_msgs/msg/DynamicObjectPath","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectPath/#definition","title":"Definition","text":"
    geometry_msgs/Pose[] path\nbuiltin_interfaces/Duration time_step\nfloat64 confidence\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectPath/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectPath/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Gear/","title":"autoware_adapi_v1_msgs/msg/Gear","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Gear/#autoware_adapi_v1_msgsmsggear","title":"autoware_adapi_v1_msgs/msg/Gear","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Gear/#definition","title":"Definition","text":"
    # constants\nuint8 UNKNOWN = 0\nuint8 NEUTRAL = 1\nuint8 DRIVE = 2\nuint8 REVERSE = 3\nuint8 PARK = 4\nuint8 LOW = 5\n\nuint8 status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Gear/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Gear/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/HazardLights/","title":"autoware_adapi_v1_msgs/msg/HazardLights","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/HazardLights/#autoware_adapi_v1_msgsmsghazardlights","title":"autoware_adapi_v1_msgs/msg/HazardLights","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/HazardLights/#definition","title":"Definition","text":"
    # constants\nuint8 UNKNOWN = 0\nuint8 DISABLE = 1\nuint8 ENABLE = 2\n\nuint8 status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/HazardLights/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/HazardLights/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/LocalizationInitializationState/","title":"autoware_adapi_v1_msgs/msg/LocalizationInitializationState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/LocalizationInitializationState/#autoware_adapi_v1_msgsmsglocalizationinitializationstate","title":"autoware_adapi_v1_msgs/msg/LocalizationInitializationState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/LocalizationInitializationState/#definition","title":"Definition","text":"
    uint16 UNKNOWN = 0\nuint16 UNINITIALIZED = 1\nuint16 INITIALIZING = 2\nuint16 INITIALIZED = 3\n\nbuiltin_interfaces/Time stamp\nuint16 state\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/LocalizationInitializationState/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/LocalizationInitializationState/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MotionState/","title":"autoware_adapi_v1_msgs/msg/MotionState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MotionState/#autoware_adapi_v1_msgsmsgmotionstate","title":"autoware_adapi_v1_msgs/msg/MotionState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MotionState/#definition","title":"Definition","text":"
    uint16 UNKNOWN = 0\nuint16 STOPPED = 1\nuint16 STARTING = 2\nuint16 MOVING = 3\n\nbuiltin_interfaces/Time stamp\nuint16 state\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MotionState/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MotionState/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MrmState/","title":"autoware_adapi_v1_msgs/msg/MrmState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MrmState/#autoware_adapi_v1_msgsmsgmrmstate","title":"autoware_adapi_v1_msgs/msg/MrmState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MrmState/#definition","title":"Definition","text":"
    builtin_interfaces/Time stamp\n\n# For common use\nuint16 UNKNOWN = 0\n\n# For state\nuint16 NORMAL = 1\nuint16 MRM_OPERATING = 2\nuint16 MRM_SUCCEEDED = 3\nuint16 MRM_FAILED = 4\n\n# For behavior\nuint16 NONE = 1\nuint16 EMERGENCY_STOP = 2\nuint16 COMFORTABLE_STOP = 3\n\nuint16 state\nuint16 behavior\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MrmState/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MrmState/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ObjectClassification/","title":"autoware_adapi_v1_msgs/msg/ObjectClassification","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ObjectClassification/#autoware_adapi_v1_msgsmsgobjectclassification","title":"autoware_adapi_v1_msgs/msg/ObjectClassification","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ObjectClassification/#definition","title":"Definition","text":"
    uint8 UNKNOWN=0\nuint8 CAR=1\nuint8 TRUCK=2\nuint8 BUS=3\nuint8 TRAILER = 4\nuint8 MOTORCYCLE = 5\nuint8 BICYCLE = 6\nuint8 PEDESTRIAN = 7\n\nuint8 label\nfloat64 probability\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ObjectClassification/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ObjectClassification/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/OperationModeState/","title":"autoware_adapi_v1_msgs/msg/OperationModeState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/OperationModeState/#autoware_adapi_v1_msgsmsgoperationmodestate","title":"autoware_adapi_v1_msgs/msg/OperationModeState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/OperationModeState/#definition","title":"Definition","text":"
    # constants for mode\nuint8 UNKNOWN = 0\nuint8 STOP = 1\nuint8 AUTONOMOUS = 2\nuint8 LOCAL = 3\nuint8 REMOTE = 4\n\n# variables\nbuiltin_interfaces/Time stamp\nuint8 mode\nbool is_autoware_control_enabled\nbool is_in_transition\nbool is_stop_mode_available\nbool is_autonomous_mode_available\nbool is_local_mode_available\nbool is_remote_mode_available\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/OperationModeState/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/OperationModeState/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ResponseStatus/","title":"autoware_adapi_v1_msgs/msg/ResponseStatus","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ResponseStatus/#autoware_adapi_v1_msgsmsgresponsestatus","title":"autoware_adapi_v1_msgs/msg/ResponseStatus","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ResponseStatus/#definition","title":"Definition","text":"
    # error code\nuint16 UNKNOWN = 50000\nuint16 SERVICE_UNREADY = 50001\nuint16 SERVICE_TIMEOUT = 50002\nuint16 TRANSFORM_ERROR = 50003\nuint16 PARAMETER_ERROR = 50004\n\n# warning code\nuint16 DEPRECATED = 60000\nuint16 NO_EFFECT = 60001\n\n# variables\nbool   success\nuint16 code\nstring message\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ResponseStatus/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ResponseStatus/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Route/","title":"autoware_adapi_v1_msgs/msg/Route","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Route/#autoware_adapi_v1_msgsmsgroute","title":"autoware_adapi_v1_msgs/msg/Route","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Route/#definition","title":"Definition","text":"
    std_msgs/Header header\nautoware_adapi_v1_msgs/RouteData[<=1] data\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Route/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Route/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteData/","title":"autoware_adapi_v1_msgs/msg/RouteData","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteData/#autoware_adapi_v1_msgsmsgroutedata","title":"autoware_adapi_v1_msgs/msg/RouteData","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteData/#definition","title":"Definition","text":"
    geometry_msgs/Pose start\ngeometry_msgs/Pose goal\nautoware_adapi_v1_msgs/RouteSegment[] segments\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteData/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteData/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteOption/","title":"autoware_adapi_v1_msgs/msg/RouteOption","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteOption/#autoware_adapi_v1_msgsmsgrouteoption","title":"autoware_adapi_v1_msgs/msg/RouteOption","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteOption/#definition","title":"Definition","text":"
    bool allow_goal_modification\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteOption/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteOption/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RoutePrimitive/","title":"autoware_adapi_v1_msgs/msg/RoutePrimitive","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RoutePrimitive/#autoware_adapi_v1_msgsmsgrouteprimitive","title":"autoware_adapi_v1_msgs/msg/RoutePrimitive","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RoutePrimitive/#definition","title":"Definition","text":"
    int64 id\nstring type  # The same id may be used for each type.\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RoutePrimitive/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RoutePrimitive/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteSegment/","title":"autoware_adapi_v1_msgs/msg/RouteSegment","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteSegment/#autoware_adapi_v1_msgsmsgroutesegment","title":"autoware_adapi_v1_msgs/msg/RouteSegment","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteSegment/#definition","title":"Definition","text":"
    autoware_adapi_v1_msgs/RoutePrimitive   preferred\nautoware_adapi_v1_msgs/RoutePrimitive[] alternatives  # Does not include the preferred primitive.\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteSegment/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteSegment/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteState/","title":"autoware_adapi_v1_msgs/msg/RouteState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteState/#autoware_adapi_v1_msgsmsgroutestate","title":"autoware_adapi_v1_msgs/msg/RouteState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteState/#definition","title":"Definition","text":"
    uint16 UNKNOWN = 0\nuint16 UNSET = 1\nuint16 SET = 2\nuint16 ARRIVED = 3\nuint16 CHANGING = 4\n\nbuiltin_interfaces/Time stamp\nuint16 state\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteState/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteState/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactor/","title":"autoware_adapi_v1_msgs/msg/SteeringFactor","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactor/#autoware_adapi_v1_msgsmsgsteeringfactor","title":"autoware_adapi_v1_msgs/msg/SteeringFactor","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactor/#definition","title":"Definition","text":"
    # constants for common use\nuint16 UNKNOWN = 0\n\n# constants for direction\nuint16 LEFT = 1\nuint16 RIGHT = 2\nuint16 STRAIGHT = 3\n\n# constants for status\nuint16 APPROACHING = 1\nuint16 TURNING = 3\n\n# variables\ngeometry_msgs/Pose[2] pose\nfloat32[2] distance\nuint16 direction\nuint16 status\nstring behavior\nstring sequence\nstring detail\nautoware_adapi_v1_msgs/CooperationStatus[<=1] cooperation\n\n\n\n# deprecated constants for type\nuint16 INTERSECTION = 1\nuint16 LANE_CHANGE = 2\nuint16 AVOIDANCE_PATH_CHANGE = 3\nuint16 AVOIDANCE_PATH_RETURN = 4\nuint16 STATION = 5\nuint16 PULL_OUT = 6 # Deprecated. Use START_PLANNER.\nuint16 START_PLANNER = 6\nuint16 PULL_OVER = 7  # Deprecated. Use GOAL_PLANNER.\nuint16 GOAL_PLANNER = 7\nuint16 EMERGENCY_OPERATION = 8\n\n# deprecated constants for status\nuint16 TRYING = 2\n\n# deprecated variables\nuint16 type\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactor/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactor/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactorArray/","title":"autoware_adapi_v1_msgs/msg/SteeringFactorArray","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactorArray/#autoware_adapi_v1_msgsmsgsteeringfactorarray","title":"autoware_adapi_v1_msgs/msg/SteeringFactorArray","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactorArray/#definition","title":"Definition","text":"
    std_msgs/Header header\nautoware_adapi_v1_msgs/SteeringFactor[] factors\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactorArray/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactorArray/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/TurnIndicators/","title":"autoware_adapi_v1_msgs/msg/TurnIndicators","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/TurnIndicators/#autoware_adapi_v1_msgsmsgturnindicators","title":"autoware_adapi_v1_msgs/msg/TurnIndicators","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/TurnIndicators/#definition","title":"Definition","text":"
    # constants\nuint8 UNKNOWN = 0\nuint8 DISABLE = 1\nuint8 LEFT = 2\nuint8 RIGHT = 3\n\nuint8 status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/TurnIndicators/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/TurnIndicators/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleDimensions/","title":"autoware_adapi_v1_msgs/msg/VehicleDimensions","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleDimensions/#autoware_adapi_v1_msgsmsgvehicledimensions","title":"autoware_adapi_v1_msgs/msg/VehicleDimensions","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleDimensions/#definition","title":"Definition","text":"
    float32 wheel_radius\nfloat32 wheel_width\nfloat32 wheel_base\nfloat32 wheel_tread\nfloat32 front_overhang\nfloat32 rear_overhang\nfloat32 left_overhang\nfloat32 right_overhang\nfloat32 height\ngeometry_msgs/Polygon footprint\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleDimensions/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleDimensions/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleKinematics/","title":"autoware_adapi_v1_msgs/msg/VehicleKinematics","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleKinematics/#autoware_adapi_v1_msgsmsgvehiclekinematics","title":"autoware_adapi_v1_msgs/msg/VehicleKinematics","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleKinematics/#definition","title":"Definition","text":"
    # Geographic point, using the WGS 84 reference ellipsoid.\n# This data will be invalid If Autoware does not provide projection information between geographic coordinates and local coordinates.\ngeographic_msgs/GeoPointStamped geographic_pose\n\n# Local coordinate from the autoware\ngeometry_msgs/PoseWithCovarianceStamped pose\ngeometry_msgs/TwistWithCovarianceStamped twist\ngeometry_msgs/AccelWithCovarianceStamped accel\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleKinematics/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleKinematics/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleStatus/","title":"autoware_adapi_v1_msgs/msg/VehicleStatus","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleStatus/#autoware_adapi_v1_msgsmsgvehiclestatus","title":"autoware_adapi_v1_msgs/msg/VehicleStatus","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleStatus/#definition","title":"Definition","text":"
    builtin_interfaces/Time stamp\nautoware_adapi_v1_msgs/Gear gear\nautoware_adapi_v1_msgs/TurnIndicators turn_indicators\nautoware_adapi_v1_msgs/HazardLights hazard_lights\nfloat64 steering_tire_angle\nfloat32 energy_percentage  # Battery percentage or fuel percentage, it will depends on the vehicle.\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleStatus/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleStatus/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactor/","title":"autoware_adapi_v1_msgs/msg/VelocityFactor","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactor/#autoware_adapi_v1_msgsmsgvelocityfactor","title":"autoware_adapi_v1_msgs/msg/VelocityFactor","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactor/#definition","title":"Definition","text":"
    # constants for common use\nuint16 UNKNOWN = 0\n\n# constants for status\nuint16 APPROACHING = 1\nuint16 STOPPED = 2\n\n# variables\ngeometry_msgs/Pose pose\nfloat32 distance\nuint16 status\nstring behavior\nstring sequence\nstring detail\nautoware_adapi_v1_msgs/CooperationStatus[<=1] cooperation\n\n\n\n# deprecated constants for type\nuint16 SURROUNDING_OBSTACLE = 1\nuint16 ROUTE_OBSTACLE = 2\nuint16 INTERSECTION = 3\nuint16 CROSSWALK = 4\nuint16 REAR_CHECK = 5\nuint16 USER_DEFINED_DETECTION_AREA = 6\nuint16 NO_STOPPING_AREA = 7\nuint16 STOP_SIGN = 8\nuint16 TRAFFIC_SIGNAL = 9\nuint16 V2I_GATE_CONTROL_ENTER = 10\nuint16 V2I_GATE_CONTROL_LEAVE = 11\nuint16 MERGE = 12\nuint16 SIDEWALK = 13\nuint16 LANE_CHANGE = 14\nuint16 AVOIDANCE = 15\nuint16 EMERGENCY_STOP_OPERATION = 16\nuint16 NO_DRIVABLE_LANE = 17\n\n# deprecated variables\nuint16 type\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactor/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactor/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactorArray/","title":"autoware_adapi_v1_msgs/msg/VelocityFactorArray","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactorArray/#autoware_adapi_v1_msgsmsgvelocityfactorarray","title":"autoware_adapi_v1_msgs/msg/VelocityFactorArray","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactorArray/#definition","title":"Definition","text":"
    std_msgs/Header header\nautoware_adapi_v1_msgs/VelocityFactor[] factors\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactorArray/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactorArray/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/AcceptStart/","title":"autoware_adapi_v1_msgs/srv/AcceptStart","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/AcceptStart/#autoware_adapi_v1_msgssrvacceptstart","title":"autoware_adapi_v1_msgs/srv/AcceptStart","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/AcceptStart/#definition","title":"Definition","text":"
    ---\nuint16 ERROR_NOT_STARTING = 1\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/AcceptStart/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/AcceptStart/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ChangeOperationMode/","title":"autoware_adapi_v1_msgs/srv/ChangeOperationMode","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ChangeOperationMode/#autoware_adapi_v1_msgssrvchangeoperationmode","title":"autoware_adapi_v1_msgs/srv/ChangeOperationMode","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ChangeOperationMode/#definition","title":"Definition","text":"
    ---\nuint16 ERROR_NOT_AVAILABLE = 1\nuint16 ERROR_IN_TRANSITION = 2\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ChangeOperationMode/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ChangeOperationMode/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ClearRoute/","title":"autoware_adapi_v1_msgs/srv/ClearRoute","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ClearRoute/#autoware_adapi_v1_msgssrvclearroute","title":"autoware_adapi_v1_msgs/srv/ClearRoute","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ClearRoute/#definition","title":"Definition","text":"
    ---\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ClearRoute/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ClearRoute/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetCooperationPolicies/","title":"autoware_adapi_v1_msgs/srv/GetCooperationPolicies","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetCooperationPolicies/#autoware_adapi_v1_msgssrvgetcooperationpolicies","title":"autoware_adapi_v1_msgs/srv/GetCooperationPolicies","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetCooperationPolicies/#definition","title":"Definition","text":"
    ---\nautoware_adapi_v1_msgs/ResponseStatus status\nautoware_adapi_v1_msgs/CooperationPolicy[] policies\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetCooperationPolicies/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetCooperationPolicies/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetDoorLayout/","title":"autoware_adapi_v1_msgs/srv/GetDoorLayout","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetDoorLayout/#autoware_adapi_v1_msgssrvgetdoorlayout","title":"autoware_adapi_v1_msgs/srv/GetDoorLayout","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetDoorLayout/#definition","title":"Definition","text":"
    ---\nautoware_adapi_v1_msgs/ResponseStatus status\nautoware_adapi_v1_msgs/DoorLayout[] doors\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetDoorLayout/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetDoorLayout/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetVehicleDimensions/","title":"autoware_adapi_v1_msgs/srv/GetVehicleDimensions","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetVehicleDimensions/#autoware_adapi_v1_msgssrvgetvehicledimensions","title":"autoware_adapi_v1_msgs/srv/GetVehicleDimensions","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetVehicleDimensions/#definition","title":"Definition","text":"
    ---\nautoware_adapi_v1_msgs/ResponseStatus status\nautoware_adapi_v1_msgs/VehicleDimensions dimensions\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetVehicleDimensions/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetVehicleDimensions/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/InitializeLocalization/","title":"autoware_adapi_v1_msgs/srv/InitializeLocalization","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/InitializeLocalization/#autoware_adapi_v1_msgssrvinitializelocalization","title":"autoware_adapi_v1_msgs/srv/InitializeLocalization","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/InitializeLocalization/#definition","title":"Definition","text":"
    geometry_msgs/PoseWithCovarianceStamped[<=1] pose\n---\nuint16 ERROR_UNSAFE = 1\nuint16 ERROR_GNSS_SUPPORT = 2\nuint16 ERROR_GNSS = 3\nuint16 ERROR_ESTIMATION = 4\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/InitializeLocalization/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/InitializeLocalization/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationCommands/","title":"autoware_adapi_v1_msgs/srv/SetCooperationCommands","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationCommands/#autoware_adapi_v1_msgssrvsetcooperationcommands","title":"autoware_adapi_v1_msgs/srv/SetCooperationCommands","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationCommands/#definition","title":"Definition","text":"
    autoware_adapi_v1_msgs/CooperationCommand[] commands\n---\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationCommands/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationCommands/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationPolicies/","title":"autoware_adapi_v1_msgs/srv/SetCooperationPolicies","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationPolicies/#autoware_adapi_v1_msgssrvsetcooperationpolicies","title":"autoware_adapi_v1_msgs/srv/SetCooperationPolicies","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationPolicies/#definition","title":"Definition","text":"
    autoware_adapi_v1_msgs/CooperationPolicy[] policies\n---\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationPolicies/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationPolicies/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetDoorCommand/","title":"autoware_adapi_v1_msgs/srv/SetDoorCommand","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetDoorCommand/#autoware_adapi_v1_msgssrvsetdoorcommand","title":"autoware_adapi_v1_msgs/srv/SetDoorCommand","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetDoorCommand/#definition","title":"Definition","text":"
    autoware_adapi_v1_msgs/DoorCommand[] doors\n---\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetDoorCommand/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetDoorCommand/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoute/","title":"autoware_adapi_v1_msgs/srv/SetRoute","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoute/#autoware_adapi_v1_msgssrvsetroute","title":"autoware_adapi_v1_msgs/srv/SetRoute","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoute/#definition","title":"Definition","text":"
    std_msgs/Header header\nautoware_adapi_v1_msgs/RouteOption option\ngeometry_msgs/Pose goal\nautoware_adapi_v1_msgs/RouteSegment[] segments\n---\nuint16 ERROR_ROUTE_EXISTS = 1 # Deprecated. Use ERROR_INVALID_STATE.\nuint16 ERROR_INVALID_STATE = 1\nuint16 ERROR_PLANNER_UNREADY = 2\nuint16 ERROR_PLANNER_FAILED = 3\nuint16 ERROR_REROUTE_FAILED = 4\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoute/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoute/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoutePoints/","title":"autoware_adapi_v1_msgs/srv/SetRoutePoints","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoutePoints/#autoware_adapi_v1_msgssrvsetroutepoints","title":"autoware_adapi_v1_msgs/srv/SetRoutePoints","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoutePoints/#definition","title":"Definition","text":"
    std_msgs/Header header\nautoware_adapi_v1_msgs/RouteOption option\ngeometry_msgs/Pose goal\ngeometry_msgs/Pose[] waypoints\n---\nuint16 ERROR_ROUTE_EXISTS = 1 # Deprecated. Use ERROR_INVALID_STATE.\nuint16 ERROR_INVALID_STATE = 1\nuint16 ERROR_PLANNER_UNREADY = 2\nuint16 ERROR_PLANNER_FAILED = 3\nuint16 ERROR_REROUTE_FAILED = 4\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoutePoints/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoutePoints/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_version_msgs/srv/InterfaceVersion/","title":"autoware_adapi_version_msgs/srv/InterfaceVersion","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_version_msgs/srv/InterfaceVersion/#autoware_adapi_version_msgssrvinterfaceversion","title":"autoware_adapi_version_msgs/srv/InterfaceVersion","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_version_msgs/srv/InterfaceVersion/#definition","title":"Definition","text":"
    ---\nuint16 major\nuint16 minor\nuint16 patch\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_version_msgs/srv/InterfaceVersion/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_version_msgs/srv/InterfaceVersion/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/change-operation-mode/","title":"Change the operation mode","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/change-operation-mode/#change-the-operation-mode","title":"Change the operation mode","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/change-operation-mode/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/change-operation-mode/#sequence","title":"Sequence","text":" "},{"location":"design/autoware-interfaces/ad-api/use-cases/drive-designated-position/","title":"Drive to the designated position","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/drive-designated-position/#drive-to-the-designated-position","title":"Drive to the designated position","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/drive-designated-position/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/drive-designated-position/#sequence","title":"Sequence","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/get-on-off/","title":"Get on and get off","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/get-on-off/#get-on-and-get-off","title":"Get on and get off","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/get-on-off/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/get-on-off/#sequence","title":"Sequence","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/initialize-pose/","title":"Initialize the pose","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/initialize-pose/#initialize-the-pose","title":"Initialize the pose","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/initialize-pose/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/initialize-pose/#sequence","title":"Sequence","text":" "},{"location":"design/autoware-interfaces/ad-api/use-cases/launch-terminate/","title":"Launch and terminate","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/launch-terminate/#launch-and-terminate","title":"Launch and terminate","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/launch-terminate/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/launch-terminate/#sequence","title":"Sequence","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-monitoring/","title":"Vehicle monitoring","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-monitoring/#vehicle-monitoring","title":"Vehicle monitoring","text":"

    AD API provides current vehicle status for remote monitoring, visualization for passengers, etc. Use the API below depending on the data you want to monitor.

    "},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-monitoring/#vehicle-status","title":"Vehicle status","text":"

    The vehicle status provides basic information such as kinematics, indicators, and dimensions. This allows a remote operator to know the position and velocity of the vehicle. For applications such as FMS, it can help find vehicles that need assistance, such as vehicles that are stuck or brake suddenly. It is also possible to determine the actual distance to an object from the vehicle dimensions.

    "},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-monitoring/#planning-factors","title":"Planning factors","text":"

    The planning factors provides the planning status of the vehicle. HMI can use this to warn of sudden movements of the vehicle, and to share the stop reason with passengers for comfortable driving.

    "},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-monitoring/#detected-objects","title":"Detected objects","text":"

    The perception provides the objects detected by Autoware. HMI can use this to visualize objects around the vehicle.

    "},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-operation/","title":"Vehicle operation","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-operation/#vehicle-operation","title":"Vehicle operation","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-operation/#request-to-intervene","title":"Request to intervene","text":"

    Request to intervene (RTI) is a feature that requires the operator to switch to manual driving mode. It is also called Take Over Request (TOR). Interfaces for RTI are currently being discussed. For now assume that manual driving is requested if the MRM state is not NORMAL. See fail-safe for details.

    "},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-operation/#request-to-cooperate","title":"Request to cooperate","text":"

    Request to cooperate (RTC) is a feature that the operator supports the decision in autonomous driving mode. Autoware usually drives the vehicle using its own decisions, but the operator may prefer to make their own decisions in complex situations. Since RTC only overrides the decision and does not need to change operation mode, the vehicle can continue autonomous driving, unlike RTC. See cooperation for details.

    "},{"location":"design/autoware-interfaces/components/","title":"Component interfaces","text":""},{"location":"design/autoware-interfaces/components/#component-interfaces","title":"Component interfaces","text":"

    Warning

    Under Construction

    See here for an overview.

    "},{"location":"design/autoware-interfaces/components/control/","title":"Control","text":""},{"location":"design/autoware-interfaces/components/control/#control","title":"Control","text":""},{"location":"design/autoware-interfaces/components/control/#inputs","title":"Inputs","text":""},{"location":"design/autoware-interfaces/components/control/#vehicle-kinematic-state","title":"Vehicle kinematic state","text":"

    Current position and orientation of ego. Published by the Localization module.

    "},{"location":"design/autoware-interfaces/components/control/#trajectory","title":"Trajectory","text":"

    trajectory to be followed by the controller. See Outputs of Planning.

    "},{"location":"design/autoware-interfaces/components/control/#steering-status","title":"Steering Status","text":"

    Current steering of the ego vehicle. Published by the Vehicle Interface.

    "},{"location":"design/autoware-interfaces/components/control/#actuation-status","title":"Actuation Status","text":"

    Actuation status of the ego vehicle for acceleration, steering, and brake.

    TODO This represents the reported physical efforts exerted by the vehicle actuators. Published by the Vehicle Interface.

    "},{"location":"design/autoware-interfaces/components/control/#output","title":"Output","text":""},{"location":"design/autoware-interfaces/components/control/#vehicle-control-command","title":"Vehicle Control Command","text":"

    A motion signal to drive the vehicle, achieved by the low-level controller in the vehicle layer. Used by the Vehicle Interface.

    "},{"location":"design/autoware-interfaces/components/localization/","title":"Localization","text":""},{"location":"design/autoware-interfaces/components/localization/#localization","title":"Localization","text":""},{"location":"design/autoware-interfaces/components/localization/#inputs","title":"Inputs","text":""},{"location":"design/autoware-interfaces/components/localization/#pointcloud-map","title":"Pointcloud Map","text":"

    Environment map created with point cloud, published by the map server.

    A 3d point cloud map is used for LiDAR-based localization in Autoware.

    "},{"location":"design/autoware-interfaces/components/localization/#manual-initial-pose","title":"Manual Initial Pose","text":"

    Start pose of ego, published by the user interface.

    "},{"location":"design/autoware-interfaces/components/localization/#3d-lidar-scanning","title":"3D-LiDAR Scanning","text":"

    LiDAR scanning for NDT matching, published by the LiDAR sensor.

    The raw 3D-LiDAR data needs to be processed by the point cloud pre-processing modules before being used for localization.

    "},{"location":"design/autoware-interfaces/components/localization/#automatic-initial-pose","title":"Automatic Initial pose","text":"

    Start pose of ego, calculated from INS(Inertial navigation sensor) sensing data.

    When the initial pose is not set manually, the message can be used for automatic pose initialization.

    Current Geographic coordinate of the ego, published by the GNSS sensor.

    Current orientation of the ego, published by the GNSS-INS.

    "},{"location":"design/autoware-interfaces/components/localization/#imu-data","title":"IMU Data","text":"

    Current orientation, angular velocity and linear acceleration of ego, calculated from IMU sensing data.

    "},{"location":"design/autoware-interfaces/components/localization/#vehicle-velocity-status","title":"Vehicle Velocity Status","text":"

    Current velocity of the ego vehicle, published by the vehicle interface.

    Before the velocity input localization interface, module vehicle_velocity_converter converts message type autoware_auto_vehicle_msgs/msg/VelocityReport to geometry_msgs/msg/TwistWithCovarianceStamped.

    "},{"location":"design/autoware-interfaces/components/localization/#outputs","title":"Outputs","text":""},{"location":"design/autoware-interfaces/components/localization/#vehicle-pose","title":"Vehicle pose","text":"

    Current pose of ego, calculated from localization interface.

    "},{"location":"design/autoware-interfaces/components/localization/#vehicle-velocity","title":"Vehicle velocity","text":"

    Current velocity of ego, calculated from localization interface.

    "},{"location":"design/autoware-interfaces/components/localization/#vehicle-acceleration","title":"Vehicle acceleration","text":"

    Current acceleration of ego, calculated from localization interface.

    "},{"location":"design/autoware-interfaces/components/localization/#vehicle-kinematic-state","title":"Vehicle kinematic state","text":"

    Current pose, velocity and acceleration of ego, calculated from localization interface.

    Note: Kinematic state contains pose, velocity and acceleration. In the future, pose, velocity and acceleration will not be used as output for localization.

    The message will be subscribed by the planning and control module.

    "},{"location":"design/autoware-interfaces/components/localization/#localization-accuracy","title":"Localization Accuracy","text":"

    Diagnostics information that indicates if the localization module works properly.

    TBD.

    "},{"location":"design/autoware-interfaces/components/map/","title":"Map","text":""},{"location":"design/autoware-interfaces/components/map/#map","title":"Map","text":""},{"location":"design/autoware-interfaces/components/map/#overview","title":"Overview","text":"

    Autoware relies on high-definition point cloud maps and vector maps of the driving environment to perform various tasks. Before launching Autoware, you need to load the pre-created map files.

    "},{"location":"design/autoware-interfaces/components/map/#inputs","title":"Inputs","text":"

    Refer to Creating maps on how to create maps.

    "},{"location":"design/autoware-interfaces/components/map/#outputs","title":"Outputs","text":""},{"location":"design/autoware-interfaces/components/map/#point-cloud-map","title":"Point cloud map","text":"

    It loads point cloud files and publishes the maps to the other Autoware nodes in various configurations. Currently, it supports the following types:

    "},{"location":"design/autoware-interfaces/components/map/#lanelet2-map","title":"Lanelet2 map","text":"

    It loads a Lanelet2 file and publishes the map data as autoware_auto_mapping_msgs/msg/HADMapBin message. The lan/lon coordinates are projected onto the MGRS coordinates.

    "},{"location":"design/autoware-interfaces/components/map/#lanelet2-map-visualization","title":"Lanelet2 map visualization","text":"

    Visualize autoware_auto_mapping_msgs/HADMapBin messages in Rviz.

    "},{"location":"design/autoware-interfaces/components/perception-interface/","title":"Perception","text":""},{"location":"design/autoware-interfaces/components/perception-interface/#perception","title":"Perception","text":"
    graph TD\n    cmp_sen(\"Sensing\"):::cls_sen\n    cmp_loc(\"Localization\"):::cls_loc\n    cmp_per(\"Perception\"):::cls_per\n    cmp_plan(\"Planning\"):::cls_plan\n\n    msg_img(\"<font size=2><b>Camera Image</b></font size>\n    <font size=1>sensor_msgs/Image</font size>\"):::cls_sen\n\n    msg_ldr(\"<font size=2><b>Lidar Point Cloud</b></font size>\n    <font size=1>sensor_msgs/PointCloud2</font size>\"):::cls_sen\n\n    msg_lanenet(\"<font size=2><b>Lanelet2 Map</b></font size>\n    <font size=1>autoware_auto_mapping_msgs/HADMapBin</font size>\"):::cls_loc\n\n    msg_vks(\"<font size=2><b>Vehicle Kinematic State</b></font size>\n    <font size=1>nav_msgs/Odometry</font size>\"):::cls_loc\n\n    msg_obj(\"<font size=2><b>3D Object Predictions </b></font size>\n    <font size=1>autoware_auto_perception_msgs/PredictedObjects</font size>\"):::cls_per\n\n    msg_tl(\"<font size=2><b>Traffic Light Response </b></font size>\n    <font size=1>autoware_perception_msgs/TrafficSignalArray</font size>\"):::cls_per\n\n    msg_tq(\"<font size=2><b>Traffic Light Query </b></font size>\n    <font size=1>TBD</font size>\"):::cls_plan\n\n\n    cmp_sen --> msg_img --> cmp_per\n    cmp_sen --> msg_ldr --> cmp_per\n    cmp_per --> msg_obj --> cmp_plan\n    cmp_per --> msg_tl --> cmp_plan\n    cmp_plan --> msg_tq -->cmp_per\n\n    cmp_loc --> msg_vks --> cmp_per\n    cmp_loc --> msg_lanenet --> cmp_per\n\nclassDef cmp_sen fill:#F8CECC,stroke:#999,stroke-width:1px;\nclassDef cls_loc fill:#D5E8D4,stroke:#999,stroke-width:1px;\nclassDef cls_per fill:#FFF2CC,stroke:#999,stroke-width:1px;\nclassDef cls_plan fill:#5AB8FF,stroke:#999,stroke-width:1px;
    "},{"location":"design/autoware-interfaces/components/perception-interface/#inputs","title":"Inputs","text":""},{"location":"design/autoware-interfaces/components/perception-interface/#pointcloud","title":"PointCloud","text":"

    PointCloud data published by Lidar.

    "},{"location":"design/autoware-interfaces/components/perception-interface/#image","title":"Image","text":"

    Image frame captured by camera.

    "},{"location":"design/autoware-interfaces/components/perception-interface/#vehicle-kinematic-state","title":"Vehicle kinematic state","text":"

    current position of ego, used in traffic signals recognition. See output of Localization.

    "},{"location":"design/autoware-interfaces/components/perception-interface/#lanelet2-map","title":"Lanelet2 Map","text":"

    map of the environment. See outputs of Map.

    "},{"location":"design/autoware-interfaces/components/perception-interface/#output","title":"Output","text":""},{"location":"design/autoware-interfaces/components/perception-interface/#3d-object-predictions","title":"3D Object Predictions","text":"

    3D Objects detected, tracked and predicted by sensor fusing.

    "},{"location":"design/autoware-interfaces/components/perception-interface/#traffic-signals","title":"Traffic Signals","text":"

    traffic signals recognized by object detection model.

    "},{"location":"design/autoware-interfaces/components/planning/","title":"Planning","text":""},{"location":"design/autoware-interfaces/components/planning/#planning","title":"Planning","text":"

    This page provides specific specifications about the Interface of the Planning Component. Please refer to the planning architecture design document for high-level concepts and data flow.

    TODO: The detailed definitions (meanings of elements included in each topic) are not described yet, need to be updated.

    "},{"location":"design/autoware-interfaces/components/planning/#input","title":"Input","text":""},{"location":"design/autoware-interfaces/components/planning/#from-map-component","title":"From Map Component","text":"Name Topic Type Description Vector Map /map/vector_map autoware_auto_mapping_msgs/msg/HADMapBin Map of the environment where the planning takes place."},{"location":"design/autoware-interfaces/components/planning/#from-localization-component","title":"From Localization Component","text":"Name Topic Type Description Vehicle Kinematic State /localization/kinematic_state nav_msgs/msg/Odometry Current position, orientation and velocity of ego. Vehicle Acceleration /localization/acceleration geometry_msgs/msg/AccelWithCovarianceStamped Current acceleration of ego.

    TODO: acceleration information should be merged into the kinematic state.

    "},{"location":"design/autoware-interfaces/components/planning/#from-perception-component","title":"From Perception Component","text":"Name Topic Type Description Objects /perception/object_recognition/objects autoware_auto_perception_msgs/msg/PredictedObjects Set of perceived objects around ego that need to be avoided or followed when planning a trajectory. This contains semantics information such as a object class (e.g. vehicle, pedestrian, etc) or a shape of the objects. Obstacles /perception/obstacle_segmentation/pointcloud sensor_msgs/msg/PointCloud2 Set of perceived obstacles around ego that need to be avoided or followed when planning a trajectory. This only contains a primitive information of the obstacle. No shape nor velocity information. Occupancy Grid Map /perception/occupancy_grid_map/map nav_msgs/msg/OccupancyGrid Contains the presence of obstacles and blind spot information (represented as UNKNOWN). Traffic Signal /perception/traffic_light_recognition/traffic_signals autoware_auto_perception_msgs/msg/TrafficSignalArray Contains the traffic signal information such as a color (green, yellow, read) and an arrow (right, left, straight).

    TODO: The type of the Obstacles information should not depend on the specific sensor message type (now PointCloud). It needs to be fixed.

    "},{"location":"design/autoware-interfaces/components/planning/#from-api","title":"From API","text":"Name Topic Type Description Max Velocity /planning/scenario_planning/max_velocity_default autoware_adapi_v1_msgs/srv/SetRoutePoints Indicate the maximum value of the vehicle speed plan Operation Mode /system/operation_mode/state autoware_adapi_v1_msgs/msg/OperationModeState Indicates the current operation mode (automatic/manual, etc.). Route Set /planning/mission_planning/set_route autoware_adapi_v1_msgs/srv/SetRoute Indicates to set the route when the vehicle is stopped. Route Points Set /planning/mission_planning/set_route_points autoware_adapi_v1_msgs/srv/SetRoutePoints Indicates to set the route with points when the vehicle is stopped. Route Change /planning/mission_planning/change_route autoware_adapi_v1_msgs/srv/SetRoute Indicates to change the route when the vehicle is moving. Route Points Change /planning/mission_planning/change_route_points autoware_adapi_v1_msgs/srv/SetRoutePoints Indicates to change the route with points when the vehicle is moving. Route Clear /planning/mission_planning/clear_route autoware_adapi_v1_msgs/srv/ClearRoute Indicates to clear the route information. MRM Route Set Points /planning/mission_planning/mission_planner/srv/set_mrm_route autoware_adapi_v1_msgs/srv/SetRoutePoints Indicates to set the emergency route. MRM Route Clear /planning/mission_planning/mission_planner/srv/clear_mrm_route autoware_adapi_v1_msgs/srv/SetRoutePoints Indicates to clear the emergency route."},{"location":"design/autoware-interfaces/components/planning/#output","title":"Output","text":""},{"location":"design/autoware-interfaces/components/planning/#to-control","title":"To Control","text":"Name Topic Type Description Trajectory /planning/trajectory autoware_auto_planning_msgs/msg/Trajectory A sequence of space and velocity and acceleration points to be followed by the controller. Turn Indicator /planning/turn_indicators_cmd autoware_auto_vehicle_msgs/msg/TurnIndicatorsCommand Turn indicator signal to be followed by the vehicle. Hazard Light /planning/hazard_lights_cmd autoware_auto_vehicle_msgs/msg/HazardLightsCommand Hazard light signal to be followed by the vehicle."},{"location":"design/autoware-interfaces/components/planning/#to-system","title":"To System","text":"Name Topic Type Description Diagnostics /planning/hazard_lights_cmd diagnostic_msgs/msg/DiagnosticArray Diagnostic status of the Planning component reported to the System component."},{"location":"design/autoware-interfaces/components/planning/#to-api","title":"To API","text":"Name Topic Type Description Path Candidate /planning/path_candidate/* autoware_auto_planning_msgs/msg/Path The path Autoware is about to take. Users can interrupt the operation based on the path candidate information. Steering Factor /planning/steering_factor/* autoware_adapi_v1_msgs/msg/SteeringFactorArray Information about the steering maneuvers performed by Autoware (e.g., steering to the right for a right turn, etc.) Velocity Factor /planning/velocity_factors/* autoware_adapi_v1_msgs/msg/VelocityFactorArray Information about the velocity maneuvers performed by Autoware (e.g., stop for an obstacle, etc.)"},{"location":"design/autoware-interfaces/components/planning/#planning-internal-interface","title":"Planning internal interface","text":"

    This section explains the communication between the different planning modules shown in the Planning Architecture Design.

    "},{"location":"design/autoware-interfaces/components/planning/#from-mission-planning-to-scenario-planning","title":"From Mission Planning to Scenario Planning","text":"Name Topic Type Description Route /planning/mission_planning/route autoware_planning_msgs/msg/LaneletRoute A sequence of lane IDs on a Lanelet map, from the starting point to the destination."},{"location":"design/autoware-interfaces/components/planning/#from-behavior-planning-to-motion-planning","title":"From Behavior Planning to Motion Planning","text":"Name Topic Type Description Path /planning/scenario_planning/lane_driving/behavior_planning/path autoware_auto_planning_msgs/msg/Path A sequence of approximate vehicle positions for driving, along with information on the maximum speed and the drivable areas. Modules receiving this message are expected to make changes to the path within the constraints of the drivable areas and the maximum speed, generating the desired final trajectory."},{"location":"design/autoware-interfaces/components/planning/#from-scenario-planning-to-validation","title":"From Scenario Planning to Validation","text":"Name Topic Type Description Trajectory /planning/scenario_planning/trajectory autoware_auto_planning_msgs/msg/Trajectory A sequence of precise vehicle positions, speeds, and accelerations required for driving. It is expected that the vehicle will follow this trajectory."},{"location":"design/autoware-interfaces/components/sensing/","title":"Sensing","text":""},{"location":"design/autoware-interfaces/components/sensing/#sensing","title":"Sensing","text":"
    graph TD\n    cmp_drv(\"Drivers\"):::cls_drv\n    cmp_loc(\"Localization\"):::cls_loc\n    cmp_per(\"Perception\"):::cls_per\n    cmp_sen(\"Preprocessors\"):::cls_sen\n    msg_ult(\"<font size=2><b>Ultrasonics</b></font size>\n    <font size=1>sensor_msgs/Range</font size>\"):::cls_drv\n    msg_img(\"<font size=2><b>Camera Image</b></font size>\n    <font size=1>sensor_msgs/Image</font size>\"):::cls_drv\n    msg_ldr(\"<font size=2><b>Lidar Point Cloud</b></font size>\n    <font size=1>sensor_msgs/PointCloud2</font size>\"):::cls_drv\n    msg_rdr_t(\"<font size=2><b>Radar Tracks</b></font size>\n    <font size=1>radar_msgs/RadarTracks</font size>\"):::cls_drv\n    msg_rdr_s(\"<font size=2><b>Radar Scan</b></font size>\n    <font size=1>radar_msgs/RadarScan</font size>\"):::cls_drv\n    msg_gnss(\"<font size=2><b>GNSS-INS Position</b></font size>\n    <font size=1>sensor_msgs/NavSatFix</font size>\"):::cls_drv\n    msg_gnssori(\"<font size=2><b>GNSS-INS Orientation</b></font size>\n    <font size=1>autoware_sensing_msgs/GnssInsOrientationStamped</font size>\"):::cls_drv\n    msg_gnssvel(\"<font size=2><b>GNSS Velocity</b></font size>\n    <font size=1>geometry_msgs/TwistWithCovarianceStamped</font size>\"):::cls_drv\n    msg_gnssacc(\"<font size=2><b>GNSS Acceleration</b></font size>\n    <font size=1>geometry_msgs/AccelWithCovarianceStamped</font size>\"):::cls_drv\n    msg_ult_sen(\"<font size=2><b>Ultrasonics</b></font size>\n    <font size=1>sensor_msgs/Range</font size>\"):::cls_sen\n    msg_img_sen(\"<font size=2><b>Camera Image</b></font size>\n    <font size=1>sensor_msgs/Image</font size>\"):::cls_sen\n    msg_pc_combined_rdr(\"<font size=2><b>Combined Radar Tracks</b></font size>\n    <font size=1>radar_msgs/RadarTracks</font size>\"):::cls_sen\n    msg_pc_rdr(\"<font size=2><b>Radar Pointcloud</b></font size>\n    <font size=1>radar_msgs/RadarScan</font size>\"):::cls_sen\n    msg_pc_combined_ldr(\"<font size=2><b>Combined Lidar Point Cloud</b></font size>\n    <font size=1>sensor_msgs/PointCloud2</font size>\"):::cls_sen\n    msg_pose_gnss(\"<font size=2><b>GNSS-INS Pose</b></font size>\n    <font size=1>geometry_msgs/PoseWithCovarianceStamped</font size>\"):::cls_sen\n    msg_gnssori_sen(\"<font size=2><b>GNSS-INS Orientation</b></font size>\n    <font size=1>sensor_msgs/Imu</font size>\"):::cls_sen\n    msg_gnssvel_sen(\"<font size=2><b>GNSS Velocity</b></font size>\n    <font size=1>geometry_msgs/TwistWithCovarianceStamped</font size>\"):::cls_sen\n    msg_gnssacc_sen(\"<font size=2><b>GNSS-INS Acceleration</b></font size>\n    <font size=1>geometry_msgs/AccelWithCovarianceStamped</font size>\"):::cls_sen\n\n    cmp_drv --> msg_ult --> cmp_sen\n    cmp_drv --> msg_img --> cmp_sen\n    cmp_drv --> msg_rdr_t --> cmp_sen\n    cmp_drv --> msg_rdr_s --> cmp_sen\n    cmp_drv --> msg_ldr --> cmp_sen\n    cmp_drv --> msg_gnss --> cmp_sen\n    cmp_drv --> msg_gnssori --> cmp_sen\n    cmp_drv --> msg_gnssvel --> cmp_sen\n    cmp_drv --> msg_gnssacc --> cmp_sen\n\n    cmp_sen --> msg_ult_sen\n    cmp_sen --> msg_img_sen\n    cmp_sen --> msg_gnssori_sen\n    cmp_sen --> msg_gnssvel_sen\n    cmp_sen --> msg_pc_combined_rdr\n    cmp_sen --> msg_pc_rdr\n    cmp_sen --> msg_pc_combined_ldr\n    cmp_sen --> msg_pose_gnss\n    cmp_sen --> msg_gnssacc_sen\n    msg_ult_sen --> cmp_per\n    msg_img_sen --> cmp_per\n    msg_pc_combined_rdr --> cmp_per\n    msg_pc_rdr --> cmp_per\n    msg_pc_combined_ldr --> cmp_per\n    msg_pc_combined_ldr --> cmp_loc\n    msg_pose_gnss --> cmp_loc\n    msg_gnssori_sen --> cmp_loc\n    msg_gnssvel_sen --> cmp_loc\n    msg_gnssacc_sen --> cmp_loc\nclassDef cls_drv fill:#F8CECC,stroke:#999,stroke-width:1px;\nclassDef cls_loc fill:#D5E8D4,stroke:#999,stroke-width:1px;\nclassDef cls_per fill:#FFF2CC,stroke:#999,stroke-width:1px;\nclassDef cls_sen fill:#FFE6CC,stroke:#999,stroke-width:1px;
    "},{"location":"design/autoware-interfaces/components/sensing/#inputs","title":"Inputs","text":""},{"location":"design/autoware-interfaces/components/sensing/#ultrasonics","title":"Ultrasonics","text":"

    Distance data from ultrasonic radar driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#camera-image","title":"Camera Image","text":"

    Image data from camera driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#radar-tracks","title":"Radar Tracks","text":"

    Tracks from radar driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#radar-scan","title":"Radar Scan","text":"

    Scan from radar driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#lidar-point-cloud","title":"Lidar Point Cloud","text":"

    Pointcloud from lidar driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#gnss-ins-position","title":"GNSS-INS Position","text":"

    Initial pose from GNSS driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#gnss-ins-orientation","title":"GNSS-INS Orientation","text":"

    Initial orientation from GNSS driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#gnss-velocity","title":"GNSS Velocity","text":"

    Initial velocity from GNSS driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#gnss-acceleration","title":"GNSS Acceleration","text":"

    Initial acceleration from GNSS driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#output","title":"Output","text":""},{"location":"design/autoware-interfaces/components/sensing/#ultrasonics_1","title":"Ultrasonics","text":"

    Distance data from ultrasonic radar. Used by the Perception.

    "},{"location":"design/autoware-interfaces/components/sensing/#camera-image_1","title":"Camera Image","text":"

    Image data from camera. Used by the Perception.

    "},{"location":"design/autoware-interfaces/components/sensing/#combined-radar-tracks","title":"Combined Radar Tracks","text":"

    Radar tracks from radar. Used by the Perception.

    "},{"location":"design/autoware-interfaces/components/sensing/#radar-point-cloud","title":"Radar Point Cloud","text":"

    Pointcloud from radar. Used by the Perception.

    "},{"location":"design/autoware-interfaces/components/sensing/#combined-lidar-point-cloud","title":"Combined Lidar Point Cloud","text":"

    Lidar pointcloud after preprocessing. Used by the Perception and Localization.

    "},{"location":"design/autoware-interfaces/components/sensing/#gnss-ins-pose","title":"GNSS-INS pose","text":"

    Initial pose of the ego vehicle from GNSS. Used by the Localization.

    "},{"location":"design/autoware-interfaces/components/sensing/#gnss-ins-orientation_1","title":"GNSS-INS Orientation","text":"

    Orientation info from GNSS. Used by the Localization.

    "},{"location":"design/autoware-interfaces/components/sensing/#gnss-velocity_1","title":"GNSS velocity","text":"

    Velocity of the ego vehicle from GNSS. Used by the Localization.

    "},{"location":"design/autoware-interfaces/components/sensing/#gnss-acceleration_1","title":"GNSS Acceleration","text":"

    Acceleration of the ego vehicle from GNSS. Used by the Localization.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/","title":"Vehicle dimensions","text":""},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#vehicle-dimensions","title":"Vehicle dimensions","text":""},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#vehicle-axes-and-base_link","title":"Vehicle axes and base_link","text":"

    The base_link frame is used very frequently throughout the Autoware stack, and is a projection of the rear-axle center onto the ground surface.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#vehicle-dimensions_1","title":"Vehicle dimensions","text":""},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#wheelbase","title":"wheelbase","text":"

    The distance between front and rear axles.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#track_width","title":"track_width","text":"

    The distance between left and right wheels.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#overhangs","title":"Overhangs","text":"

    Overhangs are part of the minimum safety box calculation.

    When measuring overhangs, side mirrors, protruding sensors and wheels should be taken into consideration.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#left_overhang","title":"left_overhang","text":"

    The distance between the axis centers of the left wheels and the left-most point of the vehicle.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#right_overhang","title":"right_overhang","text":"

    The distance between the axis centers of the right wheels and the right-most point of the vehicle.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#front_overhang","title":"front_overhang","text":"

    The distance between the front axle and the foremost point of the vehicle.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#rear_overhang","title":"rear_overhang","text":"

    The distance between the rear axle and the rear-most point of the vehicle.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#vehicle_length","title":"vehicle_length","text":"

    Total length of the vehicle. Calculated by front_overhang + wheelbase + rear_overhang

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#vehicle_width","title":"vehicle_width","text":"

    Total width of the vehicle. Calculated by left_overhang + track_width + right_overhang

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#wheel-parameters","title":"Wheel parameters","text":""},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#wheel_width","title":"wheel_width","text":"

    The lateral width of a wheel tire, primarily used for dead reckoning.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#wheel_radius","title":"wheel_radius","text":"

    The radius of the wheel, primarily used for dead reckoning.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#polygon_footprint","title":"polygon_footprint","text":"

    The polygon defines the minimum collision area for the vehicle.

    The points should be ordered clockwise, with the origin on the base_link.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#wheel-orientations","title":"Wheel orientations","text":"

    If the vehicle is going forward, a positive wheel angle will result in the vehicle turning left.

    Autoware assumes the rear wheels don't turn on z axis.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#notice","title":"Notice","text":"

    The vehicle used in the illustrations was created by xvlblo22 and is from https://www.turbosquid.com/3d-models/modular-sedan-3d-model-1590886.

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/","title":"Vehicle Interface","text":""},{"location":"design/autoware-interfaces/components/vehicle-interface/#vehicle-interface","title":"Vehicle Interface","text":"

    The Vehicle Interface receives the Vehicle Signal Commands and Vehicle Control Commands and publishes the vehicle status. It also communicates with vehicle by the vehicle-specific protocol.

    The Gate switches multiple Vehicle Control Commands. These signals include autonomous diving command, joystick, remote control, and emergency operation, etc. The Adapter converts generalized control command (target steering, steering rate, velocity, acceleration, jerk) into vehicle-specific control values (steering-torque, wheel-torque, voltage, pressure, accel pedal position, etc).

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#inputs","title":"Inputs","text":""},{"location":"design/autoware-interfaces/components/vehicle-interface/#error-status","title":"Error status","text":"

    (See Inputs of Planning.)

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#vehicle-control-command","title":"Vehicle Control Command","text":"

    (See Output of Control.)

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#vehicle-signals-commands","title":"Vehicle Signals Commands","text":"

    Commands for various elements of the vehicle unrelated to motion. Published by the Planning module.

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#outputs","title":"Outputs","text":""},{"location":"design/autoware-interfaces/components/vehicle-interface/#vehicle-signal-reports","title":"Vehicle Signal Reports","text":"

    Reports for various elements of the vehicle unrelated to motion. Published by the Vehicle Interface.

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#vehicle-odometry","title":"Vehicle Odometry","text":"

    Odometry of the vehicle. Used by the Localization module to update the pose of the vehicle in the map.

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#steering-status","title":"Steering Status","text":"

    Steering of the ego vehicle. Published by the Vehicle Interface.

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#actuation-status","title":"Actuation Status","text":"

    Actuation status of the ego vehicle for acceleration, steering, and brake. This represents the reported physical efforts exerted by the vehicle actuators. Published by the Vehicle Interface.

    The message definition is under discussion.

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#actuation-command","title":"Actuation Command","text":"

    Actuation command sent to the ego vehicle. This represents the requested physical efforts to be exerted by the vehicle actuators. Published by the Vehicle Interface as generated by the adapter.

    The message definition is under discussion.

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#vehicle-communication","title":"Vehicle Communication","text":"

    Vehicle specific messages protocol like CAN (Controller Area Network).

    "},{"location":"design/configuration-management/","title":"Configuration management","text":""},{"location":"design/configuration-management/#configuration-management","title":"Configuration management","text":"

    Warning

    Under Construction

    "},{"location":"design/configuration-management/development-process/","title":"Development process","text":""},{"location":"design/configuration-management/development-process/#development-process","title":"Development process","text":"

    Warning

    Under Construction

    "},{"location":"design/configuration-management/release-process/","title":"Release process","text":""},{"location":"design/configuration-management/release-process/#release-process","title":"Release process","text":"

    Warning

    Under Construction

    "},{"location":"design/configuration-management/repository-structure/","title":"Repository structure","text":""},{"location":"design/configuration-management/repository-structure/#repository-structure","title":"Repository structure","text":"

    Warning

    Under Construction

    "},{"location":"how-to-guides/","title":"How-to guides","text":""},{"location":"how-to-guides/#how-to-guides","title":"How-to guides","text":""},{"location":"how-to-guides/#integrating-autoware","title":"Integrating Autoware","text":""},{"location":"how-to-guides/#training-machine-learning-models","title":"Training Machine Learning Models","text":""},{"location":"how-to-guides/#others","title":"Others","text":"

    TODO: Write the following contents.

    "},{"location":"how-to-guides/integrating-autoware/overview/","title":"Overview","text":""},{"location":"how-to-guides/integrating-autoware/overview/#overview","title":"Overview","text":""},{"location":"how-to-guides/integrating-autoware/overview/#requirement-prepare-your-real-vehicle-hardware","title":"Requirement: prepare your real vehicle hardware","text":"

    Prerequisites for the vehicle:

    "},{"location":"how-to-guides/integrating-autoware/overview/#1-creating-your-autoware-meta-repository","title":"1. Creating your Autoware meta-repository","text":"

    Create your Autoware meta-repository. One easy way is to fork autowarefoundation/autoware and clone it. For how to fork a repository, refer to GitHub Docs.

    git clone https://github.com/YOUR_NAME/autoware.git\n

    If you set up multiple types of vehicles, adding a suffix like \"autoware.vehicle_A\" or \"autoware.vehicle_B\" is recommended.

    "},{"location":"how-to-guides/integrating-autoware/overview/#2-creating-the-your-vehicle-and-sensor-description","title":"2. Creating the your vehicle and sensor description","text":"

    Next, you need to create description packages that define the vehicle and sensor configuration of your vehicle.

    Create the following two packages:

    Once created, you need to update the autoware.repos file of your cloned Autoware repository to refer to these two description packages.

    -  # sensor_kit\n-  sensor_kit/sample_sensor_kit_launch:\n-    type: git\n-    url: https://github.com/autowarefoundation/sample_sensor_kit_launch.git\n-    version: main\n-  # vehicle\n-  vehicle/sample_vehicle_launch:\n-    type: git\n-    url: https://github.com/autowarefoundation/sample_vehicle_launch.git\n-    version: main\n+  # sensor_kit\n+  sensor_kit/YOUR_SENSOR_KIT_launch:\n+    type: git\n+    url: https://github.com/YOUR_NAME/YOUR_SENSOR_KIT_launch.git\n+    version: main\n+  # vehicle\n+  vehicle/YOUR_VEHICLE_launch:\n+    type: git\n+    url: https://github.com/YOUR_NAME/YOUR_VEHICLE_launch.git\n+    version: main\n
    "},{"location":"how-to-guides/integrating-autoware/overview/#adapt-your_vehicle_launch-for-autoware-launching-system","title":"Adapt YOUR_VEHICLE_launch for autoware launching system","text":""},{"location":"how-to-guides/integrating-autoware/overview/#at-your_vehicle_description","title":"At YOUR_VEHICLE_description","text":"

    Define URDF and parameters in the vehicle description package (refer to the sample vehicle description package for an example).

    "},{"location":"how-to-guides/integrating-autoware/overview/#at-your_vehicle_launch","title":"At YOUR_VEHICLE_launch","text":"

    Create a launch file (refer to the sample vehicle launch package for example). If you have multiple vehicles with the same hardware setup, you can specify vehicle_id to distinguish them.

    "},{"location":"how-to-guides/integrating-autoware/overview/#adapt-your_sensor_kit_description-for-autoware-launching-system","title":"Adapt YOUR_SENSOR_KIT_description for autoware launching system","text":""},{"location":"how-to-guides/integrating-autoware/overview/#at-your_sensor_kit_description","title":"At YOUR_SENSOR_KIT_description","text":"

    Define URDF and extrinsic parameters for all the sensors here (refer to the sample sensor kit description package for example). Note that you need to calibrate extrinsic parameters for all the sensors beforehand.

    "},{"location":"how-to-guides/integrating-autoware/overview/#at-your_sensor_kit_launch","title":"At YOUR_SENSOR_KIT_launch","text":"

    Create launch/sensing.launch.xml that launches the interfaces of all the sensors on the vehicle. (refer to the sample sensor kit launch package for example).

    Note

    At this point, you are now able to run Autoware's Planning Simulator to do a basic test of your vehicle and sensing packages. To do so, you need to build and install Autoware using your cloned repository. Follow the steps for either Docker or source installation (starting from the dependency installation step) and then run the following command:

    ros2 launch autoware_launch planning_simulator.launch.xml vehicle_model:=YOUR_VEHICLE sensor_kit:=YOUR_SENSOR_KIT map_path:=/PATH/TO/YOUR/MAP\n
    "},{"location":"how-to-guides/integrating-autoware/overview/#3-create-a-vehicle_interface-package","title":"3. Create a vehicle_interface package","text":"

    You need to create an interface package for your vehicle. The package is expected to provide the following two functions.

    1. Receive command messages from vehicle_cmd_gate and drive the vehicle accordingly
    2. Send vehicle status information to Autoware

    You can find detailed information about the requirements of the vehicle_interface package in the Vehicle Interface design documentation. You can also refer to TIER IV's pacmod_interface repository as an example of a vehicle interface package.

    "},{"location":"how-to-guides/integrating-autoware/overview/#4-create-maps","title":"4. Create maps","text":"

    You need both a pointcloud map and a vector map in order to use Autoware. For more information on map design, please click here.

    "},{"location":"how-to-guides/integrating-autoware/overview/#create-a-pointcloud-map","title":"Create a pointcloud map","text":"

    Use third-party tools such as a LiDAR-based SLAM (Simultaneous Localization And Mapping) package to create a pointcloud map in the .pcd format. For more information, please click here.

    "},{"location":"how-to-guides/integrating-autoware/overview/#create-vector-map","title":"Create vector map","text":"

    Use third-party tools such as TIER IV's Vector Map Builder to create a Lanelet2 format .osm file.

    "},{"location":"how-to-guides/integrating-autoware/overview/#5-launch-autoware","title":"5. Launch Autoware","text":"

    This section briefly explains how to run your vehicle with Autoware.

    "},{"location":"how-to-guides/integrating-autoware/overview/#install-autoware","title":"Install Autoware","text":"

    Follow the installation steps of Autoware.

    "},{"location":"how-to-guides/integrating-autoware/overview/#launch-autoware","title":"Launch Autoware","text":"

    Launch Autoware with the following command:

    ros2 launch autoware_launch autoware.launch.xml vehicle_model:=YOUR_VEHICLE sensor_kit:=YOUR_SENSOR_KIT map_path:=/PATH/TO/YOUR/MAP\n
    "},{"location":"how-to-guides/integrating-autoware/overview/#set-initial-pose","title":"Set initial pose","text":"

    If GNSS is available, Autoware automatically initializes the vehicle's pose.

    If not, you need to set the initial pose using the RViz GUI.

    1. Click the 2D Pose estimate button in the toolbar, or hit the P key
    2. In the 3D View pane, click and hold the left mouse button, and then drag to set the direction for the initial pose.
    "},{"location":"how-to-guides/integrating-autoware/overview/#set-goal-pose","title":"Set goal pose","text":"

    Set a goal pose for the ego vehicle.

    1. Click the 2D Nav Goal button in the toolbar, or hit the G key
    2. In the 3D View pane, click and hold the left mouse button, and then drag to set the direction for the goal pose. If successful, you will see the calculated planning path on RViz.
    "},{"location":"how-to-guides/integrating-autoware/overview/#engage","title":"Engage","text":"

    In your terminal, execute the following command.

    source ~/autoware.YOURS/install/setup.bash\nros2 topic pub /autoware.YOURS/engage autoware_auto_vehicle_msgs/msg/Engage \"engage: true\" -1\n

    You can also engage via RViz with \"AutowareStatePanel\". The panel can be found in Panels > Add New Panel > tier4_state_rviz_plugin > AutowareStatePanel.

    Now the vehicle should drive along the calculated path!

    "},{"location":"how-to-guides/integrating-autoware/overview/#6-tune-parameters-for-your-vehicle-environment","title":"6. Tune parameters for your vehicle & environment","text":"

    You may need to tune your parameters depending on the domain in which you will operate your vehicle.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/","title":"Creating maps","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/#creating-maps","title":"Creating maps","text":"

    Autoware requires a pointcloud map and a vector map for the vehicle's operating environment. (Check the map design documentation page for the detailed specification).

    This page explains how users can create maps that can be used for Autoware.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/#creating-a-point-cloud-map","title":"Creating a point cloud map","text":"

    Traditionally, a Mobile Mapping System (MMS) is used in order to create highly accurate large-scale point cloud maps. However, since a MMS requires high-end sensors for precise positioning, its operational cost can be very expensive and may not be suitable for a relatively small driving environment. Alternatively, a Simultaneous Localization And Mapping (SLAM) algorithm can be used to create a point cloud map from recorded LiDAR scans. Some of the useful open-source SLAM implementations are listed in this page.

    If you prefer proprietary software that is easy to use, you can try a fully automatic mapping tool from MAP IV, Inc., MapIV Engine. They currently provide a trial license for Autoware users free of charge.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/#creating-a-vector-map","title":"Creating a vector map","text":"

    The easiest way to create an Autoware-compatible vector map is to use Vector Map Builder, a free web-based tool provided by TIER IV, Inc.. Vector Map Builder allows you to create lanes and add additional regulatory elements such as stop signs or traffic lights using a point cloud map as a reference.

    For open-source software options, MapToolbox is a plugin for Unity specifically designed to create Lanelet2 maps for Autoware. Although JOSM is another open-source tool that can be used to create Lanelet2 maps, be aware that a number of modifications must be done manually to make the map compatible with Autoware. This process can be tedious and time-consuming, so the use of JOSM is not recommended.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/#autoware-compatible-map-providers","title":"Autoware-compatible map providers","text":"

    If it is not possible to create HD maps yourself, you can use a mapping service from the following Autoware-compatible map providers instead:

    The table below shows each company's mapping technology and the types of HD maps they support.

    Company Mapping technology Available maps MAP IV, Inc. SLAM Point cloud and vector maps AISAN TECHNOLOGY CO., LTD. MMS Point cloud and vector maps TomTom MMS Vector map*

    Note

    Maps provided by TomTom use their proprietary AutoStream format, not Lanelet2. The open-source AutoStreamForAutoware tool can be used to convert an AutoStream map to a Lanelet2 map. However, the converter is still in its early stages and has some known limitations.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/","title":"Converting UTM maps to MGRS map format","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/#converting-utm-maps-to-mgrs-map-format","title":"Converting UTM maps to MGRS map format","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/#overview","title":"Overview","text":"

    If you want to use MGRS (Military Grid Reference System) format in Autoware, you need to convert UTM (Universal Transverse Mercator) map to MGRS format. In order to do that, we will use UTM to MGRS pointcloud converter ROS 2 package provided by Leo Drive.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/#installation","title":"Installation","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/#dependencies","title":"Dependencies","text":"

    To install dependencies:

    sudo apt install ros-humble-pcl-conversions \\\ngeographiclib-tools\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/#building","title":"Building","text":"
        cd <PATH-TO-YOUR-ROS-2-WORKSPACE>/src\n    git clone https://github.com/leo-drive/pc_utm_to_mgrs_converter.git\n    cd ..\n    colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/#usage","title":"Usage","text":"

    After the installation of converter tool, we need to define northing, easting and ellipsoid height of local UTM map origin in pc_utm_to_mgrs_converter.param.yaml. For example, you can use latitude, longitude and altitude values in the navsatfix message from your GNSS/INS sensor.

    Sample ROS 2 topic echo from navsatfix message
    header:\nstamp:\nsec: 1694612439\nnanosec: 400000000\nframe_id: GNSS_INS/gnss_ins_link\nstatus:\nstatus: 0\nservice: 1\nlatitude: 41.0216110801253\nlongitude: 28.887096461148346\naltitude: 74.28264078891529\nposition_covariance:\n- 0.0014575386885553598\n- 0.0\n- 0.0\n- 0.0\n- 0.004014162812381983\n- 0.0\n- 0.0\n- 0.0\n- 0.0039727711118757725\nposition_covariance_type: 2\n

    After that, you need to convert latitude and longitude values to northing and easting values. You can use any converter on the internet for converting latitude longitude values to UTM. (i.e., UTMconverter)

    Now, we are ready to update pc_utm_to_mgrs_converter.param.yaml, example for our navsatfix message:

    /**:\n  ros__parameters:\n      # Northing of local origin\n-     Northing: 4520550.0\n+     Northing: 4542871.33\n\n     # Easting of local origin\n-     Easting: 698891.0\n+     Easting: 658659.84\n\n     # Elipsoid Height of local origin\n-     ElipsoidHeight: 47.62\n+     ElipsoidHeight: 74.28\n

    Lastly, we will update input and pointcloud the map path in pc_utm_to_mgrs_converter.launch.xml:

    ...\n- <arg name=\"input_file_path\" default=\"/home/melike/projects/autoware_data/gebze_pospac_map/pointcloud_map.pcd\"/>\n+ <arg name=\"input_file_path\" default=\"<PATH-TO-YOUR-INPUT-PCD-MAP>\"/>\n- <arg name=\"output_file_path\" default=\"/home/melike/projects/autoware_data/gebze_pospac_map/pointcloud_map_mgrs_orto.pcd\"/>\n+ <arg name=\"output_file_path\" default=\"<PATH-TO-YOUR-OUTPUT-PCD-MAP>\"/>\n...\n

    After the setting of the package, we will launch pc_utm_to_mgrs_converter:

    ros2 launch pc_utm_to_mgrs_converter pc_utm_to_mgrs_converter.launch.xml\n

    The conversion process will be started, you should see Saved <YOUR-MAP-POINTS-SIZE> data points saved to <YOUR-OUTPUT-MAP-PATH> message on your terminal. So, MGRS format pointcloud map saved on your output map directory.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/","title":"Creating a vector map","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/#creating-a-vector-map","title":"Creating a vector map","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/#overview","title":"Overview","text":"

    In this section, we will explain how to create Lanelet2 maps with TIER IV's vector map builder tool.

    If you want to look at another method, MapToolbox is a plugin for Unity specifically designed to create Lanelet2 maps for Autoware. We didn't recommend JOSM since it needs modifications for Autoware usage. So, this process can be tedious and time-consuming.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/#vector-map-builder","title":"Vector Map Builder","text":"

    You need a TIER IV account for using vector map builder tool. So, if you have not before, please create a TIER IV account in order to use vector map builder tool.

    You can follow these pages for creating a Lanelet2 map and its regulatory elements.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/crosswalk/","title":"Crosswalk attribute","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/crosswalk/#crosswalk-attribute","title":"Crosswalk attribute","text":"

    Behavior velocity planner's crosswalk module plans velocity to stop or decelerate for pedestrians approaching or walking on a crosswalk. In order to operate that, we will add crosswalk attribute to our lanelet2 map.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/crosswalk/#creating-a-crosswalk-attribute","title":"Creating a crosswalk attribute","text":"

    In order to create a crosswalk on your map, please follow these steps:

    1. Click Abstraction button on top panel.
    2. Select Crosswalk from the panel.
    3. Click and draw crosswalk on your pointcloud map.

    Video Demonstration:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/crosswalk/#testing-created-crosswalk-with-planning-simulator","title":"Testing created crosswalk with planning simulator","text":"

    After the completing of creating the map, we need to save it. To that please click File --> Export Lanelet2Maps then download.

    After the download is finished, we need to put lanelet2 map and pointcloud map on the same location. The directory structure should be like this:

    + <YOUR-MAP-DIRECTORY>/\n+  \u251c\u2500 pointcloud_map.pcd\n+  \u2514\u2500 lanelet2_map.osm\n

    If your .osm or .pcd map file's name is different from these names, you need to update autoware.launch.xml:

      <!-- Map -->\n-  <arg name=\"lanelet2_map_file\" default=\"lanelet2_map.osm\" description=\"lanelet2 map file name\"/>\n+  <arg name=\"lanelet2_map_file\" default=\"<YOUR-LANELET-MAP-NAME>.osm\" description=\"lanelet2 map file name\"/>\n-  <arg name=\"pointcloud_map_file\" default=\"pointcloud_map.pcd\" description=\"pointcloud map file name\"/>\n+  <arg name=\"pointcloud_map_file\" default=\"<YOUR-POINTCLOUD-MAP-NAME>.pcd\" description=\"pointcloud map file name\"/>\n

    Now we are ready to launch the planning simulator:

    ros2 launch autoware_launch planning_simulator.launch.xml map_path:=<YOUR-MAP-FOLDER-DIR> vehicle_model:=<YOUR-VEHICLE-MODEL> sensor_model:=<YOUR-SENSOR-KIT>\n

    Example for tutorial_vehicle:

    ros2 launch autoware_launch planning_simulator.launch.xml map_path:=$HOME/Files/autoware_map/tutorial_map/ vehicle_model:=tutorial_vehicle sensor_model:=tutorial_vehicle_sensor_kit vehicle_id:=tutorial_vehicle\n
    1. Click 2D Pose Estimate button on rviz or press P and give a pose for initialization.
    2. Click 2D Goal Pose button on rviz or press G and give a pose for goal point.
    3. We need to add pedestrians to crosswalk, so activate interactive pedestrians from Tool Properties panel on rviz.
    4. After that, please press Shift, then click right click button for inserting pedestrians.
    5. You can control inserted pedestrian via dragging right click.

    Video Demonstration:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/","title":"Creating a Lanelet","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/#creating-a-lanelet","title":"Creating a Lanelet","text":"

    At this page, we will explain how to create a simple lanelet on your pointcloud map.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/#creating-a-lanelet2","title":"Creating a Lanelet2","text":"

    Firstly, we need to import our pointcloud map to vector map builder tool:

    1. Please click File.
    2. Then, click Import PCD.
    3. Click Browse and select your .pcd file.

    You will display the point cloud on your vector map builder tool after the upload is complete:

    Uploaded pointcloud map file on vector map builder

    Now, we are ready to create lanelet2 map on our pointcloud map:

    1. Please click Create.
    2. Then, click Create Lanelet2Maps.
    3. Please fill your map name
    4. Please fill your MGRS zone. (At tutorial_vehicle, MGRS grid zone: 35T - MGRS 100,000-meter square: PF)
    5. Click Create.
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/#creating-a-simple-lanelet","title":"Creating a simple lanelet","text":"

    In order to create a simple lanelet on your map, please follow these steps:

    1. CLick Lanelet2Maps on the bar
    2. Enable Lanelet mode via selecting Lanelet.
    3. Then, you can click the pointcloud map to create lanelet.
    4. If your lanelet is finished, you can disable Lanelet.
    5. If you want to change your lanelet width, click lanelet --> Change Lanelet Width, then you can enter the lanelet width.

    Video Demonstration:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/#join-two-lanelets","title":"Join two lanelets","text":"

    In order to join two lanelets, please follow these steps:

    1. Please create two distinct lanelet.
    2. Select a Lanelet, then press Shift and select other lanelet.
    3. Now, you can see Join Lanelets button, just press it.
    4. These lanelets will be joined.

    Video Demonstration:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/#join-multiple-lanelets","title":"Join Multiple lanelets","text":"

    In order to add (join) two or more lanelets to another lanelet, please follow these steps:

    1. Create multiple lanelets.
    2. You can join the first two lanelets like the steps before.
    3. Please check end points ids of first lanelet.
    4. Then you need to change these ids with third lanelet's start point. (Please change with selecting linestring of lanelet)
    5. You will see two next lanes of the first lanelet will be appeared.

    Video Demonstration:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/#change-speed-limit-of-lanelet","title":"Change Speed Limit Of Lanelet","text":"

    In order to change the speed limit of lanelet, please follow these steps:

    1. Select the lanelet where the speed limit will be changed
    2. Set speed limit on the right panel.
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/#test-lanelets-with-planning-simulator","title":"Test lanelets with planning simulator","text":"

    After the completing of creating lanelets, we need to save it. To that please click File --> Export Lanelet2Maps then download.

    After the download is finished, we need to put lanelet2 map and pointcloud map on the same location. The directory structure should be like this:

    <YOUR-MAP-DIRECTORY>/\n \u251c\u2500 pointcloud_map.pcd\n \u2514\u2500 lanelet2_map.osm\n

    If your .osm or .pcd map file's name is different from these names, you need to update autoware.launch.xml:

      <!-- Map -->\n-  <arg name=\"lanelet2_map_file\" default=\"lanelet2_map.osm\" description=\"lanelet2 map file name\"/>\n+  <arg name=\"lanelet2_map_file\" default=\"<YOUR-LANELET-MAP-NAME>.osm\" description=\"lanelet2 map file name\"/>\n-  <arg name=\"pointcloud_map_file\" default=\"pointcloud_map.pcd\" description=\"pointcloud map file name\"/>\n+  <arg name=\"pointcloud_map_file\" default=\"<YOUR-POINTCLOUD-MAP-NAME>.pcd\" description=\"pointcloud map file name\"/>\n

    Now we are ready to launch the planning simulator:

    ros2 launch autoware_launch planning_simulator.launch.xml map_path:=<YOUR-MAP-FOLDER-DIR> vehicle_model:=<YOUR-VEHICLE-MODEL> sensor_model:=<YOUR-SENSOR-KIT>\n

    Example for tutorial_vehicle:

    ros2 launch autoware_launch planning_simulator.launch.xml map_path:=$HOME/Files/autoware_map/tutorial_map/ vehicle_model:=tutorial_vehicle sensor_model:=tutorial_vehicle_sensor_kit vehicle_id:=tutorial_vehicle\n
    1. Click 2D Pose Estimate button on rviz or press P and give a pose for initialization.
    2. Click 2D Goal Pose button on rviz or press G and give a pose for goal point.

    Testing our created vector map with planning simulator"},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/stop-line/","title":"Stop Line","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/stop-line/#stop-line","title":"Stop Line","text":"

    Behavior velocity planner's stop line module plans velocity to stop right before stop lines and restart driving after stopped. In order to operate that, we will add stop line attribute to our lanelet2 map.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/stop-line/#creating-a-stop-line-regulatory-element","title":"Creating a stop line regulatory element","text":"

    In order to create a stop line on your pointcloud map, please follow these steps:

    1. Please select lanelet to add stop line.
    2. Click Abstraction button on top panel.
    3. Select Stop Line from the panel.
    4. Click on the desired area for inserting stop line.

    Video Demonstration:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/stop-line/#testing-created-the-stop-line-element-with-planning-simulator","title":"Testing created the stop line element with planning simulator","text":"

    After the completing of creating the map, we need to save it. To that please click File --> Export Lanelet2Maps then download.

    After the download is finished, we need to put lanelet2 map and pointcloud map on the same location. The directory structure should be like this:

    + <YOUR-MAP-DIRECTORY>/\n+  \u251c\u2500 pointcloud_map.pcd\n+  \u2514\u2500 lanelet2_map.osm\n

    If your .osm or .pcd map file's name is different from these names, you need to update autoware.launch.xml:

      <!-- Map -->\n-  <arg name=\"lanelet2_map_file\" default=\"lanelet2_map.osm\" description=\"lanelet2 map file name\"/>\n+  <arg name=\"lanelet2_map_file\" default=\"<YOUR-LANELET-MAP-NAME>.osm\" description=\"lanelet2 map file name\"/>\n-  <arg name=\"pointcloud_map_file\" default=\"pointcloud_map.pcd\" description=\"pointcloud map file name\"/>\n+  <arg name=\"pointcloud_map_file\" default=\"<YOUR-POINTCLOUD-MAP-NAME>.pcd\" description=\"pointcloud map file name\"/>\n

    Now we are ready to launch the planning simulator:

    ros2 launch autoware_launch planning_simulator.launch.xml map_path:=<YOUR-MAP-FOLDER-DIR> vehicle_model:=<YOUR-VEHICLE-MODEL> sensor_model:=<YOUR-SENSOR-KIT>\n

    Example for tutorial_vehicle:

    ros2 launch autoware_launch planning_simulator.launch.xml map_path:=$HOME/Files/autoware_map/tutorial_map/ vehicle_model:=tutorial_vehicle sensor_model:=tutorial_vehicle_sensor_kit vehicle_id:=tutorial_vehicle\n
    1. Click 2D Pose Estimate button on rviz or press P and give a pose for initialization.
    2. Click 2D Goal Pose button on rviz or press G and give a pose for goal point.
    3. You can see the stop line marker on the rviz screen.

    Video Demonstration:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/","title":"Available Open Source SLAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/#available-open-source-slam","title":"Available Open Source SLAM","text":"

    This page provides the list of available open source Simultaneous Localization And Mapping (SLAM) implementation that can be used to generate a point cloud (.pcd) map file.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/#selecting-which-implementation-to-use","title":"Selecting which implementation to use","text":"

    Lidar odometry drifts accumulatively as time goes by and there is solutions to solve that problem such as graph optimization, loop closure and using gps sensor to decrease accumulative drift error. Because of that, a SLAM algorithm should have loop closure feature, graph optimization and should use gps sensor. Additionally, some of the algorithms are using IMU sensor to add another factor to graph for decreasing drift error. While some of the algorithms requires 9-axis IMU sensor strictly, some of them requires only 6-axis IMU sensor or not even using the IMU sensor. Before choosing an algorithm to create maps for Autoware please consider these factors depends on your sensor setup or expected quality of generated map.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/#tips","title":"Tips","text":"

    Commonly used open-source SLAM implementations are lidarslam-ros2 (LiDAR, IMU*) and LIO-SAM (LiDAR, IMU, GNSS). The required sensor data for each algorithm is specified in the parentheses, where an asterisk (*) indicates that such sensor data is optional. For supported LiDAR models, please check the GitHub repository of each algorithm. While these ROS 2-based SLAM implementations can be easily installed and used directly on the same machine that runs Autoware, it is important to note that they may not be as well-tested or as mature as ROS 1-based alternatives.

    The notable open-source SLAM implementations that are based on ROS 1 include hdl-graph-slam (LiDAR, IMU*, GNSS*), LeGO-LOAM (LiDAR, IMU*), LeGO-LOAM-BOR (LiDAR), and LIO-SAM (LiDAR, IMU, GNSS).

    Most of these algorithms already have a built-in loop-closure and pose graph optimization. However, if the built-in, automatic loop-closure fails or does not work correctly, you can use Interactive SLAM to adjust and optimize a pose graph manually.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/#list-of-third-party-slam-implementations","title":"List of Third Party SLAM Implementations","text":"Package Name Explanation Repository Link Loop Closure Sensors ROS Version Dependencies FAST-LIO-LC A computationally efficient and robust LiDAR-inertial odometry package with loop closure module and graph optimization https://github.com/yanliang-wang/FAST_LIO_LC &check; LidarIMUGPS [Optional] ROS 1 ROS MelodicPCL >= 1.8Eigen >= 3.3.4GTSAM >= 4.0.0 FAST_LIO_SLAM FAST_LIO_SLAM is the integration of FAST_LIO and SC-PGO which is scan context based loop detection and GTSAM based pose-graph optimization https://github.com/gisbi-kim/FAST_LIO_SLAM &check; LidarIMUGPS [Optional] ROS 1 PCL >= 1.8Eigen >= 3.3.4 FD-SLAM FD_SLAM is Feature&Distribution-based 3D LiDAR SLAM method based on Surface Representation Refinement. In this algorithm novel feature-based Lidar odometry used for fast scan-matching, and used a proposed UGICP method for keyframe matching https://github.com/SLAMWang/FD-SLAM &check; LidarIMU [Optional]GPS ROS 1 PCLg2oSuitesparse hdl_graph_slam An open source ROS package for real-time 6DOF SLAM using a 3D LIDAR. It is based on 3D Graph SLAM with NDT scan matching-based odometry estimation and loop detection. It also supports several graph constraints, such as GPS, IMU acceleration (gravity vector), IMU orientation (magnetic sensor), and floor plane (detected in a point cloud) https://github.com/koide3/hdl_graph_slam &check; LidarIMU [Optional]GPS [Optional] ROS 1 PCLg2oOpenMP IA-LIO-SAM IA_LIO_SLAM is created for data acquisition in unstructured environment and it is a framework for Intensity and Ambient Enhanced Lidar Inertial Odometry via Smoothing and Mapping that achieves highly accurate robot trajectories and mapping https://github.com/minwoo0611/IA_LIO_SAM &check; LidarIMUGPS ROS 1 GTSAM ISCLOAM ISCLOAM presents a robust loop closure detection approach by integrating both geometry and intensity information https://github.com/wh200720041/iscloam &check; Lidar ROS 1 Ubuntu 18.04ROS MelodicCeresPCLGTSAMOpenCV LeGO-LOAM-BOR LeGO-LOAM-BOR is improved version of the LeGO-LOAM by improving quality of the code, making it more readable and consistent. Also, performance is improved by converting processes to multi-threaded approach https://github.com/facontidavide/LeGO-LOAM-BOR &check; LidarIMU ROS 1 ROS MelodicPCLGTSAM LIO_SAM A framework that achieves highly accurate, real-time mobile robot trajectory estimation and map-building. It formulates lidar-inertial odometry atop a factor graph, allowing a multitude of relative and absolute measurements, including loop closures, to be incorporated from different sources as factors into the system https://github.com/TixiaoShan/LIO-SAM &check; LidarIMUGPS [Optional] ROS 1ROS 2 PCLGTSAM Optimized-SC-F-LOAM An improved version of F-LOAM and uses an adaptive threshold to further judge the loop closure detection results and reducing false loop closure detections. Also it uses feature point-based matching to calculate the constraints between a pair of loop closure frame point clouds and decreases time consumption of constructing loop frame constraints https://github.com/SlamCabbage/Optimized-SC-F-LOAM &check; Lidar ROS 1 PCLGTSAMCeres SC-A-LOAM A real-time LiDAR SLAM package that integrates A-LOAM and ScanContext. https://github.com/gisbi-kim/SC-A-LOAM &check; Lidar ROS 1 GTSAM >= 4.0 SC-LeGO-LOAM SC-LeGO-LOAM integrated LeGO-LOAM for lidar odometry and 2 different loop closure methods: ScanContext and Radius search based loop closure. While ScanContext is correcting large drifts, radius search based method is good for fine-stitching https://github.com/irapkaist/SC-LeGO-LOAM &check; LidarIMU ROS 1 PCLGTSAM"},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/","title":"FAST_LIO_LC","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#fast_lio_lc","title":"FAST_LIO_LC","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#what-is-fast_lio_lc","title":"What is FAST_LIO_LC?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/yanliang-wang/FAST_LIO_LC

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#dependencies","title":"Dependencies","text":"
      wget -O ~/Downloads/gtsam.zip https://github.com/borglab/gtsam/archive/4.0.0-alpha2.zip\n  cd ~/Downloads/ && unzip gtsam.zip -d ~/Downloads/\n  cd ~/Downloads/gtsam-4.0.0-alpha2/\n  mkdir build && cd build\n  cmake ..\n  sudo make install\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#1-build","title":"1) Build","text":"
        mkdir -p ~/ws_fastlio_lc/src\n    cd ~/ws_fastlio_lc/src\n    git clone https://github.com/gisbi-kim/FAST_LIO_SLAM.git\n    git clone https://github.com/Livox-SDK/livox_ros_driver\n    cd ..\n    catkin_make\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#2-set-parameters","title":"2) Set parameters","text":" "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#3-run","title":"3) Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#example-result","title":"Example Result","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#other-examples","title":"Other Examples","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#example-dataset","title":"Example dataset","text":"

    Check original repository link for example dataset.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#contact","title":"Contact","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#acknowledgements","title":"Acknowledgements","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/","title":"FAST_LIO_SLAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#fast_lio_slam","title":"FAST_LIO_SLAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#what-is-fast_lio_slam","title":"What is FAST_LIO_SLAM?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/gisbi-kim/FAST_LIO_SLAM

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#dependencies","title":"Dependencies","text":"
    wget -O ~/Downloads/gtsam.zip https://github.com/borglab/gtsam/archive/4.0.0-alpha2.zip\ncd ~/Downloads/ && unzip gtsam.zip -d ~/Downloads/\ncd ~/Downloads/gtsam-4.0.0-alpha2/\nmkdir build && cd build\ncmake ..\nsudo make install\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#1-build","title":"1) Build","text":"
        mkdir -p ~/catkin_fastlio_slam/src\n    cd ~/catkin_fastlio_slam/src\n    git clone https://github.com/gisbi-kim/FAST_LIO_SLAM.git\n    git clone https://github.com/Livox-SDK/livox_ros_driver\n    cd ..\n    catkin_make\n    source devel/setup.bash\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#2-set-parameters","title":"2) Set parameters","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#3-run","title":"3) Run","text":"
        # terminal 1: run FAST-LIO2\nroslaunch fast_lio mapping_ouster64.launch\n\n    # open the other terminal tab: run SC-PGO\ncd ~/catkin_fastlio_slam\n    source devel/setup.bash\n    roslaunch aloam_velodyne fastlio_ouster64.launch\n\n    # play bag file in the other terminal\nrosbag play xxx.bag -- clock --pause\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#example-result","title":"Example Result","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#other-examples","title":"Other Examples","text":" "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#acknowledgements","title":"Acknowledgements","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/","title":"FD-SLAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#fd-slam","title":"FD-SLAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#what-is-fd-slam","title":"What is FD-SLAM?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#repository-information","title":"Repository Information","text":"

    This is an open source ROS package for real-time 6DOF SLAM using a 3D LIDAR.

    It is based on hdl_graph_slam and the steps to run our system are same with hdl-graph-slam.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/SLAMWang/FD-SLAM

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#dependencies","title":"Dependencies","text":"

    The following ROS packages are required:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#1-build","title":"1) Build","text":"
    cd ~/catkin_ws/src\ngit clone https://github.com/SLAMWang/FD-SLAM.git\ncd ..\ncatkin_make\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#2-services","title":"2) Services","text":"
    /hdl_graph_slam/dump  (hdl_graph_slam/DumpGraph)\n- save all the internal data (point clouds, floor coeffs, odoms, and pose graph) to a directory.\n\n/hdl_graph_slam/save_map (hdl_graph_slam/SaveMap)\n- save the generated map as a PCD file.\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#3-set-parameters","title":"3) Set parameters","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#4-run","title":"4) Run","text":"
    source devel/setup.bash\nroslaunch hdl_graph_slam hdl_graph_slam_400_ours.launch\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/","title":"hdl_graph_slam","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#hdl_graph_slam","title":"hdl_graph_slam","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#what-is-hdl_graph_slam","title":"What is hdl_graph_slam?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/koide3/hdl_graph_slam

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#dependencies","title":"Dependencies","text":"

    The following ROS packages are required:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#1-build","title":"1) Build","text":"
    # for melodic\nsudo apt-get install ros-melodic-geodesy ros-melodic-pcl-ros ros-melodic-nmea-msgs ros-melodic-libg2o\ncd catkin_ws/src\ngit clone https://github.com/koide3/ndt_omp.git -b melodic\ngit clone https://github.com/SMRT-AIST/fast_gicp.git --recursive\ngit clone https://github.com/koide3/hdl_graph_slam\n\ncd .. && catkin_make -DCMAKE_BUILD_TYPE=Release\n\n# for noetic\nsudo apt-get install ros-noetic-geodesy ros-noetic-pcl-ros ros-noetic-nmea-msgs ros-noetic-libg2o\n\ncd catkin_ws/src\ngit clone https://github.com/koide3/ndt_omp.git\ngit clone https://github.com/SMRT-AIST/fast_gicp.git --recursive\ngit clone https://github.com/koide3/hdl_graph_slam\n\ncd .. && catkin_make -DCMAKE_BUILD_TYPE=Release\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#2-set-parameter","title":"2) Set parameter","text":" "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#3-run","title":"3) Run","text":"
    rosparam set use_sim_time true\nroslaunch hdl_graph_slam hdl_graph_slam_400.launch\n
    roscd hdl_graph_slam/rviz\nrviz -d hdl_graph_slam.rviz\n
    rosbag play --clock hdl_400.bag\n

    Save the generated map by:

    rosservice call /hdl_graph_slam/save_map \"resolution: 0.05\ndestination: '/full_path_directory/map.pcd'\"\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#example-result","title":"Example Result","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#example2-outdoor","title":"Example2 (Outdoor)","text":"

    Bag file (recorded in an outdoor environment):

    rosparam set use_sim_time true\nroslaunch hdl_graph_slam hdl_graph_slam_400.launch\n
    roscd hdl_graph_slam/rviz\nrviz -d hdl_graph_slam.rviz\n
    rosbag play --clock dataset.bag\n

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#papers","title":"Papers","text":"

    Kenji Koide, Jun Miura, and Emanuele Menegatti, A Portable 3D LIDAR-based System for Long-term and Wide-area People Behavior Measurement, Advanced Robotic Systems, 2019 [link].

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#contact","title":"Contact","text":"

    Kenji Koide, k.koide@aist.go.jp, https://staff.aist.go.jp/k.koide

    [Active Intelligent Systems Laboratory, Toyohashi University of Technology, Japan] [Mobile Robotics Research Team, National Institute of Advanced Industrial Science and Technology (AIST), Japan]

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/","title":"IA-LIO-SAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#ia-lio-sam","title":"IA-LIO-SAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#what-is-ia-lio-sam","title":"What is IA-LIO-SAM?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/minwoo0611/IA_LIO_SAM

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#dependencies","title":"Dependencies","text":" "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#1-build","title":"1) Build","text":"
        mkdir -p ~/catkin_ia_lio/src\n    cd ~/catkin_ia_lio/src\n    git clone https://github.com/minwoo0611/IA_LIO_SAM\n    cd ..\n    catkin_make\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#2-set-parameters","title":"2) Set parameters","text":" "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#3-run","title":"3) Run","text":"
      # open new terminal: run IA_LIO\n  source devel/setup.bash\n  roslaunch lio_sam mapping_ouster64.launch\n\n  # play bag file in the other terminal\n  rosbag play RECORDED_BAG.bag --clock\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#sample-dataset-images","title":"Sample dataset images","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#example-dataset","title":"Example dataset","text":"

    Check original repo link for example dataset.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#contact","title":"Contact","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#paper","title":"Paper","text":"

    Thank you for citing IA-LIO-SAM(./config/doc/KRS-2021-17.pdf) if you use any of this code.

    Part of the code is adapted from LIO-SAM (IROS-2020).

    @inproceedings{legoloam2018shan,\n  title={LeGO-LOAM: Lightweight and Ground-Optimized Lidar Odometry and Mapping on Variable Terrain},\n  author={Shan, Tixiao and Englot, Brendan},\n  booktitle={IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},\n  pages={4758-4765},\n  year={2018},\n  organization={IEEE}\n}\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#acknowledgements","title":"Acknowledgements","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/","title":"ISCLOAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#iscloam","title":"ISCLOAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#what-is-iscloam","title":"What is ISCLOAM?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/wh200720041/iscloam

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#dependencies","title":"Dependencies","text":"

    For visualization purpose, this package uses hector trajectory sever, you may install the package by

    sudo apt-get install ros-melodic-hector-trajectory-server\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#build-and-run","title":"Build and Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#1-clone-repository","title":"1. Clone repository","text":"
    cd ~/catkin_ws/src\ngit clone https://github.com/wh200720041/iscloam.git\ncd ..\ncatkin_make -j1\nsource ~/catkin_ws/devel/setup.bash\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#2-set-parameter","title":"2. Set Parameter","text":"

    Change the bag location and sensor parameters on launch files.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#3-launch","title":"3. Launch","text":"
    roslaunch iscloam iscloam.launch\n

    if you would like to generate the map of environment at the same time, you can run

    roslaunch iscloam iscloam_mapping.launch\n

    Note that the global map can be very large, so it may takes a while to perform global optimization, some lag is expected between trajectory and map since they are running in separate thread. More CPU usage will happen when loop closure is identified.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#example-result","title":"Example Result","text":"

    Watch demo video at Video Link

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#ground-truth-comparison","title":"Ground Truth Comparison","text":"

    Green: ISCLOAM Red: Ground Truth

                      KITTI sequence 00                                  KITTI sequence 05\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#citation","title":"Citation","text":"

    If you use this work for your research, you may want to cite the paper below, your citation will be appreciated

    @inproceedings{wang2020intensity,\n  author={H. {Wang} and C. {Wang} and L. {Xie}},\n  booktitle={2020 IEEE International Conference on Robotics and Automation (ICRA)},\n  title={Intensity Scan Context: Coding Intensity and Geometry Relations for Loop Closure Detection},\n  year={2020},\n  volume={},\n  number={},\n  pages={2095-2101},\n  doi={10.1109/ICRA40945.2020.9196764}\n}\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#acknowledgements","title":"Acknowledgements","text":"

    Thanks for A-LOAM and LOAM(J. Zhang and S. Singh. LOAM: Lidar Odometry and Mapping in Real-time) and LOAM_NOTED.

    Author: Wang Han, Nanyang Technological University, Singapore

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/","title":"LeGO-LOAM-BOR","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#lego-loam-bor","title":"LeGO-LOAM-BOR","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#what-is-lego-loam-bor","title":"What is LeGO-LOAM-BOR?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/facontidavide/LeGO-LOAM-BOR

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#dependencies","title":"Dependencies","text":"
    wget -O ~/Downloads/gtsam.zip https://github.com/borglab/gtsam/archive/4.0.0-alpha2.zip\ncd ~/Downloads/ && unzip gtsam.zip -d ~/Downloads/\ncd ~/Downloads/gtsam-4.0.0-alpha2/\nmkdir build && cd build\ncmake ..\nsudo make install\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#1-build","title":"1) Build","text":"
    cd ~/catkin_ws/src\ngit clone https://github.com/facontidavide/LeGO-LOAM-BOR.git\ncd ..\ncatkin_make\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#2-set-parameters","title":"2) Set parameters","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#3-run","title":"3) Run","text":"
    source devel/setup.bash\nroslaunch lego_loam_bor run.launch rosbag:=/path/to/your/rosbag lidar_topic:=/velodyne_points\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#example-result","title":"Example Result","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#cite-lego-loam","title":"Cite LeGO-LOAM","text":"

    Thank you for citing our LeGO-LOAM paper if you use any of this code:

    @inproceedings{legoloam2018,\n  title={LeGO-LOAM: Lightweight and Ground-Optimized Lidar Odometry and Mapping on Variable Terrain},\n  author={Tixiao Shan and Brendan Englot},\n  booktitle={IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},\n  pages={4758-4765},\n  year={2018},\n  organization={IEEE}\n}\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/","title":"LIO-SAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#lio-sam","title":"LIO-SAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#what-is-lio-sam","title":"What is LIO-SAM?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/TixiaoShan/LIO-SAM

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#required-sensors","title":"Required Sensors","text":"

    *Robosense lidars aren't supported officially, but their Helios series can be used as Velodyne lidars.

    The system architecture of LIO-SAM method described in the following diagram, please look at the official repository for getting more information.

    System Architecture of LIO-SAM

    We are using Robosense Helios 5515 and CLAP B7 sensor on tutorial_vehicle, so we will use these sensors for running LIO-SAM.

    Additionally, LIO-SAM tested with Applanix POS LVX and Hesai Pandar XT32 sensor setup. Some additional information according to the sensors will be provided in this page.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#ros-compatibility","title":"ROS Compatibility","text":"

    Since Autoware uses ROS 2 Humble currently, we will continue with ROS 2 version of LIO-SAM.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#dependencies","title":"Dependencies","text":"

    ROS 2 dependencies:

    To install these dependencies, you can use this bash command in your terminal:

    sudo apt install ros-humble-perception-pcl \\\nros-humble-pcl-msgs \\\nros-humble-vision-opencv \\\nros-humble-xacro\n

    Other dependencies:

    To install the gtsam, you can use this bash command in your terminal:

      # Add GTSAM-PPA\nsudo add-apt-repository ppa:borglab/gtsam-release-4.1\n  sudo apt install libgtsam-dev libgtsam-unstable-dev\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#1-installation","title":"1) Installation","text":"

    In order to use and build LIO-SAM, we will create workspace for LIO-SAM:

        mkdir -p ~/lio-sam-ws/src\n    cd ~/lio-sam-ws/src\n    git clone -b ros2 https://github.com/TixiaoShan/LIO-SAM.git\n    cd ..\n    colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#2-settings","title":"2) Settings","text":"

    After the building of LIO-SAM, we need to record ROS 2 Bag file with including necessary topics for LIO-SAM. The necessary topics are described in the config file on LIO-SAM.

    ROS 2 Bag example for LIO-SAM with Robosense Helios and CLAP B7
    Files:             map_bag_13_09_0.db3\nBag size:          38.4 GiB\nStorage id:        sqlite3\nDuration:          3295.326s\nStart:             Sep 13 2023 16:40:23.165 (1694612423.165)\nEnd:               Sep 13 2023 17:35:18.492 (1694615718.492)\nMessages:          1627025\nTopic information: Topic: /sensing/gnss/clap/ros/imu | Type: sensor_msgs/msg/Imu | Count: 329535 | Serialization Format: cdr\nTopic: /sensing/gnss/clap/ros/odometry | Type: nav_msgs/msg/Odometry | Count: 329533 | Serialization Format: cdr\nTopic: /sensing/lidar/top/pointcloud_raw | Type: sensor_msgs/msg/PointCloud2 | Count: 32953 | Serialization Format: cdr\n

    Note: We use use_odometry as true at clap_b7_driver for publishing GPS odometry topic from navsatfix.

    Please set topics and sensor settings on lio_sam/config/params.yaml. Here are some example modifications for out tutorial_vehicle.

    -   pointCloudTopic: \"/points\"\n+   pointCloudTopic: \"/sensing/lidar/top/pointcloud_raw\"\n-   imuTopic: \"/imu/data\"\n+   imuTopic: \"/sensing/gnss/clap/ros/imu\"\n   odomTopic: \"odometry/imu\"\n-   gpsTopic: \"odometry/gpsz\"\n+   gpsTopic: \"/sensing/gnss/clap/ros/odometry\"\n

    Since we will use GPS information with Autoware, so we need to enable useImuHeadingInitialization parameter.

    -   useImuHeadingInitialization: false\n+   useImuHeadingInitialization: true\n-   useGpsElevation: false\n+   useGpsElevation: true\n

    We will update sensor settings also. Since Robosense Lidars aren't officially supported, we will set our 32-channel Robosense Helios 5515 lidar as Velodyne:

    -   sensor: ouster\n+   sensor: velodyne\n-   N_SCAN: 64\n+   N_SCAN: 32\n-   Horizon_SCAN: 512\n+   Horizon_SCAN: 1800\n

    After that, we will update extrinsic transformations between Robosense Lidar and CLAP B7 GNSS/INS (IMU) system.

    -   extrinsicTrans:  [ 0.0,  0.0,  0.0 ]\n+   extrinsicTrans:  [-0.91, 0.0, -1.71]\n-   extrinsicRot:    [-1.0,  0.0,  0.0,\n-                      0.0,  1.0,  0.0,\n-                      0.0,  0.0, -1.0 ]\n+   extrinsicRot:    [1.0,  0.0,  0.0,\n+                     0.0,  1.0,  0.0,\n+                     0.0,  0.0, 1.0 ]\n-   extrinsicRPY: [ 0.0,  1.0,  0.0,\n-                  -1.0,  0.0,  0.0,\n-                   0.0,  0.0,  1.0 ]\n+   extrinsicRPY: [ 1.0,  0.0,  0.0,\n+                   0.0,  1.0,  0.0,\n+                   0.0,  0.0,  1.0 ]\n

    Warning

    The mapping direction is towards to the going direction in the real world. If LiDAR sensor is backwards, according to the direction you are moving, then you need to change the extrinsicRot too. Unless the IMU tries to go in the wrong direction, and it may occur problems.

    For example, in our Applanix POS LVX and Hesai Pandar XT32 setup, IMU direction was towards to the going direction and LiDAR direction has 180 degree difference in Z-axis according to the IMU direction. In other words, they were facing back to each other. The tool may need a transformation for IMU for that.

    -   extrinsicRot:    [-1.0,  0.0,  0.0,\n-                      0.0,  1.0,  0.0,\n-                      0.0,  0.0, -1.0 ]\n+   extrinsicRot:    [-1.0,  0.0,  0.0,\n+                     0.0,  -1.0,  0.0,\n+                     0.0,   0.0,  1.0 ]\n-   extrinsicRPY: [ 0.0,  1.0,  0.0,\n-                  -1.0,  0.0,  0.0,\n-                   0.0,  0.0,  1.0 ]\n+   extrinsicRPY: [ -1.0,  0.0,  0.0,\n+                    0.0, -1.0,  0.0,\n+                    0.0,  0.0,  1.0 ]\n

    Transform Visualization of Applanix POS LVX and Hesai Pandar XT32 in RViz

    Now, we are ready to create a map for Autoware.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#3-usage","title":"3) Usage","text":"

    If you are set configurations and create bag file for LIO-SAM, you can launch LIO-SAM with:

    ros2 launch lio_sam run.launch.py\n

    The rviz2 screen will be open, then you can play your bag file:

    ros2 bag play <YOUR-BAG-FILE>\n

    If the mapping process is finished, you can save map with calling this service:

    ros2 service call /lio_sam/save_map lio_sam/srv/SaveMap \"{resolution: 0.2, destination: <YOUR-MAP-DIRECTORY>}\"\n

    Here is the video for demonstration of LIO-SAM mapping in our campus environment:

    The output map format is local UTM, we will change local UTM map to MGRS format for tutorial_vehicle. Also, if you want change UTM to MGRS for autoware, please follow convert-utm-to-mgrs-map page.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#example-result","title":"Example Result","text":"Sample Map Output for our Campus Environment"},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#paper","title":"Paper","text":"

    Thank you for citing LIO-SAM (IROS-2020) if you use any of this code.

    @inproceedings{liosam2020shan,\n  title={LIO-SAM: Tightly-coupled Lidar Inertial Odometry via Smoothing and Mapping},\n  author={Shan, Tixiao and Englot, Brendan and Meyers, Drew and Wang, Wei and Ratti, Carlo and Rus Daniela},\n  booktitle={IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},\n  pages={5135-5142},\n  year={2020},\n  organization={IEEE}\n}\n

    Part of the code is adapted from LeGO-LOAM.

    @inproceedings{legoloam2018shan,\n  title={LeGO-LOAM: Lightweight and Ground-Optimized Lidar Odometry and Mapping on Variable Terrain},\n  author={Shan, Tixiao and Englot, Brendan},\n  booktitle={IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},\n  pages={4758-4765},\n  year={2018},\n  organization={IEEE}\n}\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#acknowledgements","title":"Acknowledgements","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/","title":"Optimized-SC-F-LOAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#optimized-sc-f-loam","title":"Optimized-SC-F-LOAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#what-is-optimized-sc-f-loam","title":"What is Optimized-SC-F-LOAM?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/SlamCabbage/Optimized-SC-F-LOAM

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#dependencies","title":"Dependencies","text":"
    sudo apt-get install ros-noetic-hector-trajectory-server\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#1-build","title":"1) Build","text":"
    cd ~/catkin_ws/src\ngit clone https://github.com/SlamCabbage/Optimized-SC-F-LOAM.git\ncd ..\ncatkin_make\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#2-create-message-file","title":"2) Create message file","text":"

    In this folder, Ground Truth information, optimized pose information, F-LOAM pose information and time information are stored

    mkdir -p ~/message/Scans\n\nChange line 383 in the laserLoopOptimizationNode.cpp to your own \"message\" folder path\n

    (Do not forget to rebuild your package)

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#3-set-parameters","title":"3) Set parameters","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#4-run","title":"4) Run","text":"
    source devel/setup.bash\nroslaunch optimized_sc_f_loam optimized_sc_f_loam_mapping.launch\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#example-result","title":"Example Result","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#results-on-kitti-sequence-00-and-sequence-05","title":"Results on KITTI Sequence 00 and Sequence 05","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#comparison-of-trajectories-on-kitti-dataset","title":"Comparison of trajectories on KITTI dataset","text":"

    Test on KITTI sequence You can download the sequence 00 and 05 datasets from the KITTI official website and convert them into bag files using the kitti2bag open source method.

    00: 2011_10_03_drive_0027 000000 004540

    05: 2011_09_30_drive_0018 000000 002760

    See the link: https://github.com/ethz-asl/kitti_to_rosbag

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#acknowledgements","title":"Acknowledgements","text":"

    Thanks for SC-A-LOAM(Scan context: Egocentric spatial descriptor for place recognition within 3d point cloud map) and F-LOAM(F-LOAM : Fast LiDAR Odometry and Mapping).

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#citation","title":"Citation","text":"
    @misc{https://doi.org/10.48550/arxiv.2204.04932,\n  doi = {10.48550/ARXIV.2204.04932},\n\n  url = {https://arxiv.org/abs/2204.04932},\n\n  author = {Liao, Lizhou and Fu, Chunyun and Feng, Binbin and Su, Tian},\n\n  keywords = {Robotics (cs.RO), FOS: Computer and information sciences, FOS: Computer and information sciences},\n\n  title = {Optimized SC-F-LOAM: Optimized Fast LiDAR Odometry and Mapping Using Scan Context},\n\n  publisher = {arXiv},\n\n  year = {2022},\n\n  copyright = {arXiv.org perpetual, non-exclusive license}\n}\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/","title":"SC-A-LOAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#sc-a-loam","title":"SC-A-LOAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#what-is-sc-a-loam","title":"What is SC-A-LOAM?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/gisbi-kim/SC-A-LOAM

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#prerequisites-dependencies","title":"Prerequisites (dependencies)","text":" "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#1-build","title":"1) Build","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#2-set-parameters","title":"2) Set parameters","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#scan-context-parameters","title":"Scan Context parameters","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#3-run","title":"3) Run","text":"
    roslaunch aloam_velodyne aloam_mulran.launch\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#4-saving-as-pcd-file","title":"4) Saving as PCD file","text":"
      rosrun pcl_ros pointcloud_to_pcd input:=/aft_pgo_map\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#example-results","title":"Example Results","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#riverside-01-mulran-dataset","title":"Riverside 01, MulRan dataset","text":" "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#kitti-05","title":"KITTI 05","text":" "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#contact","title":"Contact","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/","title":"SC-LeGO-LOAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#sc-lego-loam","title":"SC-LeGO-LOAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#what-is-sc-lego-loam","title":"What is SC-LeGO-LOAM?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/irapkaist/SC-LeGO-LOAM

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#dependencies","title":"Dependencies","text":"
    wget -O ~/Downloads/gtsam.zip https://github.com/borglab/gtsam/archive/4.0.0-alpha2.zip\ncd ~/Downloads/ && unzip gtsam.zip -d ~/Downloads/\ncd ~/Downloads/gtsam-4.0.0-alpha2/\nmkdir build && cd build\ncmake ..\nsudo make install\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#1-build","title":"1) Build","text":"
    cd ~/catkin_ws/src\ngit clone https://github.com/irapkaist/SC-LeGO-LOAM.git\ncd ..\ncatkin_make\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#2-set-parameters","title":"2) Set parameters","text":"

    (Do not forget to rebuild after setting parameters.)

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#3-run","title":"3) Run","text":"
    source devel/setup.bash\nroslaunch lego_loam run.launch\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#example-result","title":"Example Result","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#other-examples","title":"Other Examples","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#mulran-dataset","title":"MulRan dataset","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#cite-sc-lego-loam","title":"Cite SC-LeGO-LOAM","text":"
    @INPROCEEDINGS { gkim-2018-iros,\n  author = {Kim, Giseop and Kim, Ayoung},\n  title = { Scan Context: Egocentric Spatial Descriptor for Place Recognition within {3D} Point Cloud Map },\n  booktitle = { Proceedings of the IEEE/RSJ International Conference on Intelligent Robots and Systems },\n  year = { 2018 },\n  month = { Oct. },\n  address = { Madrid }\n}\n

    and

    @inproceedings{legoloam2018,\n  title={LeGO-LOAM: Lightweight and Ground-Optimized Lidar Odometry and Mapping on Variable Terrain},\n  author={Shan, Tixiao and Englot, Brendan},\n  booktitle={IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},\n  pages={4758-4765},\n  year={2018},\n  organization={IEEE}\n}\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#contact","title":"Contact","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/pointcloud-map-downsampling/","title":"Pointcloud map downsampling","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/pointcloud-map-downsampling/#pointcloud-map-downsampling","title":"Pointcloud map downsampling","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/pointcloud-map-downsampling/#overview","title":"Overview","text":"

    In some cases, for example, when your created point cloud map is either too dense or too large (i.e., exceeding 300 MB), you may want to downsample it for improved computational and memory efficiency. Also, you can consider using dynamic map loading with partial loading, please check map_loader package for more information.

    At tutorial_vehicle implementation we will use the whole map, so we will downsample it with using CloudCompare.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/pointcloud-map-downsampling/#installing-cloudcompare","title":"Installing CloudCompare","text":"

    You can install it by snap:

    sudo snap install cloudcompare\n

    Please check the official page for installing options.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/pointcloud-map-downsampling/#downsampling-a-pointcloud-map","title":"Downsampling a pointcloud map","text":"

    There are three subsampling methods on CloudCompare, we are using Space method for subsampling, but you can use other methods if you want.

    1. Please open CloudCompare and drag your pointcloud to here, then you can select your pointcloud map by just clicking on the map at the DB tree panel.
    2. Then you can click subsample button on the top panel.

    CloudCompare
    1. Please select on your subsample method, we will use space for tutorial_vehicle.
    2. Then you can select options. For example, we need to determine minimum space between points. (Please be careful in this section, subsampling is depending on your map size, computer performance, etc.) We will set this value 0.2 for tutorial_vehicle's map.

    Pointcloud subsampling

    Select your downsampled pointcloud

    Now, you can save your downsampled pointcloud with ctrl + s or you can click save button from File bar. Then, this pointcloud can be used by autoware.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/","title":"Calibrating your sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/#calibrating-your-sensors","title":"Calibrating your sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/#overview","title":"Overview","text":"

    Autoware expects to have multiple sensors attached to the vehicle as input to perception, localization, and planning stack. These sensors must be calibrated correctly and their positions must be defined using either urdf files (as in sample_sensor_kit) or as tf launch files.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/#camera-calibration","title":"Camera calibration","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/#intrinsic-calibration","title":"Intrinsic Calibration","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/#lidar-lidar-calibration","title":"Lidar-lidar calibration","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/#lidar-lidar-calibration-tool-from-autocore","title":"Lidar-Lidar Calibration tool from Autocore","text":"

    LL-Calib on GitHub, provided by AutoCore, is a lightweight toolkit for online/offline 3D LiDAR to LiDAR calibration. It's based on local mapping and \"GICP\" method to derive the relation between main and sub lidar. Information on how to use the tool, troubleshooting tips and example rosbags can be found at the above link.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/#lidar-camera-calibration","title":"Lidar-camera calibration","text":"

    Developed by MathWorks, The Lidar Camera Calibrator app enables you to interactively estimate the rigid transformation between a lidar sensor and a camera.

    https://ww2.mathworks.cn/help/lidar/ug/get-started-lidar-camera-calibrator.html

    SensorsCalibration toolbox v0.1: One more open source method for Lidar-camera calibration. This is a project for LiDAR to camera calibration,including automatic calibration and manual calibration

    https://github.com/PJLab-ADG/SensorsCalibration/blob/master/lidar2camera/README.md

    Developed by AutoCore, an easy-to-use lightweight toolkit for Lidar-camera-calibration is proposed. Only in three steps, a fully automatic calibration will be done.

    https://github.com/autocore-ai/calibration_tools/tree/main/lidar-cam-calib-related

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/#lidar-imu-calibration","title":"Lidar-IMU calibration","text":"

    Developed by APRIL Lab at Zhejiang University in China, the LI-Calib calibration tool is a toolkit for calibrating the 6DoF rigid transformation and the time offset between a 3D LiDAR and an IMU, based on continuous-time batch optimization. IMU-based cost and LiDAR point-to-surfel (surfel = surface element) distance are minimized jointly, which renders the calibration problem well-constrained in general scenarios.

    AutoCore has forked the original LI-Calib tool and overwritten the Lidar input for more general usage. Information on how to use the tool, troubleshooting tips and example rosbags can be found at the LI-Calib fork on GitHub.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/","title":"Creating vehicle and sensor description","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#creating-vehicle-and-sensor-description","title":"Creating vehicle and sensor description","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#introduction","title":"Introduction","text":"

    This page introduce following topics.

    1. YOUR_VEHICLE_description
    2. YOUR_SENSOR_KIT_description
    3. individual_parameter
    4. YOUR_VEHICLE_launch
    5. YOUR_SENSOR_KIT_launch
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#1-your_vehicle_description","title":"1. YOUR_VEHICLE_description","text":"

    In YOUR_VEHICLE_description, the following configurations are set:

    1. vehicle_info.param.yaml (must be changed)
    2. mesh file (*.dae)
    3. mirror.param.yaml(must be changed)
    4. simulator_model.param.yaml
    5. vehicle.xacro
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#1-vehicle_infoparamyaml","title":"1. vehicle_info.param.yaml","text":"

    Defines the vehicle dimensions. For more details on each parameter, please click here.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#2-mesh-file","title":"2. mesh file","text":"

    A 3D model file used for visualization in rviz.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#3-mirrorparamyaml","title":"3. mirror.param.yaml","text":"

    Set according to the vehicle dimensions. Used in the crop-box-filter of PointCloudPreprocessor.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#4-simulator_modelparamyaml","title":"4. simulator_model.param.yaml","text":"

    Configuration file for the simulator environment.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#5-vehiclexacro","title":"5. vehicle.xacro","text":"

    The entry point file that defines the entire URDF of the vehicle. It refers to sensors.xacro, which specifies the sensor mounting positions.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#2-your_sensor_kit_description","title":"2. YOUR_SENSOR_KIT_description","text":"

    In sensor_kit_description, the following files are configured:

    1. sensors.xacro (must be changed)
    2. sensor_kit.xacro (must be changed)
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#1-sensorsxacro","title":"1. sensors.xacro","text":"

    Resolves the positions of sensors with base_link as the parent frame and defines the positions and orientations based on sensors_calibration.yaml in individual_params.

    In Autoware, <YOUR_SENSOR_KIT_description>/config/sensors_calibration.yaml is not used.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#about-sensor_kit_base_link","title":"About sensor_kit_base_link","text":"

    A sensor_kit refers to a subset that includes multiple sensors, and sensor_kit_base_link is the name of its frame. The positions and orientations within the kit are defined in sensor_kit.xacro.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#2-sensor_kitxacro","title":"2. sensor_kit.xacro","text":"

    Resolves the positions of sensors with sensor_kit_base_link as the parent and defines the positions and orientations based on sensor_kit_calibration.yaml in individual_params.

    In Autoware, <YOUR_SENSOR_KIT_description>/config/sensor_kit_calibration.yaml is not used.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#3-individual_parameter","title":"3. individual_parameter","text":"

    The individual_parameter is where parameters referenced by sensors.xacro and sensor_kit.xacro are stored. As the name imply, it is intended to manage parameters for multiple individual instances.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#introduction-to-various-parameters","title":"Introduction to Various Parameters","text":"
    1. sensors_calibration.yaml (must be changed)
    2. sensor_kit_calibration.yaml (must be changed)
    3. imu_corrector.param.yaml
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#1-sensors_calibrationyaml","title":"1. sensors_calibration.yaml","text":"

    A file that defines the mounting positions and orientations of sensors with base_link as the parent frame.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#2-sensor_kit_calibrationyaml","title":"2. sensor_kit_calibration.yaml","text":"

    A file that defines the mounting positions and orientations of sensors with sensor_kit_base_link as the parent frame.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#3-imu_correctorparamyaml","title":"3. imu_corrector.param.yaml","text":"

    A file used by imu_corrector.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#4-folder-structure","title":"4. Folder Structure","text":"

    Below is the default directory structure.

    individual_params/\n\u2514\u2500 config/\n     \u2514\u2500 default/\n          \u2514\u2500 sample_sensor_kit/\n               \u251c\u2500 imu_corrector.param.yaml\n               \u251c\u2500 sensor_kit_calibration.yaml\n               \u2514\u2500 sensors_calibration.yaml\n

    Copy and create a folder based on your YOUR_SENSOR_KIT name.

    individual_params/\n\u2514\u2500 config/\n     \u2514\u2500 default/\n-         \u2514\u2500 sample_sensor_kit/\n+         \u2514\u2500 <YOUR_SENSOR_KIT>/\n              \u251c\u2500 imu_corrector.param.yaml\n               \u251c\u2500 sensor_kit_calibration.yaml\n               \u2514\u2500 sensors_calibration.yaml\n
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#41-sample-usage","title":"4.1 Sample Usage","text":"

    Here is an example of managing parameters for multiple instances. Add a <vehicle_id> directory and switch parameters using options at startup.

    # example1 (do not set vehicle_id)\n$ ros2 launch autoware_launch autoware.launch.xml sensor_model:=<YOUR_SENSOR_KIT> vehicle_mode:=<your_vehicle_model>\n# example2 (set vehicle_id as VEHICLE_1)\n$ ros2 launch autoware_launch autoware.launch.xml sensor_model:=<YOUR_SENSOR_KIT> vehicle_mode:=<your_vehicle_model> vehicle_id:=VEHICLE_1\n# example3 (set vehicle_id as VEHICLE_2)\n$ ros2 launch autoware_launch autoware.launch.xml sensor_model:=<YOUR_SENSOR_KIT> vehicle_mode:=<your_vehicle_model> vehicle_id:=VEHICLE_2\n
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#sample-directory-structure","title":"Sample Directory Structure","text":"
    individual_params/\n\u2514\u2500 config/\n     \u251c\u2500 default/\n     \u2502   \u2514\u2500 <YOUR_SENSOR_KIT>/                  # example1\n     \u2502        \u251c\u2500 imu_corrector.param.yaml\n     \u2502        \u251c\u2500 sensor_kit_calibration.yaml\n     \u2502        \u2514\u2500 sensors_calibration.yaml\n+    \u251c\u2500 VEHICLE_1/\n+    \u2502   \u2514\u2500 <YOUR_SENSOR_KIT>/                  # example2\n+    \u2502        \u251c\u2500 imu_corrector.param.yaml\n+    \u2502        \u251c\u2500 sensor_kit_calibration.yaml\n+    \u2502        \u2514\u2500 sensors_calibration.yaml\n+    \u2514\u2500 VEHICLE_2/\n+         \u2514\u2500 <YOUR_SENSOR_KIT>/                  # example3\n+              \u251c\u2500 imu_corrector.param.yaml\n+              \u251c\u2500 sensor_kit_calibration.yaml\n+              \u2514\u2500 sensors_calibration.yaml\n
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#4your_vehicle_launch","title":"4.YOUR_VEHICLE_launch","text":"

    YOUR_VEHICLE_launch is where the launch file for starting the drive system devices is stored.

    1. vehicle_interface.launch.xml (must be changed)
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#1-vehicle_interfacelaunchxml","title":"1. vehicle_interface.launch.xml","text":"

    vehicle_interface.launch.xml is the launch file related to the drive system. Please modify it according to the configuration of your vehicle's drive system.

    If you are operating multiple vehicles, use the vehicle_id to switch to the corresponding configuration for each vehicle.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#5-your_sensor_kit_launch","title":"5. YOUR_SENSOR_KIT_launch","text":"

    YOUR_SENSOR_KIT_launch is where the launch files related to sensor startup are stored.

    1. sensing.launch.xml (must be changed)
    2. lidar.launch.xml (must be changed)
    3. camera.launch.xml
    4. imu.launch.xml (must be changed)
    5. gnss.launch.xml
    6. pointcloud_preprocessor.launch.py (must be changed)
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#1-sensinglaunchxml","title":"1. sensing.launch.xml","text":"

    sensing.launch.xml is the entry point that calls the launch files for all sensors. Modify it according to your sensor configuration.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#2-lidarlaunchxml","title":"2. lidar.launch.xml","text":"

    lidar.launch.xml is the launch file related to starting the LiDAR driver. Modify it according to your LiDAR configuration.

    In Autoware's initial configuration, it assumes converting the acquired data using pointcloud_preprocessor.launch.py.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#example-configuration-items","title":"Example Configuration Items","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#3-cameralaunchxml","title":"3. camera.launch.xml","text":"

    camera.launch.xml is the launch file related to starting the camera driver.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#4-imulaunchxml","title":"4. imu.launch.xml","text":"

    imu.launch.xml is the launch file related to starting the IMU driver.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#5-gnsslaunchxml","title":"5. gnss.launch.xml","text":"

    gnss.launch.xml is the launch file related to starting the GNSS driver.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#6-pointcloud_preprocessorlaunchpy","title":"6. pointcloud_preprocessor.launch.py","text":"

    pointcloud_preprocessor.launch.py is the launch file to convert the raw sensor data. For more information, please click here.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/","title":"Creating a vehicle interface for an Ackermann kinematic model","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#creating-a-vehicle-interface-for-an-ackermann-kinematic-model","title":"Creating a vehicle interface for an Ackermann kinematic model","text":"

    This page introduces a module vehicle interface and explains how to implement it.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#what-is-a-vehicle-interface","title":"What is a vehicle interface","text":"

    Vehicle interface is an interface that connects the control commands and your vehicle's control device. Autoware publishes control commands such as:

    Then, the vehicle interface converts these commands into actuation such like:

    So think of the vehicle interface as a module that runs the vehicle's control device to realize the input commands provided by Autoware.

    An example of inputs and outputs for vehicle interface

    This page shows you a brief explanation how to implement your vehicle interface, but you can see further information of vehicle interface in the \"design\" page.

    Note that there is no package named \"vehicle interface\" prepared in Autoware. It is a necessary package to actuate your vehicle, but you have to create one by yourself since it is very specific to your vehicle's control device.

    For example, if you are using a by-wire kit PACMod, a vehicle interface named pacmod_interface published by TIER IV, Inc. is available. However, if you have constructed something original and haven't found an open source vehicle interface applicable, you have to implement your own vehicle interface from scratch.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#how-to-implement-a-vehicle-interface","title":"How to implement a vehicle interface","text":"

    The following instructions describe how to create a vehicle interface.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#1-create-a-directory-for-vehicle-interface","title":"1. Create a directory for vehicle interface","text":"

    It is recommended to create your vehicle interface at <your-autoware-dir>/src/vehicle/external

    cd <your-autoware-dir>/src/vehicle/external\n
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#2-install-or-implement-your-own-vehicle-interface","title":"2. Install or implement your own vehicle interface","text":"

    If there is an already complete vehicle interface package (like pacmod_interface), you can install it to your environment. If not, you have to implement your own vehicle interface by yourself. Let's create a new package by ros2 pkg create. The following example will show you how to create a vehicle interface package named my_vehicle_interface.

    ros2 pkg create --build-type ament_cmake my_vehicle_interface\n

    Then, you should write your implementation of vehicle interface in my_vehicle_interface/src. Again, since this implementation is so specific to the control device of your vehicle, it is beyond the scope of this document to describe how to implement your vehicle interface in detail. Here are some factors that might be considered.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#3-prepare-a-launch-file","title":"3. Prepare a launch file","text":"

    After you implement your vehicle interface or you want to debug it by launching it, create a launch file of your vehicle interface, and include it to vehicle_interface.launch.xml.

    Do not get confused. First, you need to create a launch file for your own vehicle interface module (like my_vehicle_interface.launch.xml) and then include that to vehicle_interface.launch.xml which exists in another directory. Here are the details.

    1. Add a launch directory in the my_vehicle_interface directory, and create a launch file of your own vehicle interface in it. Take a look at Creating a launch file in the ROS 2 documentation.

    2. Next, go to <your-autoware-dir>/src/vehicle, copy the directory /sample_vehicle_launch/, and paste it to the same place (which means it should be lined up with external and sample_vehicle_launch).

    3. You have to rename each \"sample_vehicle\" to something else. For example, if you want to rename \"sample_vehicle\" to \"my_vehicle_name\", you need to change the following. Note that it is restricted to keep the \"_launch\" and \"_description\" part.

      • Rename the directories
        • sample_vehicle_launch \u2192 my_vehicle_name_launch
        • my_vehicle_name_launch/sample_vehicle_launch \u2192 my_vehicle_name_launch/my_vehicle_name_launch
        • my_vehicle_name_launch/sample_vehicle_description \u2192 my_vehicle_name_launch/my_vehicle_name_description
      • After you rename your directories, rename each \"sample_vehicle\" to \"my_vehicle_name\" in the source code.
        • my_vehicle_name_description/CMakeLists.txt
        • my_vehicle_name_description/package.xml
        • my_vehicle_name_description/urdf/vehicle.xacro (there are two parts)
        • my_vehicle_name_launch/CMakeLists.txt
        • my_vehicle_name_launch/package.xml
        • README.md
    4. Include your launch file to my_vehicle_name_launch/my_vehicle_name_launch/launch/vehicle_interface.launch.xml by opening it and add the include terms like below.

    vehicle_interface.launch.xml
    <?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<launch>\n<arg name=\"vehicle_id\" default=\"$(env VEHICLE_ID default)\"/>\n\n<include file=\"$(find-pkg-share my_vehicle_interface)/launch/my_vehicle_interface.launch.xml\">\n</include>\n</launch>\n

    Finally, your directory structure may look like below. Most of the files are omitted for clarity, but the files shown here needs modification as said in the previous and current process.

    <your-autoware-dir>/\n\u2514\u2500 src/\n    \u2514\u2500 vehicle/\n        \u251c\u2500 external/\n+       \u2502   \u2514\u2500 my_vehicle_interface/\n+       \u2502       \u251c\u2500 src/\n+       \u2502       \u2514\u2500 launch/\n+       \u2502            \u2514\u2500 my_vehicle_interface.launch.xml\n       \u251c\u2500 sample_vehicle_launch/\n+       \u2514\u2500 my_vehicle_name_launch/ (COPIED FROM sample_vehicle_launch)\n+           \u251c\u2500 my_vehicle_name_launch/\n+           \u2502  \u251c\u2500 launch/\n+           \u2502  \u2502  \u2514\u2500 vehicle_interface.launch.xml\n+           \u2502  \u251c\u2500 CMakeLists.txt\n+           \u2502  \u2514\u2500 package.xml\n+           \u251c\u2500 my_vehicle_name_description/\n+           \u2502  \u251c\u2500 config/\n+           \u2502  \u251c\u2500 mesh/\n+           \u2502  \u251c\u2500 urdf/\n+           \u2502  \u2502  \u2514\u2500 vehicle.xacro\n+           \u2502  \u251c\u2500 CMakeLists.txt\n+           \u2502  \u2514\u2500 package.xml\n+           \u2514\u2500 README.md\n
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#4-build-the-vehicle-interface-package-and-the-launch-package","title":"4. Build the vehicle interface package and the launch package","text":"

    Build three packages my_vehicle_interface, my_vehicle_name_launch and my_vehicle_name_description by colcon build, or you can just build the entire Autoware if you have done other things.

    colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release --packages-select my_vehicle_interface my_vehicle_name_launch my_vehicle_name_description\n
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#5-when-you-launch-autoware","title":"5. When you launch Autoware","text":"

    Finally, you are done implementing your vehicle interface module! Be careful that you need to launch Autoware with the proper vehicle_model option like the example below. This example is launching planning simulator.

    ros2 launch autoware_launch planning.launch.xml map_path:=$HOME/autoware_map/sample-map-planning vehicle_model:=my_vehicle_name sensor_model:=sample_sensor_kit\n
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#tips","title":"Tips","text":"

    There are some tips that may help you.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#ackermann-kinematic-model","title":"Ackermann kinematic model","text":"

    Autoware now supports control inputs for vehicles based on an Ackermann kinematic model. This section introduces you a brief concept of Ackermann kinematic model and explains how Autoware controls it.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#geometry","title":"Geometry","text":"

    The basic style of Ackermann kinematic model has four wheels with an Ackermann link on the front, and it is powered by the rear wheels. The key point of Ackermann kinematic model is that the axes of all wheels intersect at a same point, which means all wheels will trace a circular trajectory with a different radii but a common center point (See the figure below). Therefore, this model has a great advantage that it minimizes the slippage of the wheels, and prevent tires to get worn soon.

    In general, Ackermann kinematic model accepts the longitudinal speed \\(v\\) and the steering angle \\(\\phi\\) as inputs. In autoware, \\(\\phi\\) is positive if it is steered counter clockwise, so the steering angle in the figure below is actually negative.

    The basic style of an Ackermann kinematic model. The left figure shows a vehicle facing straight forward, while the right figure shows a vehicle steering to the right."},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#control","title":"Control","text":"

    Autoware publishes a ROS 2 topic named control_cmd from several types of publishers. A control_cmd topic is a AckermannControlCommand type message that contains

    AckermannControlCommand
      builtin_interfaces/Time stamp\n  autoware_auto_control_msgs/AckermannLateralCommand lateral\n  autoware_auto_control_msgs/LongitudinalCommand longitudinal\n

    where,

    AckermannLateralCommand
      builtin_interfaces/Time stamp\n  float32 steering_tire_angle\n  float32 steering_tire_rotation_rate\n
    LongitudinalCommand
      builtin_interfaces/Time stamp\n  float32 speed\n  float32 accelaration\n  float32 jerk\n

    See the AckermannLateralCommand.idl and LongitudinalCommand.idl for details.

    The vehicle interface should realize these control commands through your vehicle's control device.

    Moreover, Autoware also provides brake commands, light commands, and more (see vehicle interface design), so the vehicle interface module should be applicable to these commands as long as there are devices available to handle them.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/customizing-for-differential-drive-model/","title":"Customizing for differential drive vehicle","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/customizing-for-differential-drive-model/#customizing-for-differential-drive-vehicle","title":"Customizing for differential drive vehicle","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/customizing-for-differential-drive-model/#1-introduction","title":"1. Introduction","text":"

    Currently, Autoware assumes that vehicles use an Ackermann kinematic model with Ackermann steering. Thus, Autoware adopts the Ackermann command format for the Control module's output (see the AckermannDrive ROS message definition for an overview of Ackermann commands, and the AckermannControlCommands struct used in Autoware for more details).

    However, it is possible to integrate Autoware with a vehicle that follows a differential drive kinematic model, as commonly used by small mobile robots.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/customizing-for-differential-drive-model/#2-procedure","title":"2. Procedure","text":"

    One simple way of using Autoware with a differential drive vehicle is to create a vehicle_interface package that translates Ackermann commands to differential drive commands. Here are two points that you need to consider:

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/customizing-for-differential-drive-model/#21-create-a-vehicle_interface-package-for-differential-drive-vehicle","title":"2.1 Create a vehicle_interface package for differential drive vehicle","text":"

    An Ackermann command in Autoware consists of two main control inputs:

    Conversely, a typical differential drive command consists of the following inputs:

    So, one way in which an Ackermann command can be converted to a differential drive command is by using the following equations:

    \\[ v_l = v - \\frac{l\\omega}{2}, v_r = v + \\frac{l\\omega}{2} \\]

    where \\(l\\) denotes wheel tread.

    For information about other factors that need to be considered when creating a vehicle_interface package, refer to the vehicle_interface component page.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/customizing-for-differential-drive-model/#22-set-an-appropriate-wheel_base","title":"2.2 Set an appropriate wheel_base","text":"

    A differential drive robot does not necessarily have front and rear wheels, which means that the wheelbase (the horizontal distance between the axles of the front and rear wheels) cannot be defined. However, Autoware expects wheel_base to be set in vehicle_info.param.yaml with some value. Thus, you need to set a pseudo value for wheel_base.

    The appropriate pseudo value for wheel_base depends on the size of your vehicle. Setting it to be the same value as wheel_tread is one possible choice.

    Warning

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/customizing-for-differential-drive-model/#3-known-issues","title":"3. Known issues","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/customizing-for-differential-drive-model/#motion-model-incompatibility","title":"Motion model incompatibility","text":"

    Since Autoware assumes that vehicles use a steering system, it is not possible to take advantage of the flexibility of a differential drive system's motion model.

    For example, when planning a parking maneuver with the freespace_planner module, Autoware may drive the differential drive vehicle forward and backward, even if the vehicle can be parked with a simpler trajectory that uses pure rotational movement.

    "},{"location":"how-to-guides/integrating-autoware/creating-your-autoware-meta-repository/creating-autoware-meta-repository/","title":"Creating Autoware meta-repository","text":""},{"location":"how-to-guides/integrating-autoware/creating-your-autoware-meta-repository/creating-autoware-meta-repository/#creating-autoware-meta-repository","title":"Creating Autoware meta-repository","text":""},{"location":"how-to-guides/integrating-autoware/creating-your-autoware-meta-repository/creating-autoware-meta-repository/#what-is-meta-repository","title":"What is Meta-repository?","text":"

    A meta-repository is a repository that manages multiple repositories, and Autoware is one of them. It serves as a centralized control point for referencing, configuring, and versioning other repositories.

    By using Ansible and VCS, you can automatically set up your Autoware. autoware.repos file manages the configuration of multiple repositories.

    Note: VCS stands for Version Control System, such as Git or Subversion.

    "},{"location":"how-to-guides/integrating-autoware/creating-your-autoware-meta-repository/creating-autoware-meta-repository/#how-to-create-and-customize-your-autoware-meta-repository","title":"How to create and customize your autoware meta-repository","text":""},{"location":"how-to-guides/integrating-autoware/creating-your-autoware-meta-repository/creating-autoware-meta-repository/#1-create-autoware-repository","title":"1. Create autoware repository","text":"

    If you want to integrate Autoware into your vehicle, the first step is to create an Autoware meta-repository.

    One easy way is to fork autowarefoundation/autoware and clone it. For how to fork a repository, refer to GitHub Docs.

    git clone https://github.com/YOUR_NAME/autoware.git\n

    If you set up multiple types of vehicles, adding a suffix like autoware.vehicle_A or autoware.vehicle_B is recommended

    "},{"location":"how-to-guides/integrating-autoware/creating-your-autoware-meta-repository/creating-autoware-meta-repository/#2-customize-your-autowarerepos-for-your-environment","title":"2. Customize your autoware.repos for your environment","text":"

    You need to customize autoware.repos for your own vehicle's Autoware.

    For example, if you want to customize the parameters in your individual_params or autoware_launch package to fit your vehicle, you can modify the configuration of each package and use them accordingly.

    Please edit the parameters in Autoware's autoware_individual_params and autoware_launch packages to match your vehicle's specific requirements, as these packages provide sample parameters and may not be tailored to your vehicle by default.

    If you want to fork autoware_individual_params and make modifications, it would be as follows:

    Example: If you fork individual_params and rename autoware_individual_params.vehicle_A:

    - param/autoware_individual_params:\n-   type: git\n-   url: https://github.com/autowarefoundation/autoware_individual_params\n-   version: main\n+ param/autoware_individual_params.vehicle_A:\n+   type: git\n+   url: https://github.com/YOUR_NAME/autoware_individual_params.vehicle_A\n+   version: main\n

    Please refer to the following documentation link for instructions on how to create and customize each vehicle_interface:

    Please remember to add all your custom packages, such as interfaces and descriptions, to your autoware.repos to ensure that your packages are properly included and managed within the Autoware repository.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/","title":"Launch Autoware","text":""},{"location":"how-to-guides/integrating-autoware/launch-autoware/#launch-autoware","title":"Launch Autoware","text":"

    Warning

    Under Construction

    This section explains how to run your vehicle with Autoware.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/#install-autoware","title":"Install Autoware","text":"

    Follow the installation steps of Autoware.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/#launch-autoware_1","title":"Launch Autoware","text":"

    Launch Autoware with the following command:

    ros2 launch autoware_launch autoware.launch.xml vehicle_model:=YOUR_VEHICLE sensor_kit:=YOUR_SENSOR_KIT map_path:=/PATH/TO/YOUR/MAP\n

    It is possible to specify which components to launch using command-line arguments. For example, if you don't need to launch perception, planning, and control for localization debug, you can launch the following:

    ros2 launch autoware_launch autoware.launch.xml vehicle_model:=YOUR_VEHICLE sensor_kit:=YOUR_SENSOR_KIT map_path:=/PATH/TO/YOUR/MAP \\\nlaunch_perception:=false \\\nlaunch_planning:=false \\\nlaunch_control:=false\n

    The basic command-line options are documented in autoware.launch.xml.

    There are options available to switch between different methods for some component. For example, by specifying pose_source/twist_source or perception_mode, you can switch localization and perception methods, respectively. These options allow you to choose the desired algorithms or sensor configurations for the respective functionalities.

    For options on eagleye component, please refer to the sub-pages.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/#set-initial-pose","title":"Set initial pose","text":"

    If GNSS is available, Autoware automatically initializes the vehicle's pose.

    If not or if the automatic initialization returns an incorrect position, you need to set the initial pose using the RViz GUI.

    1. Click the 2D Pose estimate button in the toolbar, or hit the P key

    2. In the 3D View pane, click and hold the left mouse button, and then drag to set the direction for the initial pose.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/#set-goal-pose","title":"Set goal pose","text":"

    Set a goal pose for the ego vehicle.

    1. Click the 2D Nav Goal button in the toolbar, or hit the G key

    2. In the 3D View pane, click and hold the left mouse button, and then drag to set the direction for the goal pose. If successful, you will see the calculated planning path on RViz.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/#engage","title":"Engage","text":"

    In your terminal, execute the following command.

    source ~/autoware.YOURS/install/setup.bash\nros2 topic pub /autoware.YOURS/engage autoware_auto_vehicle_msgs/msg/Engage \"engage: true\" -1\n

    You can also engage via RViz with \"AutowareStatePanel\". The panel can be found in Panels > Add New Panel > tier4_state_rviz_plugin > AutowareStatePanel.

    Once the route is computed, the \"AUTO\" button becomes active. Pressing the AUTO button engages the autonomous driving mode.

    Now the vehicle should drive along the calculated path!

    During the autonomous driving, the StatePanel appears as shown in the image below. Pressing the \"STOP\" button allows you to stop the vehicle.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/perception/","title":"Perception mode","text":""},{"location":"how-to-guides/integrating-autoware/launch-autoware/perception/#perception-mode","title":"Perception mode","text":"

    Warning

    Under Construction

    By specifying the perception_mode, users can switch between different sensor configurations for perception. This allows you to choose the specific sensor setup that you want to use for the perception tasks.

    ros2 launch autoware_launch autoware.launch.xml vehicle_model:=YOUR_VEHICLE sensor_kit:=YOUR_SENSOR_KIT map_path:=/PATH/TO/YOUR/MAP \\\nperception_mode:=lidar\n
    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/perception/#lidar","title":"LiDAR","text":"

    perception_mode:=lidar

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/perception/#radar","title":"Radar","text":"

    perception_mode:=radar

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/perception/#camera-lidar-fusion","title":"Camera LiDAR fusion","text":"

    perception_mode:=camera_lidar_fusion

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/perception/#camera-lidar-radar-fusion","title":"Camera LiDAR Radar fusion","text":"

    perception_mode:=camera_lidar_radar_fusion

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/perception/#lidar-radar-fusion","title":"LiDAR Radar fusion","text":"

    perception_mode:=lidar_radar_fusion

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/","title":"Localization methods","text":""},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/#localization-methods","title":"Localization methods","text":"

    Current localization launcher implemented by TIER IV supports multiple localization methods, both pose estimators and twist estimators. tier4_localization_component.launch.xml has two arguments to select which estimators to launch:

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/#ndt-scan-matcher-a-lidar-and-pointcloud-map-based-pose-estimator-default","title":"NDT scan matcher: a LiDAR and pointcloud map based pose estimator (default)","text":"

    By default, Autoware launches ndt_scan_matcher for pose estimator. In order to launch this explicitly, you need to specify as follows:

    ros2 launch autoware_launch autoware.launch.xml ... pose_source:=ndt ...\n

    Note that currently pose_source is set to NDT as default, so you can skip this argument.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/#gyro-odometer-an-imu-wheel-odometry-based-twist-estimator-default","title":"Gyro Odometer: an IMU & wheel odometry based twist estimator (default)","text":"

    By default, Autoware launches gyro_odometer for twist estimator. In order to launch this explicitly, you need to specify as follows:

    ros2 launch autoware_launch autoware.launch.xml ... twist_source:=gyro_odom ...\n

    Note that currently twist_source is set to Gyro Odometer as default, so you can skip this argument.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/#yabloc-a-camera-and-vector-map-based-pose-estimator","title":"YabLoc: a camera and vector map based pose estimator","text":"

    You can use YabLoc as a camera-based localization method. For more details on YabLoc, please refer to the README of YabLoc in autoware.universe.

    To use YabLoc as a pose_estimator, add pose_source:=yabloc when launching Autoware. By default, the pose_source is set to ndt. By specifying this command-line argument, YabLoc nodes will be automatically launched while the NDT nodes will not be started.

    Here is an example of a launch command:

    ros2 launch autoware_launch autoware.launch.xml ... pose_source:=yabloc ...\n
    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/#eagleye-a-gnss-imu-wheel-odometry-based-pose-and-twist-estimator","title":"Eagleye: a GNSS & IMU & wheel odometry based pose and twist estimator","text":"

    You can use Eagleye as a GNSS & IMU & wheel odometry-based localization method. For more details on Eagleye, please refer to the Eagleye.

    Eagleye has a function for position estimation and twist estimation, namely pose_estimator and twist_estimator, respectively. When running Eagleye in twist_estimator mode with other pose_estimator such as ndt_scan_matcher, Eagleye is still helpful since it can improve scan matching by providing accurate twists using GNSS doppler.

    You can use Eagleye by specifying the pose_source and twist_source accordingly through command-line arguments.

    Example of using Eagleye as the pose twist estimator:

    ros2 launch autoware_launch autoware.launch.xml ... pose_source:=eagleye twist_source:=eagleye ...\n

    Example of using Eagleye as the twist estimator:

    ros2 launch autoware_launch autoware.launch.xml ... twist_source:=eagleye ...\n
    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/","title":"Eagleye","text":""},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#using-eagleye-with-autoware","title":"Using Eagleye with Autoware","text":"

    This page will show you how to set up Eagleye in order to use it with Autoware. For the details of the integration proposal, please refer to this discussion.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#what-is-eagleye","title":"What is Eagleye?","text":"

    Eagleye is an open-source GNSS/IMU-based localizer initially developed by MAP IV. Inc. It provides a cost-effective alternative to LiDAR and point cloud-based localization by using low-cost GNSS and IMU sensors to provide vehicle position, orientation, and altitude information.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#dependencies","title":"Dependencies","text":"

    The below packages are automatically installed during the setup of Autoware as they are listed in autoware.repos.

    1. Eagleye (autoware-main branch)
    2. RTKLIB ROS Bridge (ros2-v0.1.0 branch)
    3. LLH Converter (ros2 branch)
    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#architecture","title":"Architecture","text":"

    Eagleye can be utilized in the Autoware localization stack in two ways:

    1. Feed only twist into the EKF localizer.

    2. Feed both twist and pose from Eagleye into the EKF localizer (twist can also be used with regular gyro_odometry).

    Note: RTK positioning is required when using Eagleye as the pose estimator. On the other hand, it is not mandatory when using it as the twist estimator.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#requirements","title":"Requirements","text":"

    Eagleye requires GNSS, IMU and vehicle speed as inputs.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#imu-topic","title":"IMU topic","text":"

    sensor_msgs/msg/Imu is supported for Eagleye IMU input.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#vehicle-speed-topic","title":"Vehicle speed topic","text":"

    geometry_msgs/msg/TwistStamped and geometry_msgs/msg/TwistWithCovarianceStamped are supported for the input vehicle speed.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#gnss-topic","title":"GNSS topic","text":"

    Eagleye requires latitude/longitude height and doppler velocity generated by the GNSS receiver. Your GNSS ROS driver must publish the following messages:

    GNSS ROS drivers modification ublox_gps No additional settings are required. It publishes sensor_msgs/msg/NavSatFix and geometry_msgs/msg/TwistWithCovarianceStamped required by Eagleye with default settings. septentrio_gnss_driver Set publish.navsatfix and publish.twist in the config file gnss.yaml to true"},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#parameter-modifications-for-integration-into-your-vehicle","title":"Parameter Modifications for Integration into Your Vehicle","text":""},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#topic-name-topic-type","title":"topic name & topic type","text":"

    The users must correctly specify input topics for GNSS latitude, longitude, and height , GNSS doppler speed , IMU , and vehicle speed in the eagleye_config.yaml.

    # Topic\ntwist:\ntwist_type: 1 # TwistStamped : 0, TwistWithCovarianceStamped: 1\ntwist_topic: /sensing/vehicle_velocity_converter/twist_with_covariance\nimu_topic: /sensing/imu/tamagawa/imu_raw\ngnss:\nvelocity_source_type: 2 # rtklib_msgs/RtklibNav: 0, nmea_msgs/Sentence: 1, ublox_msgs/NavPVT: 2, geometry_msgs/TwistWithCovarianceStamped: 3\nvelocity_source_topic: /sensing/gnss/ublox/navpvt\nllh_source_type: 2 # rtklib_msgs/RtklibNav: 0, nmea_msgs/Sentence: 1, sensor_msgs/NavSatFix: 2\nllh_source_topic: /sensing/gnss/ublox/nav_sat_fix\n
    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#sensor-frequency","title":"sensor frequency","text":"

    Also, the frequency of GNSS and IMU must be set in eagleye_config.yaml

    common:\nimu_rate: 50\ngnss_rate: 5\n
    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#conversion-from-fix-to-pose","title":"Conversion from fix to pose","text":"

    The parameters for converting sensor_msgs/msg/NavSatFix to geometry_msgs/msg/PoseWithCovarianceStamped is listed in fix2pose.yaml. If you use a different geoid or projection type, change these parameters.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#other-parameters","title":"Other parameters","text":"

    The other parameters are described here. Basically, these do not need to be changed .

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#notes-on-initialization","title":"Notes on initialization","text":"

    Eagleye requires an initialization process for proper operation. Without initialization, the output for twist will be in the raw value, and the pose data will not be available.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#1-static-initialization","title":"1. Static Initialization","text":"

    The first step is static initialization, which involves allowing the Eagleye to remain stationary for approximately 5 seconds after startup to estimate the yaw-rate offset.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#2-dynamic-initialization","title":"2. Dynamic initialization","text":"

    The next step is dynamic initialization, which involves running the Eagleye in a straight line for approximately 30 seconds. This process estimates the scale factor of wheel speed and azimuth angle.

    Once dynamic initialization is complete, the Eagleye will be able to provide corrected twist and pose data.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#how-to-check-the-progress-of-initialization","title":"How to check the progress of initialization","text":""},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#note-on-georeferenced-maps","title":"Note on georeferenced maps","text":"

    Note that the output position might not appear to be in the point cloud maps if you are using maps that are not properly georeferenced. In the case of a single GNSS antenna, initial position estimation (dynamic initialization) can take several seconds to complete after starting to run in an environment where GNSS positioning is available.

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/","title":"Evaluating the controller performance","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#evaluating-the-controller-performance","title":"Evaluating the controller performance","text":"

    This page shows how to use control_performance_analysis package to evaluate the controllers.

    control_performance_analysis is the package to analyze the tracking performance of a control module and monitor the driving status of the vehicle.

    If you need more detailed information about package, refer to the control_performance_analysis.

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#how-to-use","title":"How to use","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#before-driving","title":"Before Driving","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#1-firstly-you-need-to-launch-autoware-you-can-also-use-this-tool-with-real-vehicle-driving","title":"1. Firstly you need to launch Autoware. You can also use this tool with real vehicle driving","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#2-initialize-the-vehicle-and-send-goal-position-to-create-route","title":"2. Initialize the vehicle and send goal position to create route","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#3-launch-the-control_performance_analysis-package","title":"3. Launch the control_performance_analysis package","text":"
    ros2 launch control_performance_analysis controller_performance_analysis.launch.xml\n
    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#4-run-the-plotjuggler-in-sourced-terminal","title":"4. Run the PlotJuggler in sourced terminal","text":"
    source ~/autoware/install/setup.bash\n
    ros2 run plotjuggler plotjuggler\n
    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#5-increase-the-buffer-size-maximum-is-100-and-import-the-layout-from-autowareuniversecontrolcontrol_performance_analysisconfigcontroller_monitorxml","title":"5. Increase the buffer size (maximum is 100), and import the layout from /autoware.universe/control/control_performance_analysis/config/controller_monitor.xml","text":" "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#6-now-you-can-start-to-driving-you-should-see-all-the-performance-and-driving-variables-in-plotjuggler","title":"6. Now, you can start to driving. You should see all the performance and driving variables in PlotJuggler","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#after-driving","title":"After Driving","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#1-you-can-export-the-statistical-output-and-all-data-to-compare-and-later-usage","title":"1. You can export the statistical output and all data to compare and later usage","text":" "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#tips","title":"Tips","text":" "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/","title":"Evaluating real-time performance","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#evaluating-real-time-performance","title":"Evaluating real-time performance","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#introduction","title":"Introduction","text":"

    Autoware should be real-time system when integrated to a service. Therefore, the response time of each callback should be as small as possible. If Autoware appears to be slow, it is imperative to conduct performance measurements and implement improvements based on the analysis. However, Autoware is a complex software system comprising numerous ROS 2 nodes, potentially complicating the process of identifying bottlenecks. To address this challenge, we will discuss methods for conducting detailed performance measurements for Autoware and provide case studies. It is worth noting that multiple factors can contribute to poor performance, such as scheduling and memory allocation in the OS layer, but our focus in this page will be on user code bottlenecks. The outline of this section is as follows:

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#performance-measurement","title":"Performance measurement","text":"

    Improvement is impossible without precise measurements. To measure the performance of the application code, it is essential to eliminate any external influences. Such influences include interference from the operating system and CPU frequency fluctuations. Scheduling effects also occur when core resources are shared by multiple threads. This section outlines a technique for accurately measuring the performance of the application code for a specific node. Though this section only discusses the case of Linux on Intel CPUs, similar considerations should be made in other environments.

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#single-node-execution","title":"Single node execution","text":"

    To eliminate the influence of scheduling, the node being measured should operate independently, using the same logic as when the entire Autoware system is running. To accomplish this, record all input topics of the node to be measured while the whole Autoware system is running. To achieve this objective, a tool called ros2_single_node_replayer has been prepared.

    Details on how to use the tool can be found in the README. This tool records the input topics of a specific node during the entire Autoware operation and replays it in a single node with the same logic. The tool relies on the ros2 bag record command, and the recording of service/action is not supported as of ROS 2 Humble, so nodes that use service/action as their main logic may not work well.

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#prepare-separated-cores","title":"Prepare separated cores","text":"

    Isolated cores running the node to be measured must meet the following conditions.

    To fulfill these conditions on Linux, a custom kernel build with the following kernel configurations is required. You can find many resources to instruct you on how to build a custom Linux kernel (like this one). Note that even if full tickless is enabled, timer interrupts are generated for scheduling if more than two tasks exist in one core.

    # Enable CONFIG_NO_HZ_FULL\n-> General setup\n-> Timers subsystem\n-> Timer tick handling (Full dynticks system (tickless))\n(X) Full dynticks system (tickless)\n\n# Allows RCU callback processing to be offloaded from selected CPUs\n# (CONFIG_RCU_NOCB_CPU=y)\n-> General setup\n-> RCU Subsystem\n-*- Offload RCU callback processing from boot-selected CPUs\n

    Additionally, the kernel boot parameters need to be set as follows.

    GRUB_CMDLINE_LINUX_DEFAULT=\n  \"... isolcpus=2,8 rcu_nocbs=2,8 rcu_nocb_poll nohz_full=2,8 intel_pstate=disable\u201d\n

    In the above configuration, for example, the node to be measured is assumed to run on core 2, and core 8, which is a hyper-threading pair, is also being isolated. Appropriate decisions on which cores to run the measurement target and which nodes to isolate need to be made based on the cache and core layout of the measurement machine. You can easily check if it is properly configured by running cat /proc/softirqs. Since intel_pstate=disable is specified in the kernel boot parameter, userspace can be specified in the scaling governor.

    cat /sys/devices/system/cpu/cpu2/cpufreq/scaling_governor // ondemand\nsudo sh -c \"echo userspace > /sys/devices/system/cpu/cpu2/cpufreq/scaling_governor\"\n

    This allows you to freely set the desired frequency within a defined range.

    sudo sh -c \"echo <freq(kz)> > /sys/devices/system/cpu/cpu2/cpufreq/scaling_setspeed\"\n

    Turbo Boost needs to be switched off on Intel CPUs, which is often overlooked.

    sudo sh -c \"echo 0 > /sys/devices/system/cpu/cpufreq/boost\"\n
    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#run-single-node-separately","title":"Run single node separately","text":"

    Following the instructions in the ros2_single_node_replayer README, start the node and play the dedicated rosbag created by the tool. Before playing the rosbag, appropriately set the CPU affinity of the thread on which the node runs, so it is placed on the isolated core prepared.

    taskset --cpu-list -p <target cpu> <pid>\n

    To avoid interference in the last level cache, minimize the number of other applications running during the measurement.

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#measurement-and-visualization","title":"Measurement and visualization","text":"

    To visualize the performance of the measurement target, embed code for logging timestamps and performance counter values in the target source code. To achieve this objective, a tool called pmu_analyzer has been prepared.

    Details on how to use the tool can be found in the README. This tool can measure the turnaround time of any section in the source code, as well as various performance counters.

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#case-studies","title":"Case studies","text":"

    In this section, we will present several case studies that demonstrate the performance improvements. These examples not only showcase our commitment to enhancing the system's efficiency but also serve as a valuable resource for developers who may face similar challenges in their own projects. The performance improvements discussed here span various components of the Autoware system, including sensing modules and planning modules. There are tendencies for each component regarding which points are becoming bottlenecks. By examining the methods, techniques, and tools employed in these case studies, readers can gain a better understanding of the practical aspects of optimizing complex software systems like Autoware.

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#sensing-component","title":"Sensing component","text":"

    First, we will explain the procedure for performance improvement, taking the node ring_outlier_filter as an example. Refer to the Pull Request for details.

    The following figure is a time-series plot of the turnaround time of the main processing part of ring_outlier_filter, analyzed as described in the \"Performance Measurement\" section above.

    The horizontal axis indicates the number of callbacks called (i.e., callback index), and the vertical axis indicates the turnaround time.

    When analyzing the performance of the sensing module from the viewpoint of performance counter, pay attention to instructions, LLC-load-misses, LLC-store-misses, cache-misses, and minor-faults.

    Analysis of the performance counter shows that the largest fluctuations come from minor-faults (i.e., soft page faults), the second largest from LLC-store-misses and LLC-load-misses (i.e., cache misses in the last level cache), and the slowest fluctuations come from instructions (i.e., message data size fluctuations). For example, when we plot minor-faults on the horizontal axis and turnaround time on the vertical axis, we can see the following dominant proportional relationship.

    To achieve zero soft page faults, heap allocations must only be made from areas that have been first touched in advance. We have developed a library called heaphook to avoid soft page faults while running Autoware callback. If you are interested, refer to the GitHub discussion and the issue.

    To reduce LLC misses, it is necessary to reduce the working set and to use cache-efficient access patterns.

    In the sensing component, which handles large message data such as LiDAR point cloud data, minimizing copying is important. A callback that takes sensor data message types as input and output should be written in an in-place algorithm as much as possible. This means that in the following pseudocode, when generating output_msg from input_msg, it is crucial to avoid using buffers as much as possible to reduce the number of memory copies.

    void callback(const PointCloudMsg &input_msg) {\nauto output_msg = allocate_msg<PointCloudMsg>(output_size);\nfill(input_msg, output_msg);\npublish(std::move(output_msg));\n}\n

    To improve cache efficiency, implement an in-place style as much as possible, instead of touching memory areas sporadically. In ROS applications using PCL, the code shown below is often seen.

    void callback(const sensor_msgs::PointCloud2ConstPtr &input_msg) {\npcl::PointCloud<PointT>::Ptr input_pcl(new pcl::PointCloud<PointT>);\npcl::fromROSMsg(*input_msg, *input_pcl);\n\n// Algorithm is described for point cloud type of pcl\npcl::PointCloud<PointT>::Ptr output_pcl(new pcl::PointCloud<PointT>);\nfill_pcl(*input_pcl, *output_pcl);\n\nauto output_msg = allocate_msg<sensor_msgs::PointCloud2>(output_size);\npcl::toROSMsg(*output_pcl, *output_msg);\npublish(std::move(output_msg));\n}\n

    To use the PCL library, fromROSMsg() and toROSMsg() are used to perform message type conversion at the beginning and end of the callback. This is a wasteful copying process and should be avoided. We should eliminate unnecessary type conversions by removing dependencies on PCL (e.g., https://github.com/tier4/velodyne_vls/pull/39). For large message types such as map data, there should be only one instance in the entire system in terms of physical memory.

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#planning-component","title":"Planning component","text":"

    First, we will pick up detection_area module in behavior_velocity_planner node, which tends to have long turnaround time. We have followed the performance analysis steps above to obtain the following graph. Axises are the same as the graphs in the sensing case study.

    Using pmu_analyzer tool to further identify the bottleneck, we have found that the following multiple loops were taking up a lot of processing time:

    for ( area : detection_areas )\nfor ( point : point_clouds )\nif ( boost::geometry::within(point, area) )\n// do something with O(1)\n

    It checks whether each point cloud is contained in each detection area. Let N be the size of point_clouds and M be the size of detection_areas, then the computational complexity of this program is O(N^2 * M), since the complexity of within is O(N). Here, given that most of the point clouds are located far away from a certain detection area, a certain optimization can be achieved. First, calculate the minimum enclosing circle that completely covers the detection area, and then check whether the points are contained in that circle. Most of the point clouds can be quickly ruled out by this method, we don\u2019t have to call the within function in most cases. Below is the pseudocode after optimization.

    for ( area : detection_areas )\ncircle = calc_minimum_enclosing_circle(area)\nfor ( point : point_clouds )\nif ( point is in circle )\nif ( boost::geometry::within(point, area) )\n// do something with O(1)\n

    By using O(N) algorithm for minimum enclosing circle, the computational complexity of this program is reduced to almost O(N * (N + M)) (note that the exact computational complexity does not really change). If you are interested, refer to the Pull Request.

    Similar to this example, in the planning component, we take into consideration thousands to tens of thousands of point clouds, thousands of points in a path representing our own route, and polygons representing obstacles and detection areas in the surroundings, and we repeatedly create paths based on them. Therefore, we access the contents of the point clouds and paths multiple times using for-loops. In most cases, the bottleneck lies in these naive for-loops. Here, understanding Big O notation and reducing the order of computational complexity directly leads to performance improvements.

    "},{"location":"how-to-guides/others/add-a-custom-ros-message/","title":"Add a custom ROS message","text":""},{"location":"how-to-guides/others/add-a-custom-ros-message/#add-a-custom-ros-message","title":"Add a custom ROS message","text":""},{"location":"how-to-guides/others/add-a-custom-ros-message/#overview","title":"Overview","text":"

    During the Autoware development, you will probably need to define your own messages. Read the following instructions before adding a custom message.

    1. Message in autoware_msgs define interfaces of Autoware Core.

      • If a contributor wishes to make changes or add new messages to autoware_msgs, they should first create a new discussion post under the Design category.
    2. Any other minor or proposal messages used for internal communication within a component(such as planning) should be defined in another repository.

      • tier4_autoware_msgs is an example of that.

    The following is a simple tutorial of adding a message package to autoware_msgs. For the general ROS 2 tutorial, see Create custom msg and srv files.

    "},{"location":"how-to-guides/others/add-a-custom-ros-message/#how-to-create-custom-message","title":"How to create custom message","text":"

    Make sure you are in the Autoware workspace, and then run the following command to create a new package. As an example, let's create a package to define sensor messages.

    1. Create a package

      cd ./src/core/autoware_msgs\nros2 pkg create --build-type ament_cmake autoware_sensing_msgs\n
    2. Create custom messages

      You should create .msg files and place them in the msg directory.

      NOTE: The initial letters of the .msg and .srv files must be capitalized.

      As an example, let's make .msg files GnssInsOrientation.msg and GnssInsOrientationStamped.msg to define GNSS/INS orientation messages:

      mkdir msg\ncd msg\ntouch GnssInsOrientation.msg\ntouch GnssInsOrientationStamped.msg\n

      Edit GnssInsOrientation.msg with your editor to be the following content:

      geometry_msgs/Quaternion orientation\nfloat32 rmse_rotation_x\nfloat32 rmse_rotation_y\nfloat32 rmse_rotation_z\n

      In this case, the custom message uses a message from another message package geometry_msgs/Quaternion.

      Edit GnssInsOrientationStamped.msg with your editor to be the following content:

      std_msgs/Header header\nGnssInsOrientation orientation\n

      In this case, the custom message uses a message from another message package std_msgs/Header.

    3. Edit CMakeLists.txt

      In order to use this custom message in C++ or Python languages, we need to add the following lines to CMakeList.txt:

      rosidl_generate_interfaces(${PROJECT_NAME}\n\"msg/GnssInsOrientation.msg\"\n\"msg/GnssInsOrientationStamped.msg\"\nDEPENDENCIES\ngeometry_msgs\nstd_msgs\nADD_LINTER_TESTS\n)\n

      The ament_cmake_auto tool is very useful and is more widely used in Autoware, so we recommend using ament_cmake_auto instead of ament_cmake.

      We need to replace

      find_package(ament_cmake REQUIRED)\n\nament_package()\n

      with

      find_package(ament_cmake_auto REQUIRED)\n\nament_auto_package()\n
    4. Edit package.xml

      We need to declare relevant dependencies in package.xml. For the above example we need to add the following content:

      <buildtool_depend>rosidl_default_generators</buildtool_depend>\n\n<exec_depend>rosidl_default_runtime</exec_depend>\n\n<depend>geometry_msgs</depend>\n<depend>std_msgs</depend>\n\n<member_of_group>rosidl_interface_packages</member_of_group>\n

      We need to replace <buildtool_depend>ament_cmake</buildtool_depend> with <buildtool_depend>ament_cmake_auto</buildtool_depend> in the package.xml file.

    5. Build the custom message package

      You can build the package in the root of your workspace, for example by running the following command:

      colcon build --packages-select autoware_sensing_msgs\n

      Now the GnssInsOrientationStamped message will be discoverable by other packages in Autoware.

    "},{"location":"how-to-guides/others/add-a-custom-ros-message/#how-to-use-custom-messages-in-autoware","title":"How to use custom messages in Autoware","text":"

    You can use the custom messages in Autoware by following these steps:

    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/","title":"Advanced usage of colcon","text":""},{"location":"how-to-guides/others/advanced-usage-of-colcon/#advanced-usage-of-colcon","title":"Advanced usage of colcon","text":"

    This page shows some advanced and useful usage of colcon. If you need more detailed information, refer to the colcon documentation.

    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#common-mistakes","title":"Common mistakes","text":""},{"location":"how-to-guides/others/advanced-usage-of-colcon/#do-not-run-from-other-than-the-workspace-root","title":"Do not run from other than the workspace root","text":"

    It is important that you always run colcon build from the workspace root because colcon builds only under the current directory. If you have mistakenly built in a wrong directory, run rm -rf build/ install/ log/ to clean the generated files.

    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#do-not-unnecessarily-overlay-workspaces","title":"Do not unnecessarily overlay workspaces","text":"

    colcon overlays workspaces if you have sourced the setup.bash of other workspaces before building a workspace. You should take care of this especially when you have multiple workspaces.

    Run echo $COLCON_PREFIX_PATH to check whether workspaces are overlaid. If you find some workspaces are unnecessarily overlaid, remove all built files, restart the terminal to clean environment variables, and re-build the workspace.

    For more details about workspace overlaying, refer to the ROS 2 documentation.

    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#cleaning-up-the-build-artifacts","title":"Cleaning up the build artifacts","text":"

    colcon sometimes causes errors of because of the old cache. To remove the cache and rebuild the workspace, run the following command:

    rm -rf build/ install/\n

    In case you know what packages to remove:

    rm -rf {build,install}/{package_a,package_b}\n
    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#selecting-packages-to-build","title":"Selecting packages to build","text":"

    To just build specified packages:

    colcon build --packages-select <package_name1> <package_name2> ...\n

    To build specified packages and their dependencies recursively:

    colcon build --packages-up-to <package_name1> <package_name2> ...\n

    You can also use these options for colcon test.

    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#changing-the-optimization-level","title":"Changing the optimization level","text":"

    Set DCMAKE_BUILD_TYPE to change the optimization level.

    Warning

    If you specify DCMAKE_BUILD_TYPE=Debug or no DCMAKE_BUILD_TYPE is given for building the entire Autoware, it may be too slow to use.

    colcon build --cmake-args -DCMAKE_BUILD_TYPE=Debug\n
    colcon build --cmake-args -DCMAKE_BUILD_TYPE=RelWithDebInfo\n
    colcon build --cmake-args -DCMAKE_BUILD_TYPE=Release\n
    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#changing-the-default-configuration-of-colcon","title":"Changing the default configuration of colcon","text":"

    Create $COLCON_HOME/defaults.yaml to change the default configuration.

    mkdir -p ~/.colcon\ncat << EOS > ~/.colcon/defaults.yaml\n{\n\"build\": {\n\"symlink-install\": true\n}\n}\n

    For more details, see here.

    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#generating-compile_commandsjson","title":"Generating compile_commands.json","text":"

    compile_commands.json is used by IDEs/tools to analyze the build dependencies and symbol relationships.

    You can generate it with the flag DCMAKE_EXPORT_COMPILE_COMMANDS=1:

    colcon build --cmake-args -DCMAKE_EXPORT_COMPILE_COMMANDS=1\n
    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#seeing-compiler-commands","title":"Seeing compiler commands","text":"

    To see the compiler and linker invocations for a package, use VERBOSE=1 and --event-handlers console_cohesion+:

    VERBOSE=1 colcon build --packages-up-to <package_name> --event-handlers console_cohesion+\n

    For other options, see here.

    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#using-ccache","title":"Using Ccache","text":"

    Ccache can speed up recompilation. It is recommended to use it to save your time unless you have a specific reason not to do so.

    1. Install Ccache:

      sudo apt update && sudo apt install ccache\n
    2. Write the following in your .bashrc:

      export CC=\"/usr/lib/ccache/gcc\"\nexport CXX=\"/usr/lib/ccache/g++\"\n
    "},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/","title":"An example procedure for adding and evaluating a new node","text":""},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/#an-example-procedure-for-adding-and-evaluating-a-new-node","title":"An example procedure for adding and evaluating a new node","text":""},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/#overview","title":"Overview","text":"

    This page provides a guide for evaluating Autoware when a new node is implemented, especially about developing a novel localization node.

    The workflow involves initial testing and rosbag recording using a real vehicle or AWSIM, implementing the new node, subsequent testing using the recorded rosbag, and finally evaluating with a real vehicle or AWSIM.

    It is assumed that the method intended for addition has already been verified well with public datasets and so on.

    "},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/#1-running-autoware-in-its-standard-configuration","title":"1. Running Autoware in its standard configuration","text":"

    First of all, it is important to be able to run the standard Autoware to establish a basis for performance and behavior comparison.

    Autoware constantly incorporates new features. It is crucial to initially confirm that it operates as expected with the current version, which helps in problem troubleshooting.

    In this context, AWSIM is presumed. Therefore, AWSIM simulator can be useful. If you are using actual hardware, please refer to the How-to guides.

    "},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/#2-recording-a-rosbag-using-autoware","title":"2. Recording a rosbag using Autoware","text":"

    Before developing a new node, it is recommended to record a rosbag in order to evaluate. If you need a new sensor, you should add it to your vehicle or AWSIM.

    In this case, it is recommended to save all topics regardless of whether they are necessary or not. For example, in Localization, since the initial position estimation service is triggered by the input to rviz and the GNSS topic, the initial position estimation does not start when playing back data unless those topics are saved.

    Consider the use of the mcap format if data capacity becomes a concern.

    It is worth noting that using ros2 bag record increases computational load and might affect performance. After data recording, verifying the smooth flow of sensor data and unchanged time series is advised. This verification can be accomplished, for example, by inspecting the image data with rqt_image_view during ros2 bag play.

    "},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/#3-developing-the-new-node","title":"3. Developing the new node","text":"

    When developing a new node, it could be beneficial to reference a package that is similar to the one you intend to create.

    It is advisable to thoroughly read the Design page, contemplate the addition or replacement of nodes in Autoware, and then implement your solution.

    For example, a node doing NDT, a LiDAR-based localization method, is ndt_scan_matcher. If you want to replace this with a different approach, implement a node which produces the same topics and provides the same services.

    ndt_scan_matcher is launched as pose_estimator, so it is necessary to replace the launch file as well.

    "},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/#4-evaluating-by-a-rosbag-based-simulator","title":"4. Evaluating by a rosbag-based simulator","text":"

    Once the new node is implemented, it is time to evaluate it. logging_simulator is a tool of how to evaluate the new node using the rosbag captured in step 2.

    When you run the logging_simulator, you can set planning:=false or control:=false to disable the launch of specific component nodes.

    ros2 launch autoware_launch logging_simulator.launch.xml ... planning:=false control:=false

    After launching logging_simulator, the rosbag file obtained in step 2 should be replayed using ros2 bag play <rosbag_file>.

    If you remap the topics related to the localization that you want to verify this time, Autoware will use the data it is calculating this time instead of the data it recorded. Also, using the --topics option of ros2 bag play, you can publish only specific topics in rosbag.

    There is ros2bag_extensions available to filter the rosbag file and create a new rosbag file that contains only the topics you need.

    "},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/#5-evaluating-in-a-realtime-environment","title":"5. Evaluating in a realtime environment","text":"

    Once you have sufficiently verified the behavior in the logging_simulator, let's run it as Autoware with new nodes added in the realtime environment.

    To debug Autoware, the method described at debug-autoware is useful.

    For reproducibility, you may want to fix the GoalPose. In such cases, consider using the tier4_automatic_goal_rviz_plugin.

    "},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/#6-sharing-the-results","title":"6. Sharing the results","text":"

    If your implementation works successfully, please consider a pull request to Autoware.

    It is also a good idea to start by presenting your ideas in Discussion at Show and tell.

    For localization, YabLoc's Proposal may provide valuable insights.

    "},{"location":"how-to-guides/others/applying-clang-tidy-to-ros-packages/","title":"Applying Clang-Tidy to ROS packages","text":""},{"location":"how-to-guides/others/applying-clang-tidy-to-ros-packages/#applying-clang-tidy-to-ros-packages","title":"Applying Clang-Tidy to ROS packages","text":"

    Clang-Tidy is a powerful C++ linter.

    "},{"location":"how-to-guides/others/applying-clang-tidy-to-ros-packages/#preparation","title":"Preparation","text":"

    You need to generate build/compile_commands.json before using Clang-Tidy.

    colcon build --cmake-args -DCMAKE_EXPORT_COMPILE_COMMANDS=1\n
    "},{"location":"how-to-guides/others/applying-clang-tidy-to-ros-packages/#usage","title":"Usage","text":"
    clang-tidy -p build/ path/to/file1 path/to/file2 ...\n

    If you want to apply Clang-Tidy to all files in a package, using the fd command is useful. To install fd, see the installation manual.

    clang-tidy -p build/ $(fd -e cpp -e hpp --full-path \"/autoware_utils/\")\n
    "},{"location":"how-to-guides/others/applying-clang-tidy-to-ros-packages/#ide-integration","title":"IDE integration","text":""},{"location":"how-to-guides/others/applying-clang-tidy-to-ros-packages/#clion","title":"CLion","text":"

    Refer to the CLion Documentation.

    "},{"location":"how-to-guides/others/applying-clang-tidy-to-ros-packages/#visual-studio-code","title":"Visual Studio Code","text":"

    Use either one of the following extensions:

    "},{"location":"how-to-guides/others/applying-clang-tidy-to-ros-packages/#troubleshooting","title":"Troubleshooting","text":"

    If you encounter clang-diagnostic-error, try installing libomp-dev.

    Related: https://github.com/autowarefoundation/autoware-github-actions/pull/172

    "},{"location":"how-to-guides/others/debug-autoware/","title":"Debug Autoware","text":""},{"location":"how-to-guides/others/debug-autoware/#debug-autoware","title":"Debug Autoware","text":"

    This page provides some methods for debugging Autoware.

    "},{"location":"how-to-guides/others/debug-autoware/#print-debug-messages","title":"Print debug messages","text":"

    The essential thing for debug is to print the program information clearly, which can quickly judge the program operation and locate the problem. Autoware uses ROS 2 logging tool to print debug messages, how to design console logging refer to tutorial Console logging.

    "},{"location":"how-to-guides/others/debug-autoware/#using-ros-tools-debug-autoware","title":"Using ROS tools debug Autoware","text":""},{"location":"how-to-guides/others/debug-autoware/#using-command-line-tools","title":"Using command line tools","text":"

    ROS 2 includes a suite of command-line tools for introspecting a ROS 2 system. The main entry point for the tools is the command ros2, which itself has various sub-commands for introspecting and working with nodes, topics, services, and more. How to use the ROS 2 command line tool refer to tutorial CLI tools.

    "},{"location":"how-to-guides/others/debug-autoware/#using-rviz2","title":"Using rviz2","text":"

    Rviz2 is a port of Rviz to ROS 2. It provides a graphical interface for users to view their robot, sensor data, maps, and more. You can run Rviz2 tool easily by:

    rviz2\n

    When Autoware launch the simulators, the Rviz2 tool is opened by default to visualize the autopilot graphic information.

    "},{"location":"how-to-guides/others/debug-autoware/#using-rqt-tools","title":"Using rqt tools","text":"

    RQt is a graphical user interface framework that implements various tools and interfaces in the form of plugins. You can run any RQt tools/plugins easily by:

    rqt\n

    This GUI allows you to choose any available plugins on your system. You can also run plugins in standalone windows. For example, RQt Console:

    ros2 run rqt_console rqt_console\n
    "},{"location":"how-to-guides/others/debug-autoware/#common-rqt-tools","title":"Common RQt tools","text":"
    1. rqt_graph: view node interaction

      In complex applications, it may be helpful to get a visual representation of the ROS node interactions.

      ros2 run rqt_graph rqt_graph\n
    2. rqt_console: view messages

      rqt_console is a great gui for viewing ROS topics.

      ros2 run rqt_console rqt_console\n
    3. rqt_plot: view data plots

      rqt_plot is an easy way to plot ROS data in real time.

      ros2 run rqt_plot rqt_plot\n
    "},{"location":"how-to-guides/others/debug-autoware/#using-ros2_graph","title":"Using ros2_graph","text":"

    ros2_graph can be used to generate mermaid description of ROS 2 graphs to add on your markdown files.

    It can also be used as a colorful alternative to rqt_graph even though it would require some tool to render the generated mermaid diagram.

    It can be installed with:

    pip install ros2-graph\n

    Then you can generate a mermaid description of the graph with:

    ros2_graph your_node\n\n# or like with an output file\nros2_graph /turtlesim -o turtle_diagram.md\n\n# or multiple nodes\nros2_graph /turtlesim /teleop_turtle\n

    You can then visualize these graphs with:

    "},{"location":"how-to-guides/others/debug-autoware/#using-ros2doctor","title":"Using ros2doctor","text":"

    When your ROS 2 setup is not running as expected, you can check its settings with the ros2doctor tool.

    ros2doctor checks all aspects of ROS 2, including platform, version, network, environment, running systems and more, and warns you about possible errors and reasons for issues.

    It's as simple as just running ros2 doctor in your terminal.

    It has the ability to list \"Subscribers without publishers\" for all topics in the system.

    And this information can help you find if a necessary node isn't running.

    For more details, see the following official documentation for Using ros2doctor to identify issues.

    "},{"location":"how-to-guides/others/debug-autoware/#using-a-debugger-with-breakpoints","title":"Using a debugger with breakpoints","text":"

    Many IDE(e.g. Visual Studio Code, CLion) supports debugging C/C++ executable with GBD on linux platform. The following lists some references for using the debugger:

    "},{"location":"how-to-guides/others/defining-temporal-performance-metrics/","title":"Defining temporal performance metrics on components","text":""},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#defining-temporal-performance-metrics-on-components","title":"Defining temporal performance metrics on components","text":""},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#motivation-to-defining-temporal-performance-metrics","title":"Motivation to defining temporal performance metrics","text":""},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#objective-of-the-page","title":"Objective of the page","text":"

    This page introduces policies to define metrics to evaluate temporal performance on components of Autoware. The term \"temporal performance\" is often used throughout the page in order to distinguish between functional performance, which referred to as accuracy as well, and time-related performance.

    It is expected that most algorithms employed for Autoware are executed with as high frequency and short response time as possible. In order to achieve safe autonomous driving, one of the desired outcomes is no time gap between perceived and actual situation. The time gap is commonly referred to as delay. If the delay is significant, the system may determine trajectory and maneuver based on outdated situation. Consequently, if the actual situation differs from the perceived one due to the delay, the system may make unexpected decisions.

    As mentioned above, this page presents the policies to define metrics. Besides, the page contains lists of sample metrics that are crucial for the main functionalities of Autoware: Localization, Perception, Planning, and Control.

    Note

    Other functionalities, such as system components for diagnosis, are excluded currently. However they will be taken into account in the near future.

    "},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#contribution-of-the-temporal-performance-metrics","title":"Contribution of the temporal performance metrics","text":"

    Temporal performance metrics are important for evaluating Autoware. These metrics are particularly useful for assessing delays caused by new algorithms and logic. They can be employed when comparing the temporal performance of software on a desktop computer with that on a vehicle during the vehicle integration phase.

    In addition, these metrics are useful for designers and evaluators of middleware, operating systems, and computers. They are selected based on user and product requirements. One of these requirements is to provide sufficient temporal performance for executing Autoware. \"Sufficient temporal performance\" is defined as a temporal performance requirement, but it can be challenging to define the requirement because it varies depending on the product type, Operational Design Domain (ODD), and other factors. Then, this page specifically focuses on temporal performance metrics rather than requirements.

    Temporal performance metrics are important for evaluating the reliability of Autoware. However, ensuring the reliability of Autoware requires consideration of not only temporal performance metrics but also other metrics.

    "},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#tools-for-evaluating-the-metrics","title":"Tools for evaluating the metrics","text":"

    There are several tools available for evaluating Autoware according to the metrics listed in the page. For example, both CARET and ros2_tracing are recommended options when evaluating Autoware on Linux and ROS 2. If you want to measure the metrics with either of these tools, refer to the corresponding user guide for instructions. It's important to note that if you import Autoware to a platform other than Linux and ROS 2, you will need to choose a supported tool for evaluation.

    Note

    TIER IV plans to measure Autoware, which is running according to the tutorial, and provide a performance evaluation report periodically. An example of such a report can be found here, although it may not include all of the metrics listed.

    The page does not aim to provide instructions on how to use these tools or measure the metrics. Its primary focus is on the metrics themselves, as they are more important than the specific tools used. These metrics retain their relevance regardless of the employed platform.

    "},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#policies-to-define-temporal-performance-metrics","title":"Policies to define temporal performance metrics","text":"

    As mentioned above, the configuration of Autoware varies by the product type, ODD, and other factors. The variety of configurations makes it difficult to define the uniform metrics for evaluating Autoware. However, the policies used to define them are basically reused even when the configuration changes. Each of temporal performance metrics is categorized into two types: execution frequency and response time. Although there are many types of metrics, such as communication latency, the only two types are considered for simplicity. Execution frequency is observed using rate of Inter-Process Communication (IPC) messages. You will find an enormous number of messages in Autoware, but you don't have to take care of all. Some messages might be critical to functionality and they should be chosen for evaluation. Response time is duration elapsed through a series of processing. A series of processing is referred to as a path. Response time is calculated from timestamps of start and end of a path. Although many paths can be defined in Autoware, you have to choose significant paths.

    As a hint, here are some characteristics of message and path in order to choose metrics.

    1. Messages and paths on boundaries where observed values from sensors are consumed
    2. Messages and paths on boundaries of functions, e.g., a boundary of perception and planning
    3. Messages and paths on boundaries where timer-based frequency is switched
    4. Messages and paths on boundaries where two different messages are synchronized and merged
    5. Messages that must be transmitted at expected frequency, e.g., vehicle command messages

    Those hints would be helpful for most configurations but there may be exclusions. Defining metrics precisely requires an understanding of configuration.

    In addition, it is recommended that metrics be determined incrementally from the architectural level to the detailed design and implementation level. Mixing metrics at different levels of granularity can be confusing.

    "},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#list-of-sample-metrics","title":"List of sample metrics","text":"

    This section demonstrates how to define metrics according to the policies explained and has lists of the metrics for Autoware launched according to the tutorial. The section is divided into multiple subsections, each containing a model diagram and an accompanying list that explains the important temporal performance metrics. Each model is equipped with checkpoints that serve as indicators for these metrics.

    The first subsection presents the top-level temporal performance metrics, which are depicted in the abstract structure of Autoware as a whole. The detailed metrics are not included in the model as they would add complexity to it. Instead, the subsequent section introduces the detailed metrics. The detailed metrics are subject to more frequent updates compared to the top-level ones, which is another reason for categorizing them separately.

    Each list includes a column for the reference value. The reference value represents the observed value of each metric when Autoware is running according to the tutorial. It is important to note that the reference value is not a required value, meaning that Autoware does not necessarily fail in the tutorial execution if certain metrics do not fulfill the reference value.

    "},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#top-level-temporal-performance-metrics-for-autoware","title":"Top-level temporal performance metrics for Autoware","text":"

    The diagram below introduces the model for top-level temporal performance metrics.

    The following three policies assist in selecting the top-level performance metrics:

    Additionally, it is assumed that algorithms are implemented as multiple nodes and function as a pipeline processing system.

    ID Representation in the model Metric meaning Related functionality Reference value Reason to choose it as a metric Note AWOV-001 Message rate from CPA #9 to CPA #18 Update rate of result from Prediction to Planning. Perception 10 Hz Planning relies on fresh and up-to-date perceived data from Perception for creating accurate trajectory. AWOV-002 Response time from CPA #0 to CPA #20 via CPA #18 Response time in main body of Perception. Perception N/A Planning relies on fresh and up-to-date perceived data from Perception for creating accurate trajectory. The metric is used if delay compensation is disabled in Tracking. AWOV-003 Response time from CPA #7 to CPA #20 Response time from Tracking output of Tracking to its data consumption in Planning. Perception N/A Planning relies on fresh and up-to-date perceived data from Perception for creating accurate trajectory. The metric is used if delay compensation is enabled in Tracking. AWOV-004 Response time from CPA #0 to CPA #6 Duration to process pointcloud data in Sensing and Detection. Perception N/A Tracking relies on detection to provide real-time and up-to-date sensed data for accurate tracking. The metric is used if delay compensation is enabled in Tracking. AWOV-005 Message rate from CPA #4 to CPA #5 Update rate of Detection result received by Tracking. Perception 10 Hz Tracking relies on detection to provide real-time and up-to-date sensed data for accurate tracking. AWOV-006 Response time from CPA #0 to CPA #14 Response time from output of observed data from LiDARs to its consumption in EKF Localizer via NDT Scan Matcher. Localization N/A EKF Localizer relies on fresh and up-to-date observed data from sensors for accurate estimation of self pose. AWOV-007 Message rate from CPA #11 to CPA #13 Update rate of pose estimated by NDT Scan Matcher. Localization 10 Hz EKF Localizer relies on fresh and up-to-date observed data from sensors for accurate estimation of self pose. AWOV-008 Message rate from CPA #15 to CPA #12 Update rate of feed backed pose estimated by EKF Localizer. Localization 50 Hz NDT Scan Matcher relies on receiving estimated pose from EKF Localizer smoothly for linear interpolation. AWOV-009 Message rate from CPA #17 to CPA #19 Update rate of Localization result received by Planning. Localization 50 Hz Planning relies on Localization to update the estimated pose frequently. AWOV-010 Response time from CPA #20 to CPA #23 Processing time from beginning of Planning to consumption of Trajectory message in Control. Planning N/A A vehicle relies on Planning to update trajectory within a short time frame to achieve safe driving behavior. AWOV-011 Message rate from CPA #21 to CPA #22 Update rate of Trajectory message from Planning. Planning 10 Hz A vehicle relies on Planning to update trajectory frequently to achieve safe driving behavior. AWOV-012 Message rate from CPA #24 to CPA #25 Update rate of Control command. Control 33 Hz Control stability and comfort relies on sampling frequency of Control. AWOV-013 Message rate between CPA #26 and Vehicle Communication rate between Autoware and Vehicle. Vehicle Interface N/A A vehicle requires Autoware to communicate with each other at predetermined frequency. Temporal performance requirement varies depending on vehicle type.

    Note

    There is an assumption that each of sensors, such as LiDARs and cameras, outputs a set of pointcloud with a timestamp. CPA #0 is observed with the timestamp. If the sensors are not configured to output the timestamp, the time when Autoware receives the pointcloud is used instead. That is represented by CPA #1 in the model. The detailed metrics employs the idea as well.

    "},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#detailed-temporal-performance-metrics-for-perception","title":"Detailed temporal performance metrics for Perception","text":"

    The diagram below introduces the model for temporal performance metrics for Perception.

    The following two policies assist in selecting the performance metrics:

    The following list shows the temporal performance metrics for Perception.

    ID Representation in the model Metric meaning Related functionality Reference value Reason to choose it as a metric Note APER-001 Message rate from CPP #2 to CPP #26 Update rate of Traffic Light Recognition. Traffic Light Recognition 10 Hz Planning relies on fresh and up-to-date perceived data from Traffic Light Recognition for making precise decisions. APER-002 Response time from CPP #0 to CPP #30 Response time from camera input to consumption of the result in Planning. Traffic Light Recognition N/A Planning relies on fresh and up-to-date perceived data from Traffic Light Recognition for making precise decisions. APER-003 Message rate from CPP #25 to CPP #28 Update rate of result from Prediction (Object Recognition) to Planning. Object Recognition 10 Hz Planning relies on fresh and up-to-date perceived data from Perception for creating accurate trajectory. The metric is same as AWOV-001. APER-004 Response time from CPP #6 to CPP #30 Response time from Tracking output of Tracking to its data consumption in Planning. Object Recognition N/A Planning relies on fresh and up-to-date perceived data from Perception for creating accurate trajectory. The metric is same as AWOV-002 and used if delay compensation is disabled in Tracking. APER-005 Response time from CPP #23 to CPP #30 Response time from Tracking output of Tracking to its data consumption in Planning. Object Recognition N/A Planning relies on fresh and up-to-date perceived data from Perception for creating accurate trajectory. The metric is same as AWOV-003 and used if delay compensation is enabled in Tracking. APER-006 Response time from CPP #6 to CPP #21 Duration to process pointcloud data in Sensing and Detection. Object Recognition N/A Tracking relies on Detection to provide real-time and up-to-date perceived data. The metrics is same as AWOV-004 and used if delay compensation is enabled in Tracking. APER-007 Message rate from CPP #20 to CPP #21 Update rate of Detection result received by Tracking. Object Recognition 10 Hz Tracking relies on detection to provide real-time and up-to-date sensed data for accurate tracking. The metric is same as AWOV-005 APER-008 Message rate from CPP #14 to CPP #19 Update rate of data sent from Sensor Fusion. Object Recognition 10 Hz Association Merger relies on the data to be updated at expected frequency for data synchronization. APER-009 Message rate from CPP #16 to CPP #19 Update rate of data sent from Detection by Tracker. Object Recognition 10 Hz Association Merger relies on the data to be updated at expected frequency for data synchronization. APER-010 Message rate from CPP #18 to CPP #19 Update rate of data sent from Validation Object Recognition. 10 Hz Association Merger relies on the data to be updated at expected frequency for data synchronization. APER-011 Response time from CPP #6 to CPP #19 via CPP #14 Response time to consume data sent from Sensor Fusion after LiDARs output pointcloud. Object Recognition N/A Association Merger relies on fresh and up-to-date data for data synchronization. APER-012 Response time from CPP #6 to CPP #19 via CPP #16 Response time to consume data sent from Detection by Tracker after LiDARs output pointcloud. Object Recognition N/A Association Merger relies on fresh and up-to-date data for data synchronization. APER-013 Response time from CPP #6 to CPP #19 via CPP #18 Response time to consume data sent from Validator after LiDARs output pointcloud. Object Recognition N/A Association Merger relies on fresh and up-to-date data for data synchronization. APER-014 Message rate from CPP #10 to CPP #13 Update rate of data sent from Clustering. Object Recognition 10 Hz Sensor Fusion relies on the data to be updated at expected frequency for data synchronization. APER-015 Message rate from CPP #5 to CPP #13 Update rate of data sent from Camera-based Object detection. Object Recognition 10 Hz Sensor Fusion relies on the data to be updated at expected frequency for data synchronization. APER-016 Response time from CPP #6 to CPP #13 Response time to consume data sent from Clustering after LiDARs output pointcloud. Object Recognition N/A Sensor Fusion relies on fresh and up-to-date data for data synchronization. APER-017 Response time from CPP #3 to CPP #13 Response time to consume data sent from Camera-based Object detection after Cameras output images. Object Recognition N/A Sensor Fusion relies on fresh and up-to-date data for data synchronization. APER-018 Message rate from CPP #10 to CPP #17 Update rate of data sent from Clustering. Object Recognition 10 Hz Validator relies on the data to be updated at expected frequency for data synchronization. It seems similar to APER-014, but the topic message is different. APER-019 Message rate from CPP #12 to CPP #17 Update rate of data sent from DNN-based Object Recognition. Object Recognition 10 Hz Validator relies on the data to be updated at expected frequency for data synchronization. APER-020 Response time from CPP #6 to CPP #17 via CPP #10 Response time to consume data sent from Clustering after LiDARs output pointcloud. Object Recognition N/A Validator relies on fresh and update-date data for data synchronization. It seems similar to APER-015, but the topic message is different. APER-021 Response time from CPP #6 to CPP #17 via CPP #12 Response time to consume data sent from DNN-based Object Recognition after LiDARs output pointcloud. Object Recognition N/A Validator relies on fresh and update-date data for data synchronization."},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#detailed-temporal-performance-metrics-for-paths-between-obstacle-segmentation-and-planning","title":"Detailed temporal performance metrics for Paths between Obstacle segmentation and Planning","text":"

    Obstacle segmentation, which is a crucial part of Perception, transmits data to Planning. The figure below illustrates the model that takes into account performance metrics related to Obstacle segmentation and Planning.

    Note

    Both the Obstacle grid map and Obstacle segmentation transmit data to multiple sub-components of Planning. However, not all of these sub-components are described in the model. This is because our primary focus is on the paths from LiDAR to Planning via Obstacle segmentation.

    The following list shows the temporal performance metrics around Obstacle segmentation and Planning.

    ID Representation in the model Metric meaning Related functionality Reference value Reason to choose it as a metric Note OSEG-001 Message rate from CPS #4 to CPS #7 Update rate of Occupancy grid map received by Planning (behavior_path_planner) Obstacle segmentation 10 Hz Planning relies on Occupancy grid map to be updated frequently and smoothly for creating accurate trajectory. OSEG-002 Response time from CPS #0 to CPS #9 via CPS #7 Response time to consume Occupancy grid map after LiDARs output sensing data. Obstacle segmentation N/A Planning relies on fresh and up-to-date perceived data from Occupancy grid map for creating accurate trajectory.. OSEG-003 Message rate from CPS #6 to CPS #11 Update rate of obstacle segmentation received by Planning (behavior_velocity_planner). Obstacle segmentation 10 Hz Planning relies on Obstacle segmentation to be updated frequently and smoothly for creating accurate trajectory. OSEG-004 Response time from CPS #0 to CPS #13 via CPS #11 Response time to consume Obstacle segmentation after LiDARs output sensing data. Obstacle segmentation N/A Planning relies on fresh and up-to-date perceived data from Obstacle segmentation for creating accurate trajectory.."},{"location":"how-to-guides/others/determining-component-dependencies/","title":"Determining component dependencies","text":""},{"location":"how-to-guides/others/determining-component-dependencies/#determining-component-dependencies","title":"Determining component dependencies","text":"

    For any developers who wish to try and deploy Autoware as a microservices architecture, it is necessary to understand the software dependencies, communication, and implemented features of each ROS package/node.

    As an example, the commands necessary to determine the dependencies for the Perception component are shown below.

    "},{"location":"how-to-guides/others/determining-component-dependencies/#perception-component-dependencies","title":"Perception component dependencies","text":"

    To generate a graph of package dependencies, use the following colcon command:

    colcon graph --dot --packages-up-to tier4_perception_launch | dot -Tpng -o graph.png\n

    To generate a list of dependencies, use:

    colcon list --packages-up-to tier4_perception_launch --names-only\n
    colcon list output
    autoware_auto_geometry_msgs\nautoware_auto_mapping_msgs\nautoware_auto_perception_msgs\nautoware_auto_planning_msgs\nautoware_auto_vehicle_msgs\nautoware_cmake\nautoware_lint_common\nautoware_point_types\ncompare_map_segmentation\ndetected_object_feature_remover\ndetected_object_validation\ndetection_by_tracker\neuclidean_cluster\ngrid_map_cmake_helpers\ngrid_map_core\ngrid_map_cv\ngrid_map_msgs\ngrid_map_pcl\ngrid_map_ros\nground_segmentation\nimage_projection_based_fusion\nimage_transport_decompressor\ninterpolation\nkalman_filter\nlanelet2_extension\nlidar_apollo_instance_segmentation\nmap_based_prediction\nmulti_object_tracker\nmussp\nobject_merger\nobject_range_splitter\noccupancy_grid_map_outlier_filter\npointcloud_preprocessor\npointcloud_to_laserscan\nshape_estimation\ntensorrt_yolo\ntier4_autoware_utils\ntier4_debug_msgs\ntier4_pcl_extensions\ntier4_perception_launch\ntier4_perception_msgs\ntraffic_light_classifier\ntraffic_light_map_based_detector\ntraffic_light_ssd_fine_detector\ntraffic_light_visualization\nvehicle_info_util\n

    Tip

    To output a list of modules with their respective paths, run the command above without the --names-only parameter.

    To see which ROS topics are being subscribed and published to, use rqt_graph as follows:

    ros2 launch tier4_perception_launch perception.launch.xml mode:=lidar\nros2 run rqt_graph rqt_graph\n
    "},{"location":"how-to-guides/others/fixing-dependent-package-versions/","title":"Fixing dependent package versions","text":""},{"location":"how-to-guides/others/fixing-dependent-package-versions/#fixing-dependent-package-versions","title":"Fixing dependent package versions","text":"

    Autoware manages dependent package versions in autoware.repos. For example, let's say you make a branch in autoware.universe and add new features. Suppose you update other dependencies with vcs pull after cutting a branch from autoware.universe. Then the version of autoware.universe you are developing and other dependencies will become inconsistent, and the entire Autoware build will fail. We recommend saving the dependent package versions by executing the following command when starting the development.

    vcs export src --exact > my_autoware.repos\n
    "},{"location":"how-to-guides/others/reducing-start-delays/","title":"Reducing start delays on real vehicles","text":""},{"location":"how-to-guides/others/reducing-start-delays/#reducing-start-delays-on-real-vehicles","title":"Reducing start delays on real vehicles","text":"

    In simulation, the ego vehicle reacts nearly instantly to the control commands generated by Autoware. However, with a real vehicle, some delays occur that may make ego feel less responsive.

    This page presents start delays experienced when using Autoware on a real vehicle. We define the start delay as the time between (a) when Autoware decides to make the ego vehicle start and (b) when the vehicle actually starts moving. More precisely:

    "},{"location":"how-to-guides/others/reducing-start-delays/#start-delay-with-manual-driving","title":"Start delay with manual driving","text":"

    First, let us look at the start delay when a human is driving.

    The following figure shows the start delay when a human driver switches the gear from parked to drive and instantly releases the brake to push the throttle pedal and make the velocity of the vehicle increase.

    There are multiple things to note from this figure.

    "},{"location":"how-to-guides/others/reducing-start-delays/#filter-delay","title":"Filter delay","text":"

    To guarantee passenger comfort, some Autoware modules implement filters on the jerk of the vehicle, preventing sudden changes in acceleration.

    For example, the vehicle_cmd_gate filters the acceleration command generated by the controller and was previously introducing significant delays when transitioning between a stop command where the acceleration is negative, and a move command where the acceleration is positive. Because of the jerk filter, the transition between negative and positive was not instantaneous and would take several hundreds of milliseconds.

    "},{"location":"how-to-guides/others/reducing-start-delays/#gear-delay","title":"Gear delay","text":"

    In many vehicles, it is necessary to change gear before first starting to move the vehicle. When performed autonomously, this gear change can take some significant time. Moreover, as seen from the data recorded with manual driving, the measured gear value may be delayed.

    In Autoware, the controller sends a stopping control command until the gear is changed to the drive state. This means that delays in the gear change and its reported value can greatly impact the start delay. Note that this is only an issue when the vehicle is initially in the parked gear.

    The only way to reduce this delay is by tuning the vehicle to increase the gear change speed or to reduce the delay in the gear change report.

    "},{"location":"how-to-guides/others/reducing-start-delays/#brake-delay","title":"Brake delay","text":"

    In vehicles with a brake pedal, the braking system will often be made of several moving parts which cannot move instantly. Thus, when Autoware sends brake commands to a vehicle, some delays should be expected in the actual brake applied to the wheels.

    This lingering brake may prevent or delay the initial motion of the ego vehicle.

    This delay can be reduced by tuning the vehicle.

    "},{"location":"how-to-guides/others/reducing-start-delays/#throttle-response","title":"Throttle response","text":"

    For vehicles with throttle control, one of the main cause of start delays is due to the throttle response of the vehicle. When pushing the throttle pedal, the wheels of the vehicle do not instantly start rotating. This is partly due to the inertia of the vehicle, but also to the motor which may take a significant time to start applying some torque to the wheels.

    It may be possible to tune some vehicle side parameters to reduce this delay, but it is often done at the cost of reduced energy efficiency.

    On the Autoware side, the only way to decrease this delay is to increase the initial throttle but this can cause uncomfortably high initial accelerations.

    "},{"location":"how-to-guides/others/reducing-start-delays/#initial-acceleration-and-throttle","title":"Initial acceleration and throttle","text":"

    As we just discussed, for vehicles with throttle control, an increased initial throttle value can reduce the start delay.

    Since Autoware outputs an acceleration value, the conversion module raw_vehicle_cmd_converter is used to map the acceleration value from Autoware to a throttle value to be sent to the vehicle. Such mapping is usually calibrated automatically using the accel_brake_map_calibrator module, but it may produce a low initial throttle which leads to high start delays.

    In order to increase the initial throttle, there are two options: increase the initial acceleration output by Autoware, or modify the acceleration to throttle mapping.

    The initial acceleration output by Autoware can be tuned in the motion_velocity_smoother with parameters engage_velocity and engage_acceleration. However, the vehicle_cmd_gate applies a filter on the control command to prevent too sudden changes in jerk and acceleration, limiting the maximum allowed acceleration while the ego vehicle is stopped.

    Alternatively, the mapping of acceleration can be tuned to increase the throttle corresponding to the initial acceleration. If we look at an example acceleration map, it does the following conversion: when the ego velocity is 0 (first column), acceleration values between 0.631 (first row) and 0.836 (second row) are converted to a throttle between 0% and 10%. This means that any initial acceleration bellow 0.631m/s\u00b2 will not produce any throttle. Keep in mind that after tuning the acceleration map, it may be necessary to also update the brake map.

    default 0 1.39 2.78 4.17 5.56 6.94 8.33 9.72 11.11 12.5 13.89 0 0.631 0.11 -0.04 -0.04 -0.041 -0.096 -0.137 -0.178 -0.234 -0.322 -0.456 0.1 0.836 0.57 0.379 0.17 0.08 0.07 0.068 0.027 -0.03 -0.117 -0.251 0.2 1.129 0.863 0.672 0.542 0.4 0.38 0.361 0.32 0.263 0.176 0.042 0.3 1.559 1.293 1.102 0.972 0.887 0.832 0.791 0.75 0.694 0.606 0.472 0.4 2.176 1.909 1.718 1.588 1.503 1.448 1.408 1.367 1.31 1.222 1.089 0.5 3.027 2.76 2.57 2.439 2.354 2.299 2.259 2.218 2.161 2.074 1.94"},{"location":"how-to-guides/others/running-autoware-without-cuda/","title":"Running Autoware without CUDA","text":""},{"location":"how-to-guides/others/running-autoware-without-cuda/#running-autoware-without-cuda","title":"Running Autoware without CUDA","text":"

    Although CUDA installation is recommended to achieve better performance for object detection and traffic light recognition in Autoware Universe, it is possible to run these algorithms without CUDA. The following subsections briefly explain how to run each algorithm in such an environment.

    "},{"location":"how-to-guides/others/running-autoware-without-cuda/#running-2d3d-object-detection-without-cuda","title":"Running 2D/3D object detection without CUDA","text":"

    Autoware Universe's object detection can be run using one of five possible configurations:

    Of these five configurations, only the last one (euclidean_cluster) can be run without CUDA. For more details, refer to the euclidean_cluster module's README file.

    "},{"location":"how-to-guides/others/running-autoware-without-cuda/#running-traffic-light-detection-without-cuda","title":"Running traffic light detection without CUDA","text":"

    For traffic light recognition (both detection and classification), there are two modules that require CUDA:

    To run traffic light detection without CUDA, set enable_fine_detection to false in the traffic light launch file. Doing so disables the traffic_light_ssd_fine_detector such that traffic light detection is handled by the map_based_traffic_light_detector module instead.

    To run traffic light classification without CUDA, set use_gpu to false in the traffic light classifier launch file. Doing so will force the traffic_light_classifier to use a different classification algorithm that does not require CUDA or a GPU.

    "},{"location":"how-to-guides/training-machine-learning-models/training-models/","title":"Training and Deploying Models","text":""},{"location":"how-to-guides/training-machine-learning-models/training-models/#training-and-deploying-models","title":"Training and Deploying Models","text":""},{"location":"how-to-guides/training-machine-learning-models/training-models/#overview","title":"Overview","text":"

    The Autoware offers a comprehensive array of machine learning models, tailored for a wide range of tasks including 2D and 3D object detection, traffic light recognition and more. These models have been meticulously trained utilizing open-mmlab's extensive repositories. By leveraging the provided scripts and following the training steps, you have the capability to train these models using your own dataset, tailoring them to your specific needs.

    Furthermore, you will find the essential conversion scripts to deploy your trained models into Autoware using the mmdeploy repository.

    "},{"location":"how-to-guides/training-machine-learning-models/training-models/#training-traffic-light-classifier-model","title":"Training traffic light classifier model","text":"

    The traffic light classifier model within the Autoware has been trained using the mmlab/pretrained repository. The Autoware offers pretrained models based on EfficientNet-b1 and MobileNet-v2 architectures. To fine-tune these models, a total of 83,400 images were employed, comprising 58,600 for training, 14,800 for evaluation, and 10,000 for testing. These images represent Japanese traffic lights and were trained using TIER IV's internal dataset.

    Name Input Size Test Accuracy EfficientNet-b1 128 x 128 99.76% MobileNet-v2 224 x 224 99.81%

    Comprehensive training instructions for the traffic light classifier model are detailed within the readme file accompanying \"traffic_light_classifier\" package. These instructions will guide you through the process of training the model using your own dataset. To facilitate your training, we have also provided an example dataset containing three distinct classes (green, yellow, red), which you can leverage during the training process.

    Detailed instructions for training the traffic light classifier model can be found here.

    "},{"location":"installation/","title":"Installation","text":""},{"location":"installation/#installation","title":"Installation","text":""},{"location":"installation/#target-platforms","title":"Target platforms","text":"

    Autoware targets the platforms listed below. It may change in future versions of Autoware.

    The Autoware Foundation provides no support on other platforms than those listed below.

    "},{"location":"installation/#architecture","title":"Architecture","text":""},{"location":"installation/#minimum-hardware-requirements","title":"Minimum hardware requirements","text":"

    Info

    Autoware is scalable and can be customized to work with distributed or less powerful hardware. The minimum hardware requirements given below are just a general recommendation. However, performance will be improved with more cores, RAM and a higher-spec graphics card or GPU core.

    Although GPU is not required to run basic functionality, it is mandatory to enable the following neural network related functions:

    For details of how to enable object detection and traffic light detection/classification without a GPU, refer to the Running Autoware without CUDA.

    "},{"location":"installation/#installing-autoware","title":"Installing Autoware","text":"

    There are two ways to set up Autoware. Choose one according to your preference.

    If any issues occur during installation, refer to the Support page.

    "},{"location":"installation/#1-docker-installation","title":"1. Docker installation","text":"

    Docker can ensure that all developers in a project have a common, consistent development environment. It is recommended for beginners, casual users, people who are unfamiliar with Ubuntu.

    For more information, refer to the Docker installation guide.

    "},{"location":"installation/#2-source-installation","title":"2. Source installation","text":"

    Source installation is for the cases where more granular control of the installation environment is needed. It is recommended for experienced users or people who want to customize their environment. Note that some problems may occur depending on your local environment.

    For more information, refer to the source installation guide.

    "},{"location":"installation/#installing-related-tools","title":"Installing related tools","text":"

    Some other tools are required depending on the evaluation you want to do. For example, to run an end-to-end simulation you need to install an appropriate simulator.

    For more information, see here.

    "},{"location":"installation/#additional-settings-for-developers","title":"Additional settings for developers","text":"

    There are also tools and settings for developers, such as Shells or IDEs.

    For more information, see here.

    "},{"location":"installation/additional-settings-for-developers/","title":"Additional settings for developers","text":""},{"location":"installation/additional-settings-for-developers/#additional-settings-for-developers","title":"Additional settings for developers","text":""},{"location":"installation/additional-settings-for-developers/#console-settings-for-ros-2","title":"Console settings for ROS 2","text":""},{"location":"installation/additional-settings-for-developers/#colorizing-logger-output","title":"Colorizing logger output","text":"

    By default, ROS 2 logger doesn't colorize the output. To colorize it, write the following in your .bashrc:

    export RCUTILS_COLORIZED_OUTPUT=1\n
    "},{"location":"installation/additional-settings-for-developers/#customizing-the-format-of-logger-output","title":"Customizing the format of logger output","text":"

    By default, ROS 2 logger doesn't output detailed information such as file name, function name, or line number. To customize it, write the following in your .bashrc:

    export RCUTILS_CONSOLE_OUTPUT_FORMAT=\"[{severity} {time}] [{name}]: {message} ({function_name}() at {file_name}:{line_number})\"\n

    For more options, see here.

    "},{"location":"installation/additional-settings-for-developers/#network-settings-for-ros-2","title":"Network settings for ROS 2","text":"

    ROS 2 employs DDS, and the configuration of ROS 2 and DDS is described separately. For ROS 2 networking concepts, refer to the official documentation.

    "},{"location":"installation/additional-settings-for-developers/#ros-2-network-setting","title":"ROS 2 network setting","text":"

    ROS 2 multicasts data on the local network by default. Therefore, when you develop in an office, the data flows over the local network of your office. It may cause collisions of packets or increases in network traffic.

    To avoid these, there are two options.

    Unless you plan to use multiple host computers on the local network, localhost-only communication is recommended. For details, refer to the sections below.

    "},{"location":"installation/additional-settings-for-developers/#enabling-localhost-only-communication","title":"Enabling localhost-only communication","text":"

    Write the following in your .bashrc: For more information, see the ROS 2 documentation.

    export ROS_LOCALHOST_ONLY=1\n

    If you export ROS_LOCALHOST_ONLY=1, MULTICAST must be enabled at the loopback address. To verify that MULTICAST is enabled, use the following command.

    $ ip link show lo\n1: lo: <LOOPBACK,MULTICAST,UP,LOWER_UP> mtu 65536 qdisc noqueue state UNKNOWN mode DEFAULT group default qlen 1000\n

    If the word MULTICAST is not printed, use the following command to enable it.

    sudo ip link set lo multicast on\n
    "},{"location":"installation/additional-settings-for-developers/#same-domain-only-communication-on-the-local-network","title":"Same domain only communication on the local network","text":"

    ROS 2 uses ROS_DOMAIN_ID to create groups and communicate between machines in the groups. Since all ROS 2 nodes use domain ID 0 by default, it may cause unintended interference.

    To avoid it, set a different domain ID for each group in your .bashrc:

    # Replace X with the Domain ID you want to use\n# Domain ID should be a number in range [0, 101] (inclusive)\nexport ROS_DOMAIN_ID=X\n

    Also confirm that ROS_LOCALHOST_ONLY is 0 by using the following command.

    echo $ROS_LOCALHOST_ONLY # If the output is 1, localhost has priority.\n

    For more information, see the ROS 2 Documentation.

    "},{"location":"installation/additional-settings-for-developers/#dds-settings","title":"DDS settings","text":"

    Autoware uses DDS for inter-node communication. ROS 2 documentation recommends users to tune DDS to utilize its capability. Especially, receive buffer size is the critical parameter for Autoware. If the parameter is not large enough, Autoware will failed in receiving large data like point cloud or image.

    "},{"location":"installation/additional-settings-for-developers/#tuning-dds","title":"Tuning DDS","text":"

    Unless customized, CycloneDDS is adopted by default. For example, to execute Autoware with CycloneDDS, prepare a config file. A sample config file is given below. Save it as cyclonedds_config.xml.

    <?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<CycloneDDS xmlns=\"https://cdds.io/config\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"https://cdds.io/config https://raw.githubusercontent.com/eclipse-cyclonedds/cyclonedds/master/etc/cyclonedds.xsd\">\n<Domain Id=\"any\">\n<General>\n<Interfaces>\n<NetworkInterface autodetermine=\"true\" priority=\"default\" multicast=\"default\" />\n</Interfaces>\n<AllowMulticast>default</AllowMulticast>\n<MaxMessageSize>65500B</MaxMessageSize>\n</General>\n<Internal>\n<SocketReceiveBufferSize min=\"10MB\"/>\n<Watermarks>\n<WhcHigh>500kB</WhcHigh>\n</Watermarks>\n</Internal>\n</Domain>\n</CycloneDDS>\n

    This configuration is mostly taken from Eclipse Cyclone DDS:Run-time configuration documentation. You can see why each value is set as such under the documentation link.

    Set the config file path and enlarge the Linux kernel maximum buffer size before launching Autoware.

    export CYCLONEDDS_URI=file:///absolute/path/to/cyclonedds_config.xml\nsudo sysctl -w net.core.rmem_max=2147483647\n

    For more information, Refer to ROS 2 documentation. Reading user guide for chosen DDS is helpful for more understanding.

    "},{"location":"installation/additional-settings-for-developers/#tuning-dds-for-multiple-host-computers-for-advanced-users","title":"Tuning DDS for multiple host computers (for advanced users)","text":"

    When Autoware runs on multiple host computers, IP Fragmentation should be taken into account. As ROS 2 documentation recommends, parameters for IP Fragmentation should be set as shown in the following example.

    sudo sysctl -w net.ipv4.ipfrag_time=3\nsudo sysctl -w net.ipv4.ipfrag_high_thresh=134217728     # (128 MB)\n
    "},{"location":"installation/autoware/docker-installation-devel/","title":"Docker installation for development","text":""},{"location":"installation/autoware/docker-installation-devel/#docker-installation-for-development","title":"Docker installation for development","text":""},{"location":"installation/autoware/docker-installation-devel/#prerequisites","title":"Prerequisites","text":" "},{"location":"installation/autoware/docker-installation-devel/#how-to-set-up-a-development-environment","title":"How to set up a development environment","text":"
    1. Clone autowarefoundation/autoware and move to the directory.

      git clone https://github.com/autowarefoundation/autoware.git\ncd autoware\n
    2. You can install the dependencies either manually or using the provided Ansible script.

    Note: Before installing NVIDIA libraries, confirm and agree with the licenses.

    "},{"location":"installation/autoware/docker-installation-devel/#installing-dependencies-manually","title":"Installing dependencies manually","text":""},{"location":"installation/autoware/docker-installation-devel/#installing-dependencies-using-ansible","title":"Installing dependencies using Ansible","text":"

    Be very careful with this method. Make sure you read and confirmed all the steps in the Ansible configuration before using it.

    If you've manually installed the dependencies, you can skip this section.

    ./setup-dev-env.sh docker\n

    You might need to log out and log back to make the current user able to use docker.

    "},{"location":"installation/autoware/docker-installation-devel/#how-to-set-up-a-workspace","title":"How to set up a workspace","text":"

    Warning

    Before proceeding, confirm and agree with the NVIDIA Deep Learning Container license. By pulling and using the Autoware Universe images, you accept the terms and conditions of the license.

    1. Create the autoware_map directory for map data later.

      mkdir ~/autoware_map\n
    2. Pull the Docker image

      docker pull ghcr.io/autowarefoundation/autoware-universe:latest-cuda\n
    3. Launch a Docker container.

      • For amd64 architecture computers with NVIDIA GPU:

        rocker --nvidia --x11 --user --volume $HOME/autoware --volume $HOME/autoware_map -- ghcr.io/autowarefoundation/autoware-universe:latest-cuda\n
      • If you want to run container without using NVIDIA GPU, or for arm64 architecture computers:

        rocker -e LIBGL_ALWAYS_SOFTWARE=1 --x11 --user --volume $HOME/autoware --volume $HOME/autoware_map -- ghcr.io/autowarefoundation/autoware-universe:latest-cuda\n

        For detailed reason could be found here

      For more advanced usage, see here.

      After that, move to the workspace in the container:

      cd autoware\n
    4. Create the src directory and clone repositories into it.

      mkdir src\nvcs import src < autoware.repos\n
    5. Update dependent ROS packages.

      The dependency of Autoware may change after the Docker image was created. In that case, you need to run the following commands to update the dependency.

      sudo apt update\nrosdep update\nrosdep install -y --from-paths src --ignore-src --rosdistro $ROS_DISTRO\n
    6. Build the workspace.

      colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n

      If there is any build issue, refer to Troubleshooting.

    "},{"location":"installation/autoware/docker-installation-devel/#how-to-update-a-workspace","title":"How to update a workspace","text":"
    1. Update the Docker image.

      docker pull ghcr.io/autowarefoundation/autoware-universe:latest-cuda\n
    2. Launch a Docker container.

      • For amd64 architecture computers:

        rocker --nvidia --x11 --user --volume $HOME/autoware -- ghcr.io/autowarefoundation/autoware-universe:latest-cuda\n
      • If you want to run container without using NVIDIA GPU, or for arm64 architecture computers:

        rocker -e LIBGL_ALWAYS_SOFTWARE=1 --x11 --user --volume $HOME/autoware -- ghcr.io/autowarefoundation/autoware-universe:latest-cuda\n
    3. Update the .repos file.

      cd autoware\ngit pull\n
    4. Update the repositories.

      vcs import src < autoware.repos\nvcs pull src\n
    5. Build the workspace.

      colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n
    "},{"location":"installation/autoware/docker-installation-prebuilt/","title":"Docker installation for quick start","text":""},{"location":"installation/autoware/docker-installation-prebuilt/#docker-installation-for-quick-start","title":"Docker installation for quick start","text":""},{"location":"installation/autoware/docker-installation-prebuilt/#how-to-set-up-a-development-environment","title":"How to set up a development environment","text":"
    1. Installing dependencies manually

      • Install Docker Engine
      • Install NVIDIA Container Toolkit
      • Install rocker
    "},{"location":"installation/autoware/docker-installation-prebuilt/#how-to-set-up-a-workspace","title":"How to set up a workspace","text":"
    1. Create the autoware_map directory for map data later.

      mkdir ~/autoware_map\n
    2. Launch a Docker container.

      rocker --nvidia --x11 --user --volume $HOME/autoware_map -- ghcr.io/autowarefoundation/autoware-universe:humble-latest-prebuilt\n

      For more advanced usage, see here.

    3. Run Autoware simulator

      Inside the container, you can run the Autoware simulation by following this tutorial:

      planning simulation

      rosbag replay simulation.

    "},{"location":"installation/autoware/docker-installation/","title":"Docker installation","text":""},{"location":"installation/autoware/docker-installation/#docker-installation","title":"Docker installation","text":"

    Info

    Since this page explains Docker-specific information, it is recommended to see Source installation as well if you need detailed information.

    Here are two ways to install Autoware by docker:

    "},{"location":"installation/autoware/docker-installation/#docker-installation-for-quick-start","title":"Docker installation for quick start","text":"

    docker installation for quick start

    "},{"location":"installation/autoware/docker-installation/#docker-installation-for-development","title":"Docker installation for development","text":"

    docker installation for development

    "},{"location":"installation/autoware/docker-installation/#troubleshooting","title":"Troubleshooting","text":"

    Here are solutions for a few specific errors:

    "},{"location":"installation/autoware/docker-installation/#cuda-error-forward-compatibility-was-attempted-on-non-supported-hw","title":"cuda error: forward compatibility was attempted on non supported hw","text":"

    When starting Docker with GPU support enabled for NVIDIA graphics, you may sometimes receive the following error:

    docker: Error response from daemon: OCI runtime create failed: container_linux.go:349: starting container process caused \"process_linux.go:449: container init caused \\\"process_linux.go:432: running prestart hook 0 caused \\\\\\\"error running hook: exit status 1, stdout: , stderr: nvidia-container-cli: initialization error: cuda error: forward compatibility was attempted on non supported hw\\\\\\\\n\\\\\\\"\\\"\": unknown.\nERROR: Command return non-zero exit code (see above): 125\n

    This usually indicates that a new NVIDIA graphics driver has been installed (usually via apt) but the system has not yet been restarted. A similar message may appear if the graphics driver is not available, for example because of resuming after suspend.

    To fix this, restart your system after installing the new NVIDIA driver.

    "},{"location":"installation/autoware/docker-installation/#docker-with-nvidia-gpu-fails-to-start-autoware-on-arm64-devices","title":"Docker with NVIDIA gpu fails to start Autoware on arm64 devices","text":"

    When starting Docker with GPU support enabled for NVIDIA graphics on arm64 devices, e.g. NVIDIA jetson AGX xavier, you may receive the following error:

    nvidia@xavier:~$ rocker --nvidia --x11 --user --volume $HOME/autoware -- ghcr.io/autowarefoundation/autoware-universe:humble-latest-cuda-arm64\n...\n\nCollecting staticx==0.12.3\nDownloading https://files.pythonhosted.org/packages/92/ff/d9960ea1f9db48d6044a24ee0f3d78d07bcaddf96eb0c0e8806f941fb7d3/staticx-0.12.3.tar.gz (68kB)\nComplete output from command python setup.py egg_info:\nTraceback (most recent call last):\nFile \"\", line 1, in\nFile \"/tmp/pip-install-m_nm8mya/staticx/setup.py\", line 4, in\nfrom wheel.bdist_wheel import bdist_wheel\nModuleNotFoundError: No module named 'wheel'\n\nCommand \"python setup.py egg_info\" failed with error code 1 in /tmp/pip-install-m_nm8mya/staticx/\n...\n

    This error exists in current version of rocker tool, which relates to the os_detection function of rocker.

    To fix this error, temporary modification of rocker source code is required, which is not recommended.

    At current stage, it is recommended to run docker without NVIDIA gpu enabled for arm64 devices:

    rocker -e LIBGL_ALWAYS_SOFTWARE=1 --x11 --user --volume $HOME/autoware -- ghcr.io/autowarefoundation/autoware-universe:latest-cuda\n

    This tutorial will be updated after official fix from rocker.

    "},{"location":"installation/autoware/docker-installation/#tips","title":"Tips","text":""},{"location":"installation/autoware/docker-installation/#non-native-arm64-system","title":"Non-native arm64 System","text":"

    This section describes a process to run arm64 systems on amd64 systems using qemu-user-static.

    Initially, your system is usually incompatible with arm64 systems. To check that:

    $ docker run --rm -t arm64v8/ubuntu uname -m\nWARNING: The requested image's platform (linux/arm64/v8) does not match the detected host platform (linux/amd64) and no specific platform was requested\nstandard_init_linux.go:228: exec user process caused: exec format error\n

    Installing qemu-user-static enables us to run arm64 images on amd64 systems.

    $ sudo apt-get install qemu-user-static\n$ docker run --rm --privileged multiarch/qemu-user-static --reset -p yes\n$ docker run --rm -t arm64v8/ubuntu uname -m\nWARNING: The requested image's platform (linux/arm64/v8) does not match the detected host platform (linux/amd64) and no specific platform was requested\naarch64\n

    To run Autoware's Docker images of arm64 architecture, add the suffix -arm64.

    $ docker run --rm -it ghcr.io/autowarefoundation/autoware-universe:humble-latest-cuda-arm64\nWARNING: The requested image's platform (linux/arm64) does not match the detected host platform (linux/amd64) and no specific platform was requested\nroot@5b71391ad50f:/autoware#\n
    "},{"location":"installation/autoware/source-installation/","title":"Source installation","text":""},{"location":"installation/autoware/source-installation/#source-installation","title":"Source installation","text":""},{"location":"installation/autoware/source-installation/#prerequisites","title":"Prerequisites","text":"
    sudo apt-get -y update\nsudo apt-get -y install git\n

    Note: If you wish to use ROS 2 Galactic on Ubuntu 20.04, refer to installation instruction from galactic branch, but be aware that Galactic version of Autoware might not have latest features.

    "},{"location":"installation/autoware/source-installation/#how-to-set-up-a-development-environment","title":"How to set up a development environment","text":"
    1. Clone autowarefoundation/autoware and move to the directory.

      git clone https://github.com/autowarefoundation/autoware.git\ncd autoware\n
    2. If you are installing Autoware for the first time, you can automatically install the dependencies by using the provided Ansible script.

      ./setup-dev-env.sh\n

      If you encounter any build issues, please consult the Troubleshooting section for assistance.

    Info

    Before installing NVIDIA libraries, please ensure that you have reviewed and agreed to the licenses.

    Note

    The following items will be automatically installed. If the ansible script doesn't work or if you already have different versions of dependent libraries installed, please install the following items manually.

    If you didn't use ansible script you will need to download some package artifacts as explained in Manual loading of artifacts. Otherwise some packages (mostly from perception) will not be able to run as they need these artifacts for the inference.

    "},{"location":"installation/autoware/source-installation/#how-to-set-up-a-workspace","title":"How to set up a workspace","text":"
    1. Create the src directory and clone repositories into it.

      Autoware uses vcstool to construct workspaces.

      cd autoware\nmkdir src\nvcs import src < autoware.repos\n
    2. Install dependent ROS packages.

      Autoware requires some ROS 2 packages in addition to the core components. The tool rosdep allows an automatic search and installation of such dependencies. You might need to run rosdep update before rosdep install.

      source /opt/ros/humble/setup.bash\nrosdep install -y --from-paths src --ignore-src --rosdistro $ROS_DISTRO\n
    3. Build the workspace.

      Autoware uses colcon to build workspaces. For more advanced options, refer to the documentation.

      colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n

      If there is any build issue, refer to Troubleshooting.

    "},{"location":"installation/autoware/source-installation/#how-to-update-a-workspace","title":"How to update a workspace","text":"
    1. Update the .repos file.

      cd autoware\ngit pull <remote> <your branch>\n

      <remote> is usually git@github.com:autowarefoundation/autoware.git

    2. Update the repositories.

      vcs import src < autoware.repos\nvcs pull src\n

      For Git users:

      • vcs import is similar to git checkout.
        • Note that it doesn't pull from the remote.
      • vcs pull is similar to git pull.
        • Note that it doesn't switch branches.

      For more information, refer to the official documentation.

    3. Install dependent ROS packages.

      source /opt/ros/humble/setup.bash\nrosdep install -y --from-paths src --ignore-src --rosdistro $ROS_DISTRO\n
    4. Build the workspace.

      colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n
    "},{"location":"installation/related-tools/","title":"Installation of related tools","text":""},{"location":"installation/related-tools/#installation-of-related-tools","title":"Installation of related tools","text":"

    Warning

    Under Construction

    "},{"location":"models/","title":"Machine learning models","text":""},{"location":"models/#machine-learning-models","title":"Machine learning models","text":"

    The Autoware perception stack uses models for inference. These models are automatically downloaded if using ansible, but they can also be downloaded manually.

    "},{"location":"models/#onnx-model-files","title":"ONNX model files","text":""},{"location":"models/#download-instructions","title":"Download instructions","text":"

    The ONNX model files are stored in a common location, hosted by Web.Auto

    Any tool that can download files from the web (e.g. wget or curl) is the only requirement for downloading these files:

    # yabloc_pose_initializer\n\n$ mkdir -p ~/autoware_data/yabloc_pose_initializer/\n$ wget -P ~/autoware_data/yabloc_pose_initializer/ \\\nhttps://s3.ap-northeast-2.wasabisys.com/pinto-model-zoo/136_road-segmentation-adas-0001/resources.tar.gz\n\n\n# image_projection_based_fusion\n\n$ mkdir -p ~/autoware_data/image_projection_based_fusion/\n$ wget -P ~/autoware_data/image_projection_based_fusion/ \\\nhttps://awf.ml.dev.web.auto/perception/models/pointpainting/v4/pts_voxel_encoder_pointpainting.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/pointpainting/v4/pts_backbone_neck_head_pointpainting.onnx\n\n\n# lidar_apollo_instance_segmentation\n\n$ mkdir -p ~/autoware_data/lidar_apollo_instance_segmentation/\n$ wget -P ~/autoware_data/lidar_apollo_instance_segmentation/ \\\nhttps://awf.ml.dev.web.auto/perception/models/lidar_apollo_instance_segmentation/vlp-16.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/lidar_apollo_instance_segmentation/hdl-64.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/lidar_apollo_instance_segmentation/vls-128.onnx\n\n\n# lidar_centerpoint\n\n$ mkdir -p ~/autoware_data/lidar_centerpoint/\n$ wget -P ~/autoware_data/lidar_centerpoint/ \\\nhttps://awf.ml.dev.web.auto/perception/models/centerpoint/v2/pts_voxel_encoder_centerpoint.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/centerpoint/v2/pts_backbone_neck_head_centerpoint.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/centerpoint/v2/pts_voxel_encoder_centerpoint_tiny.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/centerpoint/v2/pts_backbone_neck_head_centerpoint_tiny.onnx\n\n\n# tensorrt_yolo\n\n$ mkdir -p ~/autoware_data/tensorrt_yolo/\n$ wget -P ~/autoware_data/tensorrt_yolo/ \\\nhttps://awf.ml.dev.web.auto/perception/models/yolov3.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/yolov4.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/yolov4-tiny.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/yolov5s.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/yolov5m.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/yolov5l.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/yolov5x.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/coco.names\n\n\n# tensorrt_yolox\n\n$ mkdir -p ~/autoware_data/tensorrt_yolox/\n$ wget -P ~/autoware_data/tensorrt_yolox/ \\\nhttps://awf.ml.dev.web.auto/perception/models/yolox-tiny.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/yolox-sPlus-opt.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/yolox-sPlus-opt.EntropyV2-calibration.table \\\nhttps://awf.ml.dev.web.auto/perception/models/object_detection_yolox_s/v1/yolox-sPlus-T4-960x960-pseudo-finetune.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/object_detection_yolox_s/v1/yolox-sPlus-T4-960x960-pseudo-finetune.EntropyV2-calibration.table \\\nhttps://awf.ml.dev.web.auto/perception/models/label.txt\n\n\n# traffic_light_classifier\n\n$ mkdir -p ~/autoware_data/traffic_light_classifier/\n$ wget -P ~/autoware_data/traffic_light_classifier/ \\\nhttps://awf.ml.dev.web.auto/perception/models/traffic_light_classifier/v2/traffic_light_classifier_mobilenetv2_batch_1.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/traffic_light_classifier/v2/traffic_light_classifier_mobilenetv2_batch_4.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/traffic_light_classifier/v2/traffic_light_classifier_mobilenetv2_batch_6.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/traffic_light_classifier/v2/traffic_light_classifier_efficientNet_b1_batch_1.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/traffic_light_classifier/v2/traffic_light_classifier_efficientNet_b1_batch_4.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/traffic_light_classifier/v2/traffic_light_classifier_efficientNet_b1_batch_6.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/traffic_light_classifier/v2/lamp_labels.txt\n\n\n# traffic_light_fine_detector\n\n$ mkdir -p ~/autoware_data/traffic_light_fine_detector/\n$ wget -P ~/autoware_data/traffic_light_fine_detector/ \\\nhttps://awf.ml.dev.web.auto/perception/models/tlr_yolox_s/v2/tlr_yolox_s_batch_1.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/tlr_yolox_s/v2/tlr_yolox_s_batch_4.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/tlr_yolox_s/v2/tlr_yolox_s_batch_6.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/tlr_yolox_s/v2/tlr_labels.txt\n\n\n# traffic_light_ssd_fine_detector\n\n$ mkdir -p ~/autoware_data/traffic_light_ssd_fine_detector/\n$ wget -P ~/autoware_data/traffic_light_ssd_fine_detector/ \\\nhttps://awf.ml.dev.web.auto/perception/models/mb2-ssd-lite-tlr.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/voc_labels_tl.txt\n
    "},{"location":"reference-hw/","title":"Reference HW Design","text":""},{"location":"reference-hw/#reference-hw-design","title":"Reference HW Design","text":"

    This document is created to describe and give additional information of the sensors and systems supported by Autoware.Auto software.

    All equipment listed in this document has available ROS 2 drivers and has been tested by one or more of the community members on field in autonomous vehicle and robotics applications.

    The listed sensors and systems are not sold, developed or given direct technical support by the Autoware community. Having said that any ROS 2 and Autoware related issue regarding the hardware usage could be asked using the community guidelines which found here.

    The documents consists of the sections listed below:

    "},{"location":"reference-hw/ad-computers/","title":"AD Computers","text":""},{"location":"reference-hw/ad-computers/#ad-computers","title":"AD Computers","text":""},{"location":"reference-hw/ad-computers/#adlink-in-vehicle-computers","title":"ADLINK In-Vehicle Computers","text":"

    ADLINK solutions which is used for autonomous driving and tested by one or more community members are listed below:

    Supported Products List CPU GPU RAM, Interfaces Environmental Autoware Tested (Y/N) AVA-351001 Intel\u00ae Xeon\u00ae E-2278GE Dual RTX 5000 64GB RAM,CAN, USB, 10G Ethernet, DIO, Hot-Swap SSD, USIM 9~36 VDC, MIL-STD-810H,ISO 7637-2 & SAE 113-11 Y SOAFEE\u2019s AVA Developer Platform Ampere Altra ARMv8 optional USB, Ethernet, DIO, M.2 NVMe SSDs 110/220 AC Y RQX-58G Carmel ARMv8.2 2.26GHz Nvidia Jetson AGX Xavier USB, Ethernet, M.2 NVME SSD, CAN, USIM, GMSL2 Camera support 9~36VDC Y RQX-59G 8-core Arm\u00ae Cortex\u00ae-A78AE v8.2 Nvidia Jetson AGX Orin USB, Ethernet, M.2 NVME SSD, CAN, USIM, GMSL2 Camera support 9~36VDC N SOAFEE\u2019s AVA AP1 Ampere Altra ARMv8 optional CAN, USB, Ethernet, DIO, M.2 NVMe SSDs 12 Volt Y

    Link to company website is here.

    "},{"location":"reference-hw/ad-computers/#nxp-in-vehicle-computers","title":"NXP In-Vehicle Computers","text":"

    NXP solutions which is used for autonomous driving and tested by one or more community members are listed below:

    Supported Products List CPU GPU RAM, Interfaces Environmental Autoware Tested (Y/N) BLUEBOX 3.0 16 x Arm\u00ae Cortex\u00ae-A72 Dual RTX 8000 or RTX A6000 16 GB RAM CAN, FlexRay, USB, Ethernet, DIO, SSD ASIL-D -

    Link to company website is here.

    "},{"location":"reference-hw/ad-computers/#neousys-in-vehicle-computers","title":"Neousys In-Vehicle Computers","text":"

    Neousys solutions which is used for autonomous driving and tested by one or more community members are listed below:

    Supported Products List CPU GPU RAM, Interfaces Environmental Autoware Tested (Y/N) 8208-GC Intel\u00ae Xeon\u00ae E-2278GE Dual RTX 2080ti or RTX 3070 128 GB RAM,CAN, USB, Ethernet, Serial, Hot-Swap SSD 8-35 VoltVibration:MIL-STD810G 5-500 Hz, 3 axes -

    Link to company website is here.

    "},{"location":"reference-hw/ad-computers/#crystal-rugged-in-vehicle-computers","title":"Crystal Rugged In-Vehicle Computers","text":"

    Crystal Rugged solutions which is used for autonomous driving and tested by one or more community members are listed below:

    Supported Products List CPU GPU RAM, Interfaces Environmental Autoware Tested (Y/N) AVC 0161-AC Intel\u00ae Xeon\u00ae Scalable Dual GPU RTX Series 2TB RAM,CAN, USB, Ethernet, Serial, Hot-Swap SSD 10-32 VoltVibration:2 G RMS 10-1000 Hz, 3 axes -

    Link to company website is here.

    "},{"location":"reference-hw/cameras/","title":"CAMERAs","text":""},{"location":"reference-hw/cameras/#cameras","title":"CAMERAs","text":""},{"location":"reference-hw/cameras/#tier-iv-automotive-hdr-cameras","title":"TIER IV Automotive HDR Cameras","text":"

    TIER IV's Automotive HDR cameras which have ROS 2 driver and tested by TIER IV are listed below:

    Supported Products List MP FPS Interface HDR LFM Trigger /Synchronization Ingress Protection ROS 2 Driver Autoware Tested (Y/N) C1 2.5 30 GMSL2 / USB3 Y (120dB) Y Y IP69K Y Y C2 5.4 30 GMSL2 / USB3 Y (120dB) Y Y IP69K Y Y C3 (to be released in 2024) 8.3 30 GMSL2 / TBD Y (120dB) Y Y IP69K Y Y

    Link to ROS 2 driver: https://github.com/tier4/ros2_v4l2_camera

    Link to product support site: TIER IV Edge.Auto documentation

    Link to product web site: TIER IV Automotive Camera Solution

    "},{"location":"reference-hw/cameras/#flir-machine-vision-cameras","title":"FLIR Machine Vision Cameras","text":"

    FLIR Machine Vision cameras which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List MP FPS Interface HDR LFM Trigger /Synchronization Ingress Protection ROS 2 Driver Autoware Tested (Y/N) Blackfly S 2.0 5.0 22 95 USB-GigE N/A N/A Y N/A Y - Grasshopper3 2.3 5.0 26 90 USB-GigE N/A N/A Y N/A Y -

    Link to ROS 2 driver: https://github.com/berndpfrommer/flir_spinnaker_ros2

    Link to company website: https://www.flir.eu/iis/machine-vision/

    "},{"location":"reference-hw/cameras/#lucid-vision-cameras","title":"Lucid Vision Cameras","text":"

    Lucid Vision cameras which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List MP FPS Interface HDR LFM Trigger /Synchronization Ingress Protection ROS 2 Driver Autoware Tested (Y/N) TRITON 054S 5.4 22 GigE Y Y Y up to IP67 Y Y TRITON 032S 3.2 35.4 GigE N/A N/A Y up to IP67 Y Y

    Link to ROS 2 driver: https://gitlab.com/leo-drive/Drivers/arena_camera Link to company website: https://thinklucid.com/triton-gige-machine-vision/

    "},{"location":"reference-hw/cameras/#allied-vision-cameras","title":"Allied Vision Cameras","text":"

    Allied Vision cameras which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List MP FPS Interface HDR LFM Trigger /Synchronization Ingress Protection ROS 2 Driver Autoware Tested (Y/N) Mako G319 3.2 37.6 GigE N/A N/A Y N/A Y -

    Link to ROS 2 driver: https://github.com/neil-rti/avt_vimba_camera

    Link to company website: https://www.alliedvision.com/en/products/camera-series/mako-g

    "},{"location":"reference-hw/full_drivers_list/","title":"Drivers List","text":""},{"location":"reference-hw/full_drivers_list/#drivers-list","title":"Drivers List","text":"

    The list of all drivers listed above for easy access as a table with additional information:

    Type Maker Driver links License Maintainer Lidar VelodyneHesai Link Apache 2 david.wong@tier4.jpabraham.monrroy@map4.jp Lidar Velodyne Link BSD jwhitley@autonomoustuff.com Lidar Robosense Link BSD zdxiao@robosense.cn Lidar Hesai Link Apache 2 wuxiaozhou@hesaitech.com Lidar Leishen Link - - Lidar Livox Link MIT dev@livoxtech.com Lidar Ouster Link Apache 2 stevenmacenski@gmail.comtom@boxrobotics.ai Radar smartmicro Link Apache 2 opensource@smartmicro.de Camera Flir Link Apache 2 bernd.pfrommer@gmail.com Camera Lucid Vision Link - kcolak@leodrive.ai Camera Allied Vision Link Apache 2 at@email.com GNSS NovAtel Link BSD preed@swri.org GNSS SBG Systems Link MIT support@sbg-systems.com GNSS PolyExplore Link - support@polyexplore.com"},{"location":"reference-hw/imu_ahrs_gnss_ins/","title":"IMU, AHRS & GNSS/INS","text":""},{"location":"reference-hw/imu_ahrs_gnss_ins/#imu-ahrs-gnssins","title":"IMU, AHRS & GNSS/INS","text":""},{"location":"reference-hw/imu_ahrs_gnss_ins/#novatel-gnssins-sensors","title":"NovAtel GNSS/INS Sensors","text":"

    NovAtel GNSS/INS sensors which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List INS Rate Roll, Pitch, Yaw Acc. GNSS ROS 2 Driver\u00a0 Autoware Tested (Y/N) PwrPak7D-E2 200 Hz R (0.013\u00b0)P (0.013\u00b0)Y (0.070\u00b0) 20 HzL1 / L2 / L5 555 Channels Y - Span CPT7 200 Hz R (0.01\u00b0)\u00a0P (0.01\u00b0)\u00a0Y (0.03\u00b0) 20 Hz L1 / L2 / L5 555 Channels Y -

    Link to ROS 2 driver: https://github.com/swri-robotics/novatel_gps_driver/tree/dashing-devel

    Link to company website: https://hexagonpositioning.com/

    "},{"location":"reference-hw/imu_ahrs_gnss_ins/#xsens-gnssins-imu-sensors","title":"XSens GNSS/INS & IMU Sensors","text":"

    XSens GNSS/INS sensors which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List INS/IMU Rate Roll, Pitch, Yaw Acc. GNSS ROS 2 Driver\u00a0 Autoware Tested (Y/N) MTi-680G 2 kHz R (0.2\u00b0)P (0.2\u00b0)Y (0.5\u00b0) 5 HzL1 / L2\u00a0184 Channels Y - MTi-300 AHRS 2 kHz R (0.2\u00b0)P (0.2\u00b0)Y (1\u00b0) Not Applicable Y -

    Link to ROS 2 driver: http://wiki.ros.org/xsens_mti_driver

    Link to company website: https://www.xsens.com/

    "},{"location":"reference-hw/imu_ahrs_gnss_ins/#sbg-gnssins-imu-sensors","title":"SBG GNSS/INS & IMU Sensors","text":"

    SBG GNSS/INS sensors which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List INS/IMU Rate Roll, Pitch, Yaw Acc. GNSS ROS 2 Driver\u00a0 Autoware Tested (Y/N) Ellipse-D 200 Hz, 1 kHz (IMU) R (0.1\u00b0)P (0.1\u00b0)Y (0.05\u00b0) 5 HzL1 / L2184 Channels Y Y Ellipse-A (AHRS) 200 Hz, 1 kHz (IMU) R (0.1\u00b0)P (0.1\u00b0)Y (0.8\u00b0) Not Applicable Y -

    Link to ROS 2 driver: https://github.com/SBG-Systems/sbg_ros2

    Link to company website: https://www.sbg-systems.com/products/ellipse-series/

    "},{"location":"reference-hw/imu_ahrs_gnss_ins/#applanix-gnssins-sensors","title":"Applanix GNSS/INS Sensors","text":"

    SBG GNSS/INS sensors which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List INS/IMU Rate Roll, Pitch, Yaw Acc. GNSS ROS 2 Driver\u00a0 Autoware Tested (Y/N) POSLVX 200 Hz R (0.03\u00b0)P (0.03\u00b0)Y (0.09\u00b0) L1 / L2 / L5336 Channels Y Y POSLV220 200 Hz R (0.02\u00b0)P (0.02\u00b0)Y (0.05\u00b0) L1 / L2 / L5336 Channels Y Y

    Link to ROS 2 driver: http://wiki.ros.org/applanix_driver

    Link to company website: https://www.applanix.com/products/poslv.htm

    "},{"location":"reference-hw/imu_ahrs_gnss_ins/#polyexplore-gnssins-sensors","title":"PolyExplore GNSS/INS Sensors","text":"

    PolyExplore GNSS/INS sensors which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List INS/IMU Rate Roll, Pitch, Yaw Acc. GNSS ROS 2 Driver\u00a0 Autoware Tested (Y/N) POLYNAV 2000P 100 Hz R (0.01\u00b0)P (0.01\u00b0)Y (0.1\u00b0) L1 / L2240 Channels Y - POLYNAV 2000S 100 Hz R (0.015\u00b0)P (0.015\u00b0)Y (0.08\u00b0) L1 / L240 Channels Y -

    Link to ROS 2 driver: https://github.com/polyexplore/ROS2_Driver

    Link to company website: https://www.polyexplore.com/

    "},{"location":"reference-hw/lidars/","title":"LIDARs","text":""},{"location":"reference-hw/lidars/#lidars","title":"LIDARs","text":""},{"location":"reference-hw/lidars/#velodyne-3d-lidar-sensors","title":"Velodyne 3D LIDAR Sensors","text":"

    Velodyne Lidars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (V), (H) ROS 2 Driver Autoware Tested (Y/N) Alpha Prime 245m (+15\u00b0)/(-25\u00b0), (360\u00b0) Y Y Ultra Puck 200m (+15\u00b0)/(-25\u00b0), (360\u00b0) Y Y Puck 100m (+15\u00b0)/(-15\u00b0), (360\u00b0) Y Y Puck Hi-res 100m (+10\u00b0)/(-10\u00b0), (360\u00b0) Y Y

    Link to ROS 2 drivers: https://github.com/tier4/nebula https://github.com/ros-drivers/velodyne/tree/ros2/velodyne_pointcloud https://gitlab.com/autowarefoundation/autoware.auto/AutowareAuto/-/tree/master/src/drivers/velodyne_nodes https://github.com/autowarefoundation/awf_velodyne/tree/tier4/universe

    Link to company website: https://velodynelidar.com/

    "},{"location":"reference-hw/lidars/#robosense-3d-lidar-sensors","title":"RoboSense 3D LIDAR Sensors","text":"

    RoboSense Lidars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (V), (H) ROS 2 Driver Autoware Tested (Y/N) RS-Ruby 250m (+15\u00b0)/(-25\u00b0), (360) Y - RS-Ruby-Lite 230m (+15\u00b0)/(-25\u00b0), (360) Y - RS-LiDAR-32 200m (+15\u00b0)/(-25\u00b0), (360) Y - RS-LiDAR-16 150m (+15\u00b0)/(-15), (360) Y -

    Link to ROS 2 driver: https://github.com/RoboSense-LiDAR/rslidar_sdk

    Link to company website: https://www.robosense.ai/

    "},{"location":"reference-hw/lidars/#hesai-3d-lidar-sensors","title":"HESAI 3D LIDAR Sensors","text":"

    Hesai Lidars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (V), (H) ROS 2 Driver Autoware Tested (Y/N) Pandar 128 200m (+15\u00b0)/(-25\u00b0), (360\u00b0) Y - Pandar 64 200m (+15\u00b0)/(-25\u00b0), (360\u00b0) Y Y Pandar 40P 200m (+15\u00b0)/(-25\u00b0), (360\u00b0) Y Y Pandar XT 120m (+15\u00b0)/(-16\u00b0), (360\u00b0) Y Y Pandar QT 20m (-52.1\u00b0/+52.1\u00b0)/(360\u00b0) Y Y

    Link to ROS 2 drivers: https://github.com/tier4/nebula https://github.com/HesaiTechnology/HesaiLidar_General_ROS

    Link to company website: https://www.hesaitech.com/en/

    "},{"location":"reference-hw/lidars/#leishen-3d-lidar-sensors","title":"Leishen 3D LIDAR Sensors","text":"

    Leishen Lidars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (V), (H) ROS 2 Driver Autoware Tested (Y/N) LS C16 150m (+15\u00b0/-15\u00b0), (360\u00b0) Y - LS C32\u00a0 150m (+15\u00b0/-15\u00b0), (360\u00b0) Y - CH 32 120m (+3.7\u00b0/-6.7\u00b0),(120\u00b0) Y - CH 128 20m (+14\u00b0/-17\u00b0)/(150\u00b0) Y -

    Link to ROS 2 driver: https://github.com/leishen-lidar

    Link to company website: http://www.lslidar.com/

    "},{"location":"reference-hw/lidars/#livox-3d-lidar-sensors","title":"Livox 3D LIDAR Sensors","text":"

    Livox Lidars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (V), (H) ROS 2 Driver Autoware Tested (Y/N) Horizon 260m (81.7\u00b0), (25.1\u00b0) Y Y Mid-70 90m (70.4\u00b0), (77.2\u00b0) Y - Avia 190m (70.4\u00b0), Circular Y - HAP 150m (25\u00b0), (120\u00b0) - -

    Link to ROS 2 driver: https://github.com/Livox-SDK/livox_ros2_driver

    Link to company website: https://www.livoxtech.com/

    "},{"location":"reference-hw/lidars/#ouster-3d-lidar-sensors","title":"Ouster 3D LIDAR Sensors","text":"

    Ouster Lidars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (V), (H) ROS 2 Driver Autoware Tested (Y/N) OS0 50m (90\u00b0), (360\u00b0) Y - OS1 120m (45\u00b0), (360\u00b0) Y - OS2 240m (22,5\u00b0), (360\u00b0) Y Y

    Link to ROS 2 driver: https://github.com/ros-drivers/ros2_ouster_drivers

    Link to company website: https://ouster.com/

    "},{"location":"reference-hw/radars/","title":"RADARs","text":""},{"location":"reference-hw/radars/#radars","title":"RADARs","text":""},{"location":"reference-hw/radars/#smartmicro-automotive-radars","title":"Smartmicro Automotive Radars","text":"

    Smartmicro Radars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (Azimuth), (Elevation) ROS 2 Driver Autoware Tested (Y/N) Type 153 (Triple Mode Short, Medium Long) S:0.2...19 m\u00a0M:0.4...55 m L:0.8...120 m Short: (130\u00b0), (15\u00b0) Medium: (130\u00b0), (15\u00b0)Long: (100\u00b0),(15\u00b0) Y Y Type 132 ,(Dual Mode ,Medium, Long) M:0.5...64 m\u00a0\u00a0L:1...175 m Medium: (100\u00b0), (15\u00b0) Long: (32\u00b0), (15\u00b0) Y Y

    Link to ROS 2 driver: https://github.com/smartmicro/smartmicro_ros2_radars

    Link to company website: https://www.smartmicro.com/automotive-radar

    "},{"location":"reference-hw/radars/#aptiv-automotive-radars","title":"Aptiv Automotive Radars","text":"

    Aptiv Radars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (Azimuth), (Elevation) ROS 2 Driver Autoware Tested (Y/N) Aptiv MMR (Dual Mode Short, Long) S: 1...40 m L: 3...160 m Short.: (90), (90\u00b0) Long: (90\u00b0), (90\u00b0) Y - Aptiv ESR 2.5 (Dual Mode (Medium, Long)) M: 1...60 m L: 1...175 m Med.: (90\u00b0), (4.4\u00b0) Long: (20\u00b0), (4.4\u00b0) Y -

    Link to company website: https://autonomoustuff.com/products

    "},{"location":"reference-hw/radars/#continental-engineering-radars","title":"Continental Engineering Radars","text":"

    Continental Engineering Radars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (Azimuth), (Elevation) ROS 2 Driver Autoware Tested (Y/N) ARS430DI 250m (120), (18\u00b0) - -

    Link to company website: https://conti-engineering.com/components/ars430/

    "},{"location":"reference-hw/remote_drive/","title":"Remote Drive","text":""},{"location":"reference-hw/remote_drive/#remote-drive","title":"Remote Drive","text":""},{"location":"reference-hw/remote_drive/#fort-robotics","title":"FORT ROBOTICS","text":"

    Fort Robotics remote control & E-stop devices which are used for autonomous driving and tested by one or more community members are listed below:

    Supported Products Op.Frequency Controller ROS 2 Support Autoware Tested (Y/N) Vehicle Safety Controller with E-stop 900 Mhz radio: up to 2km LOS2.4Ghz radio: up to 500m LOS IP 66 EnclosureBuilt-in emergency stop safety control(2) 2-axis joysticks(2) 1-axis finger sticks(8) buttons - -

    Link to company website: https://fortrobotics.com/vehicle-safety-controller/

    "},{"location":"reference-hw/remote_drive/#logitech","title":"LOGITECH","text":"

    Logitech joysticks which are used for autonomous driving and tested by one or more community members are listed below:

    Supported Products Op.Frequency Controller ROS 2 Support Autoware Tested (Y/N) Logitech F-710 2.4 GHz Wireless, 10m range (2) 2-axis joysticks (18) buttons Y Y

    Link to ROS driver: http://wiki.ros.org/joy

    Link to company website: https://www.logitechg.com/en-us/products/gamepads/f710-wireless-gamepad.html

    "},{"location":"reference-hw/thermal_cameras/","title":"Thermal CAMERAs","text":""},{"location":"reference-hw/thermal_cameras/#thermal-cameras","title":"Thermal CAMERAs","text":""},{"location":"reference-hw/thermal_cameras/#flir-thermal-automotive-dev-kit","title":"FLIR Thermal Automotive Dev. Kit","text":"

    FLIR ADK Thermal Vision cameras which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List MP FPS Interface Spectral Band FOV ROS 2 Driver Autoware Tested (Y/N) FLIR ADK 640x512 30 USB-GMSL,Ethernet 8-14 um (LWIR) 75\u02da, 50\u02da, 32\u02da, and 24\u02da - -"},{"location":"reference-hw/vehicle_drive_by_wire_suppliers/","title":"Vehicle Drive By Wire Suppliers","text":""},{"location":"reference-hw/vehicle_drive_by_wire_suppliers/#vehicle-drive-by-wire-suppliers","title":"Vehicle Drive By Wire Suppliers","text":""},{"location":"reference-hw/vehicle_drive_by_wire_suppliers/#new-eagle-dbw-solutions","title":"New Eagle DBW Solutions","text":"

    New Eagle DBW Controllers which is used for autonomous driving and tested by one or more community members are listed below:

    Supported Vehicles Power Remote Control ROS 2 Support Autoware Tested (Y/N) Jeep CherokeeChrysler PacificaToyota PriusChevy BoltFord TransitRAM 1500Custom\u00a0 500W Sine Inverter2000 Watts8 Channel PDS Optional, Available Y Y

    Link to company website: https://neweagle.net/autonomous-machines/

    "},{"location":"reference-hw/vehicle_drive_by_wire_suppliers/#dataspeed-dbw-solutions","title":"Dataspeed DBW Solutions","text":"

    Dataspeed DBW Controllers which is used for autonomous driving and tested by one or more community members are listed below:

    Supported Vehicles Power Remote Control ROS 2 Support Autoware Tested (Y/N) Lincoln MKZ, NautilusFord Fusion, F150, Transit Connect, RangerChrysler PacificaJeep CherokeePolaris GEM, RZR 12 Channel PDS,15 A Each at 12 V Optional, Available Y -

    Link to company website: https://www.dataspeedinc.com/

    "},{"location":"reference-hw/vehicle_drive_by_wire_suppliers/#astuff-pacmod-dbw-solutions","title":"AStuff Pacmod DBW Solutions","text":"

    Autonomous Stuff Pacmod DBW Controllers which is used for autonomous driving and tested by one or more community members are listed below:

    Supported Vehicles Power Remote Control ROS 2 Support Autoware Tested (Y/N) Polaris GEM SeriesPolaris eLXD MY 2016+Polaris Ranger X900International ProStarLexus RX-450h MYFord RangerToyota Minivan Power distribution panel Optional, Available Y Y

    Link to company website: https://autonomoustuff.com/platform/pacmod

    "},{"location":"reference-hw/vehicle_drive_by_wire_suppliers/#schaeffler-paravan-space-drive-dbw-solutions","title":"Schaeffler-Paravan Space Drive DBW Solutions","text":"

    Schaeffler-Paravan Space Drive DBW Controllers which is used for autonomous driving and tested by one or more community members are listed below:

    Supported Vehicles Power Remote Control ROS 2 Support Autoware Tested (Y/N) Custom Integration with Actuators - Optional, Available Y Y

    Link to company website: https://www.schaeffler-paravan.de/en/products/space-drive-system/

    "},{"location":"reference-hw/vehicle_platform_suppliers/","title":"Vehicle Platform Suppliers","text":""},{"location":"reference-hw/vehicle_platform_suppliers/#vehicle-platform-suppliers","title":"Vehicle Platform Suppliers","text":""},{"location":"reference-hw/vehicle_platform_suppliers/#pix-moving-autonomous-vehicle-solutions","title":"PIX MOVING Autonomous Vehicle Solutions","text":"

    PIX Moving AV solutions which is used for autonomous development and tested by one or more community members are listed below:

    Vehicle Types Sensors Integrated Autoware Installed ROS 2 Support Autoware Tested (Y/N) Electric DBW Chassis and Platforms Y Y Y -

    Link to company website: https://www.pixmoving.com/pixkit

    Different sizes of platforms

    "},{"location":"reference-hw/vehicle_platform_suppliers/#autonomoustuff-av-solutions","title":"Autonomoustuff AV Solutions","text":"

    Autonomoustuff platform solutions which is used for autonomous development and tested by one or more community members are listed below:

    Vehicle Types Sensors Integrated Autoware Installed ROS 2 Support Autoware Tested (Y/N) Road Vehicles, Golf Carts & Trucks Y Y Y -

    Link to company website: https://autonomoustuff.com/platform

    "},{"location":"reference-hw/vehicle_platform_suppliers/#navya-av-solutions","title":"NAVYA AV Solutions","text":"

    NAVYA platform solutions which is used for autonomous development and tested by one or more community members are listed below:

    Vehicle Types Sensors Integrated Autoware Installed ROS 2 Support Autoware Tested (Y/N) Shuttle Bus, Taxi and Tow Tractors Y Y - -

    Link to company website: https://navya.tech/en

    "},{"location":"reference-hw/vehicle_platform_suppliers/#zing-robotics-av-solutions","title":"ZING ROBOTICS AV Solutions","text":"

    ZING Robotics platform solutions which is used for autonomous development and tested by one or more community members are listed below:

    Vehicle Types Sensors Integrated Autoware Installed ROS 2 Support Autoware Tested (Y/N) Purpose built electric autonomous vehicles for aviation, military etc. Y Y - -

    Link to company website: https://www.zingrobotics.com/

    "},{"location":"support/","title":"Support","text":""},{"location":"support/#support","title":"Support","text":"

    This page explains several support resources.

    "},{"location":"support/docs-guide/","title":"Docs guide","text":""},{"location":"support/docs-guide/#docs-guide","title":"Docs guide","text":"

    This page explains several documentation sites that are useful for Autoware and ROS development.

    "},{"location":"support/support-guidelines/","title":"Support guidelines","text":""},{"location":"support/support-guidelines/#support-guidelines","title":"Support guidelines","text":"

    This page explains the support mechanisms we provide.

    Warning

    Before asking for help, search and read this documentation site carefully. Also, follow the discussion guidelines for discussions.

    Choose appropriate resources depending on what kind of help you need and read the detailed description in the sections below.

    "},{"location":"support/support-guidelines/#documentation-sites","title":"Documentation sites","text":"

    Docs guide shows the list of useful documentation sites. Visit them and see if there is any information related to your problem.

    Note that the documentation sites aren't always up-to-date and perfect. If you find out that some information is wrong, unclear, or missing in Autoware docs, feel free to submit a pull request following the contribution guidelines.

    Warning

    Since this documentation site is still under construction, there are some empty pages.

    "},{"location":"support/support-guidelines/#github-discussions","title":"GitHub Discussions","text":"

    If you encounter a problem with Autoware, check existing issues and questions and search for similar issues first.

    If no answer was found, create a new question thread here. If your question is not answered within a week, then @mention the maintainers to remind them.

    Also, there are other discussion types such as feature requests or design discussions. Feel free to open or join such discussions.

    If you don't know how to create a discussion, refer to GitHub Docs.

    "},{"location":"support/support-guidelines/#github-issues","title":"GitHub Issues","text":"

    If you have a problem and you have confirmed it is a bug, find the appropriate repository and create a new issue there. If you can't determine the appropriate repository, ask the maintainers for help by creating a new discussion in the Q&A category.

    Warning

    Do not create issues for questions or unconfirmed bugs. If such issues are created, maintainers will transfer them to GitHub Discussions.

    If you want to fix the bug by yourself, discuss the approach with maintainers and submit a pull request.

    "},{"location":"support/support-guidelines/#discord","title":"Discord","text":"

    Autoware has a Discord server for casual communication between contributors.

    The Autoware Discord server is a good place for the following activities:

    Note that it is not the right place to get help for your issues.

    "},{"location":"support/support-guidelines/#ros-discourse","title":"ROS Discourse","text":"

    If you want to widely discuss a topic with the general Autoware and ROS community or ask a question not related to Autoware's bugs, post to the Autoware category on ROS Discourse.

    Warning

    Do not post questions about bugs to ROS Discourse!

    "},{"location":"support/troubleshooting/","title":"Troubleshooting","text":""},{"location":"support/troubleshooting/#troubleshooting","title":"Troubleshooting","text":""},{"location":"support/troubleshooting/#setup-issues","title":"Setup issues","text":""},{"location":"support/troubleshooting/#cuda-related-errors","title":"CUDA-related errors","text":"

    When installing CUDA, errors may occur because of version conflicts. To resolve these types of errors, try one of the following methods:

    Warning

    Note that this may break your system and run carefully.

    Warning

    Note that some components in Autoware Universe require CUDA, and only the CUDA version in the env file is supported at this time. Autoware may work with other CUDA versions, but those versions are not supported and functionality is not guaranteed.

    "},{"location":"support/troubleshooting/#build-issues","title":"Build issues","text":""},{"location":"support/troubleshooting/#insufficient-memory","title":"Insufficient memory","text":"

    Building Autoware requires a lot of memory, and your machine can freeze or crash if memory runs out during a build. To avoid this problem, 16-32GB of swap should be configured.

    # Optional: Check the current swapfile\nfree -h\n\n# Remove the current swapfile\nsudo swapoff /swapfile\nsudo rm /swapfile\n\n# Create a new swapfile\nsudo fallocate -l 32G /swapfile\nsudo chmod 600 /swapfile\nsudo mkswap /swapfile\nsudo swapon /swapfile\n\n# Optional: Check if the change is reflected\nfree -h\n

    For more detailed configuration steps, along with an explanation of swap, refer to Digital Ocean's \"How To Add Swap Space on Ubuntu 20.04\" tutorial

    If there are too many CPU cores (more than 64) in your machine, it might requires larger memory. A workaround here is to limit the job number while building.

    MAKEFLAGS=\"-j4\" colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n

    You can adjust -j4 to any number based on your system. For more details, see the manual page of GNU make.

    By reducing the number of packages built in parallel, you can also reduce the amount of memory used. In the following example, the number of packages built in parallel is set to 1, and the number of jobs used by make is limited to 1.

    MAKEFLAGS=\"-j1\" colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release --parallel-workers 1\n

    Note

    By lowering both the number of packages built in parallel and the number of jobs used by make, you can reduce the memory usage. However, this also means that the build process takes longer.

    "},{"location":"support/troubleshooting/#errors-when-using-the-latest-version-of-autoware","title":"Errors when using the latest version of Autoware","text":"

    If you are working with the latest version of Autoware, issues can occur due to out-of-date software or old build files.

    To resolve these types of problems, first try cleaning your build artifacts and rebuilding:

    rm -rf build/ install/ log/\ncolcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n

    If the error is not resolved, remove src/ and update your workspace according to installation type (Docker / source).

    Warning

    Before removing src/, confirm that there are no modifications in your local environment that you want to keep!

    If errors still persist after trying the steps above, delete the entire workspace, clone the repository once again and restart the installation process.

    rm -rf autoware/\ngit clone https://github.com/autowarefoundation/autoware.git\n
    "},{"location":"support/troubleshooting/#errors-when-using-a-fixed-version-of-autoware","title":"Errors when using a fixed version of Autoware","text":"

    In principle, errors should not occur when using a fixed version. That said, possible causes include:

    In addition to the causes listed above, there are two common misunderstandings around the use of fixed versions.

    1. You used a fixed version for autowarefoundation/autoware only. All of the repository versions in the .repos file must be specified in order to use a completely fixed version.

    2. You didn't update the workspace after changing the branch of autowarefoundation/autoware. Changing the branch of autowarefoundation/autoware does not affect the files under src/. You have to run the vcs import command to update them.

    "},{"location":"support/troubleshooting/#error-when-building-python-package","title":"Error when building python package","text":"

    During building the following issue can occurs

    pkg_resources.extern.packaging.version.InvalidVersion: Invalid version: '0.23ubuntu1'\n

    The error is due to the fact that for versions between 66.0.0 and 67.5.0 setuptools enforces the python packages to be PEP-440 conformant. Since version 67.5.1 setuptools has a fallback that makes it possible to work with old packages again.

    The solution is to update setuptools to the newest version with the following command

    pip install --upgrade setuptools\n
    "},{"location":"support/troubleshooting/#dockerrocker-issues","title":"Docker/rocker issues","text":"

    If any errors occur when running Autoware with Docker or rocker, first confirm that your Docker installation is working correctly by running the following commands:

    docker run --rm -it hello-world\ndocker run --rm -it ubuntu:latest\n

    Next, confirm that you are able to access the base Autoware image that is stored on the GitHub Packages website

    docker run --rm -it ghcr.io/autowarefoundation/autoware-universe:latest\n
    "},{"location":"support/troubleshooting/#runtime-issues","title":"Runtime issues","text":""},{"location":"support/troubleshooting/#performance-related-issues","title":"Performance related issues","text":"

    Symptoms:

    If you have any of these symptoms, please the Performance Troubleshooting page.

    "},{"location":"support/troubleshooting/#map-does-not-display-when-running-the-planning-simulator","title":"Map does not display when running the Planning Simulator","text":"

    When running the Planning Simulator, the most common reason for the map not being displayed in RViz is because the map path has not been specified correctly in the launch command. You can confirm if this is the case by searching for Could not find lanelet map under {path-to-map-dir}/lanelet2_map.osm errors in the log.

    Another possible reason is that map loading is taking a long time due to poor DDS performance. For this, please visit the Performance Troubleshooting page.

    "},{"location":"support/troubleshooting/performance-troubleshooting/","title":"Performance Troubleshooting","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#performance-troubleshooting","title":"Performance Troubleshooting","text":"

    Overall symptoms:

    "},{"location":"support/troubleshooting/performance-troubleshooting/#diagnostic-steps","title":"Diagnostic Steps","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#check-if-multicast-is-enabled","title":"Check if multicast is enabled","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#target-symptoms","title":"Target symptoms","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#diagnosis","title":"Diagnosis","text":"

    Make sure that the multicast is enabled for your interface.

    For example when you run following:

    source /opt/ros/humble/setup.bash\nros2 run demo_nodes_cpp talker\n

    If you get the error message selected interface \"{your-interface-name}\" is not multicast-capable: disabling multicast, this should be fixed.

    "},{"location":"support/troubleshooting/performance-troubleshooting/#solution","title":"Solution","text":"

    Run the following command to allow multicast:

    sudo ip link set multicast on {your-interface-name}\n

    This way DDS will function as intended and multiple subscribers can receive data from a single publisher without any significant degradation in performance.

    This is a temporary solution. And will be reverted once the computer restarts.

    To make it permanent either,

    "},{"location":"support/troubleshooting/performance-troubleshooting/#check-the-compilation-flags","title":"Check the compilation flags","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#target-symptoms_1","title":"Target symptoms","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#diagnosis_1","title":"Diagnosis","text":"

    Check the ~/.bash_history file to see if there are any colcon build directives without -DCMAKE_BUILD_TYPE=Release or -DCMAKE_BUILD_TYPE=RelWithDebInfo flags at all.

    Even if a build starts with these flags but same workspace gets compiled without these flags, it will still be a slow build in the end.

    In addition, the nodes will run slow in general, especially the pointcloud_preprocessor nodes.

    Example issue: issue2597

    "},{"location":"support/troubleshooting/performance-troubleshooting/#solution_1","title":"Solution","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#check-the-dds-settings","title":"Check the DDS settings","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#target-symptoms_2","title":"Target symptoms","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#check-the-rmw-ros-middleware-implementation","title":"Check the RMW (ROS Middleware) implementation","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#diagnosis_2","title":"Diagnosis","text":"

    Run following to check the middleware used:

    echo $RMW_IMPLEMENTATION\n

    The return line should be rmw_cyclonedds_cpp. If not, apply the solution.

    If you are using a different DDS middleware, we might not have official support for it just yet.

    "},{"location":"support/troubleshooting/performance-troubleshooting/#solution_2","title":"Solution","text":"

    Add export RMW_IMPLEMENTATION=rmw_cyclonedds_cpp as a separate line in you ~/.bashrc file.

    "},{"location":"support/troubleshooting/performance-troubleshooting/#check-if-the-cyclonedds-is-configured-correctly","title":"Check if the CycloneDDS is configured correctly","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#diagnosis_3","title":"Diagnosis","text":"

    Run following to check the configuration .xml file of the CycloneDDS:

    echo $CYCLONEDDS_URI\n

    The return line should be a valid path pointing to an .xml file with CycloneDDS configuration.

    Also check if the file is configured correctly:

    cat !{echo $CYCLONEDDS_URI}\n

    This should print the .xml file on the terminal.

    "},{"location":"support/troubleshooting/performance-troubleshooting/#solution_3","title":"Solution","text":"

    Follow DDS settings:Tuning DDS documentation and make sure:

    "},{"location":"support/troubleshooting/performance-troubleshooting/#check-the-linux-kernel-maximum-buffer-size","title":"Check the Linux kernel maximum buffer size","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#diagnosis_4","title":"Diagnosis","text":"

    More info on these values: Cross-vendor tuning

    "},{"location":"support/troubleshooting/performance-troubleshooting/#solution_4","title":"Solution","text":"

    Either:

    "},{"location":"support/troubleshooting/performance-troubleshooting/#check-if-ros-localhost-only-communication-is-enabled","title":"Check if ROS localhost only communication is enabled","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#target-symptoms_3","title":"Target symptoms","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#diagnosis_5","title":"Diagnosis","text":"

    Run following to check it:

    echo $ROS_LOCALHOST_ONLY\n

    The return line should be 1. If not, apply the solution.

    "},{"location":"support/troubleshooting/performance-troubleshooting/#solution_5","title":"Solution","text":""},{"location":"tutorials/","title":"Simulation tutorials","text":""},{"location":"tutorials/#simulation-tutorials","title":"Simulation tutorials","text":"

    Simulations provide a way of verifying Autoware's functionality before field testing with an actual vehicle. There are three main types of simulation that can be run ad hoc or via a scenario runner.

    "},{"location":"tutorials/#simulation-methods","title":"Simulation methods","text":""},{"location":"tutorials/#ad-hoc-simulation","title":"Ad hoc simulation","text":"

    Ad hoc simulation is a flexible method for running basic simulations on your local machine, and is the recommended method for anyone new to Autoware.

    "},{"location":"tutorials/#scenario-simulation","title":"Scenario simulation","text":"

    Scenario simulation uses a scenario runner to run more complex simulations based on predefined scenarios. It is often run automatically for continuous integration purposes, but can also be run on a local machine.

    "},{"location":"tutorials/#simulation-types","title":"Simulation types","text":""},{"location":"tutorials/#planning-simulation","title":"Planning simulation","text":"

    Planning simulation uses simple dummy data to test the Planning and Control components - specifically path generation, path following and obstacle avoidance. It verifies that a vehicle can reach a goal destination while avoiding pedestrians and surrounding cars, and is another method for verifying the validity of Lanelet2 maps. It also allows for testing of traffic light handling.

    "},{"location":"tutorials/#how-does-planning-simulation-work","title":"How does planning simulation work?","text":"
    1. Generate a path to the goal destination
    2. Control the car along the generated path
    3. Detect and avoid any humans or other vehicles on the way to the goal destination
    "},{"location":"tutorials/#rosbag-replay-simulation","title":"Rosbag replay simulation","text":"

    Rosbag replay simulation uses prerecorded rosbag data to test the following aspects of the Localization and Perception components:

    By repeatedly playing back the data, this simulation type can also be used for endurance testing.

    "},{"location":"tutorials/#digital-twin-simulation","title":"Digital twin simulation","text":"

    Digital twin simulation is a simulation type that is able to produce realistic data and simulate almost the entire system. It is also commonly referred to as end-to-end simulation.

    "},{"location":"tutorials/ad-hoc-simulation/","title":"Ad hoc simulation","text":""},{"location":"tutorials/ad-hoc-simulation/#ad-hoc-simulation","title":"Ad hoc simulation","text":"

    Warning

    Under Construction

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/","title":"Planning simulation","text":""},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#planning-simulation","title":"Planning simulation","text":""},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#preparation","title":"Preparation","text":"

    Download and unpack a sample map.

    gdown -O ~/autoware_map/ 'https://docs.google.com/uc?export=download&id=1499_nsbUbIeturZaDj7jhUownh5fvXHd'\nunzip -d ~/autoware_map ~/autoware_map/sample-map-planning.zip\n

    Note

    Sample map: Copyright 2020 TIER IV, Inc.

    Check if you have ~/autoware_data folder and files in it.

    $ cd ~/autoware_data\n$ ls -C -w 30\nimage_projection_based_fusion\nlidar_apollo_instance_segmentation\nlidar_centerpoint\ntensorrt_yolo\ntensorrt_yolox\ntraffic_light_classifier\ntraffic_light_fine_detector\ntraffic_light_ssd_fine_detector\nyabloc_pose_initializer\n

    If not, please, follow Manual downloading of artifacts.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#basic-simulations","title":"Basic simulations","text":""},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#lane-driving-scenario","title":"Lane driving scenario","text":""},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#1-launch-autoware","title":"1. Launch Autoware","text":"
    source ~/autoware/install/setup.bash\nros2 launch autoware_launch planning_simulator.launch.xml map_path:=$HOME/autoware_map/sample-map-planning vehicle_model:=sample_vehicle sensor_model:=sample_sensor_kit\n

    Warning

    Note that you cannot use ~ instead of $HOME here.

    If ~ is used, the map will fail to load.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#2-set-an-initial-pose-for-the-ego-vehicle","title":"2. Set an initial pose for the ego vehicle","text":"

    a) Click the 2D Pose estimate button in the toolbar, or hit the P key.

    b) In the 3D View pane, click and hold the left-mouse button, and then drag to set the direction for the initial pose. An image representing the vehicle should now be displayed.

    Warning

    Remember to set the initial pose of the car in the same direction as the lane.

    To confirm the direction of the lane, check the arrowheads displayed on the map.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#3-set-a-goal-pose-for-the-ego-vehicle","title":"3. Set a goal pose for the ego vehicle","text":"

    a) Click the 2D Goal Pose button in the toolbar, or hit the G key.

    b) In the 3D View pane, click and hold the left-mouse button, and then drag to set the direction for the goal pose. If done correctly, you will see a planned path from initial pose to goal pose.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#4-start-the-ego-vehicle","title":"4. Start the ego vehicle","text":"

    Now you can start the ego vehicle driving by clicking the AUTO button on OperationMode in AutowareStatePanel. Alteratively, you can manually start the vehicle by running the following command:

    source ~/autoware/install/setup.bash\nros2 service call /api/operation_mode/change_to_autonomous autoware_adapi_v1_msgs/srv/ChangeOperationMode {}\n

    After that, you can see AUTONOMOUS sign on OperationMode and AUTO button is grayed out.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#parking-scenario","title":"Parking scenario","text":"
    1. Set an initial pose and a goal pose, and engage the ego vehicle.

    2. When the vehicle approaches the goal, it will switch from lane driving mode to parking mode.

    3. After that, the vehicle will reverse into the destination parking spot.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#lane-change-scenario","title":"Lane change scenario","text":"
    1. Download and unpack Nishishinjuku map.

      gdown -O ~/autoware_map/ 'https://github.com/tier4/AWSIM/releases/download/v1.1.0/nishishinjuku_autoware_map.zip'\nunzip -d ~/autoware_map ~/autoware_map/nishishinjuku_autoware_map.zip\n
    2. Launch autoware with Nishishinjuku map with following command:

      source ~/autoware/install/setup.bash\nros2 launch autoware_launch planning_simulator.launch.xml map_path:=$HOME/autoware_map/nishishinjuku_autoware_map vehicle_model:=sample_vehicle sensor_model:=sample_sensor_kit\n

    3. Set an initial pose and a goal pose in adjacent lanes.

    4. Engage the ego vehicle. It will make a lane change along the planned path.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#avoidance-scenario","title":"Avoidance scenario","text":"
    1. Set an initial pose and a goal pose in the same lane. A path will be planned.

    2. Set a \"2D Dummy Bus\" on the roadside. A new path will be planned.

    3. Engage the ego vehicle. It will avoid the obstacle along the newly planned path.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#advanced-simulations","title":"Advanced Simulations","text":""},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#placing-dummy-objects","title":"Placing dummy objects","text":"
    1. Click the 2D Dummy Car or 2D Dummy Pedestrian button in the toolbar.
    2. Set the pose of the dummy object by clicking and dragging on the map.
    3. Set the velocity of the object in Tool Properties -> 2D Dummy Car/Pedestrian panel.

      !!! note

      Changes to the velocity parameter will only affect objects placed after the parameter is changed.

    4. Delete any dummy objects placed in the view by clicking the Delete All Objects button in the toolbar.

    5. Click the Interactive button in the toolbar to make the dummy object interactive.

    6. For adding an interactive dummy object, press SHIFT and click the right click.

    7. For deleting an interactive dummy object, press ALT and click the right click.
    8. For moving an interactive dummy object, hold the right click drag and drop the object.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#traffic-light-recognition-simulation","title":"Traffic light recognition simulation","text":"

    By default, traffic lights on the map are all treated as if they are set to green. As a result, when a path is created that passed through an intersection with a traffic light, the ego vehicle will drive through the intersection without stopping.

    The following steps explain how to set and reset traffic lights in order to test how the Planning component will respond.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#set-traffic-light","title":"Set traffic light","text":"
    1. Go to Panels -> Add new panel, select TrafficLightPublishPanel, and then press OK.

    2. In TrafficLightPublishPanel, set the ID and color of the traffic light.

    3. Click the SET button.

    4. Finally, click the PUBLISH button to send the traffic light status to the simulator. Any planned path that goes past the selected traffic light will then change accordingly.

    By default, Rviz should display the ID of each traffic light on the map. You can have a closer look at the IDs by zooming in the region or by changing the View type.

    In case the IDs are not displayed, try the following troubleshooting steps:

    a) In the Displays panel, find the traffic_light_id topic by toggling the triangle icons next to Map > Lanelet2VectorMap > Namespaces.

    b) Check the traffic_light_id checkbox.

    c) Reload the topic by clicking the Map checkbox twice.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#updatereset-traffic-light","title":"Update/Reset traffic light","text":"

    You can update the color of the traffic light by selecting the next color (in the image it is GREEN) and clicking SET button. In the image the traffic light in front of the ego vehicle changed from RED to GREEN and the vehicle restarted.

    To remove a traffic light from TrafficLightPublishPanel, click the RESET button.

    Reference video tutorials

    "},{"location":"tutorials/ad-hoc-simulation/rosbag-replay-simulation/","title":"Rosbag replay simulation","text":""},{"location":"tutorials/ad-hoc-simulation/rosbag-replay-simulation/#rosbag-replay-simulation","title":"Rosbag replay simulation","text":""},{"location":"tutorials/ad-hoc-simulation/rosbag-replay-simulation/#steps","title":"Steps","text":"
    1. Download and unpack a sample map.

      • You can also download the map manually.
      gdown -O ~/autoware_map/ 'https://docs.google.com/uc?export=download&id=1A-8BvYRX3DhSzkAnOcGWFw5T30xTlwZI'\nunzip -d ~/autoware_map/ ~/autoware_map/sample-map-rosbag.zip\n
    2. Download the sample rosbag files.

      • You can also download the rosbag files manually.
      gdown -O ~/autoware_map/ 'https://docs.google.com/uc?export=download&id=1VnwJx9tI3kI_cTLzP61ktuAJ1ChgygpG'\nunzip -d ~/autoware_map/ ~/autoware_map/sample-rosbag.zip\n
    3. Check if you have ~/autoware_data folder and files in it.

      $ cd ~/autoware_data\n$ ls -C -w 30\nimage_projection_based_fusion\nlidar_apollo_instance_segmentation\nlidar_centerpoint\ntensorrt_yolo\ntensorrt_yolox\ntraffic_light_classifier\ntraffic_light_fine_detector\ntraffic_light_ssd_fine_detector\nyabloc_pose_initializer\n

      If not, please, follow Manual downloading of artifacts.

    "},{"location":"tutorials/ad-hoc-simulation/rosbag-replay-simulation/#note","title":"Note","text":""},{"location":"tutorials/ad-hoc-simulation/rosbag-replay-simulation/#how-to-run-a-rosbag-replay-simulation","title":"How to run a rosbag replay simulation","text":"
    1. Launch Autoware.

      source ~/autoware/install/setup.bash\nros2 launch autoware_launch logging_simulator.launch.xml map_path:=$HOME/autoware_map/sample-map-rosbag vehicle_model:=sample_vehicle sensor_model:=sample_sensor_kit\n

      Note that you cannot use ~ instead of $HOME here.

    2. Play the sample rosbag file.

      source ~/autoware/install/setup.bash\nros2 bag play ~/autoware_map/sample-rosbag/sample.db3 -r 0.2 -s sqlite3\n

    3. To focus the view on the ego vehicle, change the Target Frame in the RViz Views panel from viewer to base_link.

    4. To switch the view to Third Person Follower etc, change the Type in the RViz Views panel.

    Reference video tutorials

    "},{"location":"tutorials/ad-hoc-simulation/digital-twin-simulation/MORAI_Sim-tutorial/","title":"MORAI Sim: Drive","text":""},{"location":"tutorials/ad-hoc-simulation/digital-twin-simulation/MORAI_Sim-tutorial/#morai-sim-drive","title":"MORAI Sim: Drive","text":"

    Note

    Any kind of for-profit activity with the trial version of the MORAI SIM:Drive is strictly prohibited.

    "},{"location":"tutorials/ad-hoc-simulation/digital-twin-simulation/MORAI_Sim-tutorial/#hardware-requirements","title":"Hardware requirements","text":"Minimum PC Specs OS Windows 10, Ubuntu 20.04, Ubuntu 18.04, Ubuntu 16.04 CPU Intel i5-9600KF or AMD Ryzen 5 3500X RAM DDR4 16GB GPU RTX2060 Super Required PC Specs OS Windows 10, Ubuntu 20.04, Ubuntu 18.04, Ubuntu 16.04 CPU Intel i9-9900K or AMD Ryzen 7 3700X (or higher) RAM DDR4 64GB (or higher) GPU RTX2080Ti or higher"},{"location":"tutorials/ad-hoc-simulation/digital-twin-simulation/MORAI_Sim-tutorial/#application-and-download","title":"Application and Download","text":"

    Only for AWF developers, trial license for 3 months can be issued. Download the application form and send to Hyeongseok Jeon

    After the trial license is issued, you can login to MORAI Sim:Drive via Launchers (Windows/Ubuntu)

    CAUTION: Do not use the Launchers in the following manual

    "},{"location":"tutorials/ad-hoc-simulation/digital-twin-simulation/MORAI_Sim-tutorial/#technical-documents","title":"Technical Documents","text":"

    as Oct. 2022, our simulation version is ver.22.R3 but the english manual is under construction.

    Be aware that the following manuals are for ver.22.R2

    "},{"location":"tutorials/ad-hoc-simulation/digital-twin-simulation/MORAI_Sim-tutorial/#technical-support","title":"Technical Support","text":"

    Hyeongseok Jeon will give full technical support

    "},{"location":"tutorials/ad-hoc-simulation/digital-twin-simulation/awsim-tutorial/","title":"AWSIM simulator","text":""},{"location":"tutorials/ad-hoc-simulation/digital-twin-simulation/awsim-tutorial/#awsim-simulator","title":"AWSIM simulator","text":"

    AWSIM is a simulator for Autoware development and testing. To get started, please follow the official instruction provided by TIER IV.

    "},{"location":"tutorials/scenario-simulation/","title":"Scenario simulation","text":""},{"location":"tutorials/scenario-simulation/#scenario-simulation","title":"Scenario simulation","text":"

    Warning

    Under Construction

    "},{"location":"tutorials/scenario-simulation/planning-simulation/installation/","title":"Installation","text":""},{"location":"tutorials/scenario-simulation/planning-simulation/installation/#installation","title":"Installation","text":"

    This document contains step-by-step instruction on how to build AWF Autoware Core/Universe with scenario_simulator_v2.

    "},{"location":"tutorials/scenario-simulation/planning-simulation/installation/#prerequisites","title":"Prerequisites","text":"
    1. Autoware has been built and installed
    "},{"location":"tutorials/scenario-simulation/planning-simulation/installation/#how-to-build","title":"How to build","text":"
    1. Navigate to the Autoware workspace:

      cd autoware\n
    2. Import Simulator dependencies:

      vcs import src < simulator.repos\n
    3. Install dependent ROS packages:

      source /opt/ros/humble/setup.bash\nrosdep install -y --from-paths src --ignore-src --rosdistro $ROS_DISTRO\n
    4. Build the workspace:

      colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n
    "},{"location":"tutorials/scenario-simulation/planning-simulation/random-test-simulation/","title":"Random test simulation","text":""},{"location":"tutorials/scenario-simulation/planning-simulation/random-test-simulation/#random-test-simulation","title":"Random test simulation","text":"

    Note

    Running the Scenario Simulator requires some additional steps on top of building and installing Autoware, so make sure that Scenario Simulator installation has been completed first before proceeding.

    "},{"location":"tutorials/scenario-simulation/planning-simulation/random-test-simulation/#running-steps","title":"Running steps","text":"
    1. Move to the workspace directory where Autoware and the Scenario Simulator have been built.

    2. Source the workspace setup script:

      source install/setup.bash\n
    3. Run the simulation:

      ros2 launch random_test_runner random_test.launch.py \\\narchitecture_type:=awf/universe \\\nsensor_model:=sample_sensor_kit \\\nvehicle_model:=sample_vehicle\n

    For more information about supported parameters, refer to the random_test_runner documentation.

    "},{"location":"tutorials/scenario-simulation/planning-simulation/scenario-test-simulation/","title":"Scenario test simulation","text":""},{"location":"tutorials/scenario-simulation/planning-simulation/scenario-test-simulation/#scenario-test-simulation","title":"Scenario test simulation","text":"

    Note

    Running the Scenario Simulator requires some additional steps on top of building and installing Autoware, so make sure that Scenario Simulator installation has been completed first before proceeding.

    "},{"location":"tutorials/scenario-simulation/planning-simulation/scenario-test-simulation/#running-steps","title":"Running steps","text":"
    1. Move to the workspace directory where Autoware and the Scenario Simulator have been built.

    2. Source the workspace setup script:

      source install/setup.bash\n
    3. Run the simulation:

      ros2 launch scenario_test_runner scenario_test_runner.launch.py \\\narchitecture_type:=awf/universe \\\nrecord:=false \\\nscenario:='$(find-pkg-share scenario_test_runner)/scenario/sample.yaml' \\\nsensor_model:=sample_sensor_kit \\\nvehicle_model:=sample_vehicle\n

    Reference video tutorials

    "},{"location":"tutorials/scenario-simulation/rosbag-replay-simulation/driving-log-replayer/","title":"Driving Log Replayer","text":""},{"location":"tutorials/scenario-simulation/rosbag-replay-simulation/driving-log-replayer/#driving-log-replayer","title":"Driving Log Replayer","text":"

    Driving Log Replayer is an evaluation tool for Autoware. To get started, follow the official instruction provided by TIER IV.

    "}]} \ No newline at end of file +{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"Introduction","text":""},{"location":"#autoware-documentation","title":"Autoware Documentation","text":""},{"location":"#about-autoware","title":"About Autoware","text":"

    Autoware is the world\u2019s leading open-source software project for autonomous driving. Autoware is built on Robot Operating System (ROS) and enables commercial deployment of autonomous driving in a broad range of vehicles and applications.

    Please see here for more details.

    "},{"location":"#related-documentations","title":"Related Documentations","text":"

    This Autoware Documentation is for Autoware's general information.

    For detailed documents of Autoware Universe components, see Autoware Universe Documentation.

    "},{"location":"#getting-started","title":"Getting started","text":""},{"location":"autoware-competitions/","title":"Autoware Competitions","text":""},{"location":"autoware-competitions/#autoware-competitions","title":"Autoware Competitions","text":"

    This page is a collection of the links to the competitions that are related to the Autoware Foundation.

    Title Status Description Ongoing Autoware / TIER IV Challenge 2023 Date: May 15, 2023 - Nov. 1st, 2023 As one of the main contributors of Autoware, TIER IV has been facing many difficult challenges through development, and TIER IV would like to sponsor a challenge to solve such engineering challenges. Any researchers, students, individuals or organizations are welcome to participate and submit their solution to any of the challenges we propose. Ongoing Japan Automotive AI Challenge 2023 Registration: June 5, 2023 - July 14, 2023 Qualifiers: July 3, 2023 - Aug. 31, 2023 Finals: Nov. 12, 2023 In this competition, we focus on challenging tasks posed by autonomous driving in factory environments and aim to develop Autoware-based AD software that can overcome them. The qualifiers use the digital twin autonomous driving simulator AWSIM to complete specific tasks within a virtual environment. Teams that make it to the finals have the opportunity to run their software on actual vehicles in a test course in Japan."},{"location":"autoware-competitions/#proposing-new-competition","title":"Proposing New Competition","text":"

    If you want add a new competition to this page, please propose it in a TSC meeting and get confirmation from the AWF.

    "},{"location":"contributing/","title":"Contributing","text":""},{"location":"contributing/#contributing","title":"Contributing","text":"

    Thank you for your interest in contributing! Autoware is supported by people like you, and all types and sizes of contribution are welcome.

    As a contributor, here are the guidelines that we would like you to follow for Autoware and its associated repositories.

    Like Autoware itself, these guidelines are being actively developed and suggestions for improvement are always welcome! Guideline changes can be proposed by creating a discussion in the Ideas category.

    "},{"location":"contributing/#code-of-conduct","title":"Code of Conduct","text":"

    To ensure the Autoware community stays open and inclusive, please follow the Code of Conduct.

    If you believe that someone in the community has violated the Code of Conduct, please make a report by emailing conduct@autoware.org.

    "},{"location":"contributing/#what-should-i-know-before-i-get-started","title":"What should I know before I get started?","text":""},{"location":"contributing/#autoware-concepts","title":"Autoware concepts","text":"

    To gain a high-level understanding of Autoware's architecture and design, the following pages provide a brief overview:

    For experienced developers, the Autoware interfaces and individual component pages should also be reviewed to understand the inputs and outputs for each component or module at a more detailed level.

    "},{"location":"contributing/#contributing-to-open-source-projects","title":"Contributing to open source projects","text":"

    If you are new to open source projects, we recommend reading GitHub's How to Contribute to Open Source guide for an overview of why people contribute to open source projects, what it means to contribute and much more besides.

    "},{"location":"contributing/#how-can-i-get-help","title":"How can I get help?","text":"

    Do not open issues for general support questions as we want to keep GitHub issues for confirmed bug reports. Instead, open a discussion in the Q&A category. For more details on the support mechanisms for Autoware, refer to the Support guidelines.

    Note

    Issues created for questions or unconfirmed bugs will be moved to GitHub discussions by the maintainers.

    "},{"location":"contributing/#how-can-i-contribute","title":"How can I contribute?","text":""},{"location":"contributing/#discussions","title":"Discussions","text":"

    You can contribute to Autoware by facilitating and participating in discussions, such as:

    "},{"location":"contributing/#working-groups","title":"Working groups","text":"

    The various working groups within the Autoware Foundation are responsible for accomplishing goals set by the Technical Steering Committee. These working groups are open to everyone, and joining a particular working group will allow you to gain an understanding of current projects, see how those projects are managed within each group and to contribute to issues that will help progress a particular project.

    To see the schedule for upcoming working group meetings, refer to the Autoware Foundation events calendar.

    "},{"location":"contributing/#bug-reports","title":"Bug reports","text":"

    Before you report a bug, please search the issue tracker for the appropriate repository. It is possible that someone has already reported the same issue and that workarounds exist. If you can't determine the appropriate repository, ask the maintainers for help by creating a new discussion in the Q&A category.

    When reporting a bug, you should provide a minimal set of instructions to reproduce the issue. Doing so allows us to quickly confirm and focus on the right problem.

    If you want to fix the bug by yourself that will be appreciated, but you should discuss possible approaches with the maintainers in the issue before submitting a pull request.

    Creating an issue is straightforward, but if you happen to experience any problems then create a Q&A discussion to ask for help.

    "},{"location":"contributing/#pull-requests","title":"Pull requests","text":"

    You can submit pull requests for small changes such as:

    If your pull request is a large change, the following process should be followed:

    1. Create a GitHub Discussion to propose the change. Doing so allows you to get feedback from other members and the Autoware maintainers and to ensure that the proposed change is in line with Autoware's design philosophy and current development plans. If you're not sure where to have that conversation, then create a new Q&A discussion.

    2. Create an issue following consensus in the discussions

    3. Create a pull request to implement the changes that references the Issue created in step 2

    4. Create documentation for the new addition (if relevant)

    Examples of large changes include:

    For more information on how to submit a good pull request, have a read of the pull request guidelines and don't forget to review the required license notations!

    "},{"location":"contributing/license/","title":"License","text":""},{"location":"contributing/license/#license","title":"License","text":"

    Autoware is licensed under Apache License 2.0. Thus all contributions will be licensed as such as per clause 5 of the Apache License 2.0:

    5. Submission of Contributions. Unless You explicitly state otherwise,\n   any Contribution intentionally submitted for inclusion in the Work\n   by You to the Licensor shall be under the terms and conditions of\n   this License, without any additional terms or conditions.\n   Notwithstanding the above, nothing herein shall supersede or modify\n   the terms of any separate license agreement you may have executed\n   with Licensor regarding such Contributions.\n

    Here is an example copyright header to add to the top of a new file:

    Copyright [first year of contribution] The Autoware Contributors\nSPDX-License-Identifier: Apache-2.0\n

    We don't write copyright notations of each contributor here. Instead, we place them in the NOTICE file like the following.

    This product includes code developed by [company name].\nCopyright [first year of contribution] [company name]\n

    Let us know if your legal department has a special request for the copyright notation.

    Currently, the old formats explained here are also acceptable. Those old formats can be replaced by this new format if the original authors agree. Note that we won't write their copyrights to the NOTICE file unless they agree with the new format.

    References:

    "},{"location":"contributing/coding-guidelines/","title":"Coding guidelines","text":""},{"location":"contributing/coding-guidelines/#coding-guidelines","title":"Coding guidelines","text":"

    Warning

    Under Construction

    "},{"location":"contributing/coding-guidelines/#common-guidelines","title":"Common guidelines","text":"

    Refer to the following links for now:

    Also, keep in mind the following concepts.

    "},{"location":"contributing/coding-guidelines/languages/cmake/","title":"CMake","text":""},{"location":"contributing/coding-guidelines/languages/cmake/#cmake","title":"CMake","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/coding-guidelines/languages/cmake/#use-the-autoware_package-macro","title":"Use the autoware_package macro","text":"

    To reduce duplications in CMakeLists.txt, there is the autoware_package() macro. See the README and use it in your package.

    "},{"location":"contributing/coding-guidelines/languages/cpp/","title":"C++","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#c","title":"C++","text":"

    Warning

    Under Construction

    "},{"location":"contributing/coding-guidelines/languages/cpp/#references","title":"References","text":"

    Follow the guidelines below if a rule is not defined on this page.

    1. https://docs.ros.org/en/humble/Contributing/Code-Style-Language-Versions.html
    2. https://www.autosar.org/fileadmin/standards/adaptive/22-11/AUTOSAR_RS_CPP14Guidelines.pdf
    3. https://isocpp.github.io/CppCoreGuidelines/CppCoreGuidelines

    Also, it is encouraged to apply Clang-Tidy to each file. For the usage, see Applying Clang-Tidy to ROS packages.

    Note that not all rules are covered by Clang-Tidy.

    "},{"location":"contributing/coding-guidelines/languages/cpp/#style-rules","title":"Style rules","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#include-header-files-in-the-defined-order-required-partially-automated","title":"Include header files in the defined order (required, partially automated)","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#rationale","title":"Rationale","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#reference","title":"Reference","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#example","title":"Example","text":"

    Include the headers in the following order:

    // Compliant\n#include \"my_header.hpp\"\n\n#include \"my_package/foo.hpp\"\n\n#include <package1/foo.hpp>\n#include <package2/bar.hpp>\n\n#include <std_msgs/msg/header.hpp>\n\n#include <iostream>\n#include <vector>\n

    If you use \"\" and <> properly, ClangFormat in pre-commit sorts headers automatically.

    Do not define macros between #include lines because it prevents automatic sorting.

    // Non-compliant\n#include <package1/foo.hpp>\n#include <package2/bar.hpp>\n\n#define EIGEN_MPL2_ONLY\n#include \"my_header.hpp\"\n#include \"my_package/foo.hpp\"\n\n#include <Eigen/Core>\n\n#include <std_msgs/msg/header.hpp>\n\n#include <iostream>\n#include <vector>\n

    Instead, define macros before #include lines.

    // Compliant\n#define EIGEN_MPL2_ONLY\n\n#include \"my_header.hpp\"\n\n#include \"my_package/foo.hpp\"\n\n#include <Eigen/Core>\n#include <package1/foo.hpp>\n#include <package2/bar.hpp>\n\n#include <std_msgs/msg/header.hpp>\n\n#include <iostream>\n#include <vector>\n

    If there are any reasons for defining macros at a specific position, write a comment before the macro.

    // Compliant\n#include \"my_header.hpp\"\n\n#include \"my_package/foo.hpp\"\n\n#include <package1/foo.hpp>\n#include <package2/bar.hpp>\n\n#include <std_msgs/msg/header.hpp>\n\n#include <iostream>\n#include <vector>\n\n// For the foo bar reason, the FOO_MACRO must be defined here.\n#define FOO_MACRO\n#include <foo/bar.hpp>\n
    "},{"location":"contributing/coding-guidelines/languages/cpp/#use-lower-snake-case-for-function-names-required-partially-automated","title":"Use lower snake case for function names (required, partially automated)","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#rationale_1","title":"Rationale","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#exception","title":"Exception","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#reference_1","title":"Reference","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#example_1","title":"Example","text":"
    void function_name()\n{\n}\n
    "},{"location":"contributing/coding-guidelines/languages/cpp/#use-upper-camel-case-for-enum-names-required-partially-automated","title":"Use upper camel case for enum names (required, partially automated)","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#rationale_2","title":"Rationale","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#exception_1","title":"Exception","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#reference_2","title":"Reference","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#example_2","title":"Example","text":"
    enum class Color\n{\nRed, Green, Blue\n}\n
    "},{"location":"contributing/coding-guidelines/languages/cpp/#use-lower-snake-case-for-constant-names-required-partially-automated","title":"Use lower snake case for constant names (required, partially automated)","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#rationale_3","title":"Rationale","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#exception_2","title":"Exception","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#reference_3","title":"Reference","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#example_3","title":"Example","text":"
    constexpr double gravity = 9.80665;\n
    "},{"location":"contributing/coding-guidelines/languages/cpp/#count-acronyms-and-contractions-of-compound-words-as-one-word-required-partially-automated","title":"Count acronyms and contractions of compound words as one word (required, partially automated)","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#rationale_4","title":"Rationale","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#reference_4","title":"Reference","text":""},{"location":"contributing/coding-guidelines/languages/cpp/#example_4","title":"Example","text":"
    class RosApi;\nRosApi ros_api;\n
    "},{"location":"contributing/coding-guidelines/languages/docker/","title":"Docker","text":""},{"location":"contributing/coding-guidelines/languages/docker/#docker","title":"Docker","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/coding-guidelines/languages/github-actions/","title":"GitHub Actions","text":""},{"location":"contributing/coding-guidelines/languages/github-actions/#github-actions","title":"GitHub Actions","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/coding-guidelines/languages/markdown/","title":"Markdown","text":""},{"location":"contributing/coding-guidelines/languages/markdown/#markdown","title":"Markdown","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/coding-guidelines/languages/package-xml/","title":"package.xml","text":""},{"location":"contributing/coding-guidelines/languages/package-xml/#packagexml","title":"package.xml","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/coding-guidelines/languages/python/","title":"Python","text":""},{"location":"contributing/coding-guidelines/languages/python/#python","title":"Python","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/coding-guidelines/languages/shell-scripts/","title":"Shell scripts","text":""},{"location":"contributing/coding-guidelines/languages/shell-scripts/#shell-scripts","title":"Shell scripts","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/coding-guidelines/ros-nodes/class-design/","title":"Class design","text":""},{"location":"contributing/coding-guidelines/ros-nodes/class-design/#class-design","title":"Class design","text":"

    Warning

    Under Construction

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/","title":"Console logging","text":""},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#console-logging","title":"Console logging","text":"

    ROS 2 logging is a powerful tool for understanding and debugging ROS nodes.

    This page focuses on how to design console logging in Autoware and shows several practical examples. To comprehensively understand how ROS 2 logging works, refer to the logging documentation.

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#logging-use-cases-in-autoware","title":"Logging use cases in Autoware","text":"

    To efficiently support these use cases, clean and highly visible logs are required. For that, several rules are defined below.

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#rules","title":"Rules","text":""},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#choose-appropriate-severity-levels-required-non-automated","title":"Choose appropriate severity levels (required, non-automated)","text":""},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#rationale","title":"Rationale","text":"

    It's confusing if severity levels are inappropriate as follows:

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#example","title":"Example","text":"

    Use the following criteria as a reference:

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#filter-out-unnecessary-logs-by-setting-logging-options-required-non-automated","title":"Filter out unnecessary logs by setting logging options (required, non-automated)","text":""},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#rationale_1","title":"Rationale","text":"

    Some third-party nodes such as drivers may not follow the Autoware's guidelines. If the logs are noisy, unnecessary logs should be filtered out.

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#example_1","title":"Example","text":"

    Use the --log-level {level} option to change the minimum level of logs to be displayed:

    <launch>\n<!-- This outputs only FATAL level logs. -->\n<node pkg=\"demo_nodes_cpp\" exec=\"talker\" ros_args=\"--log-level fatal\" />\n</launch>\n

    If you want to disable only specific output targets, use the --disable-stdout-logs, --disable-rosout-logs, and/or --disable-external-lib-logs options:

    <launch>\n<!-- This outputs to rosout and disk. -->\n<node pkg=\"demo_nodes_cpp\" exec=\"talker\" ros_args=\"--disable-stdout-logs\" />\n</launch>\n
    <launch>\n<!-- This outputs to stdout. -->\n<node pkg=\"demo_nodes_cpp\" exec=\"talker\" ros_args=\"--disable-rosout-logs --disable-external-lib-logs\" />\n</launch>\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#use-throttled-logging-when-the-log-is-unnecessarily-shown-repeatedly-required-non-automated","title":"Use throttled logging when the log is unnecessarily shown repeatedly (required, non-automated)","text":""},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#rationale_2","title":"Rationale","text":"

    If tons of logs are shown on the console, people miss important message.

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#example_2","title":"Example","text":"

    While waiting for some messages, throttled logs are usually enough. In such cases, wait about 5 seconds as a reference value.

    // Compliant\nvoid FooNode::on_timer() {\nif (!current_pose_) {\nRCLCPP_ERROR_THROTTLE(get_logger(), *get_clock(), 5000, \"Waiting for current_pose_.\");\nreturn;\n}\n}\n\n// Non-compliant\nvoid FooNode::on_timer() {\nif (!current_pose_) {\nRCLCPP_ERROR(get_logger(), \"Waiting for current_pose_.\");\nreturn;\n}\n}\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#exception","title":"Exception","text":"

    The following cases are acceptable even if it's not throttled.

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#do-not-depend-on-rclcppnode-in-core-library-classes-but-depend-only-on-rclcpplogginghpp-advisory-non-automated","title":"Do not depend on rclcpp::Node in core library classes but depend only on rclcpp/logging.hpp (advisory, non-automated)","text":""},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#rationale_3","title":"Rationale","text":"

    Core library classes, which contain reusable algorithms, may also be used for non-ROS platforms. When porting libraries to other platforms, fewer dependencies are preferred.

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#example_3","title":"Example","text":"
    // Compliant\n#include <rclcpp/logging.hpp>\n\nclass FooCore {\npublic:\nexplicit FooCore(const rclcpp::Logger & logger) : logger_(logger) {}\n\nvoid process() {\nRCLCPP_INFO(logger_, \"message\");\n}\n\nprivate:\nrclcpp::Logger logger_;\n};\n\n// Compliant\n// Note that logs aren't published to `/rosout` if the logger name is different from the node name.\n#include <rclcpp/logging.hpp>\n\nclass FooCore {\nvoid process() {\nRCLCPP_INFO(rclcpp::get_logger(\"foo_core_logger\"), \"message\");\n}\n};\n\n\n// Non-compliant\n#include <rclcpp/node.hpp>\n\nclass FooCore {\npublic:\nexplicit FooCore(const rclcpp::NodeOptions & node_options) : node_(\"foo_core_node\", node_options) {}\n\nvoid process() {\nRCLCPP_INFO(node_.get_logger(), \"message\");\n}\n\nprivate:\nrclcpp::Node node_;\n};\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#tips","title":"Tips","text":""},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#use-rqt_console-to-filter-logs","title":"Use rqt_console to filter logs","text":"

    To filter logs, using rqt_console is useful:

    ros2 run rqt_console rqt_console\n

    For more details, refer to ROS 2 Documentation.

    "},{"location":"contributing/coding-guidelines/ros-nodes/console-logging/#useful-marco-expressions","title":"Useful marco expressions","text":"

    To debug program, sometimes you need to see which functions and lines of code are executed. In that case, you can use __FILE__, __LINE__ and __FUNCTION__ macro:

    void FooNode::on_timer() {\nRCLCPP_DEBUG(get_logger(), \"file: %s, line: %s, function: %s\" __FILE__, __LINE__, __FUNCTION__);\n}\n

    The example output is as follows:

    [DEBUG] [1671720414.395456931] [foo]: file: /path/to/file.cpp, line: 100, function: on_timer

    "},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/","title":"Coordinate system","text":""},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/#coordinate-system","title":"Coordinate system","text":""},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/#overview","title":"Overview","text":"

    The commonly used coordinate systems include the world coordinate system, the vehicle coordinate system, and the sensor coordinate system.

    "},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/#how-coordinates-are-used-in-autoware","title":"How coordinates are used in Autoware","text":"

    In Autoware, coordinate systems are typically used to represent the position and movement of vehicles and obstacles in space. Coordinate systems are commonly used for path planning, perception and control, can help the vehicle decide how to avoid obstacles and to plan a safe and efficient path of travel.

    1. Transformation of sensor data

      In Autoware, each sensor has a unique coordinate system and their data is expressed in terms of the coordinates. In order to correlate the independent data between different sensors, we need to find the position relationship between each sensor and the vehicle body. Once the installation position of the sensor on the vehicle body is determined, it will remain fixed during running, so the offline calibration method can be used to determine the precise position of each sensor relative to the vehicle body.

    2. ROS TF2

      The TF2 system maintains a tree of coordinate transformations to represent the relationships between different coordinate systems. Each coordinate system is given a unique name and they are connected by coordinate transformations. How to use TF2, refer to the TF2 tutorial.

    "},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/#tf-tree","title":"TF tree","text":"

    In Autoware, a common coordinate system structure is shown below:

    graph TD\n    /earth --> /map\n    /map --> /base_link\n    /base_link --> /imu\n    /base_link --> /lidar\n    /base_link --> /gnss\n    /base_link --> /radar\n    /base_link --> /camera_link\n    /camera_link --> /camera_optical_link
    "},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/#estimating-the-base_link-frame-by-using-the-other-sensors","title":"Estimating the base_link frame by using the other sensors","text":"

    Generally we don't have the localization sensors physically at the base_link frame. So various sensors localize with respect to their own frames, let's call it sensor frame.

    We introduce a new frame naming convention: x_by_y:

    x: estimated frame name\ny: localization method/source\n

    We cannot directly get the sensor frame. Because we would need the EKF module to estimate the base_link frame first.

    Without the EKF module the best we can do is to estimate Map[map] --> sensor_by_sensor --> base_link_by_sensor using this sensor.

    "},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/#example-by-the-gnssins-sensor","title":"Example by the GNSS/INS sensor","text":"

    For the integrated GNSS/INS we use the following frames:

    flowchart LR\n    earth --> Map[map] --> gnss_ins_by_gnss_ins --> base_link_by_gnss_ins

    The gnss_ins_by_gnss_ins frame is obtained by the coordinates from GNSS/INS sensor. The coordinates are converted to map frame using the gnss_poser node.

    Finally gnss_ins_by_gnss_ins frame represents the position of the gnss_ins estimated by the gnss_ins sensor in the map.

    Then by using the static transformation between gnss_ins and the base_link frame, we can obtain the base_link_by_gnss_ins frame. Which represents the base_link estimated by the gnss_ins sensor.

    References:

    "},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/#coordinate-axes-conventions","title":"Coordinate Axes Conventions","text":"

    We are using East, North, Up (ENU) coordinate axes convention by default throughout the stack.

    X+: East\nY+: North\nZ+: Up\n

    The position, orientation, velocity, acceleration are all defined in the same axis convention.

    Position by the GNSS/INS sensor is expected to be in earth frame.

    Orientation, velocity, acceleration by the GNSS/INS sensor are expected to be in the sensor frame. Axes parallel to the map frame.

    If roll, pitch, yaw is provided, they correspond to rotation around X, Y, Z axes respectively.

    Rotation around:\nX+: roll\nY+: pitch\nZ+: yaw\n

    References:

    "},{"location":"contributing/coding-guidelines/ros-nodes/coordinate-system/#how-they-can-be-created","title":"How they can be created","text":"
    1. Calibration of sensor

      The conversion relationship between every sensor coordinate system and base_link can be obtained through sensor calibration technology. How to calibrating your sensors refer to this link calibrating your sensors.

    2. Localization

      The relationship between the base_link coordinate system and the map coordinate system is determined by the position and orientation of the vehicle, and can be obtained from the vehicle localization result.

    3. Geo-referencing of map data

      The geo-referencing information can get the transformation relationship of earth coordinate system to local map coordinate system.

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/","title":"Directory structure","text":""},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#directory-structure","title":"Directory structure","text":"

    Warning

    Under Construction

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#c-package","title":"C++ package","text":"
    <package_name>\n\u251c\u2500 config\n\u2502   \u251c\u2500 foo_ros.param.yaml\n\u2502   \u2514\u2500 foo_non_ros.yaml\n\u251c\u2500 doc\n\u2502   \u251c\u2500 foo_document.md\n\u2502   \u2514\u2500 foo_diagram.svg\n\u251c\u2500 include\n\u2502   \u2514\u2500 <package_name>\n\u2502       \u2514\u2500 foo_public.hpp\n\u251c\u2500 launch\n\u2502   \u251c\u2500 foo.launch.xml\n\u2502   \u2514\u2500 foo.launch.py\n\u251c\u2500 schema\n\u2502   \u2514\u2500 foo_node.schema.json\n\u251c\u2500 src\n\u2502   \u251c\u2500 foo_node.cpp\n\u2502   \u251c\u2500 foo_node.hpp\n\u2502   \u2514\u2500 foo_private.hpp\n\u251c\u2500 test\n\u2502   \u2514\u2500 test_foo.cpp\n\u251c\u2500 package.xml\n\u251c\u2500 CMakeLists.txt\n\u2514\u2500 README.md\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#directory-descriptions","title":"Directory descriptions","text":""},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#config","title":"config","text":"

    Place configuration files such as node parameters. For ROS parameters, use the extension .param.yaml. For non-ROS parameters, use the extension .yaml.

    Rationale: Since ROS parameters files are type-sensitive, they should not be the target of some code formatters and linters. In order to distinguish the file type, we use different file extensions.

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#doc","title":"doc","text":"

    Place document files and link from README.

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#include","title":"include","text":"

    Place header files exposed to other packages. Do not place files directly under the include directory, but place files under the directory with the package name. This directory is used for mostly library headers. Note that many headers do not need to be placed here. It is enough to place the headers under the src directory.

    Reference: https://docs.ros.org/en/rolling/How-To-Guides/Ament-CMake-Documentation.html#adding-files-and-headers

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#launch","title":"launch","text":"

    Place launch files (.launch.xml and .launch.py).

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#schema","title":"schema","text":"

    Place parameter definition files. See parameters for details.

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#src","title":"src","text":"

    Place source files and private header files.

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#test","title":"test","text":"

    Place source files for testing. See unit testing for details.

    "},{"location":"contributing/coding-guidelines/ros-nodes/directory-structure/#python-package","title":"Python package","text":"

    T.B.D.

    "},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/","title":"Launch files","text":""},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/#launch-files","title":"Launch files","text":""},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/#overview","title":"Overview","text":"

    Autoware use ROS 2 launch system to startup the software. Please see the official documentation to get a basic understanding about ROS 2 Launch system if you are not familiar with it.

    "},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/#guideline","title":"Guideline","text":""},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/#the-organization-of-launch-files-in-autoware","title":"The organization of launch files in Autoware","text":"

    Autoware mainly has two repositories related to launch file organization: the autoware.universe and the autoware_launch.

    "},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/#autowareuniverse","title":"autoware.universe","text":"

    the autoware.universe contains the code of the main Autoware modules, and its launch directory is responsible for launching the nodes of each module. Autoware software stack is organized based on the architecture, so you may find that we try to match the launch structure similar to the architecture (splitting of files, namespace). For example, the tier4_map_launch subdirectory corresponds to the map module, so do the other tier4_*_launch subdirectories.

    "},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/#autoware_launch","title":"autoware_launch","text":"

    The autoware_launch is a repository referring to autoware.universe. The mainly purpose of introducing this repository is to provide the general entrance to start the Autoware software stacks, i.e, calling the launch file of each module.

    graph LR\nA11[logging_simulator.launch.xml]-.->A10[autoware.launch.xml]\nA12[planning_simulator.launch.xml]-.->A10[autoware.launch.xml]\nA13[e2e_simulator.launch.xml]-.->A10[autoware.launch.xml]\n\nA10-->A21[tier4_map_component.launch.xml]\nA10-->A22[xxx.launch.py]\nA10-->A23[tier4_localization_component.launch.xml]\nA10-->A24[xxx.launch.xml]\nA10-->A25[tier4_sensing_component.launch.xml]\n\nA23-->A30[localization.launch.xml]\nA30-->A31[pose_estimator.launch.xml]\nA30-->A32[util.launch.xml]\nA30-->A33[pose_twist_fusion_filter.launch.xml]\nA30-->A34[xxx.launch.xml]\nA30-->A35[twist_estimator.launch.xml]\n\nA33-->A41[stop_filter.launch.xml]\nA33-->A42[ekf_localizer.launch.xml]\nA33-->A43[twist2accel.launch.xml]
    "},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/#add-a-new-package-in-autoware","title":"Add a new package in Autoware","text":"

    If a newly created package has executable node, we expect sample launch file and configuration within the package, just like the recommended structure shown in previous directory structure page.

    In order to automatically load the newly added package when starting Autoware, you need to make some necessary changes to the corresponding launch file. For example, if using ICP instead of NDT as the pointcloud registration algorithm, you can modify the autoware.universe/launch/tier4_localization_launch/launch/pose_estimator/pose_estimator.launch.xml file to load the newly added ICP package.

    "},{"location":"contributing/coding-guidelines/ros-nodes/launch-files/#parameter-management","title":"Parameter management","text":"

    Another purpose of introducing the autoware_launch repository is to facilitate the parameter management of Autoware. Thinking about this situation: if we want to integrate Autoware to a specific vehicle and modify parameters, we have to fork autoware.universe which also has a lot of code other than parameters and is frequently updated by developers. By integrating these parameters in autoware_launch, we can customize the Autoware parameters just by forking autoware_launch repository. Taking the localization module as an examples:

    1. all the \u201claunch parameters\u201d for localization component is listed in the files under autoware_launch/autoware_launch/config/localization.
    2. the \"launch parameters\" file paths are set in the autoware_launch/autoware_launch/launch/components/tier4_localization_component.launch.xml file.
    3. in autoware.universe/launch/tier4_localization_launch/launch, the launch files loads the \u201claunch parameters\u201d if the argument is given in the parameter configuration file. You can still use the default parameters in each packages to launch tier4_localization_launch within autoware.universe.
    "},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/","title":"Message guidelines","text":""},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#message-guidelines","title":"Message guidelines","text":""},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#format","title":"Format","text":"

    All messages should follow ROS message description specification.

    The accepted formats are:

    "},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#naming","title":"Naming","text":"

    Under Construction

    Use Array as a suffix when creating a plural type of a message. This suffix is commonly used in common_interfaces.

    "},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#default-units","title":"Default units","text":"

    All the fields by default have the following units depending on their types:

    type default unit distance meter (m) angle radians (rad) time second (s) speed m/s velocity m/s acceleration m/s\u00b2 angular vel. rad/s angular accel. rad/s\u00b2

    If a field in a message has any of these default units, don't add any suffix or prefix denoting the type.

    "},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#non-default-units","title":"Non-default units","text":"

    For non-default units, use following suffixes:

    type non-default unit suffix distance nanometer _nm distance micrometer _um distance millimeter _mm distance kilometer _km angle degree (deg) _deg time nanosecond _ns time microsecond _us time millisecond _ms time minute _min time hour (h) _hour velocity km/h _kmph

    If a unit that you'd like to use doesn't exist here, create an issue/PR to add it to this list.

    "},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#message-field-types","title":"Message field types","text":"

    For list of types supported by the ROS interfaces see here.

    Also copied here for convenience:

    Message Field Type C++ equivalent bool bool byte uint8_t char char float32 float float64 double int8 int8_t uint8 uint8_t int16 int16_t uint16 uint16_t int32 int32_t uint32 uint32_t int64 int64_t uint64 uint64_t string std::string wstring std::u16string"},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#arrays","title":"Arrays","text":"

    For arrays, use unbounded dynamic array type.

    Example:

    int32[] unbounded_integer_array\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#enumerations","title":"Enumerations","text":"

    ROS 2 interfaces don't support enumerations directly.

    It is possible to define integers constants and assign them to a non-constant integer parameter.

    Constants are written in CONSTANT_CASE.

    Assign a different value to each element of a constant.

    Example from shape_msgs/msg/SolidPrimitive.msg

    uint8 BOX=1\nuint8 SPHERE=2\nuint8 CYLINDER=3\nuint8 CONE=4\nuint8 PRISM=5\n\n# The type of the shape\nuint8 type\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#comments","title":"Comments","text":"

    On top of the message, briefly explain what the message contains and/or what it is used for. For an example, see sensor_msgs/msg/Imu.msg.

    If necessary, add line comments before the fields that explain the context and/or meaning.

    For simple fields like x, y, z, w you might not need to add comments.

    Even though it is not strictly checked, try not to pass 100 characters in a line.

    Example:

    # Number of times the vehicle performed an emergency brake\nuint32 count_emergency_brake\n\n# Seconds passed since the last emergency brake\nuint64 duration\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/message-guidelines/#example-usages","title":"Example usages","text":""},{"location":"contributing/coding-guidelines/ros-nodes/parameters/","title":"Parameters","text":""},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#parameters","title":"Parameters","text":"

    Autoware ROS nodes have declared parameters which values are provided during the node start up in the form of a parameter file. All the expected parameters with corresponding values should exist in the parameter file. Depending on the application, the parameter values might need to be modified.

    Find more information on parameters from the official ROS documentation:

    "},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#workflow","title":"Workflow","text":"

    A ROS package which uses the declare_parameter(...) function should:

    The rationale behind this workflow is to have a verified single source of truth to pass to the ROS node and to be used in the web documentation. The approach reduces the risk of using invalid parameter values and makes maintenance of documentation easier. This is achieved by:

    Note: a parameter value can still be modified and bypass the validation, as there is no validation during runtime.

    "},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#declare-parameter-function","title":"Declare Parameter Function","text":"

    It is the declare_parameter(...) function which sets the parameter values during a node startup.

    declare_parameter<INSERT_TYPE>(\"INSERT_PARAMETER_1_NAME\"),\ndeclare_parameter<INSERT_TYPE>(\"INSERT_PARAMETER_N_NAME\")\n

    As there is no default_value provided, the function throws an exception if a parameter were to be missing in the provided *.param.yaml file. Use a type from the C++ Type column in the table below for the declare_parameter(...) function, replacing INSERT_TYPE.

    ParameterType Enum C++ Type PARAMETER_BOOL bool PARAMETER_INTEGER int64_t PARAMETER_DOUBLE double PARAMETER_STRING std::string PARAMETER_BYTE_ARRAY std::vector<uint8_t> PARAMETER_BOOL_ARRAY std::vector<bool> PARAMETER_INTEGER_ARRAY std::vector<int64_t> PARAMETER_DOUBLE_ARRAY std::vector<double> PARAMETER_STRING_ARRAY std::vector<std::string>

    The table has been derived from Parameter Type and Parameter Value.

    See example: Lidar Apollo Segmentation TVM Nodes declare function

    "},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#parameter-file","title":"Parameter File","text":"

    The parameter file is minimal as there is no need to provide the user with additional information, e.g., description or type. This is because the associated schema file provides the additional information. Use the template below as a starting point for a ROS node.

    /**:\nros__parameters:\nINSERT_PARAMETER_1_NAME: INSERT_PARAMETER_1_VALUE\nINSERT_PARAMETER_N_NAME: INSERT_PARAMETER_N_VALUE\n

    Note: /** is used instead of the explicit node namespace, this allows the parameter file to be passed to a ROS node which has been remapped.

    To adapt the template to the ROS node, replace each INSERT_PARAMETER_..._NAME and INSERT_PARAMETER_..._VALUE for all parameters. Each declare_parameter(...) takes one parameter as input. All the parameter files should have the .param.yaml suffix so that the auto-format can be applied properly.

    Autoware has the following two types of parameter files for ROS packages:

    <launch>\n<arg name=\"foo_node_param_path\" default=\"$(find-pkg-share FOO_package)/config/foo_node.param.yaml\" />\n\n<node pkg=\"FOO_package\" exec=\"foo_node\">\n...\n    <param from=\"$(var foo_node_param_path)\" />\n</node>\n</launch>\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#json-schema","title":"JSON Schema","text":"

    JSON Schema is used the validate the parameter file(s) ensuring that it has the correct structure and content. Using JSON Schema for this purpose is considered best practice for cloud-native development. The schema template below shall be used as a starting point when defining the schema for a ROS node.

    {\n\"$schema\": \"http://json-schema.org/draft-07/schema#\",\n\"title\": \"INSERT_TITLE\",\n\"type\": \"object\",\n\"definitions\": {\n\"INSERT_ROS_NODE_NAME\": {\n\"type\": \"object\",\n\"properties\": {\n\"INSERT_PARAMETER_1_NAME\": {\n\"type\": \"INSERT_TYPE\",\n\"description\": \"INSERT_DESCRIPTION\",\n\"default\": \"INSERT_DEFAULT\",\n\"INSERT_BOUND_CONDITION(S)\": INSERT_BOUND_VALUE(S)\n},\n\"INSERT_PARAMETER_N_NAME\": {\n\"type\": \"INSERT_TYPE\",\n\"description\": \"INSERT_DESCRIPTION\",\n\"default\": \"INSERT_DEFAULT\",\n\"INSERT_BOUND_CONDITION(S)\": INSERT_BOUND_VALUE(S)\n}\n},\n\"required\": [\"INSERT_PARAMETER_1_NAME\", \"INSERT_PARAMETER_N_NAME\"],\n\"additionalProperties\": false\n}\n},\n\"properties\": {\n\"/**\": {\n\"type\": \"object\",\n\"properties\": {\n\"ros__parameters\": {\n\"$ref\": \"#/definitions/INSERT_ROS_NODE_NAME\"\n}\n},\n\"required\": [\"ros__parameters\"],\n\"additionalProperties\": false\n}\n},\n\"required\": [\"/**\"],\n\"additionalProperties\": false\n}\n

    The schema file path is INSERT_PATH_TO_PACKAGE/schema/ and the schema file name is INSERT_NODE_NAME.schema.json. To adapt the template to the ROS node, replace each INSERT_... and add all parameters 1..N.

    See example: Lidar Apollo Segmentation TVM Nodes schema

    "},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#attributes","title":"Attributes","text":"

    Parameters have several attributes, some are required and some optional. The optional attributes are highly encouraged when applicable, as they provide useful information about a parameter and can ensure the value of the parameter is within its bounds.

    "},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#required","title":"Required","text":""},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#optional","title":"Optional","text":""},{"location":"contributing/coding-guidelines/ros-nodes/parameters/#tips-and-tricks","title":"Tips and Tricks","text":"

    Using well established standards enables the use of conventional tooling. Below is an example of how to link a schema to the parameter file(s) using VS Code. This enables a developer with convenient features such as auto-complete and parameter bound validation.

    In the root directory of where the project is hosted, create a .vscode folder with two files; extensions.json containing

    {\n\"recommendations\": [\"redhat.vscode-yaml\"]\n}\n

    and settings.json containing

    {\n\"yaml.schemas\": {\n\"./INSERT_PATH_TO_PACKAGE/schema/INSERT_NODE_NAME.schema.json\": \"**/INSERT_NODE_NAME/config/*.param.yaml\"\n}\n}\n

    The RedHat YAML extension enables validation of YAML files using JSON Schema and the \"yaml.schemas\" setting associates the *.schema.json file with all *.param.yaml files in the config/ folder.

    "},{"location":"contributing/coding-guidelines/ros-nodes/task-scheduling/","title":"Task scheduling","text":""},{"location":"contributing/coding-guidelines/ros-nodes/task-scheduling/#task-scheduling","title":"Task scheduling","text":"

    Warning

    Under Construction

    "},{"location":"contributing/coding-guidelines/ros-nodes/topic-namespaces/","title":"Topic namespaces","text":""},{"location":"contributing/coding-guidelines/ros-nodes/topic-namespaces/#topic-namespaces","title":"Topic namespaces","text":""},{"location":"contributing/coding-guidelines/ros-nodes/topic-namespaces/#overview","title":"Overview","text":"

    ROS allows topics, parameters and nodes to be namespaced which provides the following benefits:

    This page focuses on how to use namespaces in Autoware and shows some useful examples. For basic information on topic namespaces, refer to this tutorial.

    "},{"location":"contributing/coding-guidelines/ros-nodes/topic-namespaces/#how-topics-should-be-named-in-node","title":"How topics should be named in node","text":"

    Autoware divides the node into the following functional categories, and adds the start namespace for the nodes according to the categories.

    When a node is run in a namespace, all topics which that node publishes are given that same namespace. All nodes in the Autoware stack must support namespaces by avoiding practices such as publishing topics in the global namespace.

    In general, topics should be namespaced based on the function of the node which produces them and not the node (or nodes) which consume them.

    Classify topics as input or output topics based on they are subscribed or published by the node. In the node, input topic is named input/topic_name and output topic is named output/topic_name.

    Configure the topic in the node's launch file. Take the joy_controller node as an example, in the following example, set the input and output topics and remap topics in the joy_controller.launch.xml file.

    <launch>\n<arg name=\"input_joy\" default=\"/joy\"/>\n<arg name=\"input_odometry\" default=\"/localization/kinematic_state\"/>\n\n<arg name=\"output_control_command\" default=\"/external/$(var external_cmd_source)/joy/control_cmd\"/>\n<arg name=\"output_external_control_command\" default=\"/api/external/set/command/$(var external_cmd_source)/control\"/>\n<arg name=\"output_shift\" default=\"/api/external/set/command/$(var external_cmd_source)/shift\"/>\n<arg name=\"output_turn_signal\" default=\"/api/external/set/command/$(var external_cmd_source)/turn_signal\"/>\n<arg name=\"output_heartbeat\" default=\"/api/external/set/command/$(var external_cmd_source)/heartbeat\"/>\n<arg name=\"output_gate_mode\" default=\"/control/gate_mode_cmd\"/>\n<arg name=\"output_vehicle_engage\" default=\"/vehicle/engage\"/>\n\n<node pkg=\"joy_controller\" exec=\"joy_controller\" name=\"joy_controller\" output=\"screen\">\n<remap from=\"input/joy\" to=\"$(var input_joy)\"/>\n<remap from=\"input/odometry\" to=\"$(var input_odometry)\"/>\n\n<remap from=\"output/control_command\" to=\"$(var output_control_command)\"/>\n<remap from=\"output/external_control_command\" to=\"$(var output_external_control_command)\"/>\n<remap from=\"output/shift\" to=\"$(var output_shift)\"/>\n<remap from=\"output/turn_signal\" to=\"$(var output_turn_signal)\"/>\n<remap from=\"output/gate_mode\" to=\"$(var output_gate_mode)\"/>\n<remap from=\"output/heartbeat\" to=\"$(var output_heartbeat)\"/>\n<remap from=\"output/vehicle_engage\" to=\"$(var output_vehicle_engage)\"/>\n</node>\n</launch>\n
    "},{"location":"contributing/coding-guidelines/ros-nodes/topic-namespaces/#topic-names-in-the-code","title":"Topic names in the code","text":"
    1. Have ~ so that namespace in launch configuration is applied(should not start from root /).

    2. Have ~/input ~/output namespace before topic name used to communicate with other nodes.

      e.g., In node obstacle_avoidance_planner, using topic names of type ~/input/topic_name to subscribe to topics.

      objects_sub_ = create_subscription<PredictedObjects>(\n\"~/input/objects\", rclcpp::QoS{10},\nstd::bind(&ObstacleAvoidancePlanner::onObjects, this, std::placeholders::_1));\n

      e.g., In node obstacle_avoidance_planner, using topic names of type ~/output/topic_name to publish topic.

      traj_pub_ = create_publisher<Trajectory>(\"~/output/path\", 1);\n
    3. Visualization or debug purpose topics should have ~/debug/ namespace.

      e.g., In node obstacle_avoidance_planner, in order to debug or visualizing topics, using topic names of type ~/debug/topic_name to publish information.

      debug_markers_pub_ =\ncreate_publisher<visualization_msgs::msg::MarkerArray>(\"~/debug/marker\", durable_qos);\n\ndebug_msg_pub_ =\ncreate_publisher<tier4_debug_msgs::msg::StringStamped>(\"~/debug/calculation_time\", 1);\n

      The launch configured namespace will be add the topics before, so the topic names will be as following:

      /planning/scenario_planning/lane_driving/motion_planning/obstacle_avoidance_planner/debug/marker /planning/scenario_planning/lane_driving/motion_planning/obstacle_avoidance_planner/debug/calculation_time

    4. Rationale: we want to make topic names remapped and configurable from launch files.

    "},{"location":"contributing/discussion-guidelines/","title":"Discussion guidelines","text":""},{"location":"contributing/discussion-guidelines/#discussion-guidelines","title":"Discussion guidelines","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/documentation-guidelines/","title":"Documentation guidelines","text":""},{"location":"contributing/documentation-guidelines/#documentation-guidelines","title":"Documentation guidelines","text":""},{"location":"contributing/documentation-guidelines/#workflow","title":"Workflow","text":"

    Contributions to Autoware's documentation are welcome, and the same principles described in the contribution guidelines should be followed. Small, limited changes can be made by forking this repository and submitting a pull request, but larger changes should be discussed with the community and Autoware maintainers via GitHub Discussion first.

    Examples of small changes include:

    Examples of larger changes include:

    "},{"location":"contributing/documentation-guidelines/#style-guide","title":"Style guide","text":"

    You should refer to the Google developer documentation style guide as much as possible. Reading the Highlights page of that guide is recommended, but if not then the key points below should be noted.

    "},{"location":"contributing/documentation-guidelines/#tips","title":"Tips","text":""},{"location":"contributing/documentation-guidelines/#how-to-preview-your-modification","title":"How to preview your modification","text":"

    There are two ways to preview your modification on a documentation website.

    "},{"location":"contributing/documentation-guidelines/#1-using-github-actions-workflow","title":"1. Using GitHub Actions workflow","text":"

    Follow the steps below.

    1. Create a pull request to the repository.
    2. Add the deploy-docs label from the sidebar (See below figure).
    3. Wait for a couple of minutes, and the github-actions bot will notify the URL for the pull request's preview.

    "},{"location":"contributing/documentation-guidelines/#2-running-an-mkdocs-server-in-your-local-environment","title":"2. Running an MkDocs server in your local environment","text":"

    Instead of creating a PR, you can use the mkdocs command to build Autoware's documentation websites on your local computer. Assuming that you are using Ubuntu OS, run the following to install the required libraries.

    python3 -m pip install -U $(curl -fsSL https://raw.githubusercontent.com/autowarefoundation/autoware-github-actions/main/deploy-docs/mkdocs-requirements.txt)\n

    Then, run mkdocs serve on your documentation directory.

    cd /PATH/TO/YOUR-autoware-documentation\nmkdocs serve\n

    It will launch the MkDocs server. Access http://127.0.0.1:8000/ to see the preview of the website.

    "},{"location":"contributing/pull-request-guidelines/","title":"Pull request guidelines","text":""},{"location":"contributing/pull-request-guidelines/#pull-request-guidelines","title":"Pull request guidelines","text":""},{"location":"contributing/pull-request-guidelines/#general-pull-request-workflow","title":"General pull request workflow","text":"

    Autoware uses the fork-and-pull model. For more details about the model, refer to GitHub Docs.

    The following is a general example of the pull request workflow based on the fork-and-pull model. Use this workflow as a reference when you contribute to Autoware.

    1. Create an issue.
      • Discuss the approaches to the issue with maintainers.
      • Confirm the support guidelines before creating an issue.
      • Follow the discussion guidelines when you discuss with other contributors.
    2. Create a fork repository. (for the first time only)
    3. Write code in your fork repository according to the approach agreed upon in the issue.
      • Write the tests and documentation as appropriate.
      • Follow the coding guidelines guidelines when you write code.
      • Follow the Testing guidelines guidelines when you write tests.
      • Follow the Documentation guidelines guidelines when you write documentation.
      • Follow the commit guidelines when you commit your changes.
    4. Test the code.
      • It is recommended that you summarize the test results, because you will need to explain the test results in the later review process.
      • If you are not sure what tests should be done, discuss them with maintainers.
    5. Create a pull request.
      • Follow the pull request rules when you create a pull request.
    6. Wait for the pull request to be reviewed.
      • The reviewers will review your code following the review guidelines.
        • Not only the reviewers, but also the author is encouraged to understand the review guidelines.
      • If CI checks have failed, fix the errors.
    7. Address the review comments pointed out by the reviewers.
      • If you don't understand the meaning of a review comment, ask the reviewers until you understand it.
        • Fixing without understanding the reason is not recommended because the author should be responsible for the final content of their own pull request.
      • If you don't agree with a review comment, ask the reviewers for a rational reason.
        • The reviewers are obligated to make the author understand the meanings of each comment.
      • After you have done with the review comments, re-request a review to the reviewers and back to 6.
      • If there are no more new review comments, the reviewers will approve the pull request and proceed to 8.
    8. Merge the pull request.
      • Anyone with write access can merge the pull request if there is no special request from maintainers.
        • The author is encouraged to merge the pull request to feel responsible for their own pull request.
        • If the author does not have write access, ask the reviewers or maintainers.
    "},{"location":"contributing/pull-request-guidelines/#pull-request-rules","title":"Pull request rules","text":""},{"location":"contributing/pull-request-guidelines/#use-an-appropriate-pull-request-template-required-non-automated","title":"Use an appropriate pull request template (required, non-automated)","text":""},{"location":"contributing/pull-request-guidelines/#rationale","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/#example","title":"Example","text":"

    There are two types of templates. Select one based on the following condition.

    1. Standard change:
      • Complexity:
        • New features or significant updates.
        • Requires deeper understanding of the codebase.
      • Impact:
        • Affects multiple parts of the system.
        • Basically includes minor features, bug fixes and performance improvement.
        • Needs testing before merging.
    2. Small change:
      • Complexity:
        • Documentation, simple refactoring, or style adjustments.
        • Easy to understand and review.
      • Impact:
        • Minimal effect on the system.
        • Quicker merge with less testing needed.
    "},{"location":"contributing/pull-request-guidelines/#steps-to-use-an-appropriate-pull-request-template","title":"Steps to use an appropriate pull request template","text":"
    1. Select the appropriate template, as shown in this video.
    2. Read the selected template carefully and fill the required content.
    3. Check the checkboxes during a review.
      • There are pre-review checklist and post-review checklist for the author.
    "},{"location":"contributing/pull-request-guidelines/#set-appropriate-reviewers-after-creating-a-pull-request-required-partially-automated","title":"Set appropriate reviewers after creating a pull request (required, partially automated)","text":""},{"location":"contributing/pull-request-guidelines/#rationale_1","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/#example_1","title":"Example","text":""},{"location":"contributing/pull-request-guidelines/#apply-conventional-commits-to-the-pull-request-title-required-automated","title":"Apply Conventional Commits to the pull request title (required, automated)","text":""},{"location":"contributing/pull-request-guidelines/#rationale_2","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/#example_2","title":"Example","text":"
    feat(trajectory_follower): add an awesome feature\n

    Note

    You have to start the description part (here add an awesome feature) with a lowercase.

    If your change breaks some interfaces, use the ! (breaking changes) mark as follows:

    feat(trajectory_follower)!: remove package\nfeat(trajectory_follower)!: change parameter names\nfeat(planning)!: change topic names\nfeat(autoware_utils)!: change function names\n

    For the repositories that contain code (most repositories), use the definition of conventional-commit-types for the type.

    For documentation repositories such as autoware-documentation, use the following definition:

    perf and test are generally unused. Other types have the same meaning as the code repositories.

    "},{"location":"contributing/pull-request-guidelines/#add-the-related-component-names-to-the-scope-of-conventional-commits-advisory-non-automated","title":"Add the related component names to the scope of Conventional Commits (advisory, non-automated)","text":""},{"location":"contributing/pull-request-guidelines/#rationale_3","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/#example_3","title":"Example","text":"

    For ROS packages, adding the package name or component name is good.

    feat(trajectory_follower): add an awesome feature\nrefactor(planning, control): use common utils\n
    "},{"location":"contributing/pull-request-guidelines/#keep-a-pull-request-small-advisory-non-automated","title":"Keep a pull request small (advisory, non-automated)","text":""},{"location":"contributing/pull-request-guidelines/#rationale_4","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/#exception","title":"Exception","text":"

    It is acceptable if it is agreed with maintainers that there is no other way but to submit a big pull request.

    "},{"location":"contributing/pull-request-guidelines/#example_4","title":"Example","text":""},{"location":"contributing/pull-request-guidelines/#remind-reviewers-if-there-is-no-response-for-more-than-a-week-advisory-non-automated","title":"Remind reviewers if there is no response for more than a week (advisory, non-automated)","text":""},{"location":"contributing/pull-request-guidelines/#rationale_5","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/#example_5","title":"Example","text":"
    @{some-of-developers} Would it be possible for you to review this PR?\n@autoware-maintainers friendly ping.\n
    "},{"location":"contributing/pull-request-guidelines/ci-checks/","title":"CI checks","text":""},{"location":"contributing/pull-request-guidelines/ci-checks/#ci-checks","title":"CI checks","text":"

    Autoware has several checks for a pull request. The results are shown at the bottom of the pull request page as below.

    If the \u274c mark is shown, click the Details button and investigate the failure reason.

    If the Required mark is shown, you cannot merge the pull request unless you resolve the error. If not, it is optional, but preferably it should be fixed.

    The following sections explain about common CI checks in Autoware. Note that some repositories may have different settings.

    "},{"location":"contributing/pull-request-guidelines/ci-checks/#dco","title":"DCO","text":"

    The Developer Certificate of Origin (DCO) is a lightweight way for contributors to certify that they wrote or otherwise have the right to submit the code they are contributing to the project.

    This workflow checks whether the pull request fulfills DCO. You need to confirm the required items and commit with git commit -s.

    For more information, refer to the GitHub App page.

    "},{"location":"contributing/pull-request-guidelines/ci-checks/#semantic-pull-request","title":"semantic-pull-request","text":"

    This workflow checks whether the pull request follows Conventional Commits.

    For the detailed rules, see the pull request rules.

    "},{"location":"contributing/pull-request-guidelines/ci-checks/#pre-commit","title":"pre-commit","text":"

    pre-commit is a tool to run formatters or linters when you commit.

    This workflow checks whether the pull request has no error with pre-commit.

    In the workflow pre-commit.ci - pr is enabled in the repository, it will automatically fix errors by pre-commit.ci as many as possible. If there are some errors remain, fix them manually.

    You can run pre-commit in your local environment by the following command:

    pre-commit run -a\n

    Or you can install pre-commit to the repository and automatically run it before committing:

    pre-commit install\n

    Since it is difficult to detect errors with no false positives, some jobs are split into another config file and marked as optional. To check them, use the --config option:

    pre-commit run -a --config .pre-commit-config-optional.yaml\n
    "},{"location":"contributing/pull-request-guidelines/ci-checks/#spell-check-differential","title":"spell-check-differential","text":"

    This workflow detects spelling mistakes using CSpell with our dictionary file. You can submit pull requests to tier4/autoware-spell-check-dict to update the dictionary.

    Since it is difficult to detect errors with no false positives, it is an optional workflow, but it is preferable to remove spelling mistakes as many as possible.

    "},{"location":"contributing/pull-request-guidelines/ci-checks/#build-and-test-differential","title":"build-and-test-differential","text":"

    This workflow checks colcon build and colcon test for the pull request. To make the CI faster, it doesn't check all packages but only modified packages and the dependencies.

    "},{"location":"contributing/pull-request-guidelines/ci-checks/#build-and-test-differential-self-hosted","title":"build-and-test-differential-self-hosted","text":"

    This workflow is the ARM64 version of build-and-test-differential. You need to add the ARM64 label to run this workflow.

    For reference information, since ARM machines are not supported by GitHub-hosted runners, we use self-hosted runners prepared by the AWF. For the details about self-hosted runners, refer to GitHub Docs.

    "},{"location":"contributing/pull-request-guidelines/ci-checks/#deploy-docs","title":"deploy-docs","text":"

    This workflow deploys the preview documentation site for the pull request. You need to add the deploy-docs label to run this workflow.

    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/","title":"Commit guidelines","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#commit-guidelines","title":"Commit guidelines","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#branch-rules","title":"Branch rules","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#start-branch-names-with-the-corresponding-issue-numbers-advisory-non-automated","title":"Start branch names with the corresponding issue numbers (advisory, non-automated)","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#rationale","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#exception","title":"Exception","text":"

    If there are no corresponding issues, you can ignore this rule.

    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/#example","title":"Example","text":"
    123-add-feature\n
    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/#reference","title":"Reference","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#use-dash-case-for-the-separator-of-branch-names-advisory-non-automated","title":"Use dash-case for the separator of branch names (advisory, non-automated)","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#rationale_1","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#example_1","title":"Example","text":"
    123-add-feature\n
    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/#reference_1","title":"Reference","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#make-branch-names-descriptive-advisory-non-automated","title":"Make branch names descriptive (advisory, non-automated)","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#rationale_2","title":"Rationale","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#exception_1","title":"Exception","text":"

    If you have already submitted a pull request, you do not have to change the branch name because you need to re-create a pull request, which is noisy and a waste of time. Be careful from the next time.

    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/#example_2","title":"Example","text":"

    Usually it is good to start with a verb.

    123-fix-memory-leak-of-trajectory-follower\n
    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/#commit-rules","title":"Commit rules","text":""},{"location":"contributing/pull-request-guidelines/commit-guidelines/#sign-off-your-commits-required-automated","title":"Sign-off your commits (required, automated)","text":"

    Developers must certify that they wrote or otherwise have the right to submit the code they are contributing to the project.

    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/#rationale_3","title":"Rationale","text":"

    If not, it will lead to complex license problems.

    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/#example_3","title":"Example","text":"
    git commit -s\n
    feat: add a feature\n\nSigned-off-by: Autoware <autoware@example.com>\n
    "},{"location":"contributing/pull-request-guidelines/commit-guidelines/#reference_2","title":"Reference","text":""},{"location":"contributing/pull-request-guidelines/review-guidelines/","title":"Review guidelines","text":""},{"location":"contributing/pull-request-guidelines/review-guidelines/#review-guidelines","title":"Review guidelines","text":"

    Warning

    Under Construction

    Refer to the following links for now:

    "},{"location":"contributing/pull-request-guidelines/review-tips/","title":"Review tips","text":""},{"location":"contributing/pull-request-guidelines/review-tips/#review-tips","title":"Review tips","text":""},{"location":"contributing/pull-request-guidelines/review-tips/#toggle-annotations-or-review-comments-in-the-diff-view","title":"Toggle annotations or review comments in the diff view","text":"

    There might be some annotations or review comments in the diff view during your review.

    To toggle annotations, press the A key.

    Before:

    After:

    To toggle review comments, press the I key.

    For other keyboard shortcuts, refer to GitHub Docs.

    "},{"location":"contributing/pull-request-guidelines/review-tips/#view-code-in-the-web-based-visual-studio-code","title":"View code in the web-based Visual Studio Code","text":"

    You can open Visual Studio Code from your browser to view code in a rich UI. To use it, press the . key on any repository or pull request.

    For more detailed usage, refer to github/dev.

    "},{"location":"contributing/pull-request-guidelines/review-tips/#check-out-the-branch-of-a-pull-request-quickly","title":"Check out the branch of a pull request quickly","text":"

    If you want to check out the branch of a pull request, it's generally troublesome with the fork-and-pull model.

    # Copy the user name and the fork URL.\ngit remote add {user-name} {fork-url}\ngit checkout {user-name}/{branch-name}\ngit remote rm {user-name} # To clean up\n

    Instead, you can use GitHub CLI to simplify the steps, just run gh pr checkout {pr-number}.

    You can copy the command from the top right of the pull request page.

    "},{"location":"contributing/testing-guidelines/","title":"Testing guidelines","text":""},{"location":"contributing/testing-guidelines/#testing-guidelines","title":"Testing guidelines","text":""},{"location":"contributing/testing-guidelines/#unit-testing","title":"Unit testing","text":"

    Unit testing is a software testing method that tests individual units of source code to determine whether they satisfy the specification.

    For details, see the Unit testing guidelines.

    "},{"location":"contributing/testing-guidelines/#integration-testing","title":"Integration testing","text":"

    Integration testing combines and tests the individual software modules as a group, and is done after unit testing.

    While performing integration testing, the following subtypes of tests are written:

    1. Fault injection testing
    2. Back-to-back comparison between a model and code
    3. Requirements-based testing
    4. Anomaly detection during integration testing
    5. Random input testing

    For details, see the Integration testing guidelines.

    "},{"location":"contributing/testing-guidelines/integration-testing/","title":"Integration testing","text":""},{"location":"contributing/testing-guidelines/integration-testing/#integration-testing","title":"Integration testing","text":"

    An integration test is defined as the phase in software testing where individual software modules are combined and tested as a group. Integration tests occur after unit tests, and before validation tests.

    The input to an integration test is a set of independent modules that have been unit tested. The set of modules is tested against the defined integration test plan, and the output is a set of properly integrated software modules that is ready for system testing.

    "},{"location":"contributing/testing-guidelines/integration-testing/#value-of-integration-testing","title":"Value of integration testing","text":"

    Integration tests determine if independently developed software modules work correctly when the modules are connected to each other. In ROS 2, the software modules are called nodes. Testing a single node is a special type of integration test that is commonly referred to as component testing.

    Integration tests help to find the following types of errors:

    With ROS 2, it is possible to program complex autonomous-driving applications with a large number of nodes. Therefore, a lot of effort has been made to provide an integration-test framework that helps developers test the interaction of ROS 2 nodes.

    "},{"location":"contributing/testing-guidelines/integration-testing/#integration-test-framework","title":"Integration-test framework","text":"

    A typical integration-test framework has three parts:

    1. A series of executables with arguments that work together and generate outputs.
    2. A series of expected outputs that should match the output of the executables.
    3. A launcher that starts the tests, compares the outputs to the expected outputs, and determines if the test passes.

    In Autoware, we use the launch_testing framework.

    "},{"location":"contributing/testing-guidelines/integration-testing/#smoke-tests","title":"Smoke tests","text":"

    Autoware has a dedicated API for smoke testing. To use this framework, in package.xml add:

    <test_depend>autoware_testing</test_depend>\n

    And in CMakeLists.txt add:

    if(BUILD_TESTING)\nfind_package(autoware_testing REQUIRED)\nadd_smoke_test(${PROJECT_NAME} ${NODE_NAME})\nendif()\n

    Doing so adds smoke tests that ensure that a node can be:

    1. Launched with a default parameter file.
    2. Terminated with a standard SIGTERM signal.

    For the full API documentation, refer to the package design page.

    Note

    This API is not suitable for all smoke test cases. It cannot be used when a specific file location (eg: for a map) is required to be passed to the node, or if some preparation needs to be conducted before node launch. In such cases use the manual solution from the component test section below.

    "},{"location":"contributing/testing-guidelines/integration-testing/#integration-test-with-a-single-node-component-test","title":"Integration test with a single node: component test","text":"

    The simplest scenario is a single node. In this case, the integration test is commonly referred to as a component test.

    To add a component test to an existing node, you can follow the example of the lanelet2_map_loader in the map_loader package (added in this PR).

    In package.xml, add:

    <test_depend>ros_testing</test_depend>\n

    In CMakeLists.txt, add or modify the BUILD_TESTING section:

    if(BUILD_TESTING)\nadd_ros_test(\ntest/lanelet2_map_loader_launch.test.py\nTIMEOUT \"30\"\n)\ninstall(DIRECTORY\ntest/data/\nDESTINATION share/${PROJECT_NAME}/test/data/\n)\nendif()\n

    In addition to the command add_ros_test, we also install any data that is required by the test using the install command.

    Note

    To create a test, either read the launch_testing quick-start example, or follow the steps below.

    Taking test/lanelet2_map_loader_launch.test.py as an example, first dependencies are imported:

    import os\nimport unittest\n\nfrom ament_index_python import get_package_share_directory\nimport launch\nfrom launch import LaunchDescription\nfrom launch_ros.actions import Node\nimport launch_testing\nimport pytest\n

    Then a launch description is created to launch the node under test. Note that the test_map.osm file path is found and passed to the node, something that cannot be done with the smoke testing API:

    @pytest.mark.launch_test\ndef generate_test_description():\n\n    lanelet2_map_path = os.path.join(\n        get_package_share_directory(\"map_loader\"), \"test/data/test_map.osm\"\n    )\n\n    lanelet2_map_loader = Node(\n        package=\"map_loader\",\n        executable=\"lanelet2_map_loader\",\n        parameters=[{\"lanelet2_map_path\": lanelet2_map_path}],\n    )\n\n    context = {}\n\n    return (\n        LaunchDescription(\n            [\n                lanelet2_map_loader,\n                # Start test after 1s - gives time for the map_loader to finish initialization\n                launch.actions.TimerAction(\n                    period=1.0, actions=[launch_testing.actions.ReadyToTest()]\n                ),\n            ]\n        ),\n        context,\n    )\n

    Note

    Finally, a test is executed after the node executable has been shut down (post_shutdown_test). Here we ensure that the node was launched without error and exited cleanly.

    @launch_testing.post_shutdown_test()\nclass TestProcessOutput(unittest.TestCase):\n    def test_exit_code(self, proc_info):\n        # Check that process exits with code 0: no error\n        launch_testing.asserts.assertExitCodes(proc_info)\n
    "},{"location":"contributing/testing-guidelines/integration-testing/#running-the-test","title":"Running the test","text":"

    Continuing the example from above, first build your package:

    colcon build --packages-up-to map_loader\nsource install/setup.bash\n

    Then either execute the component test manually:

    ros2 test src/universe/autoware.universe/map/map_loader/test/lanelet2_map_loader_launch.test.py\n

    Or as part of testing the entire package:

    colcon test --packages-select map_loader\n

    Verify that the test is executed; e.g.

    $ colcon test-result --all --verbose\n...\nbuild/map_loader/test_results/map_loader/test_lanelet2_map_loader_launch.test.py.xunit.xml: 1 test, 0 errors, 0 failures, 0 skipped\n
    "},{"location":"contributing/testing-guidelines/integration-testing/#next-steps","title":"Next steps","text":"

    The simple test described in Integration test with a single node: component test can be extended in numerous directions, such as testing a node's output.

    "},{"location":"contributing/testing-guidelines/integration-testing/#testing-the-output-of-a-node","title":"Testing the output of a node","text":"

    To test while the node is running, create an active test by adding a subclass of Python's unittest.TestCase to *launch.test.py. Some boilerplate code is required to access output by creating a node and a subscription to a particular topic, e.g.

    import unittest\n\nclass TestRunningDataPublisher(unittest.TestCase):\n\n    @classmethod\n    def setUpClass(cls):\n        cls.context = Context()\n        rclpy.init(context=cls.context)\n        cls.node = rclpy.create_node(\"test_node\", context=cls.context)\n\n    @classmethod\n    def tearDownClass(cls):\n        rclpy.shutdown(context=cls.context)\n\n    def setUp(self):\n        self.msgs = []\n        sub = self.node.create_subscription(\n            msg_type=my_msg_type,\n            topic=\"/info_test\",\n            callback=self._msg_received\n        )\n        self.addCleanup(self.node.destroy_subscription, sub)\n\n    def _msg_received(self, msg):\n        # Callback for ROS 2 subscriber used in the test\n        self.msgs.append(msg)\n\n    def get_message(self):\n        startlen = len(self.msgs)\n\n        executor = rclpy.executors.SingleThreadedExecutor(context=self.context)\n        executor.add_node(self.node)\n\n        try:\n            # Try up to 60 s to receive messages\n            end_time = time.time() + 60.0\n            while time.time() < end_time:\n                executor.spin_once(timeout_sec=0.1)\n                if startlen != len(self.msgs):\n                    break\n\n            self.assertNotEqual(startlen, len(self.msgs))\n            return self.msgs[-1]\n        finally:\n            executor.remove_node(self.node)\n\n    def test_message_content():\n        msg = self.get_message()\n        self.assertEqual(msg, \"Hello, world\")\n
    "},{"location":"contributing/testing-guidelines/integration-testing/#references","title":"References","text":""},{"location":"contributing/testing-guidelines/unit-testing/","title":"Unit testing","text":""},{"location":"contributing/testing-guidelines/unit-testing/#unit-testing","title":"Unit testing","text":"

    Unit testing is the first phase of testing and is used to validate units of source code such as classes and functions. Typically, a unit of code is tested by validating its output for various inputs. Unit testing helps ensure that the code behaves as intended and prevents accidental changes of behavior.

    Autoware uses the ament_cmake framework to build and run tests. The same framework is also used to analyze the test results.

    ament_cmake provides several convenience functions to make it easy to register tests in a CMake-based package and to ensure that JUnit-compatible result files are generated. It currently supports a few different testing frameworks like pytest, gtest, and gmock.

    In order to prevent tests running in parallel from interfering with each other when publishing and subscribing to ROS topics, it is recommended to use commands from ament_cmake_ros to run tests in isolation.

    See below for an example of using ament_add_ros_isolated_gtest with colcon test. All other tests follow a similar pattern.

    "},{"location":"contributing/testing-guidelines/unit-testing/#create-a-unit-test-with-gtest","title":"Create a unit test with gtest","text":"

    In my_cool_pkg/test, create the gtest code file test_my_cool_pkg.cpp:

    #include \"gtest/gtest.h\"\n#include \"my_cool_pkg/my_cool_pkg.hpp\"\nTEST(TestMyCoolPkg, TestHello) {\nEXPECT_EQ(my_cool_pkg::print_hello(), 0);\n}\n

    In package.xml, add the following line:

    <test_depend>ament_cmake_ros</test_depend>\n

    Next add an entry under BUILD_TESTING in the CMakeLists.txt to compile the test source files:

    if(BUILD_TESTING)\n\nament_add_ros_isolated_gtest(test_my_cool_pkg test/test_my_cool_pkg.cpp)\ntarget_link_libraries(test_my_cool_pkg ${PROJECT_NAME})\ntarget_include_directories(test_my_cool_pkg PRIVATE src)  # For private headers.\n...\nendif()\n

    This automatically links the test with the default main function provided by gtest. The code under test is usually in a different CMake target (${PROJECT_NAME} in the example) and its shared object for linking needs to be added. If the test source files include private headers from the src directory, the directory needs to be added to the include path using target_include_directories() function.

    To register a new gtest item, wrap the test code with the macro TEST (). TEST () is a predefined macro that helps generate the final test code, and also registers a gtest item to be available for execution. The test case name should be in CamelCase, since gtest inserts an underscore between the fixture name and the class case name when creating the test executable.

    gtest/gtest.h also contains predefined macros of gtest like ASSERT_TRUE(condition), ASSERT_FALSE(condition), ASSERT_EQ(val1,val2), ASSERT_STREQ(str1,str2), EXPECT_EQ(), etc. ASSERT_* will abort the test if the condition is not satisfied, while EXPECT_* will mark the test as failed but continue on to the next test condition.

    Info

    More information about gtest and its features can be found in the gtest repo.

    In the demo CMakeLists.txt, ament_add_ros_isolated_gtest is a predefined macro in ament_cmake_ros that helps simplify adding gtest code. Details can be viewed in ament_add_gtest.cmake.

    "},{"location":"contributing/testing-guidelines/unit-testing/#build-test","title":"Build test","text":"

    By default, all necessary test files (ELF, CTestTestfile.cmake, etc.) are compiled by colcon:

    cd ~/workspace/\ncolcon build --packages-select my_cool_pkg\n

    Test files are generated under ~/workspace/build/my_cool_pkg.

    "},{"location":"contributing/testing-guidelines/unit-testing/#run-test","title":"Run test","text":"

    To run all tests for a specific package, call:

    $ colcon test --packages-select my_cool_pkg\n\nStarting >>> my_cool_pkg\nFinished <<< my_cool_pkg [7.80s]\n\nSummary: 1 package finished [9.27s]\n

    The test command output contains a brief report of all the test results.

    To get job-wise information of all executed tests, call:

    $ colcon test-result --all\n\nbuild/my_cool_pkg/test_results/my_cool_pkg/copyright.xunit.xml: 8 tests, 0 errors, 0 failures, 0 skipped\nbuild/my_cool_pkg/test_results/my_cool_pkg/cppcheck.xunit.xml: 6 tests, 0 errors, 0 failures, 0 skipped\nbuild/my_cool_pkg/test_results/my_cool_pkg/lint_cmake.xunit.xml: 1 test, 0 errors, 0 failures, 0 skipped\nbuild/my_cool_pkg/test_results/my_cool_pkg/my_cool_pkg_exe_integration_test.xunit.xml: 1 test, 0 errors, 0 failures, 0 skipped\nbuild/my_cool_pkg/test_results/my_cool_pkg/test_my_cool_pkg.gtest.xml: 1 test, 0 errors, 0 failures, 0 skipped\nbuild/my_cool_pkg/test_results/my_cool_pkg/xmllint.xunit.xml: 1 test, 0 errors, 0 failures, 0 skipped\n\nSummary: 18 tests, 0 errors, 0 failures, 0 skipped\n

    Look in the ~/workspace/log/test_<date>/<package_name> directory for all the raw test commands, std_out, and std_err. There is also the ~/workspace/log/latest_*/ directory containing symbolic links to the most recent package-level build and test output.

    To print the tests' details while the tests are being run, use the --event-handlers console_cohesion+ option to print the details directly to the console:

    $ colcon test --event-handlers console_cohesion+ --packages-select my_cool_pkg\n\n...\ntest 1\n    Start 1: test_my_cool_pkg\n\n1: Test command: /usr/bin/python3 \"-u\" \"~/workspace/install/share/ament_cmake_test/cmake/run_test.py\" \"~/workspace/build/my_cool_pkg/test_results/my_cool_pkg/test_my_cool_pkg.gtest.xml\" \"--package-name\" \"my_cool_pkg\" \"--output-file\" \"~/workspace/build/my_cool_pkg/ament_cmake_gtest/test_my_cool_pkg.txt\" \"--command\" \"~/workspace/build/my_cool_pkg/test_my_cool_pkg\" \"--gtest_output=xml:~/workspace/build/my_cool_pkg/test_results/my_cool_pkg/test_my_cool_pkg.gtest.xml\"\n1: Test timeout computed to be: 60\n1: -- run_test.py: invoking following command in '~/workspace/src/my_cool_pkg':\n1:  - ~/workspace/build/my_cool_pkg/test_my_cool_pkg --gtest_output=xml:~/workspace/build/my_cool_pkg/test_results/my_cool_pkg/test_my_cool_pkg.gtest.xml\n1: [==========] Running 1 test from 1 test case.\n1: [----------] Global test environment set-up.\n1: [----------] 1 test from test_my_cool_pkg\n1: [ RUN      ] test_my_cool_pkg.test_hello\n1: Hello World\n1: [       OK ] test_my_cool_pkg.test_hello (0 ms)\n1: [----------] 1 test from test_my_cool_pkg (0 ms total)\n1:\n1: [----------] Global test environment tear-down\n1: [==========] 1 test from 1 test case ran. (0 ms total)\n1: [  PASSED  ] 1 test.\n1: -- run_test.py: return code 0\n1: -- run_test.py: inject classname prefix into gtest result file '~/workspace/build/my_cool_pkg/test_results/my_cool_pkg/test_my_cool_pkg.gtest.xml'\n1: -- run_test.py: verify result file '~/workspace/build/my_cool_pkg/test_results/my_cool_pkg/test_my_cool_pkg.gtest.xml'\n1/5 Test #1: test_my_cool_pkg ...................   Passed    0.09 sec\n\n...\n\n100% tests passed, 0 tests failed out of 5\n\nLabel Time Summary:\ncopyright     =   0.49 sec*proc (1 test)\ncppcheck      =   0.20 sec*proc (1 test)\ngtest         =   0.05 sec*proc (1 test)\nlint_cmake    =   0.18 sec*proc (1 test)\nlinter        =   1.34 sec*proc (4 tests)\nxmllint       =   0.47 sec*proc (1 test)\n\nTotal Test time (real) =   7.91 sec\n...\n
    "},{"location":"contributing/testing-guidelines/unit-testing/#code-coverage","title":"Code coverage","text":"

    Loosely described, a code coverage metric is a measure of how much of the program code has been exercised (covered) during testing.

    In the Autoware repositories, Codecov is used to automatically calculate coverage of any open pull request.

    More details about the code coverage metrics can be found in the Codecov documentation.

    "},{"location":"datasets/","title":"Datasets","text":""},{"location":"datasets/#datasets","title":"Datasets","text":"

    Autoware partners provide datasets for testing and development. These datasets are available for download here.

    "},{"location":"datasets/#bus-odd-operational-design-domain-datasets","title":"Bus-ODD (Operational Design Domain) datasets","text":""},{"location":"datasets/#leo-drive-isuzu-sensor-data","title":"Leo Drive - ISUZU sensor data","text":"

    This dataset contains data from the Isuzu bus used in the Bus ODD project.

    The data contains data from following sensors:

    It also contains /tf topic for static transformations between sensors.

    "},{"location":"datasets/#required-message-types","title":"Required message types","text":"

    The GNSS data is available in sensor_msgs/msg/NavSatFix message type.

    But also the Applanix raw messages are also included in applanix_msgs/msg/NavigationPerformanceGsof50 and applanix_msgs/msg/NavigationSolutionGsof49 message types. In order to be able to play back these messages, you need to build and source the applanix_msgs package.

    # Create a workspace and clone the repository\nmkdir -p ~/applanix_ws/src && cd \"$_\"\ngit clone https://github.com/autowarefoundation/applanix.git\ncd ..\n\n# Build the workspace\ncolcon build --symlink-install --packages-select applanix_msgs\n\n# Source the workspace\nsource ~/applanix_ws/install/setup.bash\n\n# Now you can play back the messages\n

    Also make sure to source Autoware Universe workspace too.

    "},{"location":"datasets/#download-instructions","title":"Download instructions","text":"
    # Install awscli\n$ sudo apt update && sudo apt install awscli -y\n\n# This will download the entire dataset to the current directory.\n# (About 10.9GB of data)\n$ aws s3 sync s3://autoware-files/collected_data/2022-08-22_leo_drive_isuzu_bags/ ./2022-08-22_leo_drive_isuzu_bags  --no-sign-request\n\n# Optionally,\n# If you instead want to download a single bag file, you can get a list of the available files with following:\n$ aws s3 ls s3://autoware-files/collected_data/2022-08-22_leo_drive_isuzu_bags/ --no-sign-request\n   PRE all-sensors-bag1_compressed/\n   PRE all-sensors-bag2_compressed/\n   PRE all-sensors-bag3_compressed/\n   PRE all-sensors-bag4_compressed/\n   PRE all-sensors-bag5_compressed/\n   PRE all-sensors-bag6_compressed/\n   PRE driving_20_kmh_2022_06_10-16_01_55_compressed/\n   PRE driving_30_kmh_2022_06_10-15_47_42_compressed/\n\n# Then you can download a single bag file with the following:\naws s3 sync s3://autoware-files/collected_data/2022-08-22_leo_drive_isuzu_bags/all-sensors-bag1_compressed/ ./all-sensors-bag1_compressed  --no-sign-request\n
    "},{"location":"datasets/#autocoreai-lidar-ros-2-bag-file-and-pcap","title":"AutoCore.ai - lidar ROS 2 bag file and pcap","text":"

    This dataset contains pcap files and ros2 bag files from Ouster OS1-64 Lidar. The pcap file and ros2 bag file is recorded in the same time with slight difference in duration.

    Click here to download (~553MB)

    Reference Issue

    "},{"location":"design/","title":"Autoware's Design","text":""},{"location":"design/#autowares-design","title":"Autoware's Design","text":""},{"location":"design/#architecture","title":"Architecture","text":"

    Core and Universe.

    Autoware provides the runtimes and technology components by open-source software. The runtimes are based on the Robot Operating System (ROS). The technology components are provided by contributors, which include, but are not limited to:

    "},{"location":"design/#concern-assumption-and-limitation","title":"Concern, Assumption, and Limitation","text":"

    The downside of the microautonomy architecture is that the computational performance of end applications is sacrificed due to its data path overhead attributed to functional modularity. In other words, the trade-off characteristic of the microautonomy architecture exists between computational performance and functional modularity. This trade-off problem can be solved technically by introducing real-time capability. This is because autonomous driving systems are not really designed to be real-fast, that is, low-latency computing is nice-to-have but not must-have. The must-have feature for autonomous driving systems is that the latency of computing is predictable, that is, the systems are real-time. As a whole, we can compromise computational performance to an extent that is predictable enough to meet the given timing constraints of autonomous driving systems, often referred to as deadlines of computation.

    "},{"location":"design/#design","title":"Design","text":"

    Warning

    Under Construction

    "},{"location":"design/#autoware-concepts","title":"Autoware concepts","text":"

    The Autoware concepts page describes the design philosophy of Autoware. Readers (service providers and all Autoware users) will learn the basic concepts underlying Autoware development, such as microautonomy and the Core/Universe architecture.

    "},{"location":"design/#autoware-architecture","title":"Autoware architecture","text":"

    The Autoware architecture page describes an overview of each module that makes up Autoware. Readers (all Autoware users) will gain a high-level picture of how each module that composes Autoware works.

    "},{"location":"design/#autoware-interfaces","title":"Autoware interfaces","text":"

    The Autoware interfaces page describes in detail the interface of each module that makes up Autoware. Readers (intermediate developers) will learn how to add new functionality to Autoware and how to integrate their own modules with Autoware.

    "},{"location":"design/#configuration-management","title":"Configuration management","text":""},{"location":"design/#conclusion","title":"Conclusion","text":""},{"location":"design/autoware-architecture/","title":"Architecture overview","text":""},{"location":"design/autoware-architecture/#architecture-overview","title":"Architecture overview","text":"

    This page describes the architecture of Autoware.

    "},{"location":"design/autoware-architecture/#introduction","title":"Introduction","text":"

    The current Autoware is defined to be a layered architecture that clarifies each module's role and simplifies the interface between them. By doing so:

    Note that the initial focus of this architecture design was solely on driving capability, and so the following features were left as future work:

    "},{"location":"design/autoware-architecture/#high-level-architecture-design","title":"High-level architecture design","text":"

    Autoware's architecture consists of the following six stacks. Each linked page contains a more detailed set of requirements and use cases specific to that stack:

    "},{"location":"design/autoware-architecture/#node-diagram","title":"Node diagram","text":"

    A diagram showing Autoware's nodes in the default configuration can be found on the Node diagram page. Detailed documents for each node are available in the Autoware Universe docs.

    Note that Autoware configurations are scalable / selectable and will vary depending on the environment and required use cases.

    "},{"location":"design/autoware-architecture/#references","title":"References","text":""},{"location":"design/autoware-architecture/control/","title":"Control component design","text":""},{"location":"design/autoware-architecture/control/#control-component-design","title":"Control component design","text":""},{"location":"design/autoware-architecture/control/#abstract","title":"Abstract","text":"

    This document presents the design concept of the Control Component. The content is as follows:

    "},{"location":"design/autoware-architecture/control/#autoware-control-design","title":"Autoware Control Design","text":"

    The Control Component generates the control signal to which the Vehicle Component subscribes. The generated control signals are computed based on the reference trajectories from the Planning Component.

    The Control Component consists of two modules. The trajectory_follower module generates a vehicle control command to follow the reference trajectory received from the planning module. The command includes, for example, the desired steering angle and target speed. The vehicle_command_gate is responsible for filtering the control command to prevent abnormal values and then sending it to the vehicle. This gate also allows switching between multiple sources such as the MRM (minimal risk maneuver) module or some remote control module, in addition to the trajectory follower.

    The Autoware control system is designed as a platform for automated driving systems that can be compatible with a diverse range of vehicles.

    The control process in Autoware uses general information (such as target acceleration and deceleration) and no vehicle-specific information (such as brake pressure) is used. Hence it can be adjusted independently of the vehicle's drive interface enabling easy integration or performance tuning.

    Furthermore, significant differences that affect vehicle motion constraints, such as two-wheel steering or four-wheel steering, are addressed by switching the control vehicle model, achieving control specialized for each characteristic.

    Autoware's control module outputs the necessary information to control the vehicle as a substitute for a human driver. For example, the control command from the control module looks like the following:

    - Target steering angle\n- Target steering torque\n- Target speed\n- Target acceleration\n

    Note that vehicle-specific values such as pedal positions and low-level information such as individual wheel rotation speeds are excluded from the command.

    "},{"location":"design/autoware-architecture/control/#vehicle-adaptation-design","title":"Vehicle Adaptation Design","text":""},{"location":"design/autoware-architecture/control/#vehicle-interface-adapter","title":"Vehicle interface adapter","text":"

    Autoware is designed to be an autonomous driving platform able to accommodate vehicles with various drivetrain types.

    This is an explanation of how Autoware handles the standardization of systems with different vehicle drivetrain. The interfaces for vehicle drivetrain are diverse, including steering angle, steering angular velocity, steering torque, speed, accel/brake pedals, and brake pressure. To accommodate these differences, Autoware adds an adapter module between the control component and the vehicle interface. This module performs the conversion between the proprietary message types used by the vehicle (such as brake pressure) and the generic types used by Autoware (such as desired acceleration). By providing this conversion information, the differences in vehicle drivetrain can be accommodated.

    If the information is not known in advance, an automatic calibration tool can be used. Calibration will occur within limited degrees of freedom, generating the information necessary for the drivetrain conversion automatically.

    This configuration is summarized in the following diagram.

    "},{"location":"design/autoware-architecture/control/#examples-of-several-vehicle-interfaces","title":"Examples of several vehicle interfaces","text":"

    This is an example of the several drivetrain types in the vehicle interface.

    Vehicle Lateral interface Longitudinal interface Note Lexus Steering angle Accel/brake pedal position Acceleration lookup table conversion for longitudinal JPN TAXI Steering angle Accel/brake pedal position Acceleration lookup table conversion for longitudinal GSM8 Steering EPS voltage Acceleration motor voltage, Deceleration brake hydraulic pressure lookup table and PID conversion for lateral and longitudinal YMC Golfcart Steering angle Velocity Logiee yaw rate Velocity F1 TENTH Steering angle Motor RPM interface code"},{"location":"design/autoware-architecture/control/#control-feature-design","title":"Control Feature Design","text":"

    The following lists the features provided by Autoware's Control/Vehicle component, as well as the conditions and assumptions required to utilize them effectively.

    The proper operation of the ODD is limited by factors such as whether the functions are enabled, delay time, calibration accuracy and degradation rate, and sensor accuracy.

    Feature Description\u3000 Requirements/Assumptions Note \u3000Limitation for now Lateral Control Control the drivetrain system related to lateral vehicle motion Trying to increase the number of vehicle types that can be supported in the future. Only front-steering type is supported. Longitudinal Control Control the drivetrain system related to longitudinal vehicle motion Slope Compensation Supports precise vehicle motion control on slopes Gradient information can be obtained from maps or sensors attached to the chassis If gradient information is not available, the gradient is estimated from the vehicle's pitch angle. Delay Compensation Controls the drivetrain system appropriately in the presence of time delays The drivetrain delay information is provided in advance If there is no delay information, the drivetrain delay is estimated automatically (automatic calibration). However, the effect of delay cannot be completely eliminated, especially in scenarios with sudden changes in speed. Only fixed delay times can be set for longitudinal and lateral drivetrain systems separately. It does not accommodate different delay times for the accelerator and brake. Drivetrain IF Conversion (Lateral Control) Converts the drivetrain-specific information of the vehicle into the drivetrain information used by Autoware (e.g., target steering angular velocity \u2192 steering torque) The conversion information is provided in advance If there is no conversion information, the conversion map is estimated automatically (automatic calibration). The degree of freedom for conversion is limited (2D lookup table + PID FB). Drivetrain IF Conversion (Longitudinal Control) Converts the drivetrain-specific information of the vehicle into the drivetrain information used by Autoware (e.g., target acceleration \u2192 accelerator/brake pedal value) The conversion information is provided in advance If there is no conversion information, the conversion map is estimated automatically (automatic calibration). The degree of freedom for conversion is limited (2D lookup table + PID FB). Automatic Calibration Automatically estimates and applies values such as drivetrain IF conversion map and delay time. The drivetrain status can be obtained (must) Anomaly Detection Notifies when there is a discrepancy in the calibration or unexpected drivetrain behavior The drivetrain status can be obtained (must) Steering Zero Point Correction Corrects the midpoint of the steering to achieve appropriate steering control The drivetrain status can be obtained (must) Steering Deadzone Correction Corrects the deadzone of the steering to achieve appropriate steering control The steering deadzone parameter is provided in advance If the parameter is unknown, the deadzone parameter is estimated from driving information Not available now Steering Deadzone Estimation Dynamically estimates the steering deadzone from driving data Not available now Weight Compensation Performs appropriate vehicle control according to weight Weight information can be obtained from sensors If there is no weight sensor, estimate the weight from driving information. Currently not available Weight Estimation Dynamically estimates weight from driving data Currently not available

    The list above does not cover wheel control systems such as ABS commonly used in vehicles. Regarding these features, the following considerations are taken into account.

    "},{"location":"design/autoware-architecture/control/#integration-with-vehicle-side-functions","title":"Integration with vehicle-side functions","text":"

    ABS (Anti-lock Brake System) and ESC (Electric Stability Control) are two functions that may be pre-installed on a vehicle, directly impacting its controllability. The control modules of Autoware assume that both ABS and ESC are installed on the vehicle and their absence may cause unreliable controls depending on the target ODD. For example, with low-velocity driving in a controlled environment, these functions are not necessary.

    Also, note that this statement does not negate the development of ABS functionality in autonomous driving systems.

    "},{"location":"design/autoware-architecture/control/#autoware-capabilities-and-vehicle-requirements","title":"Autoware Capabilities and Vehicle Requirements","text":"

    As an alternative to human driving, autonomous driving systems essentially aim to handle tasks that humans can perform. This includes not only controlling the steering wheel, accel, and brake, but also automatically detecting issues such as poor brake response or a misaligned steering angle. However, this is a trade-off, as better vehicle performance will lead to superior system behavior, ultimately affecting the design of ODD.

    On the other hand, for tasks that are not typically anticipated or cannot be handled by a human driver, processing in the vehicle ECU is expected. Examples of such scenarios include cases where the brake response is clearly delayed or when the vehicle rotates due to a single-side tire slipping. These tasks are typically handled by ABS or ESC.

    "},{"location":"design/autoware-architecture/localization/","title":"Index","text":"

    LOCALIZATION COMPONENT DESIGN DOC

    "},{"location":"design/autoware-architecture/localization/#abstract","title":"Abstract","text":""},{"location":"design/autoware-architecture/localization/#1-requirements","title":"1. Requirements","text":"

    Localization aims to estimate vehicle pose, velocity, and acceleration.

    Goals:

    Non-goals:

    "},{"location":"design/autoware-architecture/localization/#2-sensor-configuration-examples","title":"2. Sensor Configuration Examples","text":"

    This section shows example sensor configurations and their expected performances. Each sensor has its own advantages and disadvantages, but overall performance can be improved by fusing multiple sensors.

    "},{"location":"design/autoware-architecture/localization/#3d-lidar-pointcloud-map","title":"3D-LiDAR + PointCloud Map","text":""},{"location":"design/autoware-architecture/localization/#expected-situation","title":"Expected situation","text":""},{"location":"design/autoware-architecture/localization/#situations-that-can-make-the-system-unstable","title":"Situations that can make the system unstable","text":""},{"location":"design/autoware-architecture/localization/#functionality","title":"Functionality","text":""},{"location":"design/autoware-architecture/localization/#3d-lidar-or-camera-vector-map","title":"3D-LiDAR or Camera + Vector Map","text":""},{"location":"design/autoware-architecture/localization/#expected-situation_1","title":"Expected situation","text":""},{"location":"design/autoware-architecture/localization/#situations-that-can-make-the-system-unstable_1","title":"Situations that can make the system unstable","text":""},{"location":"design/autoware-architecture/localization/#functionalities","title":"Functionalities","text":""},{"location":"design/autoware-architecture/localization/#gnss","title":"GNSS","text":""},{"location":"design/autoware-architecture/localization/#expected-situation_2","title":"Expected situation","text":""},{"location":"design/autoware-architecture/localization/#situation-that-can-make-the-system-unstable","title":"Situation that can make the system unstable","text":""},{"location":"design/autoware-architecture/localization/#functionality_1","title":"Functionality","text":""},{"location":"design/autoware-architecture/localization/#camera-visual-odometry-visual-slam","title":"Camera (Visual Odometry, Visual SLAM)","text":""},{"location":"design/autoware-architecture/localization/#expected-situation_3","title":"Expected situation","text":""},{"location":"design/autoware-architecture/localization/#situations-that-can-make-the-system-unstable_2","title":"Situations that can make the system unstable","text":""},{"location":"design/autoware-architecture/localization/#functionality_2","title":"Functionality","text":""},{"location":"design/autoware-architecture/localization/#wheel-speed-sensor","title":"Wheel speed sensor","text":""},{"location":"design/autoware-architecture/localization/#expected-situation_4","title":"Expected situation","text":""},{"location":"design/autoware-architecture/localization/#situations-that-can-make-the-system-unstable_3","title":"Situations that can make the system unstable","text":""},{"location":"design/autoware-architecture/localization/#functionality_3","title":"Functionality","text":""},{"location":"design/autoware-architecture/localization/#imu","title":"IMU","text":""},{"location":"design/autoware-architecture/localization/#expected-environments","title":"Expected environments","text":""},{"location":"design/autoware-architecture/localization/#situations-that-can-make-the-system-unstable_4","title":"Situations that can make the system unstable","text":""},{"location":"design/autoware-architecture/localization/#functionality_4","title":"Functionality","text":""},{"location":"design/autoware-architecture/localization/#geomagnetic-sensor","title":"Geomagnetic sensor","text":""},{"location":"design/autoware-architecture/localization/#expected-situation_5","title":"Expected situation","text":""},{"location":"design/autoware-architecture/localization/#situations-that-can-make-the-system-unstable_5","title":"Situations that can make the system unstable","text":""},{"location":"design/autoware-architecture/localization/#functionality_5","title":"Functionality","text":""},{"location":"design/autoware-architecture/localization/#magnetic-markers","title":"Magnetic markers","text":""},{"location":"design/autoware-architecture/localization/#expected-situation_6","title":"Expected situation","text":""},{"location":"design/autoware-architecture/localization/#situations-where-the-system-becomes-unstable","title":"Situations where the system becomes unstable","text":""},{"location":"design/autoware-architecture/localization/#functionality_6","title":"Functionality","text":""},{"location":"design/autoware-architecture/localization/#3-requirements","title":"3. Requirements","text":""},{"location":"design/autoware-architecture/localization/#4-architecture","title":"4. Architecture","text":""},{"location":"design/autoware-architecture/localization/#abstract_1","title":"Abstract","text":"

    Two architectures are defined, \"Required\" and \"Recommended\". However, the \"Required\" architecture only contains the inputs and outputs necessary to accept various localization algorithms. To improve the reusability of each module, the required components are defined in the \"Recommended\" architecture section along with a more detailed explanation.

    "},{"location":"design/autoware-architecture/localization/#required-architecture","title":"Required Architecture","text":""},{"location":"design/autoware-architecture/localization/#input","title":"Input","text":""},{"location":"design/autoware-architecture/localization/#output","title":"Output","text":""},{"location":"design/autoware-architecture/localization/#recommended-architecture","title":"Recommended Architecture","text":""},{"location":"design/autoware-architecture/localization/#pose-estimator","title":"Pose Estimator","text":""},{"location":"design/autoware-architecture/localization/#twist-accel-estimator","title":"Twist-Accel Estimator","text":""},{"location":"design/autoware-architecture/localization/#kinematics-fusion-filter","title":"Kinematics Fusion Filter","text":""},{"location":"design/autoware-architecture/localization/#localization-diagnostics","title":"Localization Diagnostics","text":""},{"location":"design/autoware-architecture/localization/#tf-tree","title":"TF tree","text":"frame meaning earth ECEF (Earth Centered Earth Fixed\uff09 map Origin of the map coordinate (ex. MGRS origin) viewer User-defined frame for rviz base_link Reference pose of the ego-vehicle (projection of the rear-axle center onto the ground surface) sensor Reference pose of each sensor

    Developers can optionally add other frames such as odom or base_footprint as long as the tf structure above is maintained.

    "},{"location":"design/autoware-architecture/localization/#the-localization-modules-ideal-functionality","title":"The localization module's ideal functionality","text":""},{"location":"design/autoware-architecture/localization/#kpi","title":"KPI","text":"

    To maintain sufficient pose estimation performance for safe operation, the following metrics are considered:

    "},{"location":"design/autoware-architecture/localization/#5-interface-and-data-structure","title":"5. Interface and Data Structure","text":""},{"location":"design/autoware-architecture/localization/#6-concerns-assumptions-and-limitations","title":"6. Concerns, Assumptions, and Limitations","text":""},{"location":"design/autoware-architecture/localization/#prerequisites-of-sensors-and-inputs","title":"Prerequisites of sensors and inputs","text":""},{"location":"design/autoware-architecture/localization/#sensor-prerequisites","title":"Sensor prerequisites","text":""},{"location":"design/autoware-architecture/localization/#map-prerequisites","title":"Map prerequisites","text":""},{"location":"design/autoware-architecture/localization/#computational-resources","title":"Computational resources","text":"
    1. For more details about bias, refer to the VectorNav IMU specifications page.\u00a0\u21a9

    "},{"location":"design/autoware-architecture/map/","title":"Map component design","text":""},{"location":"design/autoware-architecture/map/#map-component-design","title":"Map component design","text":""},{"location":"design/autoware-architecture/map/#1-overview","title":"1. Overview","text":"

    Autoware relies on high-definition point cloud maps and vector maps of the driving environment to perform various tasks such as localization, route planning, traffic light detection, and predicting the trajectories of pedestrians and other vehicles.

    This document describes the design of map component of Autoware, including its requirements, architecture design, features, data formats, and interface to distribute map information to the rest of autonomous driving stack.

    "},{"location":"design/autoware-architecture/map/#2-requirements","title":"2. Requirements","text":"

    Map should provide two types of information to the rest of the stack:

    A vector map contains highly accurate information about a road network, lane geometry, and traffic lights. It is required for route planning, traffic light detection, and predicting the trajectories of other vehicles and pedestrians.

    A 3D point cloud map is primarily used for LiDAR-based localization and part of perception in Autoware. In order to determine the current position and orientation of the vehicle, a live scan captured from one or more LiDAR units is matched against a pre-generated 3D point cloud map. Therefore, an accurate point cloud map is crucial for good localization results. However, if the vehicle has an alternate localization method with enough accuracy, for example using camera-based localization, point cloud map may not be required to use Autoware.

    In addition to above two types of maps, Autoware also requires a supplemental file for specifying the coordinate system of the map in geodetic system.

    "},{"location":"design/autoware-architecture/map/#3-architecture","title":"3. Architecture","text":"

    This diagram describes the high-level architecture of Map component in Autoware.

    The Map component consists of the following sub-components:

    "},{"location":"design/autoware-architecture/map/#4-component-interface","title":"4. Component interface","text":""},{"location":"design/autoware-architecture/map/#input-to-the-map-component","title":"Input to the map component","text":""},{"location":"design/autoware-architecture/map/#output-from-the-map-component","title":"Output from the map component","text":""},{"location":"design/autoware-architecture/map/#5-map-specification","title":"5. Map Specification","text":""},{"location":"design/autoware-architecture/map/#point-cloud-map","title":"Point Cloud Map","text":"

    The point cloud map must be supplied as a file with the following requirements:

    For more details on divided map format, please refer to the readme of map_loader in Autoware Universe.

    Note

    Three global coordinate systems are currently supported by Autoware, including Military Grid Reference System (MGRS), Universal Transverse Mercator (UTM), and Japan Rectangular Coordinate System. However, MGRS is a preferred coordinate system for georeferenced maps. In a map with MGRS coordinate system, the X and Y coordinates of each point represent the point's location within the 100,000-meter square, while the Z coordinate represents the point's elevation.

    "},{"location":"design/autoware-architecture/map/#vector-map","title":"Vector Map","text":"

    The vector cloud map must be supplied as a file with the following requirements:

    Warning

    Under Construction

    "},{"location":"design/autoware-architecture/map/#projection-information","title":"Projection Information","text":"

    The projection information must be supplied as a file with the following requirements:

    For further information, please refer to the readme of map_projection_loader in Autoware Universe.

    "},{"location":"design/autoware-architecture/node-diagram/","title":"Node diagram","text":""},{"location":"design/autoware-architecture/node-diagram/#node-diagram","title":"Node diagram","text":"

    This page depicts the node diagram designs for Autoware Core/Universe architecture.

    "},{"location":"design/autoware-architecture/node-diagram/#autoware-core","title":"Autoware Core","text":"

    TBD.

    "},{"location":"design/autoware-architecture/node-diagram/#autoware-universe","title":"Autoware Universe","text":"

    Open in draw.io for fullscreen

    Note that the diagram is for reference. We are planning to update this diagram every release and may have old information between the releases. If you wish to check the latest node diagram use rqt_graph after launching the Autoware.

    "},{"location":"design/autoware-architecture/perception/","title":"Perception component design","text":""},{"location":"design/autoware-architecture/perception/#perception-component-design","title":"Perception component design","text":"

    Warning

    Under Construction

    "},{"location":"design/autoware-architecture/planning/","title":"Planning component design","text":""},{"location":"design/autoware-architecture/planning/#planning-component-design","title":"Planning component design","text":""},{"location":"design/autoware-architecture/planning/#overview","title":"Overview","text":"

    The Planning component generates the trajectory message that will be subscribed to by the Control component based on the environmental state obtained from the Localization and the Perception components.

    "},{"location":"design/autoware-architecture/planning/#requirements","title":"Requirements","text":"

    The goal of the Planning component is to generate a trajectory (path and velocity) of the ego vehicle that is safe and well-regulated while satisfying the given mission.

    Goals:

    Non-goals:

    "},{"location":"design/autoware-architecture/planning/#high-level-architecture","title":"High-level architecture","text":"

    This diagram describes the high-level architecture of the Planning Component.

    The Planning component consists of the following sub-components:

    Each component contains some modules that can be dynamically loaded and unloaded based on the situation. For instance, the Behavior Planning component includes modules such as lane change, intersection, and crosswalk modules.

    Our planning components are built based on the microautonomy architecture with Autoware. We adopt a modular system framework where the tasks are implemented as modules that can be dynamically loaded and unloaded to achieve different features depending on the given use cases.

    "},{"location":"design/autoware-architecture/planning/#component-interface","title":"Component interface","text":"

    This section describes the inputs and outputs of the Planning Component and of its internal modules. See the Planning Component Interface page for the current implementation.

    "},{"location":"design/autoware-architecture/planning/#input-to-the-planning-component","title":"Input to the planning component","text":""},{"location":"design/autoware-architecture/planning/#output-from-the-planning-component","title":"Output from the planning component","text":""},{"location":"design/autoware-architecture/planning/#internal-interface-in-the-planning-component","title":"Internal interface in the planning component","text":""},{"location":"design/autoware-architecture/planning/#how-to-add-new-modules-wip","title":"How to add new modules (WIP)","text":"

    As mentioned in the goal session, this planning module is designed to be extensible by third-party components. For specific instructions on how to add new modules and expand its functionality, please refer to the provided documentation or guidelines (WIP).

    "},{"location":"design/autoware-architecture/planning/#supported-functions","title":"Supported Functions","text":"Feature Description Requirements Figure Route Planning Plan route from the ego vehicle position to the destination. Reference implementation is in Mission Planner, enabled by launching the mission_planner node. - Lanelet map (driving lanelets) Path Planning from Route Plan path to be followed from the given route. Reference implementation is in Behavior Path Planner. - Lanelet map (driving lanelets) Obstacle Avoidance Plan path to avoid obstacles by steering operation. Reference implementation is in Avoidance, Obstacle Avoidance Planner. Enable flag in parameter: launch obstacle_avoidance_planner true Demonstration Video - objects information Path Smoothing Plan path to achieve smooth steering. Reference implementation is in Obstacle Avoidance Planner. Demonstration Video - Lanelet map (driving lanelet) Narrow Space Driving Plan path to drive within the drivable area. Furthermore, when it is not possible to drive within the drivable area, stop the vehicle to avoid exiting the drivable area. Reference implementation is in Obstacle Avoidance Planner. Demonstration Video - Lanelet map (high-precision lane boundaries) Lane Change Plan path for lane change to reach the destination. Reference implementation is in Lane Change. Demonstration Video - Lanelet map (driving lanelets) Pull Over Plan path for pull over to park at the road shoulder. Reference implementation is in Goal Planner. Demonstration Videos: Simple Pull Over Arc Forward Pull Over Arc Backward Pull Over - Lanelet map (shoulder lane) Pull Out Plan path for pull over to start from the road shoulder. Reference implementation is in Pull Out Module. Demonstration Video: Simple Pull Out Backward Pull Out - Lanelet map (shoulder lane) Path Shift Plan path in lateral direction in response to external instructions. Reference implementation is in Side Shift Module. - None Obstacle Stop Plan velocity to stop for an obstacle on the path. Reference implementation is in Obstacle Stop Planner, Obstacle Cruise Planner. launch obstacle_stop_planner and enable flag: TODO, launch obstacle_cruise_planner and enable flag: TODO Demonstration Video - objects information Obstacle Deceleration Plan velocity to decelerate for an obstacle located around the path. Reference implementation is in Obstacle Stop Planner, Obstacle Cruise Planner. Demonstration Video - objects information Adaptive Cruise Control Plan velocity to follow the vehicle driving in front of the ego vehicle. Reference implementation is in Obstacle Stop Planner, Obstacle Cruise Planner. - objects information Decelerate for cut-in vehicles Plan velocity to avoid a risk for cutting-in vehicle to ego lane. Reference implementation is in Obstacle Cruise Planner. - objects information Surround Check at starting Plan velocity to prevent moving when an obstacle exists around the vehicle. Reference implementation is in Surround Obstacle Checker. Enable flag in parameter: use_surround_obstacle_check true in tier4_planning_component.launch.xml Demonstration Video - objects information Curve Deceleration Plan velocity to decelerate the speed on a curve. Reference implementation is in Motion Velocity Smoother. - None Curve Deceleration for Obstacle Plan velocity to decelerate the speed on a curve for a risk of obstacle collision around the path. Reference implementation is in Obstacle Velocity Limiter. Demonstration Video - objects information - Lanelet map (static obstacle) Crosswalk Plan velocity to stop or decelerate for pedestrians approaching or walking on a crosswalk. Reference implementation is in Crosswalk Module. Demonstration Video - objects information - Lanelet map (pedestrian crossing) Intersection Oncoming Vehicle Check Plan velocity for turning right/left at intersection to avoid a risk with oncoming other vehicles. Reference implementation is in Intersection Module. Demonstration Video - objects information - Lanelet map (intersection lane and yield lane) Intersection Blind Spot Check Plan velocity for turning right/left at intersection to avoid a risk with other vehicles or motorcycles coming from behind blind spot. Reference implementation is in Blind Spot Module. Demonstration Video - objects information - Lanelet map (intersection lane) Intersection Occlusion Check Plan velocity for turning right/left at intersection to avoid a risk with the possibility of coming vehicles from occlusion area. Reference implementation is in Intersection Module. Demonstration Video - objects information - Lanelet map (intersection lane) Intersection Traffic Jam Detection Plan velocity for intersection not to enter the intersection when a vehicle is stopped ahead for a traffic jam. Reference implementation is in Intersection Module. Demonstration Video - objects information - Lanelet map (intersection lane) Traffic Light Plan velocity for intersection according to a traffic light signal. Reference implementation is in Traffic Light Module. Demonstration Video - Traffic light color information Run-out Check Plan velocity to decelerate for the possibility of nearby objects running out into the path. Reference implementation is in Run Out Module. Demonstration Video - objects information Stop Line Plan velocity to stop at a stop line. Reference implementation is in Stop Line Module. Demonstration Video - Lanelet map (stop line) Occlusion Spot Check Plan velocity to decelerate for objects running out from occlusion area, for example, from behind a large vehicle. Reference implementation is in Occlusion Spot Module. Demonstration Video - objects information - Lanelet map (private/public lane) No Stop Area Plan velocity not to stop in areas where stopping is prohibited, such as in front of the fire station entrance. Reference implementation is in No Stopping Area Module. - Lanelet map (no stopping area) Merge from Private Area to Public Road Plan velocity for entering the public road from a private driveway to avoid a risk of collision with pedestrians or other vehicles. Reference implementation is in Merge from Private Area Module. - objects information - Lanelet map (private/public lane) WIP Speed Bump Plan velocity to decelerate for speed bumps. Reference implementation is in Speed Bump Module. Demonstration Video - Lanelet map (speed bump) Detection Area Plan velocity to stop at the corresponding stop when an object exist in the designated detection area. Reference implementation is in Detection Area Module. Demonstration Video - Lanelet map (detection area) No Drivable Lane Plan velocity to stop before exiting the area designated by ODD (Operational Design Domain) or stop the vehicle if autonomous mode started in out of ODD lane. Reference implementation is in No Drivable Lane Module. - Lanelet map (no drivable lane) Collision Detection when deviating from lane Plan velocity to avoid conflict with other vehicles driving in the another lane when the ego vehicle is deviating from own lane. Reference implementation is in Out of Lane Module. - objects information - Lanelet map (driving lane) WIP Parking Plan path and velocity for given goal in parking area. Reference implementation is in Free Space Planner. Demonstration Video - objects information - Lanelet map (parking area) Autonomous Emergency Braking (AEB) Perform an emergency stop if a collision with an object ahead is anticipated. It is noted that this function is expected as a final safety layer, and this should work even in the event of failures in the Localization or Perception system. Reference implementation is in Out of Lane Module. - Primitive objects Minimum Risk Maneuver (MRM) Provide appropriate MRM (Minimum Risk Maneuver) instructions when a hazardous event occurs. For example, when a sensor trouble found, send an instruction for emergency braking, moderate stop, or pulling over to the shoulder, depending on the severity of the situation. Reference implementation is in TODO - TODO WIP Trajectory Validation Check the planned trajectory is safe. If it is unsafe, take appropriate action, such as modify the trajectory, stop sending the trajectory or report to the autonomous driving system. Reference implementation is in Planning Validator. - None Running Lane Map Generation Generate lane map from localization data recorded in manual driving. Reference implementation is in WIP - None WIP Running Lane Optimization Optimize the centerline (reference path) of the map to make it smooth considering the vehicle kinematics. Reference implementation is in Static Centerline Optimizer. - Lanelet map (driving lanes) WIP"},{"location":"design/autoware-architecture/planning/#reference-implementation","title":"Reference Implementation","text":"

    The following diagram describes the reference implementation of the Planning component. By adding new modules or extending the functionalities, various ODDs can be supported.

    Note that some implementation does not adhere to the high-level architecture design and require updating.

    For more details, please refer to the design documents in each package.

    "},{"location":"design/autoware-architecture/planning/#important-parameters","title":"Important Parameters","text":"Package Parameter Type Description obstacle_stop_planner stop_planner.stop_position.max_longitudinal_margin double distance between the ego and the front vehicle when stopping (when cruise_planner_type:=obstacle_stop_planner) obstacle_cruise_planner common.safe_distance_margin double distance between the ego and the front vehicle when stopping (when cruise_planner_type:=obstacle_cruise_planner) behavior_path_planner avoidance.avoidance.lateral.lateral_collision_margin double minimum lateral margin to obstacle on avoidance behavior_path_planner avoidance.avoidance.lateral.lateral_collision_safety_buffer double additional lateral margin to obstacle if possible on avoidance obstacle_avoidance_planner option.enable_outside_drivable_area_stop bool If set true, a stop point will be inserted before the path footprint is outside the drivable area."},{"location":"design/autoware-architecture/planning/#notation","title":"Notation","text":""},{"location":"design/autoware-architecture/planning/#1-self-crossing-road-and-overlapped","title":"[1] self-crossing road and overlapped","text":"

    To support the self-crossing road and overlapped road in the opposite direction, each planning module has to meet the specifications

    Currently, the supported modules are as follows.

    "},{"location":"design/autoware-architecture/planning/#2-size-of-path-points","title":"[2] Size of Path Points","text":"

    Some functions do not support paths with only one point. Therefore, each modules should generate the path with more than two path points.

    "},{"location":"design/autoware-architecture/sensing/","title":"Sensing component design","text":""},{"location":"design/autoware-architecture/sensing/#sensing-component-design","title":"Sensing component design","text":""},{"location":"design/autoware-architecture/sensing/#overview","title":"Overview","text":"

    Sensing component is a collection of modules that apply some primitive pre-processing to the raw sensor data.

    The sensor input formats are defined in this component.

    "},{"location":"design/autoware-architecture/sensing/#role","title":"Role","text":""},{"location":"design/autoware-architecture/sensing/#inputs","title":"Inputs","text":""},{"location":"design/autoware-architecture/sensing/#input-types","title":"Input types","text":"Sensor Data Message Type Point cloud (Lidars, depth cameras, etc.) sensor_msgs/msg/PointCloud2.msg Image (RGB, monochrome, depth, etc. cameras) sensor_msgs/msg/Image.msg Radar scan radar_msgs/msg/RadarScan.msg Radar tracks radar_msgs/msg/RadarTracks.msg GNSS-INS position sensor_msgs/msg/NavSatFix.msg GNSS-INS orientation autoware_sensing_msgs/GnssInsOrientationStamped.msg GNSS-INS velocity geometry_msgs/msg/TwistWithCovarianceStamped.msg GNSS-INS acceleration geometry_msgs/msg/AccelWithCovarianceStamped.msg Ultrasonics sensor_msgs/msg/Range.msg"},{"location":"design/autoware-architecture/sensing/#design-by-data-types","title":"Design by data-types","text":""},{"location":"design/autoware-architecture/sensing/data-types/gnss-ins-data/","title":"GNSS/INS data pre-processing design","text":""},{"location":"design/autoware-architecture/sensing/data-types/gnss-ins-data/#gnssins-data-pre-processing-design","title":"GNSS/INS data pre-processing design","text":"

    Warning

    Under Construction

    "},{"location":"design/autoware-architecture/sensing/data-types/image/","title":"Image pre-processing design","text":""},{"location":"design/autoware-architecture/sensing/data-types/image/#image-pre-processing-design","title":"Image pre-processing design","text":"

    Warning

    Under Construction

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/","title":"Point cloud pre-processing design","text":""},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#point-cloud-pre-processing-design","title":"Point cloud pre-processing design","text":""},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#overview","title":"Overview","text":"

    Point cloud pre-processing is a collection of modules that apply some primitive pre-processing to the raw sensor data.

    This pipeline covers the flow of data from drivers to the perception stack.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#recommended-processing-pipeline","title":"Recommended processing pipeline","text":"
    graph TD\n    Driver[\"Lidar Driver\"] -->|\"Cloud XYZIRCADT\"| FilterPR[\"Polygon Remover Filter / CropBox Filter\"]\n\n    subgraph \"sensing\"\n    FilterPR -->|\"Cloud XYZIRCADT\"| FilterDC[\"Motion Distortion Corrector Filter\"]\n    FilterDC -->|\"Cloud XYZIRCAD\"| FilterOF[\"Outlier Remover Filter\"]\n    FilterOF -->|\"Cloud XYZIRC\"| FilterDS[\"Downsampler Filter\"]\n    FilterDS -->|\"Cloud XYZIRC\"| FilterTrans[\"Cloud Transformer\"]\n    FilterTrans -->|\"Cloud XYZIRC\"| FilterC\n\n    FilterX[\"...\"] -->|\"Cloud XYZIRC (i)\"| FilterC[\"Cloud Concatenator\"]\n    end\n\n    FilterC -->|\"Cloud XYZIRC\"| SegGr[\"Ground Segmentation\"]
    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#list-of-modules","title":"List of modules","text":"

    The modules used here are from pointcloud_preprocessor package.

    For details about the modules, see the following table.

    It is recommended that these modules are used in a single container as components. For details see ROS 2 Composition

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#point-cloud-fields","title":"Point cloud fields","text":"

    In the ideal case, the driver is expected to output a point cloud with the PointXYZIRCADT point type.

    name datatype derived description X FLOAT32 false X position Y FLOAT32 false Y position Z FLOAT32 false Z position I (intensity) UINT8 false Measured reflectivity, intensity of the point R (return type) UINT8 false Laser return type for dual return lidars C (channel) UINT16 false Vertical channel id of the laser that measured the point A (azimuth) FLOAT32 true atan2(Y, X), Horizontal angle from the front of the lidar to the point D (distance) FLOAT32 true hypot(X, Y, Z), Euclidean distance of the point to lidar T (time) UINT32 false Nanoseconds passed since the time of the header when this point was measured

    Note

    A (azimuth) and D (distance) fields are derived fields. They are provided by the driver to reduce the computational load on some parts of the perception stack.

    Note

    If the Motion Distortion Corrector Filter won't be used, the T (time) field can be omitted, PointXYZIRCAD point type can be used.

    Warning

    Autoware will support conversion from PointXYZI to PointXYZIRC or PointXYZIRCAD (with channel and return is set to 0) for prototyping purposes. However, this conversion is not recommended for production use since it's not efficient.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#intensity","title":"Intensity","text":"

    We will use following ranges for intensity, compatible with the VLP16 User Manual:

    Quoting from the VLP-16 User Manual:

    For each laser measurement, a reflectivity byte is returned in addition to distance. Reflectivity byte values are segmented into two ranges, allowing software to distinguish diffuse reflectors (e.g. tree trunks, clothing) in the low range from retroreflectors (e.g. road signs, license plates) in the high range. A retroreflector reflects light back to its source with a minimum of scattering. The VLP-16 provides its own light, with negligible separation between transmitting laser and receiving detector, so retroreflecting surfaces pop with reflected IR light compared to diffuse reflectors that tend to scatter reflected energy.

    In a typical point cloud without retroreflectors, all intensity points will be between 0 and 100.

    Retroreflective Gradient road sign, Image Source

    But in a point cloud with retroreflectors, the intensity points will be between 0 and 255.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#intensity-mapping-for-other-lidar-brands","title":"Intensity mapping for other lidar brands","text":""},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#hesai-pandarxt16","title":"Hesai PandarXT16","text":"

    Hesai Pandar XT16 User Manual

    This lidar has 2 modes for reporting reflectivity:

    If you are using linear mapping mode, you should map from [0, 255] to [0, 100] when constructing the point cloud.

    If you are using non-linear mapping mode, you should map (hesai to autoware)

    when constructing the point cloud.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#livox-mid-70","title":"Livox Mid-70","text":"

    Livox Mid-70 User Manual

    This lidar has 2 modes for reporting reflectivity similar to Velodyne VLP-16, only the ranges are slightly different.

    You should map (livox to autoware)

    when constructing the point cloud.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#robosense-rs-lidar-16","title":"RoboSense RS-LiDAR-16","text":"

    RoboSense RS-LiDAR-16 User Manual

    No mapping required, same as Velodyne VLP-16.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#ouster-os-1-64","title":"Ouster OS-1-64","text":"

    Software User Manual v2.0.0 for all Ouster sensors

    In the manual it is stated:

    Reflectivity [16 bit unsigned int] - sensor Signal Photons measurements are scaled based on measured range and sensor sensitivity at that range, providing an indication of target reflectivity. Calibration of this measurement has not currently been rigorously implemented, but this will be updated in a future firmware release.

    So it is advised to map the 16 bit reflectivity to [0, 100] range.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#leishen-ch64w","title":"Leishen CH64W","text":"

    I couldn't get the english user manual, link of website

    In a user manual I was able to find it says:

    Byte 7 represents echo strength, and the value range is 0-255. (Echo strength can reflect the energy reflection characteristics of the measured object in the actual measurement environment. Therefore, the echo strength can be used to distinguish objects with different reflection characteristics.)

    So it is advised to map the [0, 255] to [0, 100] range.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#return-type","title":"Return type","text":"

    Various lidars support multiple return modes. Velodyne lidars support Strongest and Last return modes.

    In the PointXYZIRCT and PointXYZIRC types, R field represents return mode with an UINT8.

    R (return type) Description 0 Unknown / Not Marked 1 Strongest 2 Last"},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#channel","title":"Channel","text":"

    The channel field is used to identify the vertical channel of the laser that measured the point. In various lidar manuals or literature, it can also be called laser id, ring, laser line.

    For Velodyne VLP-16, there are 16 channels. Default order of channels in drivers are generally in firing order.

    In the PointXYZIRCT and PointXYZIRC types, C field represents the vertical channel id with an UINT16.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#solid-state-and-petal-pattern-lidars","title":"Solid state and petal pattern lidars","text":"

    Warning

    This section is subject to change. Following are suggestions and open for discussion.

    For solid state lidars that have lines, assign row number as the channel id.

    For petal pattern lidars, you can keep channel 0.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#time","title":"Time","text":"

    In lidar point clouds, each point measurement can have its individual time stamp. This information can be used to eliminate the motion blur that is caused by the movement of the lidar during the scan.

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#point-cloud-header-time","title":"Point cloud header time","text":"

    The header contains a Time field. The time field has 2 components:

    Field Type Description sec int32 Unix time (seconds elapsed since January 1, 1970) nanosec uint32 Nanoseconds elapsed since the sec field

    The header of the point cloud message is expected to have the time of the earliest point it has.

    Note

    The sec field is int32 in ROS 2 humble. The largest value it can represent is 2^31 seconds, it is subject to year 2038 problems. We will wait for actions on ROS 2 community side.

    More info at: https://github.com/ros2/rcl_interfaces/issues/85

    "},{"location":"design/autoware-architecture/sensing/data-types/point-cloud/#individual-point-time","title":"Individual point time","text":"

    Each PointXYZIRCT point type has the T field for representing the nanoseconds passed since the first-shot point of the point cloud.

    To calculate exact time each point was shot, the T nanoseconds are added to the header time.

    Note

    The T field is uint32 type. The largest value it can represent is 2^32 nanoseconds, which equates to roughly 4.29 seconds. Usual point clouds don't last more than 100ms for full cycle. So this field should be enough.

    "},{"location":"design/autoware-architecture/sensing/data-types/radar-data/","title":"Radar data pre-processing design","text":""},{"location":"design/autoware-architecture/sensing/data-types/radar-data/#radar-data-pre-processing-design","title":"Radar data pre-processing design","text":"

    Warning

    Under Construction

    "},{"location":"design/autoware-architecture/sensing/data-types/ultrasonics-data/","title":"Ultrasonics data pre-processing design","text":""},{"location":"design/autoware-architecture/sensing/data-types/ultrasonics-data/#ultrasonics-data-pre-processing-design","title":"Ultrasonics data pre-processing design","text":"

    Warning

    Under Construction

    "},{"location":"design/autoware-architecture/vehicle/","title":"Vehicle Interface design","text":""},{"location":"design/autoware-architecture/vehicle/#vehicle-interface-design","title":"Vehicle Interface design","text":""},{"location":"design/autoware-architecture/vehicle/#abstract","title":"Abstract","text":"

    The Vehicle Interface component provides an interface between Autoware and a vehicle that passes control signals to the vehicle\u2019s drive-by-wire system and receives vehicle information that is passed back to Autoware.

    "},{"location":"design/autoware-architecture/vehicle/#1-requirements","title":"1. Requirements","text":"

    Goals:

    Non-goals:

    "},{"location":"design/autoware-architecture/vehicle/#2-architecture","title":"2. Architecture","text":"

    The Vehicle Interface component consists of the following components:

    Each component contains static nodes of Autoware, while each module can be dynamically loaded and unloaded (corresponding to C++ classes). The mechanism of the Vehicle Interface component is depicted by the following figures:

    "},{"location":"design/autoware-architecture/vehicle/#3-features","title":"3. Features","text":"

    The Vehicle Interface component can provide the following features in functionality and capability:

    Additional functionality and capability features may be added, depending on the vehicle hardware. Some example features are listed below:

    "},{"location":"design/autoware-architecture/vehicle/#4-interface-and-data-structure","title":"4. Interface and Data Structure","text":"

    The interface of the Vehicle Interface component for other components running in the same process space to access the functionality and capability of the Vehicle Interface component is defined as follows.

    From Control

    From Planning

    From the vehicle

    The output interface of the Vehicle Interface component:

    The data structure for the internal representation of semantics for the objects and trajectories used in the Vehicle Interface component is defined as follows:

    "},{"location":"design/autoware-architecture/vehicle/#5-concerns-assumptions-and-limitations","title":"5. Concerns, Assumptions, and Limitations","text":"

    Concerns

    Assumptions

    -

    Limitations

    "},{"location":"design/autoware-architecture/vehicle/#6-examples-of-accuracy-requirements-by-odd","title":"6. Examples of accuracy requirements by ODD","text":""},{"location":"design/autoware-concepts/","title":"Autoware concepts","text":""},{"location":"design/autoware-concepts/#autoware-concepts","title":"Autoware concepts","text":"

    Autoware is the world\u2019s first open-source software for autonomous driving systems. Autoware provides value for both The technology developers of autonomous driving systems can create new components based on Autoware. The service operators of autonomous driving systems, on the other hand, can select appropriate technology components with Autoware. This is enabled by the microautonomy architecture that modularizes its software stack into the core and universe subsystems (modules).

    "},{"location":"design/autoware-concepts/#microautonomy-architecture","title":"Microautonomy architecture","text":"

    Autoware uses a pipeline architecture to enable the development of autonomous driving systems. The pipeline architecture used in Autoware consists of components similar to three-layer-architecture. And they run in parallel. There are 2 main modules: the Core and the Universe. The components in these modules are designed to be extensible and reusable. And we call it microautonomy architecture.

    "},{"location":"design/autoware-concepts/#the-core-module","title":"The Core module","text":"

    The Core module contains basic runtimes and technology components that satisfy the basic functionality and capability of sensing, computing, and actuation required for autonomous driving systems. AWF develops and maintains the Core module with their architects and leading members through their working groups. Anyone can contribute to the Core but the PR(Pull Request) acceptance criteria is more strict compared to the Universe.

    "},{"location":"design/autoware-concepts/#the-universe-module","title":"The Universe module","text":"

    The Universe modules are extensions to the Core module that can be provided by the technology developers to enhance the functionality and capability of sensing, computing, and actuation. AWF provides the base Universe module to extend from. A key feature of the microautonomy architecture is that the Universe modules can be contributed to by any organization and individual. That is, you can even create your Universe and make it available for the Autoware community and ecosystem. AWF is responsible for quality control of the Universe modules through their development process. As a result, there are multiple types of the Universe modules - some are verified and validated by AWF and others are not. It is up to the users of Autoware which Universe modules are selected and integrated to build their end applications.

    "},{"location":"design/autoware-concepts/#interface-design","title":"Interface design","text":"

    The interface design is the most essential piece of the microautonomy architecture, which is classified into internal and external interfaces. The component interface is designed for the components in a Universe module to communicate with those in other modules, including the Core module, within Autoware internally. The AD(Autonomous Driving) API, on the other hand, is designed for the applications of Autoware to access the technology components in the Core and Universe modules of Autoware externally. Designing solid interfaces, the microautonomy architecture is made possible with AWF's partners, and at the same time is made feasible for the partners.

    "},{"location":"design/autoware-concepts/#challenges","title":"Challenges","text":"

    A grand challenge of the microautonomy architecture is to achieve real-time capability, which guarantees all the technology components activated in the system to predictably meet timing constraints (given deadlines). In general, it is difficult, if not impossible, to tightly estimate the worst-case execution times (WCETs) of components.

    In addition, it is also difficult, if not impossible, to tightly estimate the end-to-end latency of components connected by a DAG. Autonomous driving systems based on the microautonomy architecture, therefore, must be designed to be fail-safe but not never-fail. We accept that the timing constraints may be violated (the given deadlines may be missed) as far as the overrun is taken into account. The overrun handlers are two-fold: (i) platform-defined and (ii) user-defined. The platform-defined handler is implemented as part of the platform by default, while the user-defined handler can overwrite it or add a new handler to the system. This is what we call \u201cfail-safe\u201d on a timely basis.

    "},{"location":"design/autoware-concepts/#requirements-and-roadmap","title":"Requirements and roadmap","text":"

    Goals:

    "},{"location":"design/autoware-concepts/difference-from-ai-and-auto/","title":"How is Autoware Core/Universe different from Autoware.AI and Autoware.Auto?","text":""},{"location":"design/autoware-concepts/difference-from-ai-and-auto/#how-is-autoware-coreuniverse-different-from-autowareai-and-autowareauto","title":"How is Autoware Core/Universe different from Autoware.AI and Autoware.Auto?","text":"

    Autoware is the world's first \"all-in-one\" open-source software for self-driving vehicles. Since it was first released in 2015, there have been multiple releases made with differing underlying concepts, each one aimed at improving the software.

    "},{"location":"design/autoware-concepts/difference-from-ai-and-auto/#autowareai","title":"Autoware.AI","text":"

    Autoware.AI is the first distribution of Autoware that was released based on ROS 1. The repository contains a variety of packages covering different aspects of autonomous driving technologies - sensing, actuation, localization, mapping, perception and planning.

    While it was successful in attracting many developers and contributions, it was difficult to improve Autoware.AI's capabilities for a number of reasons:

    Furthermore, there was no clear definition of the conditions under which an Autoware-enabled autonomous vehicle could operate, nor of the use cases or situations supported (eg: the ability to overtake a stationary vehicle).

    From the lessons learned from Autoware.AI development, a different development process was taken for Autoware.Auto to develop a ROS 2 version of Autoware.

    Warning

    Autoware.AI is currently in maintenance mode and will reach end-of-life at the end of 2022.

    "},{"location":"design/autoware-concepts/difference-from-ai-and-auto/#autowareauto","title":"Autoware.Auto","text":"

    Autoware.Auto is the second distribution of Autoware that was released based on ROS 2. As part of the transition to ROS 2, it was decided to avoid simply porting Autoware.AI from ROS 1 to ROS 2. Instead, the codebase was rewritten from scratch with proper engineering practices, including defining target use cases and ODDs (eg: Autonomous Valet Parking [AVP], Cargo Delivery, etc.), designing a proper architecture, writing design documents and test code.

    Autoware.Auto development seemed to work fine initially, but after completing the AVP and and Cargo Delivery ODD projects, we started to see the following issues:

    "},{"location":"design/autoware-concepts/difference-from-ai-and-auto/#autoware-coreuniverse","title":"Autoware Core/Universe","text":"

    In order to address the issues with Autoware.Auto development, the Autoware Foundation decided to create a new architecture called Autoware Core/Universe.

    Autoware Core carries over the original policy of Autoware.Auto to be a stable and well-tested codebase. Alongside Autoware Core is a new concept called Autoware Universe, which acts as an extension of Autoware Core with the following benefits:

    This way, the primary requirement of having a stable and safe autonomous driving system can be achieved, whilst simultaneously enabling access to state-of-the-art features created by third-party contributors. For more details about the design of Autoware Core/Universe, refer to the Autoware concepts documentation page.

    "},{"location":"design/autoware-interfaces/","title":"Autoware interface design","text":""},{"location":"design/autoware-interfaces/#autoware-interface-design","title":"Autoware interface design","text":""},{"location":"design/autoware-interfaces/#abstract","title":"Abstract","text":"

    Autoware defines three categories of interfaces. The first one is Autoware AD API for operating the vehicle from outside the autonomous driving system such as the Fleet Management System (FMS) and Human Machine Interface (HMI) for operators or passengers. The second one is Autoware component interface for components to communicate with each other. The last one is the local interface used inside the component.

    "},{"location":"design/autoware-interfaces/#concept","title":"Concept","text":" "},{"location":"design/autoware-interfaces/#requirements","title":"Requirements","text":"

    Goals:

    Non-goals:

    "},{"location":"design/autoware-interfaces/#architecture","title":"Architecture","text":"

    The components of Autoware are connected via the component interface. Each component uses the interface to provide functionality and to access other components. AD API implementation is also a component. Since the functional elements required for AD API are defined as the component interface, other components do not need to consider AD API directly. Tools for evaluation and debugging, such as simulators, access both AD API and the component interface.

    The component interface has a hierarchical specification. The top-level architecture consists of some components. Each component has some options of the next-level architecture. Developers select one of them when implementing the component. The simplest next-level architecture is monolithic. This is an all-in-one and black box implementation, and is suitable for small group development, prototyping, and very complex functions. Others are arbitrary architecture consists of sub-components and have advantages for large group development. A sub-component can be combined with others that adopt the same architecture. Third parties can define and publish their own architecture and interface for open source development. It is desirable to propose them for standardization if they are sufficiently evaluated.

    "},{"location":"design/autoware-interfaces/#features","title":"Features","text":""},{"location":"design/autoware-interfaces/#communication-methods","title":"Communication methods","text":"

    As shown in the table below, interfaces are classified into four communication methods to define their behavior. Function Call is a request-response communication and is used for processing that requires immediate results. The others are publish-subscribe communication. Notification is used to process data that changes with some event, typically a callback. Streams handle continuously changing data. Reliable Stream expects all data to arrive without loss, Realtime Stream expects the latest data to arrive with low delay.

    Communication Method ROS Implementation Optional Implementation Function Call Service HTTP Notification Topic (reliable, transient_local) MQTT (QoS=2, retain) Reliable Stream Topic (reliable, volatile) MQTT (QoS=2) Realtime Stream Topic (best_effort, volatile) MQTT (QoS=0)

    These methods are provided as services or topics of ROS since Autoware is developed using ROS and mainly communicates with its packages. On the other hand, FMS and HMI are often implemented without ROS, Autoware is also expected to communicate with applications that do not use ROS. It is wasteful for each of these applications to have an adapter for Autoware, and a more suitable means of communication is required. HTTP and MQTT are suggested as additional options because these protocols are widely used and can substitute the behavior of services and topics. In that case, text formats such as JSON where field names are repeated in an array of objects, are inefficient and it is necessary to consider the serialization.

    "},{"location":"design/autoware-interfaces/#naming-convention","title":"Naming convention","text":"

    The name of the interface must be /<component name>/api/<interface name>, where <component name> is the name of the component. For an AD API component, omit this part and start with /api. The <interface name> is an arbitrary string separated by slashes. Note that this rule causes a restriction that the namespace api must not be used as a name other than AD API and the component interface.

    The following are examples of correct interface names for AD API and the component interface:

    The following are examples of incorrect interface names for AD API and the component interface:

    "},{"location":"design/autoware-interfaces/#logging","title":"Logging","text":"

    It is recommended to log the interface for analysis of vehicle behavior. If logging is needed, rosbag is available for topics, and use logger in rclcpp or rclpy for services. Typically, create a wrapper for service and client classes that logs when a service is called.

    "},{"location":"design/autoware-interfaces/#restrictions","title":"Restrictions","text":"

    For each API, consider the restrictions such as following and describe them if necessary.

    Services:

    Topics:

    "},{"location":"design/autoware-interfaces/#data-structure","title":"Data structure","text":""},{"location":"design/autoware-interfaces/#data-type-definition","title":"Data type definition","text":"

    Do not share the types in AD API unless they are obviously the same to avoid changes in one API affecting another. Also, implementation-dependent types, including the component interface, should not be used in AD API for the same reason. Use the type in AD API in implementation, or create the same type and copy the data to convert the type.

    "},{"location":"design/autoware-interfaces/#constants-and-enumeration","title":"Constants and enumeration","text":"

    Since ROS don't support enumeration, use constants instead. The default value of type such as zero and empty string should not be used to detect that a variable is unassigned. Alternatively, assign it a dedicated name to indicate that it is undefined. If one type has multiple enumerations, comment on the correspondence between constants and variables. Do not use enumeration values directly, as assignments are subject to change when the version is updated.

    "},{"location":"design/autoware-interfaces/#time-stamp","title":"Time stamp","text":"

    Clarify what the timestamp indicates. for example, send time, measurement time, update time, etc. Consider having multiple timestamps if necessary. Use std_msgs/msg/Header when using ROS transform. Also consider whether the header is common to all data, independent for each data, or additional timestamp is required.

    "},{"location":"design/autoware-interfaces/#request-header","title":"Request header","text":"

    Currently, there is no required header.

    "},{"location":"design/autoware-interfaces/#response-status","title":"Response status","text":"

    The interfaces whose communication method is Function Call use a common response status to unify the error format. These interfaces should include a variable of ResponseStatus with the name status in the response. See autoware_adapi_v1_msgs/msg/ResponseStatus for details.

    "},{"location":"design/autoware-interfaces/#concerns-assumptions-and-limitations","title":"Concerns, assumptions and limitations","text":""},{"location":"design/autoware-interfaces/ad-api/","title":"Autoware AD API","text":""},{"location":"design/autoware-interfaces/ad-api/#autoware-ad-api","title":"Autoware AD API","text":""},{"location":"design/autoware-interfaces/ad-api/#overview","title":"Overview","text":"

    Autoware AD API is the interface for operating the vehicle from outside the autonomous driving system. See here for the overall interface design of Autoware.

    "},{"location":"design/autoware-interfaces/ad-api/#user-stories","title":"User stories","text":"

    The user stories are service scenarios that AD API assumes. AD API is designed based on these scenarios. Each scenario is realized by a combination of use cases described later. If there are scenarios that cannot be covered, please discuss adding a user story.

    "},{"location":"design/autoware-interfaces/ad-api/#use-cases","title":"Use cases","text":"

    Use cases are partial scenarios derived from the user story and generically designed. Service providers can combine these use cases to define user stories and check if AD API can be applied to their own scenarios.

    "},{"location":"design/autoware-interfaces/ad-api/#features","title":"Features","text":""},{"location":"design/autoware-interfaces/ad-api/features/cooperation/","title":"Cooperation","text":""},{"location":"design/autoware-interfaces/ad-api/features/cooperation/#cooperation","title":"Cooperation","text":""},{"location":"design/autoware-interfaces/ad-api/features/cooperation/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/cooperation/#description","title":"Description","text":"

    Request to cooperate (RTC) is a feature that enables a human operator to support the decision in autonomous driving mode. Autoware usually drives the vehicle using its own decisions, but the operator may prefer to make their decisions in experiments and complex situations.

    The planning component manages each situation that requires decision as a scene. Each scene has an ID that doesn't change until the scene is completed or canceled. The operator can override the decision of the target scene using this ID. In practice, the user interface application can hides the specification of the ID and provides an abstracted interface to the operator.

    For example, in the situation in the diagram below, vehicle is expected to make two lane changes and turning left at the intersection. Therefore the planning component generates three scene instances for each required action, and each scene instance will wait for the decision to be made, in this case \"changing or keeping lane\" and \"turning left or waiting at the intersection\". Here Autoware decides not to change lanes a second time due to the obstacle, so the vehicle will stop there. However, operator could overwrite that decision through RTC function and force the lane change so that vehicle could reach to it's goal. Using RTC, the operator can override these decisions to continue driving the vehicle to the goal.

    "},{"location":"design/autoware-interfaces/ad-api/features/cooperation/#architecture","title":"Architecture","text":"

    Modules that support RTC have the operator decision and cooperation policy in addition to the module decision as shown below. These modules use the merged decision that is determined by these values when planning vehicle behavior. See decisions section for details of these values. The cooperation policy is used when there is no operator decision and has a default value set by the system settings. If the module supports RTC, these information are available in velocity factors or steering factors as cooperation status.

    "},{"location":"design/autoware-interfaces/ad-api/features/cooperation/#sequence","title":"Sequence","text":"

    This is an example sequence that overrides the scene decision to force a lane change. It is for the second scene in the diagram in the architecture section. Here let's assume the cooperation policy is set to optional, see the decisions section described later for details.

    1. A planning module creates a scene instance with unique ID when approaching a place where a lane change is needed.
    2. The scene instance generates the module decision from the current situation. In this case, the module decision is not to do a lane change due to the obstacle.
    3. The scene instance generates the merged decision. At this point, there is no operator decision yet, so it is based on the module decision.
    4. The scene instance plans the vehicle to keep the lane according to the merged decision.
    5. The scene instance sends a cooperation status.
    6. The operator receives the cooperation status.
    7. The operator sends a cooperation command to override the module decision and to do a lane change.
    8. The scene instance receives the cooperation command and update the operator decision.
    9. The scene instance updates the module decision from the current situation.
    10. The scene instance updates the merged decision. It is based on the operator decision received.
    11. The scene instance plans the vehicle to change the lane according to the merged decision.
    "},{"location":"design/autoware-interfaces/ad-api/features/cooperation/#decisions","title":"Decisions","text":"

    The merged decision is determined by the module decision, operator decision, and cooperation policy, each of which takes the value shown in the table below.

    Status Values merged decision deactivate, activate module decision deactivate, activate operator decision deactivate, activate, autonomous, none cooperation policy required, optional

    The meanings of these values are as follows. Note that the cooperation policy is common per module, so changing it will affect all scenes in the same module.

    Value Description deactivate An operator/module decision to plan vehicle behavior with priority on safety. activate An operator/module decision to plan vehicle behavior with priority on driving. autonomous An operator decision that follows the module decision. none An initial value for operator decision, indicating that there is no operator decision yet. required A policy that requires the operator decision to continue driving. optional A policy that does not require the operator decision to continue driving.

    The following flow is how the merged decision is determined.

    "},{"location":"design/autoware-interfaces/ad-api/features/cooperation/#examples","title":"Examples","text":"

    This is an example of cooperation for lane change module. The behaviors by the combination of decisions are as follows.

    Operator decision Policy Module decision Description deactivate - - The operator instructs to keep lane regardless the module decision. So the vehicle keeps the lane by the operator decision. activate - - The operator instructs to change lane regardless the module decision. So the vehicle changes the lane by the operator decision. autonomous - deactivate The operator instructs to follow the module decision. So the vehicle keeps the lane by the module decision. autonomous - activate The operator instructs to follow the module decision. So the vehicle changes the lane by the module decision. none required - The required policy is used because no operator instruction. So the vehicle keeps the lane by the cooperation policy. none optional deactivate The optional policy is used because no operator instruction. So the vehicle keeps the lane by the module decision. none optional activate The optional policy is used because no operator instruction. So the vehicle change the lane by the module decision."},{"location":"design/autoware-interfaces/ad-api/features/fail-safe/","title":"Fail-safe","text":""},{"location":"design/autoware-interfaces/ad-api/features/fail-safe/#fail-safe","title":"Fail-safe","text":""},{"location":"design/autoware-interfaces/ad-api/features/fail-safe/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/fail-safe/#description","title":"Description","text":"

    This API manages the behavior related to the abnormality of the vehicle. It provides the state of Request to Intervene (RTI), Minimal Risk Maneuver (MRM) and Minimal Risk Condition (MRC). As shown below, Autoware has the gate to switch between the command during normal operation and the command during abnormal operation. For safety, Autoware switches the operation to MRM when an abnormality is detected. Since the required behavior differs depending on the situation, MRM is implemented in various places as a specific mode in a normal module or as an independent module. The fail-safe module selects the behavior of MRM according to the abnormality and switches the gate output to that command.

    "},{"location":"design/autoware-interfaces/ad-api/features/fail-safe/#states","title":"States","text":"

    The MRM state indicates whether MRM is operating. This state also provides success or failure. Generally, MRM will switch to another behavior if it fails.

    State Description NONE MRM is not operating. OPERATING MRM is operating because an abnormality has been detected. SUCCEEDED MRM succeeded. The vehicle is in a safe condition. FAILED MRM failed. The vehicle is still in an unsafe condition."},{"location":"design/autoware-interfaces/ad-api/features/fail-safe/#behavior","title":"Behavior","text":"

    There is a dependency between MRM behaviors. For example, it switches from a comfortable stop to a emergency stop, but not the other way around. This is service dependent. Autoware supports the following transitions by default.

    State Description NONE MRM is not operating or is operating but no special behavior is required. COMFORTABLE_STOP The vehicle will stop quickly with a comfortable deceleration. EMERGENCY_STOP The vehicle will stop immediately with as much deceleration as possible."},{"location":"design/autoware-interfaces/ad-api/features/interface/","title":"Interface","text":""},{"location":"design/autoware-interfaces/ad-api/features/interface/#interface","title":"Interface","text":""},{"location":"design/autoware-interfaces/ad-api/features/interface/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/interface/#description","title":"Description","text":"

    This API provides the interface version of the set of AD APIs. It follows Semantic Versioning in order to provide an intuitive understanding of the changes between versions.

    "},{"location":"design/autoware-interfaces/ad-api/features/localization/","title":"Localization","text":""},{"location":"design/autoware-interfaces/ad-api/features/localization/#localization","title":"Localization","text":""},{"location":"design/autoware-interfaces/ad-api/features/localization/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/localization/#description","title":"Description","text":"

    This API manages the initialization of localization. Autoware requires a global pose as the initial guess for localization.

    "},{"location":"design/autoware-interfaces/ad-api/features/localization/#states","title":"States","text":"State Description UNINITIALIZED Localization is not initialized. Waiting for a global pose as the initial guess. INITIALIZING Localization is initializing. INITIALIZED Localization is initialized. Initialization can be requested again if necessary."},{"location":"design/autoware-interfaces/ad-api/features/motion/","title":"Motion","text":""},{"location":"design/autoware-interfaces/ad-api/features/motion/#motion","title":"Motion","text":""},{"location":"design/autoware-interfaces/ad-api/features/motion/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/motion/#description","title":"Description","text":"

    This API manages the current behavior of the vehicle. Applications can notify the vehicle behavior to the people around and visualize it for operator and passengers.

    "},{"location":"design/autoware-interfaces/ad-api/features/motion/#states","title":"States","text":"

    The motion state manages the stop and start of the vehicle. Once the vehicle has stopped, the state will be STOPPED. After this, when the vehicle tries to start (is still stopped), the state will be STARTING. In this state, calling the start API changes the state to MOVING and the vehicle starts. This mechanism can add processing such as announcements before the vehicle starts. Depending on the configuration, the state may transition directly from STOPPED to MOVING.

    State Description STOPPED The vehicle is stopped. STARTING The vehicle is stopped, but is trying to start. MOVING The vehicle is moving. BRAKING (T.B.D.) The vehicle is decelerating strongly."},{"location":"design/autoware-interfaces/ad-api/features/operation_mode/","title":"Operation mode","text":""},{"location":"design/autoware-interfaces/ad-api/features/operation_mode/#operation-mode","title":"Operation mode","text":""},{"location":"design/autoware-interfaces/ad-api/features/operation_mode/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/operation_mode/#description","title":"Description","text":"

    As shown below, Autoware assumes that the vehicle interface has two modes, Autoware control and direct control. In direct control mode, the vehicle is operated using devices such as steering and pedals. If the vehicle does not support direct control mode, it is always treated as Autoware control mode. Autoware control mode has four operation modes.

    Mode Description Stop Keep the vehicle stopped. Autonomous Autonomously control the vehicle. Local Manually control the vehicle from nearby with some device such as a joystick. Remote Manually control the vehicle from a web application on the cloud.

    "},{"location":"design/autoware-interfaces/ad-api/features/operation_mode/#states","title":"States","text":""},{"location":"design/autoware-interfaces/ad-api/features/operation_mode/#autoware-control-flag","title":"Autoware control flag","text":"

    The flag is_autoware_control_enabled indicates if the vehicle is controlled by Autoware. The enable and disable APIs can be used if the control can be switched by software. These APIs will always fail if the vehicle does not support mode switching or is switched by hardware.

    "},{"location":"design/autoware-interfaces/ad-api/features/operation_mode/#operation-mode-and-change-flags","title":"Operation mode and change flags","text":"

    The state operation_mode indicates what command is used when Autoware control is enabled. The flags change_to_* can be used to check if it is possible to transition to each mode.

    "},{"location":"design/autoware-interfaces/ad-api/features/operation_mode/#transition-flag","title":"Transition flag","text":"

    Since Autoware may not be able to guarantee safety, such as switching to autonomous mode during overspeed. There is the flag is_in_transition for this situation and it will be true when changing modes. The operator who changed the mode should ensure safety while this flag is true. The flag will be false when the mode change is complete.

    "},{"location":"design/autoware-interfaces/ad-api/features/perception/","title":"Perception","text":""},{"location":"design/autoware-interfaces/ad-api/features/perception/#perception","title":"Perception","text":""},{"location":"design/autoware-interfaces/ad-api/features/perception/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/perception/#description","title":"Description","text":"

    API for perception related topic.

    "},{"location":"design/autoware-interfaces/ad-api/features/planning-factors/","title":"Planning factors","text":""},{"location":"design/autoware-interfaces/ad-api/features/planning-factors/#planning-factors","title":"Planning factors","text":""},{"location":"design/autoware-interfaces/ad-api/features/planning-factors/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/planning-factors/#description","title":"Description","text":"

    This API manages the planned behavior of the vehicle. Applications can notify the vehicle behavior to the people around and visualize it for operator and passengers.

    "},{"location":"design/autoware-interfaces/ad-api/features/planning-factors/#velocity-factors","title":"Velocity factors","text":"

    The velocity factors is an array of information on the behavior that the vehicle stops or slows down. Each factor has a behavior type which is described below. Some behavior types have sequence and details as additional information.

    Behavior Description surrounding-obstacle There are obstacles immediately around the vehicle. route-obstacle There are obstacles along the route ahead. intersection There are obstacles in other lanes in the path. crosswalk There are obstacles on the crosswalk. rear-check There are obstacles behind that would be in a human driver's blind spot. user-defined-attention-area There are obstacles in the predefined attention area. no-stopping-area There is not enough space beyond the no stopping area. stop-sign A stop by a stop sign. traffic-signal A stop by a traffic signal. v2x-gate-area A stop by a gate area. It has enter and leave as sequences and v2x type as details. merge A stop before merging lanes. sidewalk A stop before crossing the sidewalk. lane-change A lane change. avoidance A path change to avoid an obstacle in the current lane. emergency-operation A stop by emergency instruction from the operator.

    Each factor also provides status, poses in the base link frame, and distance from that pose. As the vehicle approaches the stop position, this factor appears with a status of APPROACHING. And when the vehicle reaches that position and stops, the status will be STOPPED. The pose indicates the stop position, or the base link if the stop position cannot be calculated.

    "},{"location":"design/autoware-interfaces/ad-api/features/planning-factors/#steering-factors","title":"Steering factors","text":"

    The steering factors is an array of information on the maneuver that requires use of turn indicators, such as turning left or right. Each factor has a behavior type which is described below and steering direction. Some behavior types have sequence and details as additional information.

    Behavior Description intersection A turning left or right at an intersection. lane-change A lane change. avoidance A path change to avoid an obstacle. It has a sequence of change and return. start-planner T.B.D. goal-planner T.B.D. emergency-operation A path change by emergency instruction from the operator.

    Each factor also provides status, poses in the base link frame, and distances from that poses. As the vehicle approaches the position to start steering, this factor appears with a status of APPROACHING. And when the vehicle reaches that position, the status will be TURNING. The poses indicate the start and end position of the section where the status is TURNING.

    In cases such as lane change and avoidance, the vehicle will start steering at any position in the range depending on the situation. For these types, the section where the status is TURNING will be updated dynamically and the poses will follow that.

    "},{"location":"design/autoware-interfaces/ad-api/features/routing/","title":"Routing","text":""},{"location":"design/autoware-interfaces/ad-api/features/routing/#routing","title":"Routing","text":""},{"location":"design/autoware-interfaces/ad-api/features/routing/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/routing/#description","title":"Description","text":"

    This API manages destination and waypoints. Note that waypoints are not like stops and just points passing through. In other words, Autoware does not support the route with multiple stops, the application needs to split it up and switch them. There are two ways to set the route. The one is a generic method that uses pose, another is a map-dependent.

    "},{"location":"design/autoware-interfaces/ad-api/features/routing/#states","title":"States","text":"State Description UNSET The route is not set. Waiting for a route request. SET The route is set. ARRIVED The vehicle has arrived at the destination. CHANGING Trying to change the route. Not implemented yet."},{"location":"design/autoware-interfaces/ad-api/features/routing/#goal-modification","title":"Goal modification","text":"

    Autoware tries to look for an alternate goal when goal is unreachable (e.g., when there is an obstacle on the given goal). When setting a route from the API, applications can choose whether they allow Autoware to adjust goal pose in such situation. When set false, Autoware may get stuck until the given goal becomes reachable.

    Option Description allow_goal_modification If true, allow goal modification."},{"location":"design/autoware-interfaces/ad-api/features/vehicle-doors/","title":"Vehicle doors","text":""},{"location":"design/autoware-interfaces/ad-api/features/vehicle-doors/#vehicle-doors","title":"Vehicle doors","text":""},{"location":"design/autoware-interfaces/ad-api/features/vehicle-doors/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/vehicle-doors/#description","title":"Description","text":"

    This feature is available if the vehicle provides a software interface for the doors. It can be used to create user interfaces for passengers or to control sequences at bus stops.

    "},{"location":"design/autoware-interfaces/ad-api/features/vehicle-doors/#layout","title":"Layout","text":"

    Each door in a vehicle is assigned an array index. This assignment is vehicle dependent. The layout API returns this information. The description field is a string to display in the user interface, etc. This is an arbitrary string and is not recommended to use for processing in applications. Use the roles field to know doors for getting on and off. Below is an example of the information returned by the layout API.

    Index Description Roles 0 front right - 1 front left GET_ON 2 rear right GET_OFF 3 rear left GET_ON, GET_OFF"},{"location":"design/autoware-interfaces/ad-api/features/vehicle-doors/#status","title":"Status","text":"

    The status API provides an array of door status. This array order is consistent with the layout API.

    "},{"location":"design/autoware-interfaces/ad-api/features/vehicle-doors/#control","title":"Control","text":"

    Use the command API to control doors. Unlike the status and layout APIs, array index do not correspond to doors. The command has a field to specify the target door index.

    "},{"location":"design/autoware-interfaces/ad-api/features/vehicle-status/","title":"Vehicle status","text":""},{"location":"design/autoware-interfaces/ad-api/features/vehicle-status/#vehicle-status","title":"Vehicle status","text":""},{"location":"design/autoware-interfaces/ad-api/features/vehicle-status/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/features/vehicle-status/#kinematics","title":"Kinematics","text":"

    This is an estimate of the vehicle kinematics. The vehicle position is necessary for applications to schedule dispatches. Also, using velocity and acceleration, applications can find vehicles that need operator assistance, such as stuck or brake suddenly.

    "},{"location":"design/autoware-interfaces/ad-api/features/vehicle-status/#status","title":"Status","text":"

    This is the status provided by the vehicle. The indicators and steering are mainly used for visualization and remote control. The remaining energy can be also used for vehicle scheduling.

    "},{"location":"design/autoware-interfaces/ad-api/features/vehicle-status/#dimensions","title":"Dimensions","text":"

    The vehicle dimensions are used to know the actual distance between the vehicle and objects because the vehicle position in kinematics is the coordinates of the base link. This is necessary for visualization when supporting vehicles remotely.

    "},{"location":"design/autoware-interfaces/ad-api/list/","title":"List of Autoware AD API","text":""},{"location":"design/autoware-interfaces/ad-api/list/#list-of-autoware-ad-api","title":"List of Autoware AD API","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/fail_safe/mrm_state/","title":"/api/fail_safe/mrm_state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/fail_safe/mrm_state/#apifail_safemrm_state","title":"/api/fail_safe/mrm_state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/fail_safe/mrm_state/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/fail_safe/mrm_state/#description","title":"Description","text":"

    Get the MRM state. For details, see the fail-safe.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/fail_safe/mrm_state/#message","title":"Message","text":"Name Type Description state uint16 The state of MRM operation. behavior uint16 The currently selected behavior of MRM."},{"location":"design/autoware-interfaces/ad-api/list/api/interface/version/","title":"/api/interface/version","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/interface/version/#apiinterfaceversion","title":"/api/interface/version","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/interface/version/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/interface/version/#description","title":"Description","text":"

    Get the interface version. The version follows Semantic Versioning.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/interface/version/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/interface/version/#response","title":"Response","text":"Name Type Description major uint16 major version minor uint16 minor version patch uint16 patch version"},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialization_state/","title":"/api/localization/initialization_state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialization_state/#apilocalizationinitialization_state","title":"/api/localization/initialization_state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialization_state/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialization_state/#description","title":"Description","text":"

    Get the initialization state of localization. For details, see the localization.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialization_state/#message","title":"Message","text":"Name Type Description state uint16 A value of the localization initialization state."},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialize/","title":"/api/localization/initialize","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialize/#apilocalizationinitialize","title":"/api/localization/initialize","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialize/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialize/#description","title":"Description","text":"

    Request to initialize localization. For details, see the localization.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialize/#request","title":"Request","text":"Name Type Description pose geometry_msgs/msg/PoseWithCovarianceStamped[<=1] A global pose as the initial guess. If omitted, the GNSS pose will be used."},{"location":"design/autoware-interfaces/ad-api/list/api/localization/initialize/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/motion/accept_start/","title":"/api/motion/accept_start","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/motion/accept_start/#apimotionaccept_start","title":"/api/motion/accept_start","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/motion/accept_start/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/motion/accept_start/#description","title":"Description","text":"

    Accept the vehicle to start. This API can be used when the motion state is STARTING.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/motion/accept_start/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/motion/accept_start/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/motion/state/","title":"/api/motion/state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/motion/state/#apimotionstate","title":"/api/motion/state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/motion/state/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/motion/state/#description","title":"Description","text":"

    Get the motion state. For details, see the motion state.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/motion/state/#message","title":"Message","text":"Name Type Description state uint16 A value of the motion state."},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_autonomous/","title":"/api/operation_mode/change_to_autonomous","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_autonomous/#apioperation_modechange_to_autonomous","title":"/api/operation_mode/change_to_autonomous","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_autonomous/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_autonomous/#description","title":"Description","text":"

    Change the operation mode to autonomous. For details, see the operation mode.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_autonomous/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_autonomous/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_local/","title":"/api/operation_mode/change_to_local","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_local/#apioperation_modechange_to_local","title":"/api/operation_mode/change_to_local","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_local/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_local/#description","title":"Description","text":"

    Change the operation mode to local. For details, see the operation mode.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_local/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_local/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_remote/","title":"/api/operation_mode/change_to_remote","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_remote/#apioperation_modechange_to_remote","title":"/api/operation_mode/change_to_remote","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_remote/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_remote/#description","title":"Description","text":"

    Change the operation mode to remote. For details, see the operation mode.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_remote/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_remote/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_stop/","title":"/api/operation_mode/change_to_stop","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_stop/#apioperation_modechange_to_stop","title":"/api/operation_mode/change_to_stop","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_stop/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_stop/#description","title":"Description","text":"

    Change the operation mode to stop. For details, see the operation mode.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_stop/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/change_to_stop/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/disable_autoware_control/","title":"/api/operation_mode/disable_autoware_control","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/disable_autoware_control/#apioperation_modedisable_autoware_control","title":"/api/operation_mode/disable_autoware_control","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/disable_autoware_control/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/disable_autoware_control/#description","title":"Description","text":"

    Disable vehicle control by Autoware. For details, see the operation mode. This API fails if the vehicle does not support mode change by software.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/disable_autoware_control/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/disable_autoware_control/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/enable_autoware_control/","title":"/api/operation_mode/enable_autoware_control","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/enable_autoware_control/#apioperation_modeenable_autoware_control","title":"/api/operation_mode/enable_autoware_control","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/enable_autoware_control/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/enable_autoware_control/#description","title":"Description","text":"

    Enable vehicle control by Autoware. For details, see the operation mode. This API fails if the vehicle does not support mode change by software.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/enable_autoware_control/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/enable_autoware_control/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/state/","title":"/api/operation_mode/state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/state/#apioperation_modestate","title":"/api/operation_mode/state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/state/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/state/#description","title":"Description","text":"

    Get the operation mode state. For details, see the operation mode.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/operation_mode/state/#message","title":"Message","text":"Name Type Description mode uint8 The selected command for Autoware control. is_autoware_control_enabled bool True if vehicle control by Autoware is enabled. is_in_transition bool True if the operation mode is in transition. is_stop_mode_available bool True if the operation mode can be changed to stop. is_autonomous_mode_available bool True if the operation mode can be changed to autonomous. is_local_mode_available bool True if the operation mode can be changed to local. is_remote_mode_available bool True if the operation mode can be changed to remote."},{"location":"design/autoware-interfaces/ad-api/list/api/perception/objects/","title":"/api/perception/objects","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/perception/objects/#apiperceptionobjects","title":"/api/perception/objects","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/perception/objects/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/perception/objects/#description","title":"Description","text":"

    Get the recognized objects array with label, shape, current position and predicted path For details, see the perception.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/perception/objects/#message","title":"Message","text":"Name Type Description objects.id unique_identifier_msgs/msg/UUID The UUID of each object objects.existence_probability float64 The probability of the object exits objects.classification autoware_adapi_v1_msgs/msg/ObjectClassification[] The type of the object recognized and the confidence level objects.kinematics autoware_adapi_v1_msgs/msg/DynamicObjectKinematics Consist of the object pose, twist, acceleration and the predicted_paths objects.shape shape_msgs/msg/SolidPrimitive escribe the shape of the object with dimension, and polygon"},{"location":"design/autoware-interfaces/ad-api/list/api/planning/steering_factors/","title":"/api/planning/steering_factors","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/steering_factors/#apiplanningsteering_factors","title":"/api/planning/steering_factors","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/steering_factors/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/steering_factors/#description","title":"Description","text":"

    Get the steering factors, sorted in ascending order of distance. For details, see the planning factors.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/planning/steering_factors/#message","title":"Message","text":"Name Type Description factors.pose geometry_msgs/msg/Pose[2] The base link pose related to the steering factor. factors.distance float32[2] The distance from the base link to the above pose. factors.direction uint16 The direction of the steering factor. factors.status uint16 The status of the steering factor. factors.behavior string The behavior type of the steering factor. factors.sequence string The sequence type of the steering factor. factors.detail string The additional information of the steering factor. factors.cooperation autoware_adapi_v1_msgs/msg/CooperationStatus[<=1] The cooperation status if the module supports."},{"location":"design/autoware-interfaces/ad-api/list/api/planning/velocity_factors/","title":"/api/planning/velocity_factors","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/velocity_factors/#apiplanningvelocity_factors","title":"/api/planning/velocity_factors","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/velocity_factors/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/velocity_factors/#description","title":"Description","text":"

    Get the velocity factors, sorted in ascending order of distance. For details, see the planning factors.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/planning/velocity_factors/#message","title":"Message","text":"Name Type Description factors.pose geometry_msgs/msg/Pose The base link pose related to the velocity factor. factors.distance float32 The distance from the base link to the above pose. factors.status uint16 The status of the velocity factor. factors.behavior string The behavior type of the velocity factor. factors.sequence string The sequence type of the velocity factor. factors.detail string The additional information of the velocity factor. factors.cooperation autoware_adapi_v1_msgs/msg/CooperationStatus[<=1] The cooperation status if the module supports."},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/get_policies/","title":"/api/planning/cooperation/get_policies","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/get_policies/#apiplanningcooperationget_policies","title":"/api/planning/cooperation/get_policies","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/get_policies/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/get_policies/#description","title":"Description","text":"

    Get the default decision that is used instead when the operator's decision is undecided. For details, see the cooperation.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/get_policies/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/get_policies/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status policies.behavior string The type of the target behavior. policies.sequence string The type of the target sequence. policies.policy uint8 The type of the cooporation policy."},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_commands/","title":"/api/planning/cooperation/set_commands","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_commands/#apiplanningcooperationset_commands","title":"/api/planning/cooperation/set_commands","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_commands/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_commands/#description","title":"Description","text":"

    Set the operator's decision for cooperation. For details, see the cooperation.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_commands/#request","title":"Request","text":"Name Type Description commands.uuid unique_identifier_msgs/msg/UUID The ID in the cooperation status. commands.cooperator autoware_adapi_v1_msgs/msg/CooperationDecision The operator's decision."},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_commands/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_policies/","title":"/api/planning/cooperation/set_policies","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_policies/#apiplanningcooperationset_policies","title":"/api/planning/cooperation/set_policies","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_policies/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_policies/#description","title":"Description","text":"

    Set the default decision that is used instead when the operator's decision is undecided. For details, see the cooperation.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_policies/#request","title":"Request","text":"Name Type Description policies.behavior string The type of the target behavior. policies.sequence string The type of the target sequence. policies.policy uint8 The type of the cooporation policy."},{"location":"design/autoware-interfaces/ad-api/list/api/planning/cooperation/set_policies/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/routing/clear_route/","title":"/api/routing/clear_route","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/clear_route/#apiroutingclear_route","title":"/api/routing/clear_route","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/clear_route/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/clear_route/#description","title":"Description","text":"

    Clear the route.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/routing/clear_route/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/routing/clear_route/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/routing/route/","title":"/api/routing/route","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/route/#apiroutingroute","title":"/api/routing/route","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/route/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/route/#description","title":"Description","text":"

    Get the route with the waypoint segments in lanelet format. It is empty if route is not set.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/routing/route/#message","title":"Message","text":"Name Type Description header std_msgs/msg/Header header for pose transformation data autoware_adapi_v1_msgs/msg/RouteData[<=1] The route in lanelet format"},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route/","title":"/api/routing/set_route","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route/#apiroutingset_route","title":"/api/routing/set_route","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route/#description","title":"Description","text":"

    Set the route with the waypoint segments in lanelet format. If start pose is not specified, the current pose will be used.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route/#request","title":"Request","text":"Name Type Description header std_msgs/msg/Header header for pose transformation goal geometry_msgs/msg/Pose goal pose segments autoware_adapi_v1_msgs/msg/RouteSegment[] waypoint segments in lanelet format"},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route_points/","title":"/api/routing/set_route_points","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route_points/#apiroutingset_route_points","title":"/api/routing/set_route_points","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route_points/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route_points/#description","title":"Description","text":"

    Set the route with the waypoint poses. If start pose is not specified, the current pose will be used.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route_points/#request","title":"Request","text":"Name Type Description header std_msgs/msg/Header header for pose transformation goal geometry_msgs/msg/Pose goal pose waypoints geometry_msgs/msg/Pose[] waypoint poses"},{"location":"design/autoware-interfaces/ad-api/list/api/routing/set_route_points/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/routing/state/","title":"/api/routing/state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/state/#apiroutingstate","title":"/api/routing/state","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/state/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/routing/state/#description","title":"Description","text":"

    Get the route state. For details, see the routing.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/routing/state/#message","title":"Message","text":"Name Type Description state uint16 A value of the route state."},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/dimensions/","title":"/api/vehicle/dimensions","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/dimensions/#apivehicledimensions","title":"/api/vehicle/dimensions","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/dimensions/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/dimensions/#description","title":"Description","text":"

    Get the vehicle dimensions. See here for the definition of each value.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/dimensions/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/dimensions/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status dimensions autoware_adapi_v1_msgs/msg/VehicleDimensions vehicle dimensions"},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/kinematics/","title":"/api/vehicle/kinematics","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/kinematics/#apivehiclekinematics","title":"/api/vehicle/kinematics","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/kinematics/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/kinematics/#description","title":"Description","text":"

    Publish vehicle kinematics.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/kinematics/#message","title":"Message","text":"Name Type Description geographic_pose geographic_msgs/msg/GeoPointStamped The longitude and latitude of the vehicle. If the map uses local coordinates, it will not be available. pose geometry_msgs/msg/PoseWithCovarianceStamped The pose with covariance from the base link. twist geometry_msgs/msg/TwistWithCovarianceStamped Vehicle current twist with covariance. accel geometry_msgs/msg/AccelWithCovarianceStamped Vehicle current acceleration with covariance."},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/status/","title":"/api/vehicle/status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/status/#apivehiclestatus","title":"/api/vehicle/status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/status/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/status/#description","title":"Description","text":"

    Publish vehicle state information.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/status/#message","title":"Message","text":"Name Type Description gear autoware_adapi_v1_msgs/msg/Gear Gear status. turn_indicators autoware_adapi_v1_msgs/msg/TurnIndicators Turn indicators status, only either left or right will be enabled. hazard_lights autoware_adapi_v1_msgs/msg/HazardLights Hazard lights status. steering_tire_angle float64 Vehicle current tire angle in radian. energy_percentage float32 Battery percentage or fuel percentage, it will depends on the vehicle."},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/command/","title":"/api/vehicle/doors/command","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/command/#apivehicledoorscommand","title":"/api/vehicle/doors/command","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/command/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/command/#description","title":"Description","text":"

    Set the door command. This API is only available if the vehicle supports software door control.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/command/#request","title":"Request","text":"Name Type Description doors.index uint32 The index of the target door. doors.command uint8 The command for the target door."},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/command/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status"},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/layout/","title":"/api/vehicle/doors/layout","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/layout/#apivehicledoorslayout","title":"/api/vehicle/doors/layout","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/layout/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/layout/#description","title":"Description","text":"

    Get the door layout. It is an array of roles and descriptions for each door.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/layout/#request","title":"Request","text":"

    None

    "},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/layout/#response","title":"Response","text":"Name Type Description status autoware_adapi_v1_msgs/msg/ResponseStatus response status doors.roles uint8[] The roles of the door in the service the vehicle provides. doors.description string The description of the door for display in the interface."},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/status/","title":"/api/vehicle/doors/status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/status/#apivehicledoorsstatus","title":"/api/vehicle/doors/status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/status/#status","title":"Status","text":""},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/status/#description","title":"Description","text":"

    The status of each door such as opened or closed.

    "},{"location":"design/autoware-interfaces/ad-api/list/api/vehicle/doors/status/#message","title":"Message","text":"Name Type Description doors.status uint8 current door status"},{"location":"design/autoware-interfaces/ad-api/stories/bus-service/","title":"User story of bus service","text":""},{"location":"design/autoware-interfaces/ad-api/stories/bus-service/#user-story-of-bus-service","title":"User story of bus service","text":""},{"location":"design/autoware-interfaces/ad-api/stories/bus-service/#overview","title":"Overview","text":"

    This user story is a bus service that goes around the designated stops.

    "},{"location":"design/autoware-interfaces/ad-api/stories/bus-service/#scenario","title":"Scenario","text":"Step Operation Use Case 1 Startup the autonomous driving system. Launch and terminate 2 Drive the vehicle from the garage to the waiting position. Change the operation mode 3 Enable autonomous control. Change the operation mode 4 Drive the vehicle to the next bus stop. Drive to the designated position 5 Get on and off the vehicle. Get on and get off 6 Return to step 4 unless it's the last bus stop. 7 Drive the vehicle to the waiting position. Drive to the designated position 8 Drive the vehicle from the waiting position to the garage. Change the operation mode 9 Shutdown the autonomous driving system. Launch and terminate"},{"location":"design/autoware-interfaces/ad-api/stories/taxi-service/","title":"User story of bus service","text":""},{"location":"design/autoware-interfaces/ad-api/stories/taxi-service/#user-story-of-bus-service","title":"User story of bus service","text":""},{"location":"design/autoware-interfaces/ad-api/stories/taxi-service/#overview","title":"Overview","text":"

    This user story is a taxi service that picks up passengers and drives them to their destination.

    "},{"location":"design/autoware-interfaces/ad-api/stories/taxi-service/#scenario","title":"Scenario","text":"Step Operation Use Case 1 Startup the autonomous driving system. Launch and terminate 2 Drive the vehicle from the garage to the waiting position. Change the operation mode 3 Enable autonomous control. Change the operation mode 4 Drive the vehicle to the position to pick up. Drive to the designated position 5 Get on the vehicle. Get on and get off 6 Drive the vehicle to the destination. Drive to the designated position 7 Get off the vehicle. Get on and get off 8 Drive the vehicle to the waiting position. Drive to the designated position 9 Return to step 4 if there is another request. 10 Drive the vehicle from the waiting position to the garage. Change the operation mode 11 Shutdown the autonomous driving system. Launch and terminate"},{"location":"design/autoware-interfaces/ad-api/types/","title":"Types of Autoware AD API","text":""},{"location":"design/autoware-interfaces/ad-api/types/#types-of-autoware-ad-api","title":"Types of Autoware AD API","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationCommand/","title":"autoware_adapi_v1_msgs/msg/CooperationCommand","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationCommand/#autoware_adapi_v1_msgsmsgcooperationcommand","title":"autoware_adapi_v1_msgs/msg/CooperationCommand","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationCommand/#definition","title":"Definition","text":"
    unique_identifier_msgs/UUID uuid\nautoware_adapi_v1_msgs/CooperationDecision cooperator\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationCommand/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationCommand/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationDecision/","title":"autoware_adapi_v1_msgs/msg/CooperationDecision","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationDecision/#autoware_adapi_v1_msgsmsgcooperationdecision","title":"autoware_adapi_v1_msgs/msg/CooperationDecision","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationDecision/#definition","title":"Definition","text":"
    uint8 UNKNOWN = 0\nuint8 DEACTIVATE = 1\nuint8 ACTIVATE = 2\nuint8 AUTONOMOUS = 3\nuint8 UNDECIDED = 4\n\nuint8 decision\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationDecision/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationDecision/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationPolicy/","title":"autoware_adapi_v1_msgs/msg/CooperationPolicy","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationPolicy/#autoware_adapi_v1_msgsmsgcooperationpolicy","title":"autoware_adapi_v1_msgs/msg/CooperationPolicy","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationPolicy/#definition","title":"Definition","text":"
    uint8 OPTIONAL = 1\nuint8 REQUIRED = 2\n\nstring behavior\nstring sequence\nuint8 policy\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationPolicy/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationPolicy/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationStatus/","title":"autoware_adapi_v1_msgs/msg/CooperationStatus","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationStatus/#autoware_adapi_v1_msgsmsgcooperationstatus","title":"autoware_adapi_v1_msgs/msg/CooperationStatus","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationStatus/#definition","title":"Definition","text":"
    unique_identifier_msgs/UUID uuid\nautoware_adapi_v1_msgs/CooperationDecision autonomous\nautoware_adapi_v1_msgs/CooperationDecision cooperator\nbool cancellable\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationStatus/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/CooperationStatus/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorCommand/","title":"autoware_adapi_v1_msgs/msg/DoorCommand","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorCommand/#autoware_adapi_v1_msgsmsgdoorcommand","title":"autoware_adapi_v1_msgs/msg/DoorCommand","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorCommand/#definition","title":"Definition","text":"
    uint8 OPEN = 1\nuint8 CLOSE = 2\n\nuint32 index\nuint8 command\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorCommand/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorCommand/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorLayout/","title":"autoware_adapi_v1_msgs/msg/DoorLayout","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorLayout/#autoware_adapi_v1_msgsmsgdoorlayout","title":"autoware_adapi_v1_msgs/msg/DoorLayout","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorLayout/#definition","title":"Definition","text":"
    uint8 GET_ON = 1\nuint8 GET_OFF = 2\n\nuint8[] roles\nstring description\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorLayout/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorLayout/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatus/","title":"autoware_adapi_v1_msgs/msg/DoorStatus","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatus/#autoware_adapi_v1_msgsmsgdoorstatus","title":"autoware_adapi_v1_msgs/msg/DoorStatus","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatus/#definition","title":"Definition","text":"
    uint8 UNKNOWN = 0\nuint8 NOT_AVAILABLE = 1\nuint8 OPENED = 2\nuint8 CLOSED = 3\nuint8 OPENING = 4\nuint8 CLOSING = 5\n\nuint8 status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatus/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatus/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatusArray/","title":"autoware_adapi_v1_msgs/msg/DoorStatusArray","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatusArray/#autoware_adapi_v1_msgsmsgdoorstatusarray","title":"autoware_adapi_v1_msgs/msg/DoorStatusArray","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatusArray/#definition","title":"Definition","text":"
    builtin_interfaces/Time stamp\nautoware_adapi_v1_msgs/DoorStatus[] doors\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatusArray/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DoorStatusArray/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObject/","title":"autoware_adapi_v1_msgs/msg/DynamicObject","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObject/#autoware_adapi_v1_msgsmsgdynamicobject","title":"autoware_adapi_v1_msgs/msg/DynamicObject","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObject/#definition","title":"Definition","text":"
    unique_identifier_msgs/UUID id\nfloat64 existence_probability\nautoware_adapi_v1_msgs/ObjectClassification[] classification\nautoware_adapi_v1_msgs/DynamicObjectKinematics kinematics\nshape_msgs/SolidPrimitive shape\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObject/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObject/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectArray/","title":"autoware_adapi_v1_msgs/msg/DynamicObjectArray","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectArray/#autoware_adapi_v1_msgsmsgdynamicobjectarray","title":"autoware_adapi_v1_msgs/msg/DynamicObjectArray","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectArray/#definition","title":"Definition","text":"
    std_msgs/Header header\nautoware_adapi_v1_msgs/DynamicObject[] objects\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectArray/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectArray/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectKinematics/","title":"autoware_adapi_v1_msgs/msg/DynamicObjectKinematics","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectKinematics/#autoware_adapi_v1_msgsmsgdynamicobjectkinematics","title":"autoware_adapi_v1_msgs/msg/DynamicObjectKinematics","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectKinematics/#definition","title":"Definition","text":"
    geometry_msgs/Pose pose\ngeometry_msgs/Twist twist\ngeometry_msgs/Accel accel\n\nautoware_adapi_v1_msgs/DynamicObjectPath[] predicted_paths\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectKinematics/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectKinematics/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectPath/","title":"autoware_adapi_v1_msgs/msg/DynamicObjectPath","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectPath/#autoware_adapi_v1_msgsmsgdynamicobjectpath","title":"autoware_adapi_v1_msgs/msg/DynamicObjectPath","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectPath/#definition","title":"Definition","text":"
    geometry_msgs/Pose[] path\nbuiltin_interfaces/Duration time_step\nfloat64 confidence\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectPath/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/DynamicObjectPath/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Gear/","title":"autoware_adapi_v1_msgs/msg/Gear","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Gear/#autoware_adapi_v1_msgsmsggear","title":"autoware_adapi_v1_msgs/msg/Gear","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Gear/#definition","title":"Definition","text":"
    # constants\nuint8 UNKNOWN = 0\nuint8 NEUTRAL = 1\nuint8 DRIVE = 2\nuint8 REVERSE = 3\nuint8 PARK = 4\nuint8 LOW = 5\n\nuint8 status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Gear/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Gear/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/HazardLights/","title":"autoware_adapi_v1_msgs/msg/HazardLights","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/HazardLights/#autoware_adapi_v1_msgsmsghazardlights","title":"autoware_adapi_v1_msgs/msg/HazardLights","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/HazardLights/#definition","title":"Definition","text":"
    # constants\nuint8 UNKNOWN = 0\nuint8 DISABLE = 1\nuint8 ENABLE = 2\n\nuint8 status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/HazardLights/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/HazardLights/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/LocalizationInitializationState/","title":"autoware_adapi_v1_msgs/msg/LocalizationInitializationState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/LocalizationInitializationState/#autoware_adapi_v1_msgsmsglocalizationinitializationstate","title":"autoware_adapi_v1_msgs/msg/LocalizationInitializationState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/LocalizationInitializationState/#definition","title":"Definition","text":"
    uint16 UNKNOWN = 0\nuint16 UNINITIALIZED = 1\nuint16 INITIALIZING = 2\nuint16 INITIALIZED = 3\n\nbuiltin_interfaces/Time stamp\nuint16 state\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/LocalizationInitializationState/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/LocalizationInitializationState/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MotionState/","title":"autoware_adapi_v1_msgs/msg/MotionState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MotionState/#autoware_adapi_v1_msgsmsgmotionstate","title":"autoware_adapi_v1_msgs/msg/MotionState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MotionState/#definition","title":"Definition","text":"
    uint16 UNKNOWN = 0\nuint16 STOPPED = 1\nuint16 STARTING = 2\nuint16 MOVING = 3\n\nbuiltin_interfaces/Time stamp\nuint16 state\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MotionState/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MotionState/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MrmState/","title":"autoware_adapi_v1_msgs/msg/MrmState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MrmState/#autoware_adapi_v1_msgsmsgmrmstate","title":"autoware_adapi_v1_msgs/msg/MrmState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MrmState/#definition","title":"Definition","text":"
    builtin_interfaces/Time stamp\n\n# For common use\nuint16 UNKNOWN = 0\n\n# For state\nuint16 NORMAL = 1\nuint16 MRM_OPERATING = 2\nuint16 MRM_SUCCEEDED = 3\nuint16 MRM_FAILED = 4\n\n# For behavior\nuint16 NONE = 1\nuint16 EMERGENCY_STOP = 2\nuint16 COMFORTABLE_STOP = 3\n\nuint16 state\nuint16 behavior\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MrmState/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/MrmState/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ObjectClassification/","title":"autoware_adapi_v1_msgs/msg/ObjectClassification","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ObjectClassification/#autoware_adapi_v1_msgsmsgobjectclassification","title":"autoware_adapi_v1_msgs/msg/ObjectClassification","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ObjectClassification/#definition","title":"Definition","text":"
    uint8 UNKNOWN=0\nuint8 CAR=1\nuint8 TRUCK=2\nuint8 BUS=3\nuint8 TRAILER = 4\nuint8 MOTORCYCLE = 5\nuint8 BICYCLE = 6\nuint8 PEDESTRIAN = 7\n\nuint8 label\nfloat64 probability\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ObjectClassification/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ObjectClassification/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/OperationModeState/","title":"autoware_adapi_v1_msgs/msg/OperationModeState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/OperationModeState/#autoware_adapi_v1_msgsmsgoperationmodestate","title":"autoware_adapi_v1_msgs/msg/OperationModeState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/OperationModeState/#definition","title":"Definition","text":"
    # constants for mode\nuint8 UNKNOWN = 0\nuint8 STOP = 1\nuint8 AUTONOMOUS = 2\nuint8 LOCAL = 3\nuint8 REMOTE = 4\n\n# variables\nbuiltin_interfaces/Time stamp\nuint8 mode\nbool is_autoware_control_enabled\nbool is_in_transition\nbool is_stop_mode_available\nbool is_autonomous_mode_available\nbool is_local_mode_available\nbool is_remote_mode_available\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/OperationModeState/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/OperationModeState/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ResponseStatus/","title":"autoware_adapi_v1_msgs/msg/ResponseStatus","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ResponseStatus/#autoware_adapi_v1_msgsmsgresponsestatus","title":"autoware_adapi_v1_msgs/msg/ResponseStatus","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ResponseStatus/#definition","title":"Definition","text":"
    # error code\nuint16 UNKNOWN = 50000\nuint16 SERVICE_UNREADY = 50001\nuint16 SERVICE_TIMEOUT = 50002\nuint16 TRANSFORM_ERROR = 50003\nuint16 PARAMETER_ERROR = 50004\n\n# warning code\nuint16 DEPRECATED = 60000\nuint16 NO_EFFECT = 60001\n\n# variables\nbool   success\nuint16 code\nstring message\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ResponseStatus/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/ResponseStatus/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Route/","title":"autoware_adapi_v1_msgs/msg/Route","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Route/#autoware_adapi_v1_msgsmsgroute","title":"autoware_adapi_v1_msgs/msg/Route","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Route/#definition","title":"Definition","text":"
    std_msgs/Header header\nautoware_adapi_v1_msgs/RouteData[<=1] data\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Route/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/Route/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteData/","title":"autoware_adapi_v1_msgs/msg/RouteData","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteData/#autoware_adapi_v1_msgsmsgroutedata","title":"autoware_adapi_v1_msgs/msg/RouteData","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteData/#definition","title":"Definition","text":"
    geometry_msgs/Pose start\ngeometry_msgs/Pose goal\nautoware_adapi_v1_msgs/RouteSegment[] segments\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteData/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteData/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteOption/","title":"autoware_adapi_v1_msgs/msg/RouteOption","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteOption/#autoware_adapi_v1_msgsmsgrouteoption","title":"autoware_adapi_v1_msgs/msg/RouteOption","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteOption/#definition","title":"Definition","text":"
    bool allow_goal_modification\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteOption/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteOption/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RoutePrimitive/","title":"autoware_adapi_v1_msgs/msg/RoutePrimitive","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RoutePrimitive/#autoware_adapi_v1_msgsmsgrouteprimitive","title":"autoware_adapi_v1_msgs/msg/RoutePrimitive","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RoutePrimitive/#definition","title":"Definition","text":"
    int64 id\nstring type  # The same id may be used for each type.\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RoutePrimitive/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RoutePrimitive/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteSegment/","title":"autoware_adapi_v1_msgs/msg/RouteSegment","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteSegment/#autoware_adapi_v1_msgsmsgroutesegment","title":"autoware_adapi_v1_msgs/msg/RouteSegment","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteSegment/#definition","title":"Definition","text":"
    autoware_adapi_v1_msgs/RoutePrimitive   preferred\nautoware_adapi_v1_msgs/RoutePrimitive[] alternatives  # Does not include the preferred primitive.\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteSegment/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteSegment/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteState/","title":"autoware_adapi_v1_msgs/msg/RouteState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteState/#autoware_adapi_v1_msgsmsgroutestate","title":"autoware_adapi_v1_msgs/msg/RouteState","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteState/#definition","title":"Definition","text":"
    uint16 UNKNOWN = 0\nuint16 UNSET = 1\nuint16 SET = 2\nuint16 ARRIVED = 3\nuint16 CHANGING = 4\n\nbuiltin_interfaces/Time stamp\nuint16 state\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteState/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/RouteState/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactor/","title":"autoware_adapi_v1_msgs/msg/SteeringFactor","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactor/#autoware_adapi_v1_msgsmsgsteeringfactor","title":"autoware_adapi_v1_msgs/msg/SteeringFactor","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactor/#definition","title":"Definition","text":"
    # constants for common use\nuint16 UNKNOWN = 0\n\n# constants for direction\nuint16 LEFT = 1\nuint16 RIGHT = 2\nuint16 STRAIGHT = 3\n\n# constants for status\nuint16 APPROACHING = 1\nuint16 TURNING = 3\n\n# variables\ngeometry_msgs/Pose[2] pose\nfloat32[2] distance\nuint16 direction\nuint16 status\nstring behavior\nstring sequence\nstring detail\nautoware_adapi_v1_msgs/CooperationStatus[<=1] cooperation\n\n\n\n# deprecated constants for type\nuint16 INTERSECTION = 1\nuint16 LANE_CHANGE = 2\nuint16 AVOIDANCE_PATH_CHANGE = 3\nuint16 AVOIDANCE_PATH_RETURN = 4\nuint16 STATION = 5\nuint16 PULL_OUT = 6 # Deprecated. Use START_PLANNER.\nuint16 START_PLANNER = 6\nuint16 PULL_OVER = 7  # Deprecated. Use GOAL_PLANNER.\nuint16 GOAL_PLANNER = 7\nuint16 EMERGENCY_OPERATION = 8\n\n# deprecated constants for status\nuint16 TRYING = 2\n\n# deprecated variables\nuint16 type\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactor/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactor/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactorArray/","title":"autoware_adapi_v1_msgs/msg/SteeringFactorArray","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactorArray/#autoware_adapi_v1_msgsmsgsteeringfactorarray","title":"autoware_adapi_v1_msgs/msg/SteeringFactorArray","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactorArray/#definition","title":"Definition","text":"
    std_msgs/Header header\nautoware_adapi_v1_msgs/SteeringFactor[] factors\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactorArray/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/SteeringFactorArray/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/TurnIndicators/","title":"autoware_adapi_v1_msgs/msg/TurnIndicators","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/TurnIndicators/#autoware_adapi_v1_msgsmsgturnindicators","title":"autoware_adapi_v1_msgs/msg/TurnIndicators","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/TurnIndicators/#definition","title":"Definition","text":"
    # constants\nuint8 UNKNOWN = 0\nuint8 DISABLE = 1\nuint8 LEFT = 2\nuint8 RIGHT = 3\n\nuint8 status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/TurnIndicators/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/TurnIndicators/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleDimensions/","title":"autoware_adapi_v1_msgs/msg/VehicleDimensions","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleDimensions/#autoware_adapi_v1_msgsmsgvehicledimensions","title":"autoware_adapi_v1_msgs/msg/VehicleDimensions","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleDimensions/#definition","title":"Definition","text":"
    float32 wheel_radius\nfloat32 wheel_width\nfloat32 wheel_base\nfloat32 wheel_tread\nfloat32 front_overhang\nfloat32 rear_overhang\nfloat32 left_overhang\nfloat32 right_overhang\nfloat32 height\ngeometry_msgs/Polygon footprint\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleDimensions/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleDimensions/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleKinematics/","title":"autoware_adapi_v1_msgs/msg/VehicleKinematics","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleKinematics/#autoware_adapi_v1_msgsmsgvehiclekinematics","title":"autoware_adapi_v1_msgs/msg/VehicleKinematics","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleKinematics/#definition","title":"Definition","text":"
    # Geographic point, using the WGS 84 reference ellipsoid.\n# This data will be invalid If Autoware does not provide projection information between geographic coordinates and local coordinates.\ngeographic_msgs/GeoPointStamped geographic_pose\n\n# Local coordinate from the autoware\ngeometry_msgs/PoseWithCovarianceStamped pose\ngeometry_msgs/TwistWithCovarianceStamped twist\ngeometry_msgs/AccelWithCovarianceStamped accel\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleKinematics/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleKinematics/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleStatus/","title":"autoware_adapi_v1_msgs/msg/VehicleStatus","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleStatus/#autoware_adapi_v1_msgsmsgvehiclestatus","title":"autoware_adapi_v1_msgs/msg/VehicleStatus","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleStatus/#definition","title":"Definition","text":"
    builtin_interfaces/Time stamp\nautoware_adapi_v1_msgs/Gear gear\nautoware_adapi_v1_msgs/TurnIndicators turn_indicators\nautoware_adapi_v1_msgs/HazardLights hazard_lights\nfloat64 steering_tire_angle\nfloat32 energy_percentage  # Battery percentage or fuel percentage, it will depends on the vehicle.\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleStatus/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VehicleStatus/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactor/","title":"autoware_adapi_v1_msgs/msg/VelocityFactor","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactor/#autoware_adapi_v1_msgsmsgvelocityfactor","title":"autoware_adapi_v1_msgs/msg/VelocityFactor","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactor/#definition","title":"Definition","text":"
    # constants for common use\nuint16 UNKNOWN = 0\n\n# constants for status\nuint16 APPROACHING = 1\nuint16 STOPPED = 2\n\n# variables\ngeometry_msgs/Pose pose\nfloat32 distance\nuint16 status\nstring behavior\nstring sequence\nstring detail\nautoware_adapi_v1_msgs/CooperationStatus[<=1] cooperation\n\n\n\n# deprecated constants for type\nuint16 SURROUNDING_OBSTACLE = 1\nuint16 ROUTE_OBSTACLE = 2\nuint16 INTERSECTION = 3\nuint16 CROSSWALK = 4\nuint16 REAR_CHECK = 5\nuint16 USER_DEFINED_DETECTION_AREA = 6\nuint16 NO_STOPPING_AREA = 7\nuint16 STOP_SIGN = 8\nuint16 TRAFFIC_SIGNAL = 9\nuint16 V2I_GATE_CONTROL_ENTER = 10\nuint16 V2I_GATE_CONTROL_LEAVE = 11\nuint16 MERGE = 12\nuint16 SIDEWALK = 13\nuint16 LANE_CHANGE = 14\nuint16 AVOIDANCE = 15\nuint16 EMERGENCY_STOP_OPERATION = 16\nuint16 NO_DRIVABLE_LANE = 17\n\n# deprecated variables\nuint16 type\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactor/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactor/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactorArray/","title":"autoware_adapi_v1_msgs/msg/VelocityFactorArray","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactorArray/#autoware_adapi_v1_msgsmsgvelocityfactorarray","title":"autoware_adapi_v1_msgs/msg/VelocityFactorArray","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactorArray/#definition","title":"Definition","text":"
    std_msgs/Header header\nautoware_adapi_v1_msgs/VelocityFactor[] factors\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactorArray/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/msg/VelocityFactorArray/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/AcceptStart/","title":"autoware_adapi_v1_msgs/srv/AcceptStart","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/AcceptStart/#autoware_adapi_v1_msgssrvacceptstart","title":"autoware_adapi_v1_msgs/srv/AcceptStart","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/AcceptStart/#definition","title":"Definition","text":"
    ---\nuint16 ERROR_NOT_STARTING = 1\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/AcceptStart/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/AcceptStart/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ChangeOperationMode/","title":"autoware_adapi_v1_msgs/srv/ChangeOperationMode","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ChangeOperationMode/#autoware_adapi_v1_msgssrvchangeoperationmode","title":"autoware_adapi_v1_msgs/srv/ChangeOperationMode","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ChangeOperationMode/#definition","title":"Definition","text":"
    ---\nuint16 ERROR_NOT_AVAILABLE = 1\nuint16 ERROR_IN_TRANSITION = 2\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ChangeOperationMode/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ChangeOperationMode/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ClearRoute/","title":"autoware_adapi_v1_msgs/srv/ClearRoute","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ClearRoute/#autoware_adapi_v1_msgssrvclearroute","title":"autoware_adapi_v1_msgs/srv/ClearRoute","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ClearRoute/#definition","title":"Definition","text":"
    ---\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ClearRoute/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/ClearRoute/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetCooperationPolicies/","title":"autoware_adapi_v1_msgs/srv/GetCooperationPolicies","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetCooperationPolicies/#autoware_adapi_v1_msgssrvgetcooperationpolicies","title":"autoware_adapi_v1_msgs/srv/GetCooperationPolicies","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetCooperationPolicies/#definition","title":"Definition","text":"
    ---\nautoware_adapi_v1_msgs/ResponseStatus status\nautoware_adapi_v1_msgs/CooperationPolicy[] policies\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetCooperationPolicies/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetCooperationPolicies/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetDoorLayout/","title":"autoware_adapi_v1_msgs/srv/GetDoorLayout","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetDoorLayout/#autoware_adapi_v1_msgssrvgetdoorlayout","title":"autoware_adapi_v1_msgs/srv/GetDoorLayout","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetDoorLayout/#definition","title":"Definition","text":"
    ---\nautoware_adapi_v1_msgs/ResponseStatus status\nautoware_adapi_v1_msgs/DoorLayout[] doors\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetDoorLayout/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetDoorLayout/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetVehicleDimensions/","title":"autoware_adapi_v1_msgs/srv/GetVehicleDimensions","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetVehicleDimensions/#autoware_adapi_v1_msgssrvgetvehicledimensions","title":"autoware_adapi_v1_msgs/srv/GetVehicleDimensions","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetVehicleDimensions/#definition","title":"Definition","text":"
    ---\nautoware_adapi_v1_msgs/ResponseStatus status\nautoware_adapi_v1_msgs/VehicleDimensions dimensions\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetVehicleDimensions/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/GetVehicleDimensions/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/InitializeLocalization/","title":"autoware_adapi_v1_msgs/srv/InitializeLocalization","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/InitializeLocalization/#autoware_adapi_v1_msgssrvinitializelocalization","title":"autoware_adapi_v1_msgs/srv/InitializeLocalization","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/InitializeLocalization/#definition","title":"Definition","text":"
    geometry_msgs/PoseWithCovarianceStamped[<=1] pose\n---\nuint16 ERROR_UNSAFE = 1\nuint16 ERROR_GNSS_SUPPORT = 2\nuint16 ERROR_GNSS = 3\nuint16 ERROR_ESTIMATION = 4\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/InitializeLocalization/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/InitializeLocalization/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationCommands/","title":"autoware_adapi_v1_msgs/srv/SetCooperationCommands","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationCommands/#autoware_adapi_v1_msgssrvsetcooperationcommands","title":"autoware_adapi_v1_msgs/srv/SetCooperationCommands","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationCommands/#definition","title":"Definition","text":"
    autoware_adapi_v1_msgs/CooperationCommand[] commands\n---\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationCommands/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationCommands/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationPolicies/","title":"autoware_adapi_v1_msgs/srv/SetCooperationPolicies","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationPolicies/#autoware_adapi_v1_msgssrvsetcooperationpolicies","title":"autoware_adapi_v1_msgs/srv/SetCooperationPolicies","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationPolicies/#definition","title":"Definition","text":"
    autoware_adapi_v1_msgs/CooperationPolicy[] policies\n---\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationPolicies/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetCooperationPolicies/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetDoorCommand/","title":"autoware_adapi_v1_msgs/srv/SetDoorCommand","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetDoorCommand/#autoware_adapi_v1_msgssrvsetdoorcommand","title":"autoware_adapi_v1_msgs/srv/SetDoorCommand","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetDoorCommand/#definition","title":"Definition","text":"
    autoware_adapi_v1_msgs/DoorCommand[] doors\n---\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetDoorCommand/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetDoorCommand/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoute/","title":"autoware_adapi_v1_msgs/srv/SetRoute","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoute/#autoware_adapi_v1_msgssrvsetroute","title":"autoware_adapi_v1_msgs/srv/SetRoute","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoute/#definition","title":"Definition","text":"
    std_msgs/Header header\nautoware_adapi_v1_msgs/RouteOption option\ngeometry_msgs/Pose goal\nautoware_adapi_v1_msgs/RouteSegment[] segments\n---\nuint16 ERROR_ROUTE_EXISTS = 1 # Deprecated. Use ERROR_INVALID_STATE.\nuint16 ERROR_INVALID_STATE = 1\nuint16 ERROR_PLANNER_UNREADY = 2\nuint16 ERROR_PLANNER_FAILED = 3\nuint16 ERROR_REROUTE_FAILED = 4\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoute/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoute/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoutePoints/","title":"autoware_adapi_v1_msgs/srv/SetRoutePoints","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoutePoints/#autoware_adapi_v1_msgssrvsetroutepoints","title":"autoware_adapi_v1_msgs/srv/SetRoutePoints","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoutePoints/#definition","title":"Definition","text":"
    std_msgs/Header header\nautoware_adapi_v1_msgs/RouteOption option\ngeometry_msgs/Pose goal\ngeometry_msgs/Pose[] waypoints\n---\nuint16 ERROR_ROUTE_EXISTS = 1 # Deprecated. Use ERROR_INVALID_STATE.\nuint16 ERROR_INVALID_STATE = 1\nuint16 ERROR_PLANNER_UNREADY = 2\nuint16 ERROR_PLANNER_FAILED = 3\nuint16 ERROR_REROUTE_FAILED = 4\nautoware_adapi_v1_msgs/ResponseStatus status\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoutePoints/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_v1_msgs/srv/SetRoutePoints/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_version_msgs/srv/InterfaceVersion/","title":"autoware_adapi_version_msgs/srv/InterfaceVersion","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_version_msgs/srv/InterfaceVersion/#autoware_adapi_version_msgssrvinterfaceversion","title":"autoware_adapi_version_msgs/srv/InterfaceVersion","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_version_msgs/srv/InterfaceVersion/#definition","title":"Definition","text":"
    ---\nuint16 major\nuint16 minor\nuint16 patch\n
    "},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_version_msgs/srv/InterfaceVersion/#this-type-uses","title":"This type uses","text":""},{"location":"design/autoware-interfaces/ad-api/types/autoware_adapi_version_msgs/srv/InterfaceVersion/#this-type-is-used-by","title":"This type is used by","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/change-operation-mode/","title":"Change the operation mode","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/change-operation-mode/#change-the-operation-mode","title":"Change the operation mode","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/change-operation-mode/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/change-operation-mode/#sequence","title":"Sequence","text":" "},{"location":"design/autoware-interfaces/ad-api/use-cases/drive-designated-position/","title":"Drive to the designated position","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/drive-designated-position/#drive-to-the-designated-position","title":"Drive to the designated position","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/drive-designated-position/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/drive-designated-position/#sequence","title":"Sequence","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/get-on-off/","title":"Get on and get off","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/get-on-off/#get-on-and-get-off","title":"Get on and get off","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/get-on-off/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/get-on-off/#sequence","title":"Sequence","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/initialize-pose/","title":"Initialize the pose","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/initialize-pose/#initialize-the-pose","title":"Initialize the pose","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/initialize-pose/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/initialize-pose/#sequence","title":"Sequence","text":" "},{"location":"design/autoware-interfaces/ad-api/use-cases/launch-terminate/","title":"Launch and terminate","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/launch-terminate/#launch-and-terminate","title":"Launch and terminate","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/launch-terminate/#related-api","title":"Related API","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/launch-terminate/#sequence","title":"Sequence","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-monitoring/","title":"Vehicle monitoring","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-monitoring/#vehicle-monitoring","title":"Vehicle monitoring","text":"

    AD API provides current vehicle status for remote monitoring, visualization for passengers, etc. Use the API below depending on the data you want to monitor.

    "},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-monitoring/#vehicle-status","title":"Vehicle status","text":"

    The vehicle status provides basic information such as kinematics, indicators, and dimensions. This allows a remote operator to know the position and velocity of the vehicle. For applications such as FMS, it can help find vehicles that need assistance, such as vehicles that are stuck or brake suddenly. It is also possible to determine the actual distance to an object from the vehicle dimensions.

    "},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-monitoring/#planning-factors","title":"Planning factors","text":"

    The planning factors provides the planning status of the vehicle. HMI can use this to warn of sudden movements of the vehicle, and to share the stop reason with passengers for comfortable driving.

    "},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-monitoring/#detected-objects","title":"Detected objects","text":"

    The perception provides the objects detected by Autoware. HMI can use this to visualize objects around the vehicle.

    "},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-operation/","title":"Vehicle operation","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-operation/#vehicle-operation","title":"Vehicle operation","text":""},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-operation/#request-to-intervene","title":"Request to intervene","text":"

    Request to intervene (RTI) is a feature that requires the operator to switch to manual driving mode. It is also called Take Over Request (TOR). Interfaces for RTI are currently being discussed. For now assume that manual driving is requested if the MRM state is not NORMAL. See fail-safe for details.

    "},{"location":"design/autoware-interfaces/ad-api/use-cases/vehicle-operation/#request-to-cooperate","title":"Request to cooperate","text":"

    Request to cooperate (RTC) is a feature that the operator supports the decision in autonomous driving mode. Autoware usually drives the vehicle using its own decisions, but the operator may prefer to make their own decisions in complex situations. Since RTC only overrides the decision and does not need to change operation mode, the vehicle can continue autonomous driving, unlike RTC. See cooperation for details.

    "},{"location":"design/autoware-interfaces/components/","title":"Component interfaces","text":""},{"location":"design/autoware-interfaces/components/#component-interfaces","title":"Component interfaces","text":"

    Warning

    Under Construction

    See here for an overview.

    "},{"location":"design/autoware-interfaces/components/control/","title":"Control","text":""},{"location":"design/autoware-interfaces/components/control/#control","title":"Control","text":""},{"location":"design/autoware-interfaces/components/control/#inputs","title":"Inputs","text":""},{"location":"design/autoware-interfaces/components/control/#vehicle-kinematic-state","title":"Vehicle kinematic state","text":"

    Current position and orientation of ego. Published by the Localization module.

    "},{"location":"design/autoware-interfaces/components/control/#trajectory","title":"Trajectory","text":"

    trajectory to be followed by the controller. See Outputs of Planning.

    "},{"location":"design/autoware-interfaces/components/control/#steering-status","title":"Steering Status","text":"

    Current steering of the ego vehicle. Published by the Vehicle Interface.

    "},{"location":"design/autoware-interfaces/components/control/#actuation-status","title":"Actuation Status","text":"

    Actuation status of the ego vehicle for acceleration, steering, and brake.

    TODO This represents the reported physical efforts exerted by the vehicle actuators. Published by the Vehicle Interface.

    "},{"location":"design/autoware-interfaces/components/control/#output","title":"Output","text":""},{"location":"design/autoware-interfaces/components/control/#vehicle-control-command","title":"Vehicle Control Command","text":"

    A motion signal to drive the vehicle, achieved by the low-level controller in the vehicle layer. Used by the Vehicle Interface.

    "},{"location":"design/autoware-interfaces/components/localization/","title":"Localization","text":""},{"location":"design/autoware-interfaces/components/localization/#localization","title":"Localization","text":""},{"location":"design/autoware-interfaces/components/localization/#inputs","title":"Inputs","text":""},{"location":"design/autoware-interfaces/components/localization/#pointcloud-map","title":"Pointcloud Map","text":"

    Environment map created with point cloud, published by the map server.

    A 3d point cloud map is used for LiDAR-based localization in Autoware.

    "},{"location":"design/autoware-interfaces/components/localization/#manual-initial-pose","title":"Manual Initial Pose","text":"

    Start pose of ego, published by the user interface.

    "},{"location":"design/autoware-interfaces/components/localization/#3d-lidar-scanning","title":"3D-LiDAR Scanning","text":"

    LiDAR scanning for NDT matching, published by the LiDAR sensor.

    The raw 3D-LiDAR data needs to be processed by the point cloud pre-processing modules before being used for localization.

    "},{"location":"design/autoware-interfaces/components/localization/#automatic-initial-pose","title":"Automatic Initial pose","text":"

    Start pose of ego, calculated from INS(Inertial navigation sensor) sensing data.

    When the initial pose is not set manually, the message can be used for automatic pose initialization.

    Current Geographic coordinate of the ego, published by the GNSS sensor.

    Current orientation of the ego, published by the GNSS-INS.

    "},{"location":"design/autoware-interfaces/components/localization/#imu-data","title":"IMU Data","text":"

    Current orientation, angular velocity and linear acceleration of ego, calculated from IMU sensing data.

    "},{"location":"design/autoware-interfaces/components/localization/#vehicle-velocity-status","title":"Vehicle Velocity Status","text":"

    Current velocity of the ego vehicle, published by the vehicle interface.

    Before the velocity input localization interface, module vehicle_velocity_converter converts message type autoware_auto_vehicle_msgs/msg/VelocityReport to geometry_msgs/msg/TwistWithCovarianceStamped.

    "},{"location":"design/autoware-interfaces/components/localization/#outputs","title":"Outputs","text":""},{"location":"design/autoware-interfaces/components/localization/#vehicle-pose","title":"Vehicle pose","text":"

    Current pose of ego, calculated from localization interface.

    "},{"location":"design/autoware-interfaces/components/localization/#vehicle-velocity","title":"Vehicle velocity","text":"

    Current velocity of ego, calculated from localization interface.

    "},{"location":"design/autoware-interfaces/components/localization/#vehicle-acceleration","title":"Vehicle acceleration","text":"

    Current acceleration of ego, calculated from localization interface.

    "},{"location":"design/autoware-interfaces/components/localization/#vehicle-kinematic-state","title":"Vehicle kinematic state","text":"

    Current pose, velocity and acceleration of ego, calculated from localization interface.

    Note: Kinematic state contains pose, velocity and acceleration. In the future, pose, velocity and acceleration will not be used as output for localization.

    The message will be subscribed by the planning and control module.

    "},{"location":"design/autoware-interfaces/components/localization/#localization-accuracy","title":"Localization Accuracy","text":"

    Diagnostics information that indicates if the localization module works properly.

    TBD.

    "},{"location":"design/autoware-interfaces/components/map/","title":"Map","text":""},{"location":"design/autoware-interfaces/components/map/#map","title":"Map","text":""},{"location":"design/autoware-interfaces/components/map/#overview","title":"Overview","text":"

    Autoware relies on high-definition point cloud maps and vector maps of the driving environment to perform various tasks. Before launching Autoware, you need to load the pre-created map files.

    "},{"location":"design/autoware-interfaces/components/map/#inputs","title":"Inputs","text":"

    Refer to Creating maps on how to create maps.

    "},{"location":"design/autoware-interfaces/components/map/#outputs","title":"Outputs","text":""},{"location":"design/autoware-interfaces/components/map/#point-cloud-map","title":"Point cloud map","text":"

    It loads point cloud files and publishes the maps to the other Autoware nodes in various configurations. Currently, it supports the following types:

    "},{"location":"design/autoware-interfaces/components/map/#lanelet2-map","title":"Lanelet2 map","text":"

    It loads a Lanelet2 file and publishes the map data as autoware_auto_mapping_msgs/msg/HADMapBin message. The lan/lon coordinates are projected onto the MGRS coordinates.

    "},{"location":"design/autoware-interfaces/components/map/#lanelet2-map-visualization","title":"Lanelet2 map visualization","text":"

    Visualize autoware_auto_mapping_msgs/HADMapBin messages in Rviz.

    "},{"location":"design/autoware-interfaces/components/perception-interface/","title":"Perception","text":""},{"location":"design/autoware-interfaces/components/perception-interface/#perception","title":"Perception","text":"
    graph TD\n    cmp_sen(\"Sensing\"):::cls_sen\n    cmp_loc(\"Localization\"):::cls_loc\n    cmp_per(\"Perception\"):::cls_per\n    cmp_plan(\"Planning\"):::cls_plan\n\n    msg_img(\"<font size=2><b>Camera Image</b></font size>\n    <font size=1>sensor_msgs/Image</font size>\"):::cls_sen\n\n    msg_ldr(\"<font size=2><b>Lidar Point Cloud</b></font size>\n    <font size=1>sensor_msgs/PointCloud2</font size>\"):::cls_sen\n\n    msg_lanenet(\"<font size=2><b>Lanelet2 Map</b></font size>\n    <font size=1>autoware_auto_mapping_msgs/HADMapBin</font size>\"):::cls_loc\n\n    msg_vks(\"<font size=2><b>Vehicle Kinematic State</b></font size>\n    <font size=1>nav_msgs/Odometry</font size>\"):::cls_loc\n\n    msg_obj(\"<font size=2><b>3D Object Predictions </b></font size>\n    <font size=1>autoware_auto_perception_msgs/PredictedObjects</font size>\"):::cls_per\n\n    msg_tl(\"<font size=2><b>Traffic Light Response </b></font size>\n    <font size=1>autoware_perception_msgs/TrafficSignalArray</font size>\"):::cls_per\n\n    msg_tq(\"<font size=2><b>Traffic Light Query </b></font size>\n    <font size=1>TBD</font size>\"):::cls_plan\n\n\n    cmp_sen --> msg_img --> cmp_per\n    cmp_sen --> msg_ldr --> cmp_per\n    cmp_per --> msg_obj --> cmp_plan\n    cmp_per --> msg_tl --> cmp_plan\n    cmp_plan --> msg_tq -->cmp_per\n\n    cmp_loc --> msg_vks --> cmp_per\n    cmp_loc --> msg_lanenet --> cmp_per\n\nclassDef cmp_sen fill:#F8CECC,stroke:#999,stroke-width:1px;\nclassDef cls_loc fill:#D5E8D4,stroke:#999,stroke-width:1px;\nclassDef cls_per fill:#FFF2CC,stroke:#999,stroke-width:1px;\nclassDef cls_plan fill:#5AB8FF,stroke:#999,stroke-width:1px;
    "},{"location":"design/autoware-interfaces/components/perception-interface/#inputs","title":"Inputs","text":""},{"location":"design/autoware-interfaces/components/perception-interface/#pointcloud","title":"PointCloud","text":"

    PointCloud data published by Lidar.

    "},{"location":"design/autoware-interfaces/components/perception-interface/#image","title":"Image","text":"

    Image frame captured by camera.

    "},{"location":"design/autoware-interfaces/components/perception-interface/#vehicle-kinematic-state","title":"Vehicle kinematic state","text":"

    current position of ego, used in traffic signals recognition. See output of Localization.

    "},{"location":"design/autoware-interfaces/components/perception-interface/#lanelet2-map","title":"Lanelet2 Map","text":"

    map of the environment. See outputs of Map.

    "},{"location":"design/autoware-interfaces/components/perception-interface/#output","title":"Output","text":""},{"location":"design/autoware-interfaces/components/perception-interface/#3d-object-predictions","title":"3D Object Predictions","text":"

    3D Objects detected, tracked and predicted by sensor fusing.

    "},{"location":"design/autoware-interfaces/components/perception-interface/#traffic-signals","title":"Traffic Signals","text":"

    traffic signals recognized by object detection model.

    "},{"location":"design/autoware-interfaces/components/planning/","title":"Planning","text":""},{"location":"design/autoware-interfaces/components/planning/#planning","title":"Planning","text":"

    This page provides specific specifications about the Interface of the Planning Component. Please refer to the planning architecture design document for high-level concepts and data flow.

    TODO: The detailed definitions (meanings of elements included in each topic) are not described yet, need to be updated.

    "},{"location":"design/autoware-interfaces/components/planning/#input","title":"Input","text":""},{"location":"design/autoware-interfaces/components/planning/#from-map-component","title":"From Map Component","text":"Name Topic Type Description Vector Map /map/vector_map autoware_auto_mapping_msgs/msg/HADMapBin Map of the environment where the planning takes place."},{"location":"design/autoware-interfaces/components/planning/#from-localization-component","title":"From Localization Component","text":"Name Topic Type Description Vehicle Kinematic State /localization/kinematic_state nav_msgs/msg/Odometry Current position, orientation and velocity of ego. Vehicle Acceleration /localization/acceleration geometry_msgs/msg/AccelWithCovarianceStamped Current acceleration of ego.

    TODO: acceleration information should be merged into the kinematic state.

    "},{"location":"design/autoware-interfaces/components/planning/#from-perception-component","title":"From Perception Component","text":"Name Topic Type Description Objects /perception/object_recognition/objects autoware_auto_perception_msgs/msg/PredictedObjects Set of perceived objects around ego that need to be avoided or followed when planning a trajectory. This contains semantics information such as a object class (e.g. vehicle, pedestrian, etc) or a shape of the objects. Obstacles /perception/obstacle_segmentation/pointcloud sensor_msgs/msg/PointCloud2 Set of perceived obstacles around ego that need to be avoided or followed when planning a trajectory. This only contains a primitive information of the obstacle. No shape nor velocity information. Occupancy Grid Map /perception/occupancy_grid_map/map nav_msgs/msg/OccupancyGrid Contains the presence of obstacles and blind spot information (represented as UNKNOWN). Traffic Signal /perception/traffic_light_recognition/traffic_signals autoware_auto_perception_msgs/msg/TrafficSignalArray Contains the traffic signal information such as a color (green, yellow, read) and an arrow (right, left, straight).

    TODO: The type of the Obstacles information should not depend on the specific sensor message type (now PointCloud). It needs to be fixed.

    "},{"location":"design/autoware-interfaces/components/planning/#from-api","title":"From API","text":"Name Topic Type Description Max Velocity /planning/scenario_planning/max_velocity_default autoware_adapi_v1_msgs/srv/SetRoutePoints Indicate the maximum value of the vehicle speed plan Operation Mode /system/operation_mode/state autoware_adapi_v1_msgs/msg/OperationModeState Indicates the current operation mode (automatic/manual, etc.). Route Set /planning/mission_planning/set_route autoware_adapi_v1_msgs/srv/SetRoute Indicates to set the route when the vehicle is stopped. Route Points Set /planning/mission_planning/set_route_points autoware_adapi_v1_msgs/srv/SetRoutePoints Indicates to set the route with points when the vehicle is stopped. Route Change /planning/mission_planning/change_route autoware_adapi_v1_msgs/srv/SetRoute Indicates to change the route when the vehicle is moving. Route Points Change /planning/mission_planning/change_route_points autoware_adapi_v1_msgs/srv/SetRoutePoints Indicates to change the route with points when the vehicle is moving. Route Clear /planning/mission_planning/clear_route autoware_adapi_v1_msgs/srv/ClearRoute Indicates to clear the route information. MRM Route Set Points /planning/mission_planning/mission_planner/srv/set_mrm_route autoware_adapi_v1_msgs/srv/SetRoutePoints Indicates to set the emergency route. MRM Route Clear /planning/mission_planning/mission_planner/srv/clear_mrm_route autoware_adapi_v1_msgs/srv/SetRoutePoints Indicates to clear the emergency route."},{"location":"design/autoware-interfaces/components/planning/#output","title":"Output","text":""},{"location":"design/autoware-interfaces/components/planning/#to-control","title":"To Control","text":"Name Topic Type Description Trajectory /planning/trajectory autoware_auto_planning_msgs/msg/Trajectory A sequence of space and velocity and acceleration points to be followed by the controller. Turn Indicator /planning/turn_indicators_cmd autoware_auto_vehicle_msgs/msg/TurnIndicatorsCommand Turn indicator signal to be followed by the vehicle. Hazard Light /planning/hazard_lights_cmd autoware_auto_vehicle_msgs/msg/HazardLightsCommand Hazard light signal to be followed by the vehicle."},{"location":"design/autoware-interfaces/components/planning/#to-system","title":"To System","text":"Name Topic Type Description Diagnostics /planning/hazard_lights_cmd diagnostic_msgs/msg/DiagnosticArray Diagnostic status of the Planning component reported to the System component."},{"location":"design/autoware-interfaces/components/planning/#to-api","title":"To API","text":"Name Topic Type Description Path Candidate /planning/path_candidate/* autoware_auto_planning_msgs/msg/Path The path Autoware is about to take. Users can interrupt the operation based on the path candidate information. Steering Factor /planning/steering_factor/* autoware_adapi_v1_msgs/msg/SteeringFactorArray Information about the steering maneuvers performed by Autoware (e.g., steering to the right for a right turn, etc.) Velocity Factor /planning/velocity_factors/* autoware_adapi_v1_msgs/msg/VelocityFactorArray Information about the velocity maneuvers performed by Autoware (e.g., stop for an obstacle, etc.)"},{"location":"design/autoware-interfaces/components/planning/#planning-internal-interface","title":"Planning internal interface","text":"

    This section explains the communication between the different planning modules shown in the Planning Architecture Design.

    "},{"location":"design/autoware-interfaces/components/planning/#from-mission-planning-to-scenario-planning","title":"From Mission Planning to Scenario Planning","text":"Name Topic Type Description Route /planning/mission_planning/route autoware_planning_msgs/msg/LaneletRoute A sequence of lane IDs on a Lanelet map, from the starting point to the destination."},{"location":"design/autoware-interfaces/components/planning/#from-behavior-planning-to-motion-planning","title":"From Behavior Planning to Motion Planning","text":"Name Topic Type Description Path /planning/scenario_planning/lane_driving/behavior_planning/path autoware_auto_planning_msgs/msg/Path A sequence of approximate vehicle positions for driving, along with information on the maximum speed and the drivable areas. Modules receiving this message are expected to make changes to the path within the constraints of the drivable areas and the maximum speed, generating the desired final trajectory."},{"location":"design/autoware-interfaces/components/planning/#from-scenario-planning-to-validation","title":"From Scenario Planning to Validation","text":"Name Topic Type Description Trajectory /planning/scenario_planning/trajectory autoware_auto_planning_msgs/msg/Trajectory A sequence of precise vehicle positions, speeds, and accelerations required for driving. It is expected that the vehicle will follow this trajectory."},{"location":"design/autoware-interfaces/components/sensing/","title":"Sensing","text":""},{"location":"design/autoware-interfaces/components/sensing/#sensing","title":"Sensing","text":"
    graph TD\n    cmp_drv(\"Drivers\"):::cls_drv\n    cmp_loc(\"Localization\"):::cls_loc\n    cmp_per(\"Perception\"):::cls_per\n    cmp_sen(\"Preprocessors\"):::cls_sen\n    msg_ult(\"<font size=2><b>Ultrasonics</b></font size>\n    <font size=1>sensor_msgs/Range</font size>\"):::cls_drv\n    msg_img(\"<font size=2><b>Camera Image</b></font size>\n    <font size=1>sensor_msgs/Image</font size>\"):::cls_drv\n    msg_ldr(\"<font size=2><b>Lidar Point Cloud</b></font size>\n    <font size=1>sensor_msgs/PointCloud2</font size>\"):::cls_drv\n    msg_rdr_t(\"<font size=2><b>Radar Tracks</b></font size>\n    <font size=1>radar_msgs/RadarTracks</font size>\"):::cls_drv\n    msg_rdr_s(\"<font size=2><b>Radar Scan</b></font size>\n    <font size=1>radar_msgs/RadarScan</font size>\"):::cls_drv\n    msg_gnss(\"<font size=2><b>GNSS-INS Position</b></font size>\n    <font size=1>sensor_msgs/NavSatFix</font size>\"):::cls_drv\n    msg_gnssori(\"<font size=2><b>GNSS-INS Orientation</b></font size>\n    <font size=1>autoware_sensing_msgs/GnssInsOrientationStamped</font size>\"):::cls_drv\n    msg_gnssvel(\"<font size=2><b>GNSS Velocity</b></font size>\n    <font size=1>geometry_msgs/TwistWithCovarianceStamped</font size>\"):::cls_drv\n    msg_gnssacc(\"<font size=2><b>GNSS Acceleration</b></font size>\n    <font size=1>geometry_msgs/AccelWithCovarianceStamped</font size>\"):::cls_drv\n    msg_ult_sen(\"<font size=2><b>Ultrasonics</b></font size>\n    <font size=1>sensor_msgs/Range</font size>\"):::cls_sen\n    msg_img_sen(\"<font size=2><b>Camera Image</b></font size>\n    <font size=1>sensor_msgs/Image</font size>\"):::cls_sen\n    msg_pc_combined_rdr(\"<font size=2><b>Combined Radar Tracks</b></font size>\n    <font size=1>radar_msgs/RadarTracks</font size>\"):::cls_sen\n    msg_pc_rdr(\"<font size=2><b>Radar Pointcloud</b></font size>\n    <font size=1>radar_msgs/RadarScan</font size>\"):::cls_sen\n    msg_pc_combined_ldr(\"<font size=2><b>Combined Lidar Point Cloud</b></font size>\n    <font size=1>sensor_msgs/PointCloud2</font size>\"):::cls_sen\n    msg_pose_gnss(\"<font size=2><b>GNSS-INS Pose</b></font size>\n    <font size=1>geometry_msgs/PoseWithCovarianceStamped</font size>\"):::cls_sen\n    msg_gnssori_sen(\"<font size=2><b>GNSS-INS Orientation</b></font size>\n    <font size=1>sensor_msgs/Imu</font size>\"):::cls_sen\n    msg_gnssvel_sen(\"<font size=2><b>GNSS Velocity</b></font size>\n    <font size=1>geometry_msgs/TwistWithCovarianceStamped</font size>\"):::cls_sen\n    msg_gnssacc_sen(\"<font size=2><b>GNSS-INS Acceleration</b></font size>\n    <font size=1>geometry_msgs/AccelWithCovarianceStamped</font size>\"):::cls_sen\n\n    cmp_drv --> msg_ult --> cmp_sen\n    cmp_drv --> msg_img --> cmp_sen\n    cmp_drv --> msg_rdr_t --> cmp_sen\n    cmp_drv --> msg_rdr_s --> cmp_sen\n    cmp_drv --> msg_ldr --> cmp_sen\n    cmp_drv --> msg_gnss --> cmp_sen\n    cmp_drv --> msg_gnssori --> cmp_sen\n    cmp_drv --> msg_gnssvel --> cmp_sen\n    cmp_drv --> msg_gnssacc --> cmp_sen\n\n    cmp_sen --> msg_ult_sen\n    cmp_sen --> msg_img_sen\n    cmp_sen --> msg_gnssori_sen\n    cmp_sen --> msg_gnssvel_sen\n    cmp_sen --> msg_pc_combined_rdr\n    cmp_sen --> msg_pc_rdr\n    cmp_sen --> msg_pc_combined_ldr\n    cmp_sen --> msg_pose_gnss\n    cmp_sen --> msg_gnssacc_sen\n    msg_ult_sen --> cmp_per\n    msg_img_sen --> cmp_per\n    msg_pc_combined_rdr --> cmp_per\n    msg_pc_rdr --> cmp_per\n    msg_pc_combined_ldr --> cmp_per\n    msg_pc_combined_ldr --> cmp_loc\n    msg_pose_gnss --> cmp_loc\n    msg_gnssori_sen --> cmp_loc\n    msg_gnssvel_sen --> cmp_loc\n    msg_gnssacc_sen --> cmp_loc\nclassDef cls_drv fill:#F8CECC,stroke:#999,stroke-width:1px;\nclassDef cls_loc fill:#D5E8D4,stroke:#999,stroke-width:1px;\nclassDef cls_per fill:#FFF2CC,stroke:#999,stroke-width:1px;\nclassDef cls_sen fill:#FFE6CC,stroke:#999,stroke-width:1px;
    "},{"location":"design/autoware-interfaces/components/sensing/#inputs","title":"Inputs","text":""},{"location":"design/autoware-interfaces/components/sensing/#ultrasonics","title":"Ultrasonics","text":"

    Distance data from ultrasonic radar driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#camera-image","title":"Camera Image","text":"

    Image data from camera driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#radar-tracks","title":"Radar Tracks","text":"

    Tracks from radar driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#radar-scan","title":"Radar Scan","text":"

    Scan from radar driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#lidar-point-cloud","title":"Lidar Point Cloud","text":"

    Pointcloud from lidar driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#gnss-ins-position","title":"GNSS-INS Position","text":"

    Initial pose from GNSS driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#gnss-ins-orientation","title":"GNSS-INS Orientation","text":"

    Initial orientation from GNSS driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#gnss-velocity","title":"GNSS Velocity","text":"

    Initial velocity from GNSS driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#gnss-acceleration","title":"GNSS Acceleration","text":"

    Initial acceleration from GNSS driver.

    "},{"location":"design/autoware-interfaces/components/sensing/#output","title":"Output","text":""},{"location":"design/autoware-interfaces/components/sensing/#ultrasonics_1","title":"Ultrasonics","text":"

    Distance data from ultrasonic radar. Used by the Perception.

    "},{"location":"design/autoware-interfaces/components/sensing/#camera-image_1","title":"Camera Image","text":"

    Image data from camera. Used by the Perception.

    "},{"location":"design/autoware-interfaces/components/sensing/#combined-radar-tracks","title":"Combined Radar Tracks","text":"

    Radar tracks from radar. Used by the Perception.

    "},{"location":"design/autoware-interfaces/components/sensing/#radar-point-cloud","title":"Radar Point Cloud","text":"

    Pointcloud from radar. Used by the Perception.

    "},{"location":"design/autoware-interfaces/components/sensing/#combined-lidar-point-cloud","title":"Combined Lidar Point Cloud","text":"

    Lidar pointcloud after preprocessing. Used by the Perception and Localization.

    "},{"location":"design/autoware-interfaces/components/sensing/#gnss-ins-pose","title":"GNSS-INS pose","text":"

    Initial pose of the ego vehicle from GNSS. Used by the Localization.

    "},{"location":"design/autoware-interfaces/components/sensing/#gnss-ins-orientation_1","title":"GNSS-INS Orientation","text":"

    Orientation info from GNSS. Used by the Localization.

    "},{"location":"design/autoware-interfaces/components/sensing/#gnss-velocity_1","title":"GNSS velocity","text":"

    Velocity of the ego vehicle from GNSS. Used by the Localization.

    "},{"location":"design/autoware-interfaces/components/sensing/#gnss-acceleration_1","title":"GNSS Acceleration","text":"

    Acceleration of the ego vehicle from GNSS. Used by the Localization.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/","title":"Vehicle dimensions","text":""},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#vehicle-dimensions","title":"Vehicle dimensions","text":""},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#vehicle-axes-and-base_link","title":"Vehicle axes and base_link","text":"

    The base_link frame is used very frequently throughout the Autoware stack, and is a projection of the rear-axle center onto the ground surface.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#vehicle-dimensions_1","title":"Vehicle dimensions","text":""},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#wheelbase","title":"wheelbase","text":"

    The distance between front and rear axles.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#track_width","title":"track_width","text":"

    The distance between left and right wheels.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#overhangs","title":"Overhangs","text":"

    Overhangs are part of the minimum safety box calculation.

    When measuring overhangs, side mirrors, protruding sensors and wheels should be taken into consideration.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#left_overhang","title":"left_overhang","text":"

    The distance between the axis centers of the left wheels and the left-most point of the vehicle.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#right_overhang","title":"right_overhang","text":"

    The distance between the axis centers of the right wheels and the right-most point of the vehicle.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#front_overhang","title":"front_overhang","text":"

    The distance between the front axle and the foremost point of the vehicle.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#rear_overhang","title":"rear_overhang","text":"

    The distance between the rear axle and the rear-most point of the vehicle.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#vehicle_length","title":"vehicle_length","text":"

    Total length of the vehicle. Calculated by front_overhang + wheelbase + rear_overhang

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#vehicle_width","title":"vehicle_width","text":"

    Total width of the vehicle. Calculated by left_overhang + track_width + right_overhang

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#wheel-parameters","title":"Wheel parameters","text":""},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#wheel_width","title":"wheel_width","text":"

    The lateral width of a wheel tire, primarily used for dead reckoning.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#wheel_radius","title":"wheel_radius","text":"

    The radius of the wheel, primarily used for dead reckoning.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#polygon_footprint","title":"polygon_footprint","text":"

    The polygon defines the minimum collision area for the vehicle.

    The points should be ordered clockwise, with the origin on the base_link.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#wheel-orientations","title":"Wheel orientations","text":"

    If the vehicle is going forward, a positive wheel angle will result in the vehicle turning left.

    Autoware assumes the rear wheels don't turn on z axis.

    "},{"location":"design/autoware-interfaces/components/vehicle-dimensions/#notice","title":"Notice","text":"

    The vehicle used in the illustrations was created by xvlblo22 and is from https://www.turbosquid.com/3d-models/modular-sedan-3d-model-1590886.

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/","title":"Vehicle Interface","text":""},{"location":"design/autoware-interfaces/components/vehicle-interface/#vehicle-interface","title":"Vehicle Interface","text":"

    The Vehicle Interface receives the Vehicle Signal Commands and Vehicle Control Commands and publishes the vehicle status. It also communicates with vehicle by the vehicle-specific protocol.

    The Gate switches multiple Vehicle Control Commands. These signals include autonomous diving command, joystick, remote control, and emergency operation, etc. The Adapter converts generalized control command (target steering, steering rate, velocity, acceleration, jerk) into vehicle-specific control values (steering-torque, wheel-torque, voltage, pressure, accel pedal position, etc).

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#inputs","title":"Inputs","text":""},{"location":"design/autoware-interfaces/components/vehicle-interface/#error-status","title":"Error status","text":"

    (See Inputs of Planning.)

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#vehicle-control-command","title":"Vehicle Control Command","text":"

    (See Output of Control.)

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#vehicle-signals-commands","title":"Vehicle Signals Commands","text":"

    Commands for various elements of the vehicle unrelated to motion. Published by the Planning module.

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#outputs","title":"Outputs","text":""},{"location":"design/autoware-interfaces/components/vehicle-interface/#vehicle-signal-reports","title":"Vehicle Signal Reports","text":"

    Reports for various elements of the vehicle unrelated to motion. Published by the Vehicle Interface.

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#vehicle-odometry","title":"Vehicle Odometry","text":"

    Odometry of the vehicle. Used by the Localization module to update the pose of the vehicle in the map.

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#steering-status","title":"Steering Status","text":"

    Steering of the ego vehicle. Published by the Vehicle Interface.

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#actuation-status","title":"Actuation Status","text":"

    Actuation status of the ego vehicle for acceleration, steering, and brake. This represents the reported physical efforts exerted by the vehicle actuators. Published by the Vehicle Interface.

    The message definition is under discussion.

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#actuation-command","title":"Actuation Command","text":"

    Actuation command sent to the ego vehicle. This represents the requested physical efforts to be exerted by the vehicle actuators. Published by the Vehicle Interface as generated by the adapter.

    The message definition is under discussion.

    "},{"location":"design/autoware-interfaces/components/vehicle-interface/#vehicle-communication","title":"Vehicle Communication","text":"

    Vehicle specific messages protocol like CAN (Controller Area Network).

    "},{"location":"design/configuration-management/","title":"Configuration management","text":""},{"location":"design/configuration-management/#configuration-management","title":"Configuration management","text":"

    Warning

    Under Construction

    "},{"location":"design/configuration-management/development-process/","title":"Development process","text":""},{"location":"design/configuration-management/development-process/#development-process","title":"Development process","text":"

    Warning

    Under Construction

    "},{"location":"design/configuration-management/release-process/","title":"Release process","text":""},{"location":"design/configuration-management/release-process/#release-process","title":"Release process","text":"

    Warning

    Under Construction

    "},{"location":"design/configuration-management/repository-structure/","title":"Repository structure","text":""},{"location":"design/configuration-management/repository-structure/#repository-structure","title":"Repository structure","text":"

    Warning

    Under Construction

    "},{"location":"how-to-guides/","title":"How-to guides","text":""},{"location":"how-to-guides/#how-to-guides","title":"How-to guides","text":""},{"location":"how-to-guides/#integrating-autoware","title":"Integrating Autoware","text":""},{"location":"how-to-guides/#training-machine-learning-models","title":"Training Machine Learning Models","text":""},{"location":"how-to-guides/#others","title":"Others","text":"

    TODO: Write the following contents.

    "},{"location":"how-to-guides/integrating-autoware/overview/","title":"Overview","text":""},{"location":"how-to-guides/integrating-autoware/overview/#overview","title":"Overview","text":""},{"location":"how-to-guides/integrating-autoware/overview/#requirement-prepare-your-real-vehicle-hardware","title":"Requirement: prepare your real vehicle hardware","text":"

    Prerequisites for the vehicle:

    "},{"location":"how-to-guides/integrating-autoware/overview/#1-creating-your-autoware-meta-repository","title":"1. Creating your Autoware meta-repository","text":"

    Create your Autoware meta-repository. One easy way is to fork autowarefoundation/autoware and clone it. For how to fork a repository, refer to GitHub Docs.

    git clone https://github.com/YOUR_NAME/autoware.git\n

    If you set up multiple types of vehicles, adding a suffix like \"autoware.vehicle_A\" or \"autoware.vehicle_B\" is recommended.

    "},{"location":"how-to-guides/integrating-autoware/overview/#2-creating-the-your-vehicle-and-sensor-description","title":"2. Creating the your vehicle and sensor description","text":"

    Next, you need to create description packages that define the vehicle and sensor configuration of your vehicle.

    Create the following two packages:

    Once created, you need to update the autoware.repos file of your cloned Autoware repository to refer to these two description packages.

    -  # sensor_kit\n-  sensor_kit/sample_sensor_kit_launch:\n-    type: git\n-    url: https://github.com/autowarefoundation/sample_sensor_kit_launch.git\n-    version: main\n-  # vehicle\n-  vehicle/sample_vehicle_launch:\n-    type: git\n-    url: https://github.com/autowarefoundation/sample_vehicle_launch.git\n-    version: main\n+  # sensor_kit\n+  sensor_kit/YOUR_SENSOR_KIT_launch:\n+    type: git\n+    url: https://github.com/YOUR_NAME/YOUR_SENSOR_KIT_launch.git\n+    version: main\n+  # vehicle\n+  vehicle/YOUR_VEHICLE_launch:\n+    type: git\n+    url: https://github.com/YOUR_NAME/YOUR_VEHICLE_launch.git\n+    version: main\n
    "},{"location":"how-to-guides/integrating-autoware/overview/#adapt-your_vehicle_launch-for-autoware-launching-system","title":"Adapt YOUR_VEHICLE_launch for autoware launching system","text":""},{"location":"how-to-guides/integrating-autoware/overview/#at-your_vehicle_description","title":"At YOUR_VEHICLE_description","text":"

    Define URDF and parameters in the vehicle description package (refer to the sample vehicle description package for an example).

    "},{"location":"how-to-guides/integrating-autoware/overview/#at-your_vehicle_launch","title":"At YOUR_VEHICLE_launch","text":"

    Create a launch file (refer to the sample vehicle launch package for example). If you have multiple vehicles with the same hardware setup, you can specify vehicle_id to distinguish them.

    "},{"location":"how-to-guides/integrating-autoware/overview/#adapt-your_sensor_kit_description-for-autoware-launching-system","title":"Adapt YOUR_SENSOR_KIT_description for autoware launching system","text":""},{"location":"how-to-guides/integrating-autoware/overview/#at-your_sensor_kit_description","title":"At YOUR_SENSOR_KIT_description","text":"

    Define URDF and extrinsic parameters for all the sensors here (refer to the sample sensor kit description package for example). Note that you need to calibrate extrinsic parameters for all the sensors beforehand.

    "},{"location":"how-to-guides/integrating-autoware/overview/#at-your_sensor_kit_launch","title":"At YOUR_SENSOR_KIT_launch","text":"

    Create launch/sensing.launch.xml that launches the interfaces of all the sensors on the vehicle. (refer to the sample sensor kit launch package for example).

    Note

    At this point, you are now able to run Autoware's Planning Simulator to do a basic test of your vehicle and sensing packages. To do so, you need to build and install Autoware using your cloned repository. Follow the steps for either Docker or source installation (starting from the dependency installation step) and then run the following command:

    ros2 launch autoware_launch planning_simulator.launch.xml vehicle_model:=YOUR_VEHICLE sensor_kit:=YOUR_SENSOR_KIT map_path:=/PATH/TO/YOUR/MAP\n
    "},{"location":"how-to-guides/integrating-autoware/overview/#3-create-a-vehicle_interface-package","title":"3. Create a vehicle_interface package","text":"

    You need to create an interface package for your vehicle. The package is expected to provide the following two functions.

    1. Receive command messages from vehicle_cmd_gate and drive the vehicle accordingly
    2. Send vehicle status information to Autoware

    You can find detailed information about the requirements of the vehicle_interface package in the Vehicle Interface design documentation. You can also refer to TIER IV's pacmod_interface repository as an example of a vehicle interface package.

    "},{"location":"how-to-guides/integrating-autoware/overview/#4-create-maps","title":"4. Create maps","text":"

    You need both a pointcloud map and a vector map in order to use Autoware. For more information on map design, please click here.

    "},{"location":"how-to-guides/integrating-autoware/overview/#create-a-pointcloud-map","title":"Create a pointcloud map","text":"

    Use third-party tools such as a LiDAR-based SLAM (Simultaneous Localization And Mapping) package to create a pointcloud map in the .pcd format. For more information, please click here.

    "},{"location":"how-to-guides/integrating-autoware/overview/#create-vector-map","title":"Create vector map","text":"

    Use third-party tools such as TIER IV's Vector Map Builder to create a Lanelet2 format .osm file.

    "},{"location":"how-to-guides/integrating-autoware/overview/#5-launch-autoware","title":"5. Launch Autoware","text":"

    This section briefly explains how to run your vehicle with Autoware.

    "},{"location":"how-to-guides/integrating-autoware/overview/#install-autoware","title":"Install Autoware","text":"

    Follow the installation steps of Autoware.

    "},{"location":"how-to-guides/integrating-autoware/overview/#launch-autoware","title":"Launch Autoware","text":"

    Launch Autoware with the following command:

    ros2 launch autoware_launch autoware.launch.xml vehicle_model:=YOUR_VEHICLE sensor_kit:=YOUR_SENSOR_KIT map_path:=/PATH/TO/YOUR/MAP\n
    "},{"location":"how-to-guides/integrating-autoware/overview/#set-initial-pose","title":"Set initial pose","text":"

    If GNSS is available, Autoware automatically initializes the vehicle's pose.

    If not, you need to set the initial pose using the RViz GUI.

    1. Click the 2D Pose estimate button in the toolbar, or hit the P key
    2. In the 3D View pane, click and hold the left mouse button, and then drag to set the direction for the initial pose.
    "},{"location":"how-to-guides/integrating-autoware/overview/#set-goal-pose","title":"Set goal pose","text":"

    Set a goal pose for the ego vehicle.

    1. Click the 2D Nav Goal button in the toolbar, or hit the G key
    2. In the 3D View pane, click and hold the left mouse button, and then drag to set the direction for the goal pose. If successful, you will see the calculated planning path on RViz.
    "},{"location":"how-to-guides/integrating-autoware/overview/#engage","title":"Engage","text":"

    In your terminal, execute the following command.

    source ~/autoware.YOURS/install/setup.bash\nros2 topic pub /autoware.YOURS/engage autoware_auto_vehicle_msgs/msg/Engage \"engage: true\" -1\n

    You can also engage via RViz with \"AutowareStatePanel\". The panel can be found in Panels > Add New Panel > tier4_state_rviz_plugin > AutowareStatePanel.

    Now the vehicle should drive along the calculated path!

    "},{"location":"how-to-guides/integrating-autoware/overview/#6-tune-parameters-for-your-vehicle-environment","title":"6. Tune parameters for your vehicle & environment","text":"

    You may need to tune your parameters depending on the domain in which you will operate your vehicle.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/","title":"Creating maps","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/#creating-maps","title":"Creating maps","text":"

    Autoware requires a pointcloud map and a vector map for the vehicle's operating environment. (Check the map design documentation page for the detailed specification).

    This page explains how users can create maps that can be used for Autoware.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/#creating-a-point-cloud-map","title":"Creating a point cloud map","text":"

    Traditionally, a Mobile Mapping System (MMS) is used in order to create highly accurate large-scale point cloud maps. However, since a MMS requires high-end sensors for precise positioning, its operational cost can be very expensive and may not be suitable for a relatively small driving environment. Alternatively, a Simultaneous Localization And Mapping (SLAM) algorithm can be used to create a point cloud map from recorded LiDAR scans. Some of the useful open-source SLAM implementations are listed in this page.

    If you prefer proprietary software that is easy to use, you can try a fully automatic mapping tool from MAP IV, Inc., MapIV Engine. They currently provide a trial license for Autoware users free of charge.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/#creating-a-vector-map","title":"Creating a vector map","text":"

    The easiest way to create an Autoware-compatible vector map is to use Vector Map Builder, a free web-based tool provided by TIER IV, Inc.. Vector Map Builder allows you to create lanes and add additional regulatory elements such as stop signs or traffic lights using a point cloud map as a reference.

    For open-source software options, MapToolbox is a plugin for Unity specifically designed to create Lanelet2 maps for Autoware. Although JOSM is another open-source tool that can be used to create Lanelet2 maps, be aware that a number of modifications must be done manually to make the map compatible with Autoware. This process can be tedious and time-consuming, so the use of JOSM is not recommended.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/#autoware-compatible-map-providers","title":"Autoware-compatible map providers","text":"

    If it is not possible to create HD maps yourself, you can use a mapping service from the following Autoware-compatible map providers instead:

    The table below shows each company's mapping technology and the types of HD maps they support.

    Company Mapping technology Available maps MAP IV, Inc. SLAM Point cloud and vector maps AISAN TECHNOLOGY CO., LTD. MMS Point cloud and vector maps TomTom MMS Vector map*

    Note

    Maps provided by TomTom use their proprietary AutoStream format, not Lanelet2. The open-source AutoStreamForAutoware tool can be used to convert an AutoStream map to a Lanelet2 map. However, the converter is still in its early stages and has some known limitations.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/","title":"Converting UTM maps to MGRS map format","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/#converting-utm-maps-to-mgrs-map-format","title":"Converting UTM maps to MGRS map format","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/#overview","title":"Overview","text":"

    If you want to use MGRS (Military Grid Reference System) format in Autoware, you need to convert UTM (Universal Transverse Mercator) map to MGRS format. In order to do that, we will use UTM to MGRS pointcloud converter ROS 2 package provided by Leo Drive.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/#installation","title":"Installation","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/#dependencies","title":"Dependencies","text":"

    To install dependencies:

    sudo apt install ros-humble-pcl-conversions \\\ngeographiclib-tools\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/#building","title":"Building","text":"
        cd <PATH-TO-YOUR-ROS-2-WORKSPACE>/src\n    git clone https://github.com/leo-drive/pc_utm_to_mgrs_converter.git\n    cd ..\n    colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/converting-utm-to-mgrs-map/#usage","title":"Usage","text":"

    After the installation of converter tool, we need to define northing, easting and ellipsoid height of local UTM map origin in pc_utm_to_mgrs_converter.param.yaml. For example, you can use latitude, longitude and altitude values in the navsatfix message from your GNSS/INS sensor.

    Sample ROS 2 topic echo from navsatfix message
    header:\nstamp:\nsec: 1694612439\nnanosec: 400000000\nframe_id: GNSS_INS/gnss_ins_link\nstatus:\nstatus: 0\nservice: 1\nlatitude: 41.0216110801253\nlongitude: 28.887096461148346\naltitude: 74.28264078891529\nposition_covariance:\n- 0.0014575386885553598\n- 0.0\n- 0.0\n- 0.0\n- 0.004014162812381983\n- 0.0\n- 0.0\n- 0.0\n- 0.0039727711118757725\nposition_covariance_type: 2\n

    After that, you need to convert latitude and longitude values to northing and easting values. You can use any converter on the internet for converting latitude longitude values to UTM. (i.e., UTMconverter)

    Now, we are ready to update pc_utm_to_mgrs_converter.param.yaml, example for our navsatfix message:

    /**:\n  ros__parameters:\n      # Northing of local origin\n-     Northing: 4520550.0\n+     Northing: 4542871.33\n\n     # Easting of local origin\n-     Easting: 698891.0\n+     Easting: 658659.84\n\n     # Elipsoid Height of local origin\n-     ElipsoidHeight: 47.62\n+     ElipsoidHeight: 74.28\n

    Lastly, we will update input and pointcloud the map path in pc_utm_to_mgrs_converter.launch.xml:

    ...\n- <arg name=\"input_file_path\" default=\"/home/melike/projects/autoware_data/gebze_pospac_map/pointcloud_map.pcd\"/>\n+ <arg name=\"input_file_path\" default=\"<PATH-TO-YOUR-INPUT-PCD-MAP>\"/>\n- <arg name=\"output_file_path\" default=\"/home/melike/projects/autoware_data/gebze_pospac_map/pointcloud_map_mgrs_orto.pcd\"/>\n+ <arg name=\"output_file_path\" default=\"<PATH-TO-YOUR-OUTPUT-PCD-MAP>\"/>\n...\n

    After the setting of the package, we will launch pc_utm_to_mgrs_converter:

    ros2 launch pc_utm_to_mgrs_converter pc_utm_to_mgrs_converter.launch.xml\n

    The conversion process will be started, you should see Saved <YOUR-MAP-POINTS-SIZE> data points saved to <YOUR-OUTPUT-MAP-PATH> message on your terminal. So, MGRS format pointcloud map saved on your output map directory.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/","title":"Creating a vector map","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/#creating-a-vector-map","title":"Creating a vector map","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/#overview","title":"Overview","text":"

    In this section, we will explain how to create Lanelet2 maps with TIER IV's vector map builder tool.

    If you want to look at another method, MapToolbox is a plugin for Unity specifically designed to create Lanelet2 maps for Autoware. We didn't recommend JOSM since it needs modifications for Autoware usage. So, this process can be tedious and time-consuming.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/#vector-map-builder","title":"Vector Map Builder","text":"

    You need a TIER IV account for using vector map builder tool. So, if you have not before, please create a TIER IV account in order to use vector map builder tool. For more information about this tool, please check the official guide.

    You can follow these pages for creating a Lanelet2 map and its regulatory elements.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/crosswalk/","title":"Crosswalk attribute","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/crosswalk/#crosswalk-attribute","title":"Crosswalk attribute","text":"

    Behavior velocity planner's crosswalk module plans velocity to stop or decelerate for pedestrians approaching or walking on a crosswalk. In order to operate that, we will add crosswalk attribute to our lanelet2 map.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/crosswalk/#creating-a-crosswalk-attribute","title":"Creating a crosswalk attribute","text":"

    In order to create a crosswalk on your map, please follow these steps:

    1. Click Abstraction button on top panel.
    2. Select Crosswalk from the panel.
    3. Click and draw crosswalk on your pointcloud map.

    Video Demonstration:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/crosswalk/#testing-created-crosswalk-with-planning-simulator","title":"Testing created crosswalk with planning simulator","text":"

    After the completing of creating the map, we need to save it. To that please click File --> Export Lanelet2Maps then download.

    After the download is finished, we need to put lanelet2 map and pointcloud map on the same location. The directory structure should be like this:

    + <YOUR-MAP-DIRECTORY>/\n+  \u251c\u2500 pointcloud_map.pcd\n+  \u2514\u2500 lanelet2_map.osm\n

    If your .osm or .pcd map file's name is different from these names, you need to update autoware.launch.xml:

      <!-- Map -->\n-  <arg name=\"lanelet2_map_file\" default=\"lanelet2_map.osm\" description=\"lanelet2 map file name\"/>\n+  <arg name=\"lanelet2_map_file\" default=\"<YOUR-LANELET-MAP-NAME>.osm\" description=\"lanelet2 map file name\"/>\n-  <arg name=\"pointcloud_map_file\" default=\"pointcloud_map.pcd\" description=\"pointcloud map file name\"/>\n+  <arg name=\"pointcloud_map_file\" default=\"<YOUR-POINTCLOUD-MAP-NAME>.pcd\" description=\"pointcloud map file name\"/>\n

    Now we are ready to launch the planning simulator:

    ros2 launch autoware_launch planning_simulator.launch.xml map_path:=<YOUR-MAP-FOLDER-DIR> vehicle_model:=<YOUR-VEHICLE-MODEL> sensor_model:=<YOUR-SENSOR-KIT>\n

    Example for tutorial_vehicle:

    ros2 launch autoware_launch planning_simulator.launch.xml map_path:=$HOME/Files/autoware_map/tutorial_map/ vehicle_model:=tutorial_vehicle sensor_model:=tutorial_vehicle_sensor_kit vehicle_id:=tutorial_vehicle\n
    1. Click 2D Pose Estimate button on rviz or press P and give a pose for initialization.
    2. Click 2D Goal Pose button on rviz or press G and give a pose for goal point.
    3. We need to add pedestrians to crosswalk, so activate interactive pedestrians from Tool Properties panel on rviz.
    4. After that, please press Shift, then click right click button for inserting pedestrians.
    5. You can control inserted pedestrian via dragging right click.

    Crosswalk markers on rviz:

    Crosswalk test on the created map.

    Video Demonstration:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/","title":"Creating a Lanelet","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/#creating-a-lanelet","title":"Creating a Lanelet","text":"

    At this page, we will explain how to create a simple lanelet on your pointcloud map.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/#creating-a-lanelet2","title":"Creating a Lanelet2","text":"

    Firstly, we need to import our pointcloud map to vector map builder tool:

    1. Please click File.
    2. Then, click Import PCD.
    3. Click Browse and select your .pcd file.

    You will display the point cloud on your vector map builder tool after the upload is complete:

    Uploaded pointcloud map file on vector map builder

    Now, we are ready to create lanelet2 map on our pointcloud map:

    1. Please click Create.
    2. Then, click Create Lanelet2Maps.
    3. Please fill your map name
    4. Please fill your MGRS zone. (At tutorial_vehicle, MGRS grid zone: 35T - MGRS 100,000-meter square: PF)
    5. Click Create.
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/#creating-a-simple-lanelet","title":"Creating a simple lanelet","text":"

    In order to create a simple lanelet on your map, please follow these steps:

    1. CLick Lanelet2Maps on the bar
    2. Enable Lanelet mode via selecting Lanelet.
    3. Then, you can click the pointcloud map to create lanelet.
    4. If your lanelet is finished, you can disable Lanelet.
    5. If you want to change your lanelet width, click lanelet --> Change Lanelet Width, then you can enter the lanelet width.

    Video Demonstration:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/#join-two-lanelets","title":"Join two lanelets","text":"

    In order to join two lanelets, please follow these steps:

    1. Please create two distinct lanelet.
    2. Select a Lanelet, then press Shift and select other lanelet.
    3. Now, you can see Join Lanelets button, just press it.
    4. These lanelets will be joined.

    Video Demonstration:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/#join-multiple-lanelets","title":"Join Multiple lanelets","text":"

    In order to add (join) two or more lanelets to another lanelet, please follow these steps:

    1. Create multiple lanelets.
    2. You can join the first two lanelets like the steps before.
    3. Please check end points ids of first lanelet.
    4. Then you need to change these ids with third lanelet's start point. (Please change with selecting linestring of lanelet)
    5. You will see two next lanes of the first lanelet will be appeared.

    Video Demonstration:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/#change-speed-limit-of-lanelet","title":"Change Speed Limit Of Lanelet","text":"

    In order to change the speed limit of lanelet, please follow these steps:

    1. Select the lanelet where the speed limit will be changed
    2. Set speed limit on the right panel.
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/lanelet2/#test-lanelets-with-planning-simulator","title":"Test lanelets with planning simulator","text":"

    After the completing of creating lanelets, we need to save it. To that please click File --> Export Lanelet2Maps then download.

    After the download is finished, we need to put lanelet2 map and pointcloud map on the same location. The directory structure should be like this:

    <YOUR-MAP-DIRECTORY>/\n \u251c\u2500 pointcloud_map.pcd\n \u2514\u2500 lanelet2_map.osm\n

    If your .osm or .pcd map file's name is different from these names, you need to update autoware.launch.xml:

      <!-- Map -->\n-  <arg name=\"lanelet2_map_file\" default=\"lanelet2_map.osm\" description=\"lanelet2 map file name\"/>\n+  <arg name=\"lanelet2_map_file\" default=\"<YOUR-LANELET-MAP-NAME>.osm\" description=\"lanelet2 map file name\"/>\n-  <arg name=\"pointcloud_map_file\" default=\"pointcloud_map.pcd\" description=\"pointcloud map file name\"/>\n+  <arg name=\"pointcloud_map_file\" default=\"<YOUR-POINTCLOUD-MAP-NAME>.pcd\" description=\"pointcloud map file name\"/>\n

    Now we are ready to launch the planning simulator:

    ros2 launch autoware_launch planning_simulator.launch.xml map_path:=<YOUR-MAP-FOLDER-DIR> vehicle_model:=<YOUR-VEHICLE-MODEL> sensor_model:=<YOUR-SENSOR-KIT>\n

    Example for tutorial_vehicle:

    ros2 launch autoware_launch planning_simulator.launch.xml map_path:=$HOME/Files/autoware_map/tutorial_map/ vehicle_model:=tutorial_vehicle sensor_model:=tutorial_vehicle_sensor_kit vehicle_id:=tutorial_vehicle\n
    1. Click 2D Pose Estimate button on rviz or press P and give a pose for initialization.
    2. Click 2D Goal Pose button on rviz or press G and give a pose for goal point.

    Testing our created vector map with planning simulator"},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/stop-line/","title":"Stop Line","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/stop-line/#stop-line","title":"Stop Line","text":"

    Behavior velocity planner's stop line module plans velocity to stop right before stop lines and restart driving after stopped. In order to operate that, we will add stop line attribute to our lanelet2 map.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/stop-line/#creating-a-stop-line-regulatory-element","title":"Creating a stop line regulatory element","text":"

    In order to create a stop line on your pointcloud map, please follow these steps:

    1. Please select lanelet which stop line to be added.
    2. Click Abstraction button on top panel.
    3. Select Stop Line from the panel.
    4. Click on the desired area for inserting stop line.

    Video Demonstration:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/stop-line/#testing-created-the-stop-line-element-with-planning-simulator","title":"Testing created the stop line element with planning simulator","text":"

    After the completing of creating the map, we need to save it. To that please click File --> Export Lanelet2Maps then download.

    After the download is finished, we need to put lanelet2 map and pointcloud map on the same location. The directory structure should be like this:

    + <YOUR-MAP-DIRECTORY>/\n+  \u251c\u2500 pointcloud_map.pcd\n+  \u2514\u2500 lanelet2_map.osm\n

    If your .osm or .pcd map file's name is different from these names, you need to update autoware.launch.xml:

      <!-- Map -->\n-  <arg name=\"lanelet2_map_file\" default=\"lanelet2_map.osm\" description=\"lanelet2 map file name\"/>\n+  <arg name=\"lanelet2_map_file\" default=\"<YOUR-LANELET-MAP-NAME>.osm\" description=\"lanelet2 map file name\"/>\n-  <arg name=\"pointcloud_map_file\" default=\"pointcloud_map.pcd\" description=\"pointcloud map file name\"/>\n+  <arg name=\"pointcloud_map_file\" default=\"<YOUR-POINTCLOUD-MAP-NAME>.pcd\" description=\"pointcloud map file name\"/>\n

    Now we are ready to launch the planning simulator:

    ros2 launch autoware_launch planning_simulator.launch.xml map_path:=<YOUR-MAP-FOLDER-DIR> vehicle_model:=<YOUR-VEHICLE-MODEL> sensor_model:=<YOUR-SENSOR-KIT>\n

    Example for tutorial_vehicle:

    ros2 launch autoware_launch planning_simulator.launch.xml map_path:=$HOME/Files/autoware_map/tutorial_map/ vehicle_model:=tutorial_vehicle sensor_model:=tutorial_vehicle_sensor_kit vehicle_id:=tutorial_vehicle\n
    1. Click 2D Pose Estimate button on rviz or press P and give a pose for initialization.
    2. Click 2D Goal Pose button on rviz or press G and give a pose for goal point.
    3. You can see the stop line marker on the rviz screen.

    Stop line markers on rviz:

    Stop line test on the created map.

    Video Demonstration:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/traffic-light/","title":"Traffic light","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/traffic-light/#traffic-light","title":"Traffic light","text":"

    Behavior velocity planner's traffic light module plans velocity according to the traffic light status. In order to operate that, we will add stop line attribute to our lanelet2 map.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/traffic-light/#creating-a-traffic-light-regulatory-element","title":"Creating a traffic light regulatory element","text":"

    In order to create a traffic light on your pointcloud map, please follow these steps:

    1. Please select lanelet which traffic light to be added.
    2. Click Abstraction button on top panel.
    3. Select Traffic Light from the panel.
    4. Click on the desired area for inserting traffic light.

    Video Demonstration:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/creating-vector-map/traffic-light/#testing-created-the-traffic-light-element-with-planning-simulator","title":"Testing created the traffic light element with planning simulator","text":"

    After the completing of creating the map, we need to save it. To that please click File --> Export Lanelet2Maps then download.

    After the download is finished, we need to put lanelet2 map and pointcloud map on the same location. The directory structure should be like this:

    + <YOUR-MAP-DIRECTORY>/\n+  \u251c\u2500 pointcloud_map.pcd\n+  \u2514\u2500 lanelet2_map.osm\n

    If your .osm or .pcd map file's name is different from these names, you need to update autoware.launch.xml:

      <!-- Map -->\n-  <arg name=\"lanelet2_map_file\" default=\"lanelet2_map.osm\" description=\"lanelet2 map file name\"/>\n+  <arg name=\"lanelet2_map_file\" default=\"<YOUR-LANELET-MAP-NAME>.osm\" description=\"lanelet2 map file name\"/>\n-  <arg name=\"pointcloud_map_file\" default=\"pointcloud_map.pcd\" description=\"pointcloud map file name\"/>\n+  <arg name=\"pointcloud_map_file\" default=\"<YOUR-POINTCLOUD-MAP-NAME>.pcd\" description=\"pointcloud map file name\"/>\n

    Now we are ready to launch the planning simulator:

    ros2 launch autoware_launch planning_simulator.launch.xml map_path:=<YOUR-MAP-FOLDER-DIR> vehicle_model:=<YOUR-VEHICLE-MODEL> sensor_model:=<YOUR-SENSOR-KIT>\n

    Example for tutorial_vehicle:

    ros2 launch autoware_launch planning_simulator.launch.xml map_path:=$HOME/Files/autoware_map/tutorial_map/ vehicle_model:=tutorial_vehicle sensor_model:=tutorial_vehicle_sensor_kit vehicle_id:=tutorial_vehicle\n
    1. Click 2D Pose Estimate button on rviz or press P and give a pose for initialization.
    2. Click Panels -> Add new panel, select TrafficLightPublishPanel, and then press OK.
    3. In TrafficLightPublishPanel, set the ID and color of the traffic light.
    4. Then, Click SET and PUBLISH button.
    5. Click 2D Goal Pose button on rviz or press G and give a pose for goal point.
    6. You can see the traffic light marker on the rviz screen if you set the traffic light color as RED.

    Traffic Light markers on rviz:

    Traffic light test on the created map.

    Video Demonstration:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/","title":"Available Open Source SLAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/#available-open-source-slam","title":"Available Open Source SLAM","text":"

    This page provides the list of available open source Simultaneous Localization And Mapping (SLAM) implementation that can be used to generate a point cloud (.pcd) map file.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/#selecting-which-implementation-to-use","title":"Selecting which implementation to use","text":"

    Lidar odometry drifts accumulatively as time goes by and there is solutions to solve that problem such as graph optimization, loop closure and using gps sensor to decrease accumulative drift error. Because of that, a SLAM algorithm should have loop closure feature, graph optimization and should use gps sensor. Additionally, some of the algorithms are using IMU sensor to add another factor to graph for decreasing drift error. While some of the algorithms requires 9-axis IMU sensor strictly, some of them requires only 6-axis IMU sensor or not even using the IMU sensor. Before choosing an algorithm to create maps for Autoware please consider these factors depends on your sensor setup or expected quality of generated map.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/#tips","title":"Tips","text":"

    Commonly used open-source SLAM implementations are lidarslam-ros2 (LiDAR, IMU*) and LIO-SAM (LiDAR, IMU, GNSS). The required sensor data for each algorithm is specified in the parentheses, where an asterisk (*) indicates that such sensor data is optional. For supported LiDAR models, please check the GitHub repository of each algorithm. While these ROS 2-based SLAM implementations can be easily installed and used directly on the same machine that runs Autoware, it is important to note that they may not be as well-tested or as mature as ROS 1-based alternatives.

    The notable open-source SLAM implementations that are based on ROS 1 include hdl-graph-slam (LiDAR, IMU*, GNSS*), LeGO-LOAM (LiDAR, IMU*), LeGO-LOAM-BOR (LiDAR), and LIO-SAM (LiDAR, IMU, GNSS).

    Most of these algorithms already have a built-in loop-closure and pose graph optimization. However, if the built-in, automatic loop-closure fails or does not work correctly, you can use Interactive SLAM to adjust and optimize a pose graph manually.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/#list-of-third-party-slam-implementations","title":"List of Third Party SLAM Implementations","text":"Package Name Explanation Repository Link Loop Closure Sensors ROS Version Dependencies FAST-LIO-LC A computationally efficient and robust LiDAR-inertial odometry package with loop closure module and graph optimization https://github.com/yanliang-wang/FAST_LIO_LC &check; LidarIMUGPS [Optional] ROS 1 ROS MelodicPCL >= 1.8Eigen >= 3.3.4GTSAM >= 4.0.0 FAST_LIO_SLAM FAST_LIO_SLAM is the integration of FAST_LIO and SC-PGO which is scan context based loop detection and GTSAM based pose-graph optimization https://github.com/gisbi-kim/FAST_LIO_SLAM &check; LidarIMUGPS [Optional] ROS 1 PCL >= 1.8Eigen >= 3.3.4 FD-SLAM FD_SLAM is Feature&Distribution-based 3D LiDAR SLAM method based on Surface Representation Refinement. In this algorithm novel feature-based Lidar odometry used for fast scan-matching, and used a proposed UGICP method for keyframe matching https://github.com/SLAMWang/FD-SLAM &check; LidarIMU [Optional]GPS ROS 1 PCLg2oSuitesparse hdl_graph_slam An open source ROS package for real-time 6DOF SLAM using a 3D LIDAR. It is based on 3D Graph SLAM with NDT scan matching-based odometry estimation and loop detection. It also supports several graph constraints, such as GPS, IMU acceleration (gravity vector), IMU orientation (magnetic sensor), and floor plane (detected in a point cloud) https://github.com/koide3/hdl_graph_slam &check; LidarIMU [Optional]GPS [Optional] ROS 1 PCLg2oOpenMP IA-LIO-SAM IA_LIO_SLAM is created for data acquisition in unstructured environment and it is a framework for Intensity and Ambient Enhanced Lidar Inertial Odometry via Smoothing and Mapping that achieves highly accurate robot trajectories and mapping https://github.com/minwoo0611/IA_LIO_SAM &check; LidarIMUGPS ROS 1 GTSAM ISCLOAM ISCLOAM presents a robust loop closure detection approach by integrating both geometry and intensity information https://github.com/wh200720041/iscloam &check; Lidar ROS 1 Ubuntu 18.04ROS MelodicCeresPCLGTSAMOpenCV LeGO-LOAM-BOR LeGO-LOAM-BOR is improved version of the LeGO-LOAM by improving quality of the code, making it more readable and consistent. Also, performance is improved by converting processes to multi-threaded approach https://github.com/facontidavide/LeGO-LOAM-BOR &check; LidarIMU ROS 1 ROS MelodicPCLGTSAM LIO_SAM A framework that achieves highly accurate, real-time mobile robot trajectory estimation and map-building. It formulates lidar-inertial odometry atop a factor graph, allowing a multitude of relative and absolute measurements, including loop closures, to be incorporated from different sources as factors into the system https://github.com/TixiaoShan/LIO-SAM &check; LidarIMUGPS [Optional] ROS 1ROS 2 PCLGTSAM Optimized-SC-F-LOAM An improved version of F-LOAM and uses an adaptive threshold to further judge the loop closure detection results and reducing false loop closure detections. Also it uses feature point-based matching to calculate the constraints between a pair of loop closure frame point clouds and decreases time consumption of constructing loop frame constraints https://github.com/SlamCabbage/Optimized-SC-F-LOAM &check; Lidar ROS 1 PCLGTSAMCeres SC-A-LOAM A real-time LiDAR SLAM package that integrates A-LOAM and ScanContext. https://github.com/gisbi-kim/SC-A-LOAM &check; Lidar ROS 1 GTSAM >= 4.0 SC-LeGO-LOAM SC-LeGO-LOAM integrated LeGO-LOAM for lidar odometry and 2 different loop closure methods: ScanContext and Radius search based loop closure. While ScanContext is correcting large drifts, radius search based method is good for fine-stitching https://github.com/irapkaist/SC-LeGO-LOAM &check; LidarIMU ROS 1 PCLGTSAM"},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/","title":"FAST_LIO_LC","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#fast_lio_lc","title":"FAST_LIO_LC","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#what-is-fast_lio_lc","title":"What is FAST_LIO_LC?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/yanliang-wang/FAST_LIO_LC

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#dependencies","title":"Dependencies","text":"
      wget -O ~/Downloads/gtsam.zip https://github.com/borglab/gtsam/archive/4.0.0-alpha2.zip\n  cd ~/Downloads/ && unzip gtsam.zip -d ~/Downloads/\n  cd ~/Downloads/gtsam-4.0.0-alpha2/\n  mkdir build && cd build\n  cmake ..\n  sudo make install\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#1-build","title":"1) Build","text":"
        mkdir -p ~/ws_fastlio_lc/src\n    cd ~/ws_fastlio_lc/src\n    git clone https://github.com/gisbi-kim/FAST_LIO_SLAM.git\n    git clone https://github.com/Livox-SDK/livox_ros_driver\n    cd ..\n    catkin_make\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#2-set-parameters","title":"2) Set parameters","text":" "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#3-run","title":"3) Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#example-result","title":"Example Result","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#other-examples","title":"Other Examples","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#example-dataset","title":"Example dataset","text":"

    Check original repository link for example dataset.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#contact","title":"Contact","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-lc/#acknowledgements","title":"Acknowledgements","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/","title":"FAST_LIO_SLAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#fast_lio_slam","title":"FAST_LIO_SLAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#what-is-fast_lio_slam","title":"What is FAST_LIO_SLAM?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/gisbi-kim/FAST_LIO_SLAM

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#dependencies","title":"Dependencies","text":"
    wget -O ~/Downloads/gtsam.zip https://github.com/borglab/gtsam/archive/4.0.0-alpha2.zip\ncd ~/Downloads/ && unzip gtsam.zip -d ~/Downloads/\ncd ~/Downloads/gtsam-4.0.0-alpha2/\nmkdir build && cd build\ncmake ..\nsudo make install\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#1-build","title":"1) Build","text":"
        mkdir -p ~/catkin_fastlio_slam/src\n    cd ~/catkin_fastlio_slam/src\n    git clone https://github.com/gisbi-kim/FAST_LIO_SLAM.git\n    git clone https://github.com/Livox-SDK/livox_ros_driver\n    cd ..\n    catkin_make\n    source devel/setup.bash\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#2-set-parameters","title":"2) Set parameters","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#3-run","title":"3) Run","text":"
        # terminal 1: run FAST-LIO2\nroslaunch fast_lio mapping_ouster64.launch\n\n    # open the other terminal tab: run SC-PGO\ncd ~/catkin_fastlio_slam\n    source devel/setup.bash\n    roslaunch aloam_velodyne fastlio_ouster64.launch\n\n    # play bag file in the other terminal\nrosbag play xxx.bag -- clock --pause\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#example-result","title":"Example Result","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#other-examples","title":"Other Examples","text":" "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fast-lio-slam/#acknowledgements","title":"Acknowledgements","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/","title":"FD-SLAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#fd-slam","title":"FD-SLAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#what-is-fd-slam","title":"What is FD-SLAM?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#repository-information","title":"Repository Information","text":"

    This is an open source ROS package for real-time 6DOF SLAM using a 3D LIDAR.

    It is based on hdl_graph_slam and the steps to run our system are same with hdl-graph-slam.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/SLAMWang/FD-SLAM

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#dependencies","title":"Dependencies","text":"

    The following ROS packages are required:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#1-build","title":"1) Build","text":"
    cd ~/catkin_ws/src\ngit clone https://github.com/SLAMWang/FD-SLAM.git\ncd ..\ncatkin_make\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#2-services","title":"2) Services","text":"
    /hdl_graph_slam/dump  (hdl_graph_slam/DumpGraph)\n- save all the internal data (point clouds, floor coeffs, odoms, and pose graph) to a directory.\n\n/hdl_graph_slam/save_map (hdl_graph_slam/SaveMap)\n- save the generated map as a PCD file.\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#3-set-parameters","title":"3) Set parameters","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/fd-slam/#4-run","title":"4) Run","text":"
    source devel/setup.bash\nroslaunch hdl_graph_slam hdl_graph_slam_400_ours.launch\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/","title":"hdl_graph_slam","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#hdl_graph_slam","title":"hdl_graph_slam","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#what-is-hdl_graph_slam","title":"What is hdl_graph_slam?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/koide3/hdl_graph_slam

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#dependencies","title":"Dependencies","text":"

    The following ROS packages are required:

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#1-build","title":"1) Build","text":"
    # for melodic\nsudo apt-get install ros-melodic-geodesy ros-melodic-pcl-ros ros-melodic-nmea-msgs ros-melodic-libg2o\ncd catkin_ws/src\ngit clone https://github.com/koide3/ndt_omp.git -b melodic\ngit clone https://github.com/SMRT-AIST/fast_gicp.git --recursive\ngit clone https://github.com/koide3/hdl_graph_slam\n\ncd .. && catkin_make -DCMAKE_BUILD_TYPE=Release\n\n# for noetic\nsudo apt-get install ros-noetic-geodesy ros-noetic-pcl-ros ros-noetic-nmea-msgs ros-noetic-libg2o\n\ncd catkin_ws/src\ngit clone https://github.com/koide3/ndt_omp.git\ngit clone https://github.com/SMRT-AIST/fast_gicp.git --recursive\ngit clone https://github.com/koide3/hdl_graph_slam\n\ncd .. && catkin_make -DCMAKE_BUILD_TYPE=Release\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#2-set-parameter","title":"2) Set parameter","text":" "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#3-run","title":"3) Run","text":"
    rosparam set use_sim_time true\nroslaunch hdl_graph_slam hdl_graph_slam_400.launch\n
    roscd hdl_graph_slam/rviz\nrviz -d hdl_graph_slam.rviz\n
    rosbag play --clock hdl_400.bag\n

    Save the generated map by:

    rosservice call /hdl_graph_slam/save_map \"resolution: 0.05\ndestination: '/full_path_directory/map.pcd'\"\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#example-result","title":"Example Result","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#example2-outdoor","title":"Example2 (Outdoor)","text":"

    Bag file (recorded in an outdoor environment):

    rosparam set use_sim_time true\nroslaunch hdl_graph_slam hdl_graph_slam_400.launch\n
    roscd hdl_graph_slam/rviz\nrviz -d hdl_graph_slam.rviz\n
    rosbag play --clock dataset.bag\n

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#papers","title":"Papers","text":"

    Kenji Koide, Jun Miura, and Emanuele Menegatti, A Portable 3D LIDAR-based System for Long-term and Wide-area People Behavior Measurement, Advanced Robotic Systems, 2019 [link].

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/hdl-graph-slam/#contact","title":"Contact","text":"

    Kenji Koide, k.koide@aist.go.jp, https://staff.aist.go.jp/k.koide

    [Active Intelligent Systems Laboratory, Toyohashi University of Technology, Japan] [Mobile Robotics Research Team, National Institute of Advanced Industrial Science and Technology (AIST), Japan]

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/","title":"IA-LIO-SAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#ia-lio-sam","title":"IA-LIO-SAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#what-is-ia-lio-sam","title":"What is IA-LIO-SAM?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/minwoo0611/IA_LIO_SAM

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#dependencies","title":"Dependencies","text":" "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#1-build","title":"1) Build","text":"
        mkdir -p ~/catkin_ia_lio/src\n    cd ~/catkin_ia_lio/src\n    git clone https://github.com/minwoo0611/IA_LIO_SAM\n    cd ..\n    catkin_make\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#2-set-parameters","title":"2) Set parameters","text":" "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#3-run","title":"3) Run","text":"
      # open new terminal: run IA_LIO\n  source devel/setup.bash\n  roslaunch lio_sam mapping_ouster64.launch\n\n  # play bag file in the other terminal\n  rosbag play RECORDED_BAG.bag --clock\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#sample-dataset-images","title":"Sample dataset images","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#example-dataset","title":"Example dataset","text":"

    Check original repo link for example dataset.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#contact","title":"Contact","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#paper","title":"Paper","text":"

    Thank you for citing IA-LIO-SAM(./config/doc/KRS-2021-17.pdf) if you use any of this code.

    Part of the code is adapted from LIO-SAM (IROS-2020).

    @inproceedings{legoloam2018shan,\n  title={LeGO-LOAM: Lightweight and Ground-Optimized Lidar Odometry and Mapping on Variable Terrain},\n  author={Shan, Tixiao and Englot, Brendan},\n  booktitle={IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},\n  pages={4758-4765},\n  year={2018},\n  organization={IEEE}\n}\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/ia-lio-slam/#acknowledgements","title":"Acknowledgements","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/","title":"ISCLOAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#iscloam","title":"ISCLOAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#what-is-iscloam","title":"What is ISCLOAM?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/wh200720041/iscloam

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#dependencies","title":"Dependencies","text":"

    For visualization purpose, this package uses hector trajectory sever, you may install the package by

    sudo apt-get install ros-melodic-hector-trajectory-server\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#build-and-run","title":"Build and Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#1-clone-repository","title":"1. Clone repository","text":"
    cd ~/catkin_ws/src\ngit clone https://github.com/wh200720041/iscloam.git\ncd ..\ncatkin_make -j1\nsource ~/catkin_ws/devel/setup.bash\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#2-set-parameter","title":"2. Set Parameter","text":"

    Change the bag location and sensor parameters on launch files.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#3-launch","title":"3. Launch","text":"
    roslaunch iscloam iscloam.launch\n

    if you would like to generate the map of environment at the same time, you can run

    roslaunch iscloam iscloam_mapping.launch\n

    Note that the global map can be very large, so it may takes a while to perform global optimization, some lag is expected between trajectory and map since they are running in separate thread. More CPU usage will happen when loop closure is identified.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#example-result","title":"Example Result","text":"

    Watch demo video at Video Link

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#ground-truth-comparison","title":"Ground Truth Comparison","text":"

    Green: ISCLOAM Red: Ground Truth

                      KITTI sequence 00                                  KITTI sequence 05\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#citation","title":"Citation","text":"

    If you use this work for your research, you may want to cite the paper below, your citation will be appreciated

    @inproceedings{wang2020intensity,\n  author={H. {Wang} and C. {Wang} and L. {Xie}},\n  booktitle={2020 IEEE International Conference on Robotics and Automation (ICRA)},\n  title={Intensity Scan Context: Coding Intensity and Geometry Relations for Loop Closure Detection},\n  year={2020},\n  volume={},\n  number={},\n  pages={2095-2101},\n  doi={10.1109/ICRA40945.2020.9196764}\n}\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/iscloam/#acknowledgements","title":"Acknowledgements","text":"

    Thanks for A-LOAM and LOAM(J. Zhang and S. Singh. LOAM: Lidar Odometry and Mapping in Real-time) and LOAM_NOTED.

    Author: Wang Han, Nanyang Technological University, Singapore

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/","title":"LeGO-LOAM-BOR","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#lego-loam-bor","title":"LeGO-LOAM-BOR","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#what-is-lego-loam-bor","title":"What is LeGO-LOAM-BOR?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/facontidavide/LeGO-LOAM-BOR

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#dependencies","title":"Dependencies","text":"
    wget -O ~/Downloads/gtsam.zip https://github.com/borglab/gtsam/archive/4.0.0-alpha2.zip\ncd ~/Downloads/ && unzip gtsam.zip -d ~/Downloads/\ncd ~/Downloads/gtsam-4.0.0-alpha2/\nmkdir build && cd build\ncmake ..\nsudo make install\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#1-build","title":"1) Build","text":"
    cd ~/catkin_ws/src\ngit clone https://github.com/facontidavide/LeGO-LOAM-BOR.git\ncd ..\ncatkin_make\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#2-set-parameters","title":"2) Set parameters","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#3-run","title":"3) Run","text":"
    source devel/setup.bash\nroslaunch lego_loam_bor run.launch rosbag:=/path/to/your/rosbag lidar_topic:=/velodyne_points\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#example-result","title":"Example Result","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lego-loam-bor/#cite-lego-loam","title":"Cite LeGO-LOAM","text":"

    Thank you for citing our LeGO-LOAM paper if you use any of this code:

    @inproceedings{legoloam2018,\n  title={LeGO-LOAM: Lightweight and Ground-Optimized Lidar Odometry and Mapping on Variable Terrain},\n  author={Tixiao Shan and Brendan Englot},\n  booktitle={IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},\n  pages={4758-4765},\n  year={2018},\n  organization={IEEE}\n}\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/","title":"LIO-SAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#lio-sam","title":"LIO-SAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#what-is-lio-sam","title":"What is LIO-SAM?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/TixiaoShan/LIO-SAM

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#required-sensors","title":"Required Sensors","text":"

    *Robosense lidars aren't supported officially, but their Helios series can be used as Velodyne lidars.

    The system architecture of LIO-SAM method described in the following diagram, please look at the official repository for getting more information.

    System Architecture of LIO-SAM

    We are using Robosense Helios 5515 and CLAP B7 sensor on tutorial_vehicle, so we will use these sensors for running LIO-SAM.

    Additionally, LIO-SAM tested with Applanix POS LVX and Hesai Pandar XT32 sensor setup. Some additional information according to the sensors will be provided in this page.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#ros-compatibility","title":"ROS Compatibility","text":"

    Since Autoware uses ROS 2 Humble currently, we will continue with ROS 2 version of LIO-SAM.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#dependencies","title":"Dependencies","text":"

    ROS 2 dependencies:

    To install these dependencies, you can use this bash command in your terminal:

    sudo apt install ros-humble-perception-pcl \\\nros-humble-pcl-msgs \\\nros-humble-vision-opencv \\\nros-humble-xacro\n

    Other dependencies:

    To install the gtsam, you can use this bash command in your terminal:

      # Add GTSAM-PPA\nsudo add-apt-repository ppa:borglab/gtsam-release-4.1\n  sudo apt install libgtsam-dev libgtsam-unstable-dev\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#1-installation","title":"1) Installation","text":"

    In order to use and build LIO-SAM, we will create workspace for LIO-SAM:

        mkdir -p ~/lio-sam-ws/src\n    cd ~/lio-sam-ws/src\n    git clone -b ros2 https://github.com/TixiaoShan/LIO-SAM.git\n    cd ..\n    colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#2-settings","title":"2) Settings","text":"

    After the building of LIO-SAM, we need to record ROS 2 Bag file with including necessary topics for LIO-SAM. The necessary topics are described in the config file on LIO-SAM.

    ROS 2 Bag example for LIO-SAM with Robosense Helios and CLAP B7
    Files:             map_bag_13_09_0.db3\nBag size:          38.4 GiB\nStorage id:        sqlite3\nDuration:          3295.326s\nStart:             Sep 13 2023 16:40:23.165 (1694612423.165)\nEnd:               Sep 13 2023 17:35:18.492 (1694615718.492)\nMessages:          1627025\nTopic information: Topic: /sensing/gnss/clap/ros/imu | Type: sensor_msgs/msg/Imu | Count: 329535 | Serialization Format: cdr\nTopic: /sensing/gnss/clap/ros/odometry | Type: nav_msgs/msg/Odometry | Count: 329533 | Serialization Format: cdr\nTopic: /sensing/lidar/top/pointcloud_raw | Type: sensor_msgs/msg/PointCloud2 | Count: 32953 | Serialization Format: cdr\n

    Note: We use use_odometry as true at clap_b7_driver for publishing GPS odometry topic from navsatfix.

    Please set topics and sensor settings on lio_sam/config/params.yaml. Here are some example modifications for out tutorial_vehicle.

    -   pointCloudTopic: \"/points\"\n+   pointCloudTopic: \"/sensing/lidar/top/pointcloud_raw\"\n-   imuTopic: \"/imu/data\"\n+   imuTopic: \"/sensing/gnss/clap/ros/imu\"\n   odomTopic: \"odometry/imu\"\n-   gpsTopic: \"odometry/gpsz\"\n+   gpsTopic: \"/sensing/gnss/clap/ros/odometry\"\n

    Since we will use GPS information with Autoware, so we need to enable useImuHeadingInitialization parameter.

    -   useImuHeadingInitialization: false\n+   useImuHeadingInitialization: true\n-   useGpsElevation: false\n+   useGpsElevation: true\n

    We will update sensor settings also. Since Robosense Lidars aren't officially supported, we will set our 32-channel Robosense Helios 5515 lidar as Velodyne:

    -   sensor: ouster\n+   sensor: velodyne\n-   N_SCAN: 64\n+   N_SCAN: 32\n-   Horizon_SCAN: 512\n+   Horizon_SCAN: 1800\n

    After that, we will update extrinsic transformations between Robosense Lidar and CLAP B7 GNSS/INS (IMU) system.

    -   extrinsicTrans:  [ 0.0,  0.0,  0.0 ]\n+   extrinsicTrans:  [-0.91, 0.0, -1.71]\n-   extrinsicRot:    [-1.0,  0.0,  0.0,\n-                      0.0,  1.0,  0.0,\n-                      0.0,  0.0, -1.0 ]\n+   extrinsicRot:    [1.0,  0.0,  0.0,\n+                     0.0,  1.0,  0.0,\n+                     0.0,  0.0, 1.0 ]\n-   extrinsicRPY: [ 0.0,  1.0,  0.0,\n-                  -1.0,  0.0,  0.0,\n-                   0.0,  0.0,  1.0 ]\n+   extrinsicRPY: [ 1.0,  0.0,  0.0,\n+                   0.0,  1.0,  0.0,\n+                   0.0,  0.0,  1.0 ]\n

    Warning

    The mapping direction is towards to the going direction in the real world. If LiDAR sensor is backwards, according to the direction you are moving, then you need to change the extrinsicRot too. Unless the IMU tries to go in the wrong direction, and it may occur problems.

    For example, in our Applanix POS LVX and Hesai Pandar XT32 setup, IMU direction was towards to the going direction and LiDAR direction has 180 degree difference in Z-axis according to the IMU direction. In other words, they were facing back to each other. The tool may need a transformation for IMU for that.

    -   extrinsicRot:    [-1.0,  0.0,  0.0,\n-                      0.0,  1.0,  0.0,\n-                      0.0,  0.0, -1.0 ]\n+   extrinsicRot:    [-1.0,  0.0,  0.0,\n+                     0.0,  -1.0,  0.0,\n+                     0.0,   0.0,  1.0 ]\n-   extrinsicRPY: [ 0.0,  1.0,  0.0,\n-                  -1.0,  0.0,  0.0,\n-                   0.0,  0.0,  1.0 ]\n+   extrinsicRPY: [ -1.0,  0.0,  0.0,\n+                    0.0, -1.0,  0.0,\n+                    0.0,  0.0,  1.0 ]\n

    Transform Visualization of Applanix POS LVX and Hesai Pandar XT32 in RViz

    Now, we are ready to create a map for Autoware.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#3-usage","title":"3) Usage","text":"

    If you are set configurations and create bag file for LIO-SAM, you can launch LIO-SAM with:

    ros2 launch lio_sam run.launch.py\n

    The rviz2 screen will be open, then you can play your bag file:

    ros2 bag play <YOUR-BAG-FILE>\n

    If the mapping process is finished, you can save map with calling this service:

    ros2 service call /lio_sam/save_map lio_sam/srv/SaveMap \"{resolution: 0.2, destination: <YOUR-MAP-DIRECTORY>}\"\n

    Here is the video for demonstration of LIO-SAM mapping in our campus environment:

    The output map format is local UTM, we will change local UTM map to MGRS format for tutorial_vehicle. Also, if you want change UTM to MGRS for autoware, please follow convert-utm-to-mgrs-map page.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#example-result","title":"Example Result","text":"Sample Map Output for our Campus Environment"},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#paper","title":"Paper","text":"

    Thank you for citing LIO-SAM (IROS-2020) if you use any of this code.

    @inproceedings{liosam2020shan,\n  title={LIO-SAM: Tightly-coupled Lidar Inertial Odometry via Smoothing and Mapping},\n  author={Shan, Tixiao and Englot, Brendan and Meyers, Drew and Wang, Wei and Ratti, Carlo and Rus Daniela},\n  booktitle={IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},\n  pages={5135-5142},\n  year={2020},\n  organization={IEEE}\n}\n

    Part of the code is adapted from LeGO-LOAM.

    @inproceedings{legoloam2018shan,\n  title={LeGO-LOAM: Lightweight and Ground-Optimized Lidar Odometry and Mapping on Variable Terrain},\n  author={Shan, Tixiao and Englot, Brendan},\n  booktitle={IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},\n  pages={4758-4765},\n  year={2018},\n  organization={IEEE}\n}\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/lio-sam/#acknowledgements","title":"Acknowledgements","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/","title":"Optimized-SC-F-LOAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#optimized-sc-f-loam","title":"Optimized-SC-F-LOAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#what-is-optimized-sc-f-loam","title":"What is Optimized-SC-F-LOAM?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/SlamCabbage/Optimized-SC-F-LOAM

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#dependencies","title":"Dependencies","text":"
    sudo apt-get install ros-noetic-hector-trajectory-server\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#1-build","title":"1) Build","text":"
    cd ~/catkin_ws/src\ngit clone https://github.com/SlamCabbage/Optimized-SC-F-LOAM.git\ncd ..\ncatkin_make\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#2-create-message-file","title":"2) Create message file","text":"

    In this folder, Ground Truth information, optimized pose information, F-LOAM pose information and time information are stored

    mkdir -p ~/message/Scans\n\nChange line 383 in the laserLoopOptimizationNode.cpp to your own \"message\" folder path\n

    (Do not forget to rebuild your package)

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#3-set-parameters","title":"3) Set parameters","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#4-run","title":"4) Run","text":"
    source devel/setup.bash\nroslaunch optimized_sc_f_loam optimized_sc_f_loam_mapping.launch\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#example-result","title":"Example Result","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#results-on-kitti-sequence-00-and-sequence-05","title":"Results on KITTI Sequence 00 and Sequence 05","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#comparison-of-trajectories-on-kitti-dataset","title":"Comparison of trajectories on KITTI dataset","text":"

    Test on KITTI sequence You can download the sequence 00 and 05 datasets from the KITTI official website and convert them into bag files using the kitti2bag open source method.

    00: 2011_10_03_drive_0027 000000 004540

    05: 2011_09_30_drive_0018 000000 002760

    See the link: https://github.com/ethz-asl/kitti_to_rosbag

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#acknowledgements","title":"Acknowledgements","text":"

    Thanks for SC-A-LOAM(Scan context: Egocentric spatial descriptor for place recognition within 3d point cloud map) and F-LOAM(F-LOAM : Fast LiDAR Odometry and Mapping).

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/optimized-sc-f-loam/#citation","title":"Citation","text":"
    @misc{https://doi.org/10.48550/arxiv.2204.04932,\n  doi = {10.48550/ARXIV.2204.04932},\n\n  url = {https://arxiv.org/abs/2204.04932},\n\n  author = {Liao, Lizhou and Fu, Chunyun and Feng, Binbin and Su, Tian},\n\n  keywords = {Robotics (cs.RO), FOS: Computer and information sciences, FOS: Computer and information sciences},\n\n  title = {Optimized SC-F-LOAM: Optimized Fast LiDAR Odometry and Mapping Using Scan Context},\n\n  publisher = {arXiv},\n\n  year = {2022},\n\n  copyright = {arXiv.org perpetual, non-exclusive license}\n}\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/","title":"SC-A-LOAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#sc-a-loam","title":"SC-A-LOAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#what-is-sc-a-loam","title":"What is SC-A-LOAM?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/gisbi-kim/SC-A-LOAM

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#prerequisites-dependencies","title":"Prerequisites (dependencies)","text":" "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#1-build","title":"1) Build","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#2-set-parameters","title":"2) Set parameters","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#scan-context-parameters","title":"Scan Context parameters","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#3-run","title":"3) Run","text":"
    roslaunch aloam_velodyne aloam_mulran.launch\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#4-saving-as-pcd-file","title":"4) Saving as PCD file","text":"
      rosrun pcl_ros pointcloud_to_pcd input:=/aft_pgo_map\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#example-results","title":"Example Results","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#riverside-01-mulran-dataset","title":"Riverside 01, MulRan dataset","text":" "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#kitti-05","title":"KITTI 05","text":" "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-a-loam/#contact","title":"Contact","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/","title":"SC-LeGO-LOAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#sc-lego-loam","title":"SC-LeGO-LOAM","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#what-is-sc-lego-loam","title":"What is SC-LeGO-LOAM?","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#repository-information","title":"Repository Information","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#original-repository-link","title":"Original Repository link","text":"

    https://github.com/irapkaist/SC-LeGO-LOAM

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#required-sensors","title":"Required Sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#ros-compatibility","title":"ROS Compatibility","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#dependencies","title":"Dependencies","text":"
    wget -O ~/Downloads/gtsam.zip https://github.com/borglab/gtsam/archive/4.0.0-alpha2.zip\ncd ~/Downloads/ && unzip gtsam.zip -d ~/Downloads/\ncd ~/Downloads/gtsam-4.0.0-alpha2/\nmkdir build && cd build\ncmake ..\nsudo make install\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#build-run","title":"Build & Run","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#1-build","title":"1) Build","text":"
    cd ~/catkin_ws/src\ngit clone https://github.com/irapkaist/SC-LeGO-LOAM.git\ncd ..\ncatkin_make\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#2-set-parameters","title":"2) Set parameters","text":"

    (Do not forget to rebuild after setting parameters.)

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#3-run","title":"3) Run","text":"
    source devel/setup.bash\nroslaunch lego_loam run.launch\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#example-result","title":"Example Result","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#other-examples","title":"Other Examples","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#mulran-dataset","title":"MulRan dataset","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#cite-sc-lego-loam","title":"Cite SC-LeGO-LOAM","text":"
    @INPROCEEDINGS { gkim-2018-iros,\n  author = {Kim, Giseop and Kim, Ayoung},\n  title = { Scan Context: Egocentric Spatial Descriptor for Place Recognition within {3D} Point Cloud Map },\n  booktitle = { Proceedings of the IEEE/RSJ International Conference on Intelligent Robots and Systems },\n  year = { 2018 },\n  month = { Oct. },\n  address = { Madrid }\n}\n

    and

    @inproceedings{legoloam2018,\n  title={LeGO-LOAM: Lightweight and Ground-Optimized Lidar Odometry and Mapping on Variable Terrain},\n  author={Shan, Tixiao and Englot, Brendan},\n  booktitle={IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},\n  pages={4758-4765},\n  year={2018},\n  organization={IEEE}\n}\n
    "},{"location":"how-to-guides/integrating-autoware/creating-maps/open-source-slam/sc-lego-loam/#contact","title":"Contact","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/pointcloud-map-downsampling/","title":"Pointcloud map downsampling","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/pointcloud-map-downsampling/#pointcloud-map-downsampling","title":"Pointcloud map downsampling","text":""},{"location":"how-to-guides/integrating-autoware/creating-maps/pointcloud-map-downsampling/#overview","title":"Overview","text":"

    In some cases, for example, when your created point cloud map is either too dense or too large (i.e., exceeding 300 MB), you may want to downsample it for improved computational and memory efficiency. Also, you can consider using dynamic map loading with partial loading, please check map_loader package for more information.

    At tutorial_vehicle implementation we will use the whole map, so we will downsample it with using CloudCompare.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/pointcloud-map-downsampling/#installing-cloudcompare","title":"Installing CloudCompare","text":"

    You can install it by snap:

    sudo snap install cloudcompare\n

    Please check the official page for installing options.

    "},{"location":"how-to-guides/integrating-autoware/creating-maps/pointcloud-map-downsampling/#downsampling-a-pointcloud-map","title":"Downsampling a pointcloud map","text":"

    There are three subsampling methods on CloudCompare, we are using Space method for subsampling, but you can use other methods if you want.

    1. Please open CloudCompare and drag your pointcloud to here, then you can select your pointcloud map by just clicking on the map at the DB tree panel.
    2. Then you can click subsample button on the top panel.

    CloudCompare
    1. Please select on your subsample method, we will use space for tutorial_vehicle.
    2. Then you can select options. For example, we need to determine minimum space between points. (Please be careful in this section, subsampling is depending on your map size, computer performance, etc.) We will set this value 0.2 for tutorial_vehicle's map.

    Pointcloud subsampling

    Select your downsampled pointcloud

    Now, you can save your downsampled pointcloud with ctrl + s or you can click save button from File bar. Then, this pointcloud can be used by autoware.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/","title":"Calibrating your sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/#calibrating-your-sensors","title":"Calibrating your sensors","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/#overview","title":"Overview","text":"

    Autoware expects to have multiple sensors attached to the vehicle as input to perception, localization, and planning stack. These sensors must be calibrated correctly and their positions must be defined using either urdf files (as in sample_sensor_kit) or as tf launch files.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/#camera-calibration","title":"Camera calibration","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/#intrinsic-calibration","title":"Intrinsic Calibration","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/#lidar-lidar-calibration","title":"Lidar-lidar calibration","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/#lidar-lidar-calibration-tool-from-autocore","title":"Lidar-Lidar Calibration tool from Autocore","text":"

    LL-Calib on GitHub, provided by AutoCore, is a lightweight toolkit for online/offline 3D LiDAR to LiDAR calibration. It's based on local mapping and \"GICP\" method to derive the relation between main and sub lidar. Information on how to use the tool, troubleshooting tips and example rosbags can be found at the above link.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/#lidar-camera-calibration","title":"Lidar-camera calibration","text":"

    Developed by MathWorks, The Lidar Camera Calibrator app enables you to interactively estimate the rigid transformation between a lidar sensor and a camera.

    https://ww2.mathworks.cn/help/lidar/ug/get-started-lidar-camera-calibrator.html

    SensorsCalibration toolbox v0.1: One more open source method for Lidar-camera calibration. This is a project for LiDAR to camera calibration,including automatic calibration and manual calibration

    https://github.com/PJLab-ADG/SensorsCalibration/blob/master/lidar2camera/README.md

    Developed by AutoCore, an easy-to-use lightweight toolkit for Lidar-camera-calibration is proposed. Only in three steps, a fully automatic calibration will be done.

    https://github.com/autocore-ai/calibration_tools/tree/main/lidar-cam-calib-related

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/calibrating-sensors/#lidar-imu-calibration","title":"Lidar-IMU calibration","text":"

    Developed by APRIL Lab at Zhejiang University in China, the LI-Calib calibration tool is a toolkit for calibrating the 6DoF rigid transformation and the time offset between a 3D LiDAR and an IMU, based on continuous-time batch optimization. IMU-based cost and LiDAR point-to-surfel (surfel = surface element) distance are minimized jointly, which renders the calibration problem well-constrained in general scenarios.

    AutoCore has forked the original LI-Calib tool and overwritten the Lidar input for more general usage. Information on how to use the tool, troubleshooting tips and example rosbags can be found at the LI-Calib fork on GitHub.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/","title":"Creating vehicle and sensor description","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#creating-vehicle-and-sensor-description","title":"Creating vehicle and sensor description","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#introduction","title":"Introduction","text":"

    This page introduce following topics.

    1. YOUR_VEHICLE_description
    2. YOUR_SENSOR_KIT_description
    3. individual_parameter
    4. YOUR_VEHICLE_launch
    5. YOUR_SENSOR_KIT_launch
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#1-your_vehicle_description","title":"1. YOUR_VEHICLE_description","text":"

    In YOUR_VEHICLE_description, the following configurations are set:

    1. vehicle_info.param.yaml (must be changed)
    2. mesh file (*.dae)
    3. mirror.param.yaml(must be changed)
    4. simulator_model.param.yaml
    5. vehicle.xacro
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#1-vehicle_infoparamyaml","title":"1. vehicle_info.param.yaml","text":"

    Defines the vehicle dimensions. For more details on each parameter, please click here.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#2-mesh-file","title":"2. mesh file","text":"

    A 3D model file used for visualization in rviz.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#3-mirrorparamyaml","title":"3. mirror.param.yaml","text":"

    Set according to the vehicle dimensions. Used in the crop-box-filter of PointCloudPreprocessor.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#4-simulator_modelparamyaml","title":"4. simulator_model.param.yaml","text":"

    Configuration file for the simulator environment.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#5-vehiclexacro","title":"5. vehicle.xacro","text":"

    The entry point file that defines the entire URDF of the vehicle. It refers to sensors.xacro, which specifies the sensor mounting positions.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#2-your_sensor_kit_description","title":"2. YOUR_SENSOR_KIT_description","text":"

    In sensor_kit_description, the following files are configured:

    1. sensors.xacro (must be changed)
    2. sensor_kit.xacro (must be changed)
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#1-sensorsxacro","title":"1. sensors.xacro","text":"

    Resolves the positions of sensors with base_link as the parent frame and defines the positions and orientations based on sensors_calibration.yaml in individual_params.

    In Autoware, <YOUR_SENSOR_KIT_description>/config/sensors_calibration.yaml is not used.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#about-sensor_kit_base_link","title":"About sensor_kit_base_link","text":"

    A sensor_kit refers to a subset that includes multiple sensors, and sensor_kit_base_link is the name of its frame. The positions and orientations within the kit are defined in sensor_kit.xacro.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#2-sensor_kitxacro","title":"2. sensor_kit.xacro","text":"

    Resolves the positions of sensors with sensor_kit_base_link as the parent and defines the positions and orientations based on sensor_kit_calibration.yaml in individual_params.

    In Autoware, <YOUR_SENSOR_KIT_description>/config/sensor_kit_calibration.yaml is not used.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#3-individual_parameter","title":"3. individual_parameter","text":"

    The individual_parameter is where parameters referenced by sensors.xacro and sensor_kit.xacro are stored. As the name imply, it is intended to manage parameters for multiple individual instances.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#introduction-to-various-parameters","title":"Introduction to Various Parameters","text":"
    1. sensors_calibration.yaml (must be changed)
    2. sensor_kit_calibration.yaml (must be changed)
    3. imu_corrector.param.yaml
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#1-sensors_calibrationyaml","title":"1. sensors_calibration.yaml","text":"

    A file that defines the mounting positions and orientations of sensors with base_link as the parent frame.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#2-sensor_kit_calibrationyaml","title":"2. sensor_kit_calibration.yaml","text":"

    A file that defines the mounting positions and orientations of sensors with sensor_kit_base_link as the parent frame.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#3-imu_correctorparamyaml","title":"3. imu_corrector.param.yaml","text":"

    A file used by imu_corrector.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#4-folder-structure","title":"4. Folder Structure","text":"

    Below is the default directory structure.

    individual_params/\n\u2514\u2500 config/\n     \u2514\u2500 default/\n          \u2514\u2500 sample_sensor_kit/\n               \u251c\u2500 imu_corrector.param.yaml\n               \u251c\u2500 sensor_kit_calibration.yaml\n               \u2514\u2500 sensors_calibration.yaml\n

    Copy and create a folder based on your YOUR_SENSOR_KIT name.

    individual_params/\n\u2514\u2500 config/\n     \u2514\u2500 default/\n-         \u2514\u2500 sample_sensor_kit/\n+         \u2514\u2500 <YOUR_SENSOR_KIT>/\n              \u251c\u2500 imu_corrector.param.yaml\n               \u251c\u2500 sensor_kit_calibration.yaml\n               \u2514\u2500 sensors_calibration.yaml\n
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#41-sample-usage","title":"4.1 Sample Usage","text":"

    Here is an example of managing parameters for multiple instances. Add a <vehicle_id> directory and switch parameters using options at startup.

    # example1 (do not set vehicle_id)\n$ ros2 launch autoware_launch autoware.launch.xml sensor_model:=<YOUR_SENSOR_KIT> vehicle_mode:=<your_vehicle_model>\n# example2 (set vehicle_id as VEHICLE_1)\n$ ros2 launch autoware_launch autoware.launch.xml sensor_model:=<YOUR_SENSOR_KIT> vehicle_mode:=<your_vehicle_model> vehicle_id:=VEHICLE_1\n# example3 (set vehicle_id as VEHICLE_2)\n$ ros2 launch autoware_launch autoware.launch.xml sensor_model:=<YOUR_SENSOR_KIT> vehicle_mode:=<your_vehicle_model> vehicle_id:=VEHICLE_2\n
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#sample-directory-structure","title":"Sample Directory Structure","text":"
    individual_params/\n\u2514\u2500 config/\n     \u251c\u2500 default/\n     \u2502   \u2514\u2500 <YOUR_SENSOR_KIT>/                  # example1\n     \u2502        \u251c\u2500 imu_corrector.param.yaml\n     \u2502        \u251c\u2500 sensor_kit_calibration.yaml\n     \u2502        \u2514\u2500 sensors_calibration.yaml\n+    \u251c\u2500 VEHICLE_1/\n+    \u2502   \u2514\u2500 <YOUR_SENSOR_KIT>/                  # example2\n+    \u2502        \u251c\u2500 imu_corrector.param.yaml\n+    \u2502        \u251c\u2500 sensor_kit_calibration.yaml\n+    \u2502        \u2514\u2500 sensors_calibration.yaml\n+    \u2514\u2500 VEHICLE_2/\n+         \u2514\u2500 <YOUR_SENSOR_KIT>/                  # example3\n+              \u251c\u2500 imu_corrector.param.yaml\n+              \u251c\u2500 sensor_kit_calibration.yaml\n+              \u2514\u2500 sensors_calibration.yaml\n
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#4your_vehicle_launch","title":"4.YOUR_VEHICLE_launch","text":"

    YOUR_VEHICLE_launch is where the launch file for starting the drive system devices is stored.

    1. vehicle_interface.launch.xml (must be changed)
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#1-vehicle_interfacelaunchxml","title":"1. vehicle_interface.launch.xml","text":"

    vehicle_interface.launch.xml is the launch file related to the drive system. Please modify it according to the configuration of your vehicle's drive system.

    If you are operating multiple vehicles, use the vehicle_id to switch to the corresponding configuration for each vehicle.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#5-your_sensor_kit_launch","title":"5. YOUR_SENSOR_KIT_launch","text":"

    YOUR_SENSOR_KIT_launch is where the launch files related to sensor startup are stored.

    1. sensing.launch.xml (must be changed)
    2. lidar.launch.xml (must be changed)
    3. camera.launch.xml
    4. imu.launch.xml (must be changed)
    5. gnss.launch.xml
    6. pointcloud_preprocessor.launch.py (must be changed)
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#1-sensinglaunchxml","title":"1. sensing.launch.xml","text":"

    sensing.launch.xml is the entry point that calls the launch files for all sensors. Modify it according to your sensor configuration.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#2-lidarlaunchxml","title":"2. lidar.launch.xml","text":"

    lidar.launch.xml is the launch file related to starting the LiDAR driver. Modify it according to your LiDAR configuration.

    In Autoware's initial configuration, it assumes converting the acquired data using pointcloud_preprocessor.launch.py.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#example-configuration-items","title":"Example Configuration Items","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#3-cameralaunchxml","title":"3. camera.launch.xml","text":"

    camera.launch.xml is the launch file related to starting the camera driver.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#4-imulaunchxml","title":"4. imu.launch.xml","text":"

    imu.launch.xml is the launch file related to starting the IMU driver.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#5-gnsslaunchxml","title":"5. gnss.launch.xml","text":"

    gnss.launch.xml is the launch file related to starting the GNSS driver.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-and-sensor-description/creating-vehicle-and-sensor-description/#6-pointcloud_preprocessorlaunchpy","title":"6. pointcloud_preprocessor.launch.py","text":"

    pointcloud_preprocessor.launch.py is the launch file to convert the raw sensor data. For more information, please click here.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/","title":"Creating a vehicle interface for an Ackermann kinematic model","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#creating-a-vehicle-interface-for-an-ackermann-kinematic-model","title":"Creating a vehicle interface for an Ackermann kinematic model","text":"

    This page introduces a module vehicle interface and explains how to implement it.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#what-is-a-vehicle-interface","title":"What is a vehicle interface","text":"

    Vehicle interface is an interface that connects the control commands and your vehicle's control device. Autoware publishes control commands such as:

    Then, the vehicle interface converts these commands into actuation such like:

    So think of the vehicle interface as a module that runs the vehicle's control device to realize the input commands provided by Autoware.

    An example of inputs and outputs for vehicle interface

    This page shows you a brief explanation how to implement your vehicle interface, but you can see further information of vehicle interface in the \"design\" page.

    Note that there is no package named \"vehicle interface\" prepared in Autoware. It is a necessary package to actuate your vehicle, but you have to create one by yourself since it is very specific to your vehicle's control device.

    For example, if you are using a by-wire kit PACMod, a vehicle interface named pacmod_interface published by TIER IV, Inc. is available. However, if you have constructed something original and haven't found an open source vehicle interface applicable, you have to implement your own vehicle interface from scratch.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#how-to-implement-a-vehicle-interface","title":"How to implement a vehicle interface","text":"

    The following instructions describe how to create a vehicle interface.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#1-create-a-directory-for-vehicle-interface","title":"1. Create a directory for vehicle interface","text":"

    It is recommended to create your vehicle interface at <your-autoware-dir>/src/vehicle/external

    cd <your-autoware-dir>/src/vehicle/external\n
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#2-install-or-implement-your-own-vehicle-interface","title":"2. Install or implement your own vehicle interface","text":"

    If there is an already complete vehicle interface package (like pacmod_interface), you can install it to your environment. If not, you have to implement your own vehicle interface by yourself. Let's create a new package by ros2 pkg create. The following example will show you how to create a vehicle interface package named my_vehicle_interface.

    ros2 pkg create --build-type ament_cmake my_vehicle_interface\n

    Then, you should write your implementation of vehicle interface in my_vehicle_interface/src. Again, since this implementation is so specific to the control device of your vehicle, it is beyond the scope of this document to describe how to implement your vehicle interface in detail. Here are some factors that might be considered.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#3-prepare-a-launch-file","title":"3. Prepare a launch file","text":"

    After you implement your vehicle interface or you want to debug it by launching it, create a launch file of your vehicle interface, and include it to vehicle_interface.launch.xml.

    Do not get confused. First, you need to create a launch file for your own vehicle interface module (like my_vehicle_interface.launch.xml) and then include that to vehicle_interface.launch.xml which exists in another directory. Here are the details.

    1. Add a launch directory in the my_vehicle_interface directory, and create a launch file of your own vehicle interface in it. Take a look at Creating a launch file in the ROS 2 documentation.

    2. Next, go to <your-autoware-dir>/src/vehicle, copy the directory /sample_vehicle_launch/, and paste it to the same place (which means it should be lined up with external and sample_vehicle_launch).

    3. You have to rename each \"sample_vehicle\" to something else. For example, if you want to rename \"sample_vehicle\" to \"my_vehicle_name\", you need to change the following. Note that it is restricted to keep the \"_launch\" and \"_description\" part.

      • Rename the directories
        • sample_vehicle_launch \u2192 my_vehicle_name_launch
        • my_vehicle_name_launch/sample_vehicle_launch \u2192 my_vehicle_name_launch/my_vehicle_name_launch
        • my_vehicle_name_launch/sample_vehicle_description \u2192 my_vehicle_name_launch/my_vehicle_name_description
      • After you rename your directories, rename each \"sample_vehicle\" to \"my_vehicle_name\" in the source code.
        • my_vehicle_name_description/CMakeLists.txt
        • my_vehicle_name_description/package.xml
        • my_vehicle_name_description/urdf/vehicle.xacro (there are two parts)
        • my_vehicle_name_launch/CMakeLists.txt
        • my_vehicle_name_launch/package.xml
        • README.md
    4. Include your launch file to my_vehicle_name_launch/my_vehicle_name_launch/launch/vehicle_interface.launch.xml by opening it and add the include terms like below.

    vehicle_interface.launch.xml
    <?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<launch>\n<arg name=\"vehicle_id\" default=\"$(env VEHICLE_ID default)\"/>\n\n<include file=\"$(find-pkg-share my_vehicle_interface)/launch/my_vehicle_interface.launch.xml\">\n</include>\n</launch>\n

    Finally, your directory structure may look like below. Most of the files are omitted for clarity, but the files shown here needs modification as said in the previous and current process.

    <your-autoware-dir>/\n\u2514\u2500 src/\n    \u2514\u2500 vehicle/\n        \u251c\u2500 external/\n+       \u2502   \u2514\u2500 my_vehicle_interface/\n+       \u2502       \u251c\u2500 src/\n+       \u2502       \u2514\u2500 launch/\n+       \u2502            \u2514\u2500 my_vehicle_interface.launch.xml\n       \u251c\u2500 sample_vehicle_launch/\n+       \u2514\u2500 my_vehicle_name_launch/ (COPIED FROM sample_vehicle_launch)\n+           \u251c\u2500 my_vehicle_name_launch/\n+           \u2502  \u251c\u2500 launch/\n+           \u2502  \u2502  \u2514\u2500 vehicle_interface.launch.xml\n+           \u2502  \u251c\u2500 CMakeLists.txt\n+           \u2502  \u2514\u2500 package.xml\n+           \u251c\u2500 my_vehicle_name_description/\n+           \u2502  \u251c\u2500 config/\n+           \u2502  \u251c\u2500 mesh/\n+           \u2502  \u251c\u2500 urdf/\n+           \u2502  \u2502  \u2514\u2500 vehicle.xacro\n+           \u2502  \u251c\u2500 CMakeLists.txt\n+           \u2502  \u2514\u2500 package.xml\n+           \u2514\u2500 README.md\n
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#4-build-the-vehicle-interface-package-and-the-launch-package","title":"4. Build the vehicle interface package and the launch package","text":"

    Build three packages my_vehicle_interface, my_vehicle_name_launch and my_vehicle_name_description by colcon build, or you can just build the entire Autoware if you have done other things.

    colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release --packages-select my_vehicle_interface my_vehicle_name_launch my_vehicle_name_description\n
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#5-when-you-launch-autoware","title":"5. When you launch Autoware","text":"

    Finally, you are done implementing your vehicle interface module! Be careful that you need to launch Autoware with the proper vehicle_model option like the example below. This example is launching planning simulator.

    ros2 launch autoware_launch planning.launch.xml map_path:=$HOME/autoware_map/sample-map-planning vehicle_model:=my_vehicle_name sensor_model:=sample_sensor_kit\n
    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#tips","title":"Tips","text":"

    There are some tips that may help you.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#ackermann-kinematic-model","title":"Ackermann kinematic model","text":"

    Autoware now supports control inputs for vehicles based on an Ackermann kinematic model. This section introduces you a brief concept of Ackermann kinematic model and explains how Autoware controls it.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#geometry","title":"Geometry","text":"

    The basic style of Ackermann kinematic model has four wheels with an Ackermann link on the front, and it is powered by the rear wheels. The key point of Ackermann kinematic model is that the axes of all wheels intersect at a same point, which means all wheels will trace a circular trajectory with a different radii but a common center point (See the figure below). Therefore, this model has a great advantage that it minimizes the slippage of the wheels, and prevent tires to get worn soon.

    In general, Ackermann kinematic model accepts the longitudinal speed \\(v\\) and the steering angle \\(\\phi\\) as inputs. In autoware, \\(\\phi\\) is positive if it is steered counter clockwise, so the steering angle in the figure below is actually negative.

    The basic style of an Ackermann kinematic model. The left figure shows a vehicle facing straight forward, while the right figure shows a vehicle steering to the right."},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/creating-a-vehicle-interface-for-an-ackermann-kinematic-model/#control","title":"Control","text":"

    Autoware publishes a ROS 2 topic named control_cmd from several types of publishers. A control_cmd topic is a AckermannControlCommand type message that contains

    AckermannControlCommand
      builtin_interfaces/Time stamp\n  autoware_auto_control_msgs/AckermannLateralCommand lateral\n  autoware_auto_control_msgs/LongitudinalCommand longitudinal\n

    where,

    AckermannLateralCommand
      builtin_interfaces/Time stamp\n  float32 steering_tire_angle\n  float32 steering_tire_rotation_rate\n
    LongitudinalCommand
      builtin_interfaces/Time stamp\n  float32 speed\n  float32 accelaration\n  float32 jerk\n

    See the AckermannLateralCommand.idl and LongitudinalCommand.idl for details.

    The vehicle interface should realize these control commands through your vehicle's control device.

    Moreover, Autoware also provides brake commands, light commands, and more (see vehicle interface design), so the vehicle interface module should be applicable to these commands as long as there are devices available to handle them.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/customizing-for-differential-drive-model/","title":"Customizing for differential drive vehicle","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/customizing-for-differential-drive-model/#customizing-for-differential-drive-vehicle","title":"Customizing for differential drive vehicle","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/customizing-for-differential-drive-model/#1-introduction","title":"1. Introduction","text":"

    Currently, Autoware assumes that vehicles use an Ackermann kinematic model with Ackermann steering. Thus, Autoware adopts the Ackermann command format for the Control module's output (see the AckermannDrive ROS message definition for an overview of Ackermann commands, and the AckermannControlCommands struct used in Autoware for more details).

    However, it is possible to integrate Autoware with a vehicle that follows a differential drive kinematic model, as commonly used by small mobile robots.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/customizing-for-differential-drive-model/#2-procedure","title":"2. Procedure","text":"

    One simple way of using Autoware with a differential drive vehicle is to create a vehicle_interface package that translates Ackermann commands to differential drive commands. Here are two points that you need to consider:

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/customizing-for-differential-drive-model/#21-create-a-vehicle_interface-package-for-differential-drive-vehicle","title":"2.1 Create a vehicle_interface package for differential drive vehicle","text":"

    An Ackermann command in Autoware consists of two main control inputs:

    Conversely, a typical differential drive command consists of the following inputs:

    So, one way in which an Ackermann command can be converted to a differential drive command is by using the following equations:

    \\[ v_l = v - \\frac{l\\omega}{2}, v_r = v + \\frac{l\\omega}{2} \\]

    where \\(l\\) denotes wheel tread.

    For information about other factors that need to be considered when creating a vehicle_interface package, refer to the vehicle_interface component page.

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/customizing-for-differential-drive-model/#22-set-an-appropriate-wheel_base","title":"2.2 Set an appropriate wheel_base","text":"

    A differential drive robot does not necessarily have front and rear wheels, which means that the wheelbase (the horizontal distance between the axles of the front and rear wheels) cannot be defined. However, Autoware expects wheel_base to be set in vehicle_info.param.yaml with some value. Thus, you need to set a pseudo value for wheel_base.

    The appropriate pseudo value for wheel_base depends on the size of your vehicle. Setting it to be the same value as wheel_tread is one possible choice.

    Warning

    "},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/customizing-for-differential-drive-model/#3-known-issues","title":"3. Known issues","text":""},{"location":"how-to-guides/integrating-autoware/creating-vehicle-interface-package/customizing-for-differential-drive-model/#motion-model-incompatibility","title":"Motion model incompatibility","text":"

    Since Autoware assumes that vehicles use a steering system, it is not possible to take advantage of the flexibility of a differential drive system's motion model.

    For example, when planning a parking maneuver with the freespace_planner module, Autoware may drive the differential drive vehicle forward and backward, even if the vehicle can be parked with a simpler trajectory that uses pure rotational movement.

    "},{"location":"how-to-guides/integrating-autoware/creating-your-autoware-meta-repository/creating-autoware-meta-repository/","title":"Creating Autoware meta-repository","text":""},{"location":"how-to-guides/integrating-autoware/creating-your-autoware-meta-repository/creating-autoware-meta-repository/#creating-autoware-meta-repository","title":"Creating Autoware meta-repository","text":""},{"location":"how-to-guides/integrating-autoware/creating-your-autoware-meta-repository/creating-autoware-meta-repository/#what-is-meta-repository","title":"What is Meta-repository?","text":"

    A meta-repository is a repository that manages multiple repositories, and Autoware is one of them. It serves as a centralized control point for referencing, configuring, and versioning other repositories.

    By using Ansible and VCS, you can automatically set up your Autoware. autoware.repos file manages the configuration of multiple repositories.

    Note: VCS stands for Version Control System, such as Git or Subversion.

    "},{"location":"how-to-guides/integrating-autoware/creating-your-autoware-meta-repository/creating-autoware-meta-repository/#how-to-create-and-customize-your-autoware-meta-repository","title":"How to create and customize your autoware meta-repository","text":""},{"location":"how-to-guides/integrating-autoware/creating-your-autoware-meta-repository/creating-autoware-meta-repository/#1-create-autoware-repository","title":"1. Create autoware repository","text":"

    If you want to integrate Autoware into your vehicle, the first step is to create an Autoware meta-repository.

    One easy way is to fork autowarefoundation/autoware and clone it. For how to fork a repository, refer to GitHub Docs.

    git clone https://github.com/YOUR_NAME/autoware.git\n

    If you set up multiple types of vehicles, adding a suffix like autoware.vehicle_A or autoware.vehicle_B is recommended

    "},{"location":"how-to-guides/integrating-autoware/creating-your-autoware-meta-repository/creating-autoware-meta-repository/#2-customize-your-autowarerepos-for-your-environment","title":"2. Customize your autoware.repos for your environment","text":"

    You need to customize autoware.repos for your own vehicle's Autoware.

    For example, if you want to customize the parameters in your individual_params or autoware_launch package to fit your vehicle, you can modify the configuration of each package and use them accordingly.

    Please edit the parameters in Autoware's autoware_individual_params and autoware_launch packages to match your vehicle's specific requirements, as these packages provide sample parameters and may not be tailored to your vehicle by default.

    If you want to fork autoware_individual_params and make modifications, it would be as follows:

    Example: If you fork individual_params and rename autoware_individual_params.vehicle_A:

    - param/autoware_individual_params:\n-   type: git\n-   url: https://github.com/autowarefoundation/autoware_individual_params\n-   version: main\n+ param/autoware_individual_params.vehicle_A:\n+   type: git\n+   url: https://github.com/YOUR_NAME/autoware_individual_params.vehicle_A\n+   version: main\n

    Please refer to the following documentation link for instructions on how to create and customize each vehicle_interface:

    Please remember to add all your custom packages, such as interfaces and descriptions, to your autoware.repos to ensure that your packages are properly included and managed within the Autoware repository.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/","title":"Launch Autoware","text":""},{"location":"how-to-guides/integrating-autoware/launch-autoware/#launch-autoware","title":"Launch Autoware","text":"

    Warning

    Under Construction

    This section explains how to run your vehicle with Autoware.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/#install-autoware","title":"Install Autoware","text":"

    Follow the installation steps of Autoware.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/#launch-autoware_1","title":"Launch Autoware","text":"

    Launch Autoware with the following command:

    ros2 launch autoware_launch autoware.launch.xml vehicle_model:=YOUR_VEHICLE sensor_kit:=YOUR_SENSOR_KIT map_path:=/PATH/TO/YOUR/MAP\n

    It is possible to specify which components to launch using command-line arguments. For example, if you don't need to launch perception, planning, and control for localization debug, you can launch the following:

    ros2 launch autoware_launch autoware.launch.xml vehicle_model:=YOUR_VEHICLE sensor_kit:=YOUR_SENSOR_KIT map_path:=/PATH/TO/YOUR/MAP \\\nlaunch_perception:=false \\\nlaunch_planning:=false \\\nlaunch_control:=false\n

    The basic command-line options are documented in autoware.launch.xml.

    There are options available to switch between different methods for some component. For example, by specifying pose_source/twist_source or perception_mode, you can switch localization and perception methods, respectively. These options allow you to choose the desired algorithms or sensor configurations for the respective functionalities.

    For options on eagleye component, please refer to the sub-pages.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/#set-initial-pose","title":"Set initial pose","text":"

    If GNSS is available, Autoware automatically initializes the vehicle's pose.

    If not or if the automatic initialization returns an incorrect position, you need to set the initial pose using the RViz GUI.

    1. Click the 2D Pose estimate button in the toolbar, or hit the P key

    2. In the 3D View pane, click and hold the left mouse button, and then drag to set the direction for the initial pose.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/#set-goal-pose","title":"Set goal pose","text":"

    Set a goal pose for the ego vehicle.

    1. Click the 2D Nav Goal button in the toolbar, or hit the G key

    2. In the 3D View pane, click and hold the left mouse button, and then drag to set the direction for the goal pose. If successful, you will see the calculated planning path on RViz.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/#engage","title":"Engage","text":"

    In your terminal, execute the following command.

    source ~/autoware.YOURS/install/setup.bash\nros2 topic pub /autoware.YOURS/engage autoware_auto_vehicle_msgs/msg/Engage \"engage: true\" -1\n

    You can also engage via RViz with \"AutowareStatePanel\". The panel can be found in Panels > Add New Panel > tier4_state_rviz_plugin > AutowareStatePanel.

    Once the route is computed, the \"AUTO\" button becomes active. Pressing the AUTO button engages the autonomous driving mode.

    Now the vehicle should drive along the calculated path!

    During the autonomous driving, the StatePanel appears as shown in the image below. Pressing the \"STOP\" button allows you to stop the vehicle.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/perception/","title":"Perception mode","text":""},{"location":"how-to-guides/integrating-autoware/launch-autoware/perception/#perception-mode","title":"Perception mode","text":"

    Warning

    Under Construction

    By specifying the perception_mode, users can switch between different sensor configurations for perception. This allows you to choose the specific sensor setup that you want to use for the perception tasks.

    ros2 launch autoware_launch autoware.launch.xml vehicle_model:=YOUR_VEHICLE sensor_kit:=YOUR_SENSOR_KIT map_path:=/PATH/TO/YOUR/MAP \\\nperception_mode:=lidar\n
    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/perception/#lidar","title":"LiDAR","text":"

    perception_mode:=lidar

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/perception/#radar","title":"Radar","text":"

    perception_mode:=radar

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/perception/#camera-lidar-fusion","title":"Camera LiDAR fusion","text":"

    perception_mode:=camera_lidar_fusion

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/perception/#camera-lidar-radar-fusion","title":"Camera LiDAR Radar fusion","text":"

    perception_mode:=camera_lidar_radar_fusion

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/perception/#lidar-radar-fusion","title":"LiDAR Radar fusion","text":"

    perception_mode:=lidar_radar_fusion

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/","title":"Localization methods","text":""},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/#localization-methods","title":"Localization methods","text":"

    Current localization launcher implemented by TIER IV supports multiple localization methods, both pose estimators and twist estimators. tier4_localization_component.launch.xml has two arguments to select which estimators to launch:

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/#ndt-scan-matcher-a-lidar-and-pointcloud-map-based-pose-estimator-default","title":"NDT scan matcher: a LiDAR and pointcloud map based pose estimator (default)","text":"

    By default, Autoware launches ndt_scan_matcher for pose estimator. In order to launch this explicitly, you need to specify as follows:

    ros2 launch autoware_launch autoware.launch.xml ... pose_source:=ndt ...\n

    Note that currently pose_source is set to NDT as default, so you can skip this argument.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/#gyro-odometer-an-imu-wheel-odometry-based-twist-estimator-default","title":"Gyro Odometer: an IMU & wheel odometry based twist estimator (default)","text":"

    By default, Autoware launches gyro_odometer for twist estimator. In order to launch this explicitly, you need to specify as follows:

    ros2 launch autoware_launch autoware.launch.xml ... twist_source:=gyro_odom ...\n

    Note that currently twist_source is set to Gyro Odometer as default, so you can skip this argument.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/#yabloc-a-camera-and-vector-map-based-pose-estimator","title":"YabLoc: a camera and vector map based pose estimator","text":"

    You can use YabLoc as a camera-based localization method. For more details on YabLoc, please refer to the README of YabLoc in autoware.universe.

    To use YabLoc as a pose_estimator, add pose_source:=yabloc when launching Autoware. By default, the pose_source is set to ndt. By specifying this command-line argument, YabLoc nodes will be automatically launched while the NDT nodes will not be started.

    Here is an example of a launch command:

    ros2 launch autoware_launch autoware.launch.xml ... pose_source:=yabloc ...\n
    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/#eagleye-a-gnss-imu-wheel-odometry-based-pose-and-twist-estimator","title":"Eagleye: a GNSS & IMU & wheel odometry based pose and twist estimator","text":"

    You can use Eagleye as a GNSS & IMU & wheel odometry-based localization method. For more details on Eagleye, please refer to the Eagleye.

    Eagleye has a function for position estimation and twist estimation, namely pose_estimator and twist_estimator, respectively. When running Eagleye in twist_estimator mode with other pose_estimator such as ndt_scan_matcher, Eagleye is still helpful since it can improve scan matching by providing accurate twists using GNSS doppler.

    You can use Eagleye by specifying the pose_source and twist_source accordingly through command-line arguments.

    Example of using Eagleye as the pose twist estimator:

    ros2 launch autoware_launch autoware.launch.xml ... pose_source:=eagleye twist_source:=eagleye ...\n

    Example of using Eagleye as the twist estimator:

    ros2 launch autoware_launch autoware.launch.xml ... twist_source:=eagleye ...\n
    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/","title":"Eagleye","text":""},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#using-eagleye-with-autoware","title":"Using Eagleye with Autoware","text":"

    This page will show you how to set up Eagleye in order to use it with Autoware. For the details of the integration proposal, please refer to this discussion.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#what-is-eagleye","title":"What is Eagleye?","text":"

    Eagleye is an open-source GNSS/IMU-based localizer initially developed by MAP IV. Inc. It provides a cost-effective alternative to LiDAR and point cloud-based localization by using low-cost GNSS and IMU sensors to provide vehicle position, orientation, and altitude information.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#dependencies","title":"Dependencies","text":"

    The below packages are automatically installed during the setup of Autoware as they are listed in autoware.repos.

    1. Eagleye (autoware-main branch)
    2. RTKLIB ROS Bridge (ros2-v0.1.0 branch)
    3. LLH Converter (ros2 branch)
    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#architecture","title":"Architecture","text":"

    Eagleye can be utilized in the Autoware localization stack in two ways:

    1. Feed only twist into the EKF localizer.

    2. Feed both twist and pose from Eagleye into the EKF localizer (twist can also be used with regular gyro_odometry).

    Note: RTK positioning is required when using Eagleye as the pose estimator. On the other hand, it is not mandatory when using it as the twist estimator.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#requirements","title":"Requirements","text":"

    Eagleye requires GNSS, IMU and vehicle speed as inputs.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#imu-topic","title":"IMU topic","text":"

    sensor_msgs/msg/Imu is supported for Eagleye IMU input.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#vehicle-speed-topic","title":"Vehicle speed topic","text":"

    geometry_msgs/msg/TwistStamped and geometry_msgs/msg/TwistWithCovarianceStamped are supported for the input vehicle speed.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#gnss-topic","title":"GNSS topic","text":"

    Eagleye requires latitude/longitude height and doppler velocity generated by the GNSS receiver. Your GNSS ROS driver must publish the following messages:

    GNSS ROS drivers modification ublox_gps No additional settings are required. It publishes sensor_msgs/msg/NavSatFix and geometry_msgs/msg/TwistWithCovarianceStamped required by Eagleye with default settings. septentrio_gnss_driver Set publish.navsatfix and publish.twist in the config file gnss.yaml to true"},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#parameter-modifications-for-integration-into-your-vehicle","title":"Parameter Modifications for Integration into Your Vehicle","text":""},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#topic-name-topic-type","title":"topic name & topic type","text":"

    The users must correctly specify input topics for GNSS latitude, longitude, and height , GNSS doppler speed , IMU , and vehicle speed in the eagleye_config.yaml.

    # Topic\ntwist:\ntwist_type: 1 # TwistStamped : 0, TwistWithCovarianceStamped: 1\ntwist_topic: /sensing/vehicle_velocity_converter/twist_with_covariance\nimu_topic: /sensing/imu/tamagawa/imu_raw\ngnss:\nvelocity_source_type: 2 # rtklib_msgs/RtklibNav: 0, nmea_msgs/Sentence: 1, ublox_msgs/NavPVT: 2, geometry_msgs/TwistWithCovarianceStamped: 3\nvelocity_source_topic: /sensing/gnss/ublox/navpvt\nllh_source_type: 2 # rtklib_msgs/RtklibNav: 0, nmea_msgs/Sentence: 1, sensor_msgs/NavSatFix: 2\nllh_source_topic: /sensing/gnss/ublox/nav_sat_fix\n
    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#sensor-frequency","title":"sensor frequency","text":"

    Also, the frequency of GNSS and IMU must be set in eagleye_config.yaml

    common:\nimu_rate: 50\ngnss_rate: 5\n
    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#conversion-from-fix-to-pose","title":"Conversion from fix to pose","text":"

    The parameters for converting sensor_msgs/msg/NavSatFix to geometry_msgs/msg/PoseWithCovarianceStamped is listed in fix2pose.yaml. If you use a different geoid or projection type, change these parameters.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#other-parameters","title":"Other parameters","text":"

    The other parameters are described here. Basically, these do not need to be changed .

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#notes-on-initialization","title":"Notes on initialization","text":"

    Eagleye requires an initialization process for proper operation. Without initialization, the output for twist will be in the raw value, and the pose data will not be available.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#1-static-initialization","title":"1. Static Initialization","text":"

    The first step is static initialization, which involves allowing the Eagleye to remain stationary for approximately 5 seconds after startup to estimate the yaw-rate offset.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#2-dynamic-initialization","title":"2. Dynamic initialization","text":"

    The next step is dynamic initialization, which involves running the Eagleye in a straight line for approximately 30 seconds. This process estimates the scale factor of wheel speed and azimuth angle.

    Once dynamic initialization is complete, the Eagleye will be able to provide corrected twist and pose data.

    "},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#how-to-check-the-progress-of-initialization","title":"How to check the progress of initialization","text":""},{"location":"how-to-guides/integrating-autoware/launch-autoware/localization-methods/eagleye-guide/#note-on-georeferenced-maps","title":"Note on georeferenced maps","text":"

    Note that the output position might not appear to be in the point cloud maps if you are using maps that are not properly georeferenced. In the case of a single GNSS antenna, initial position estimation (dynamic initialization) can take several seconds to complete after starting to run in an environment where GNSS positioning is available.

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/","title":"Evaluating the controller performance","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#evaluating-the-controller-performance","title":"Evaluating the controller performance","text":"

    This page shows how to use control_performance_analysis package to evaluate the controllers.

    control_performance_analysis is the package to analyze the tracking performance of a control module and monitor the driving status of the vehicle.

    If you need more detailed information about package, refer to the control_performance_analysis.

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#how-to-use","title":"How to use","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#before-driving","title":"Before Driving","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#1-firstly-you-need-to-launch-autoware-you-can-also-use-this-tool-with-real-vehicle-driving","title":"1. Firstly you need to launch Autoware. You can also use this tool with real vehicle driving","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#2-initialize-the-vehicle-and-send-goal-position-to-create-route","title":"2. Initialize the vehicle and send goal position to create route","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#3-launch-the-control_performance_analysis-package","title":"3. Launch the control_performance_analysis package","text":"
    ros2 launch control_performance_analysis controller_performance_analysis.launch.xml\n
    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#4-run-the-plotjuggler-in-sourced-terminal","title":"4. Run the PlotJuggler in sourced terminal","text":"
    source ~/autoware/install/setup.bash\n
    ros2 run plotjuggler plotjuggler\n
    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#5-increase-the-buffer-size-maximum-is-100-and-import-the-layout-from-autowareuniversecontrolcontrol_performance_analysisconfigcontroller_monitorxml","title":"5. Increase the buffer size (maximum is 100), and import the layout from /autoware.universe/control/control_performance_analysis/config/controller_monitor.xml","text":" "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#6-now-you-can-start-to-driving-you-should-see-all-the-performance-and-driving-variables-in-plotjuggler","title":"6. Now, you can start to driving. You should see all the performance and driving variables in PlotJuggler","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#after-driving","title":"After Driving","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#1-you-can-export-the-statistical-output-and-all-data-to-compare-and-later-usage","title":"1. You can export the statistical output and all data to compare and later usage","text":" "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-controller-performance/#tips","title":"Tips","text":" "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/","title":"Evaluating real-time performance","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#evaluating-real-time-performance","title":"Evaluating real-time performance","text":""},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#introduction","title":"Introduction","text":"

    Autoware should be real-time system when integrated to a service. Therefore, the response time of each callback should be as small as possible. If Autoware appears to be slow, it is imperative to conduct performance measurements and implement improvements based on the analysis. However, Autoware is a complex software system comprising numerous ROS 2 nodes, potentially complicating the process of identifying bottlenecks. To address this challenge, we will discuss methods for conducting detailed performance measurements for Autoware and provide case studies. It is worth noting that multiple factors can contribute to poor performance, such as scheduling and memory allocation in the OS layer, but our focus in this page will be on user code bottlenecks. The outline of this section is as follows:

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#performance-measurement","title":"Performance measurement","text":"

    Improvement is impossible without precise measurements. To measure the performance of the application code, it is essential to eliminate any external influences. Such influences include interference from the operating system and CPU frequency fluctuations. Scheduling effects also occur when core resources are shared by multiple threads. This section outlines a technique for accurately measuring the performance of the application code for a specific node. Though this section only discusses the case of Linux on Intel CPUs, similar considerations should be made in other environments.

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#single-node-execution","title":"Single node execution","text":"

    To eliminate the influence of scheduling, the node being measured should operate independently, using the same logic as when the entire Autoware system is running. To accomplish this, record all input topics of the node to be measured while the whole Autoware system is running. To achieve this objective, a tool called ros2_single_node_replayer has been prepared.

    Details on how to use the tool can be found in the README. This tool records the input topics of a specific node during the entire Autoware operation and replays it in a single node with the same logic. The tool relies on the ros2 bag record command, and the recording of service/action is not supported as of ROS 2 Humble, so nodes that use service/action as their main logic may not work well.

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#prepare-separated-cores","title":"Prepare separated cores","text":"

    Isolated cores running the node to be measured must meet the following conditions.

    To fulfill these conditions on Linux, a custom kernel build with the following kernel configurations is required. You can find many resources to instruct you on how to build a custom Linux kernel (like this one). Note that even if full tickless is enabled, timer interrupts are generated for scheduling if more than two tasks exist in one core.

    # Enable CONFIG_NO_HZ_FULL\n-> General setup\n-> Timers subsystem\n-> Timer tick handling (Full dynticks system (tickless))\n(X) Full dynticks system (tickless)\n\n# Allows RCU callback processing to be offloaded from selected CPUs\n# (CONFIG_RCU_NOCB_CPU=y)\n-> General setup\n-> RCU Subsystem\n-*- Offload RCU callback processing from boot-selected CPUs\n

    Additionally, the kernel boot parameters need to be set as follows.

    GRUB_CMDLINE_LINUX_DEFAULT=\n  \"... isolcpus=2,8 rcu_nocbs=2,8 rcu_nocb_poll nohz_full=2,8 intel_pstate=disable\u201d\n

    In the above configuration, for example, the node to be measured is assumed to run on core 2, and core 8, which is a hyper-threading pair, is also being isolated. Appropriate decisions on which cores to run the measurement target and which nodes to isolate need to be made based on the cache and core layout of the measurement machine. You can easily check if it is properly configured by running cat /proc/softirqs. Since intel_pstate=disable is specified in the kernel boot parameter, userspace can be specified in the scaling governor.

    cat /sys/devices/system/cpu/cpu2/cpufreq/scaling_governor // ondemand\nsudo sh -c \"echo userspace > /sys/devices/system/cpu/cpu2/cpufreq/scaling_governor\"\n

    This allows you to freely set the desired frequency within a defined range.

    sudo sh -c \"echo <freq(kz)> > /sys/devices/system/cpu/cpu2/cpufreq/scaling_setspeed\"\n

    Turbo Boost needs to be switched off on Intel CPUs, which is often overlooked.

    sudo sh -c \"echo 0 > /sys/devices/system/cpu/cpufreq/boost\"\n
    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#run-single-node-separately","title":"Run single node separately","text":"

    Following the instructions in the ros2_single_node_replayer README, start the node and play the dedicated rosbag created by the tool. Before playing the rosbag, appropriately set the CPU affinity of the thread on which the node runs, so it is placed on the isolated core prepared.

    taskset --cpu-list -p <target cpu> <pid>\n

    To avoid interference in the last level cache, minimize the number of other applications running during the measurement.

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#measurement-and-visualization","title":"Measurement and visualization","text":"

    To visualize the performance of the measurement target, embed code for logging timestamps and performance counter values in the target source code. To achieve this objective, a tool called pmu_analyzer has been prepared.

    Details on how to use the tool can be found in the README. This tool can measure the turnaround time of any section in the source code, as well as various performance counters.

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#case-studies","title":"Case studies","text":"

    In this section, we will present several case studies that demonstrate the performance improvements. These examples not only showcase our commitment to enhancing the system's efficiency but also serve as a valuable resource for developers who may face similar challenges in their own projects. The performance improvements discussed here span various components of the Autoware system, including sensing modules and planning modules. There are tendencies for each component regarding which points are becoming bottlenecks. By examining the methods, techniques, and tools employed in these case studies, readers can gain a better understanding of the practical aspects of optimizing complex software systems like Autoware.

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#sensing-component","title":"Sensing component","text":"

    First, we will explain the procedure for performance improvement, taking the node ring_outlier_filter as an example. Refer to the Pull Request for details.

    The following figure is a time-series plot of the turnaround time of the main processing part of ring_outlier_filter, analyzed as described in the \"Performance Measurement\" section above.

    The horizontal axis indicates the number of callbacks called (i.e., callback index), and the vertical axis indicates the turnaround time.

    When analyzing the performance of the sensing module from the viewpoint of performance counter, pay attention to instructions, LLC-load-misses, LLC-store-misses, cache-misses, and minor-faults.

    Analysis of the performance counter shows that the largest fluctuations come from minor-faults (i.e., soft page faults), the second largest from LLC-store-misses and LLC-load-misses (i.e., cache misses in the last level cache), and the slowest fluctuations come from instructions (i.e., message data size fluctuations). For example, when we plot minor-faults on the horizontal axis and turnaround time on the vertical axis, we can see the following dominant proportional relationship.

    To achieve zero soft page faults, heap allocations must only be made from areas that have been first touched in advance. We have developed a library called heaphook to avoid soft page faults while running Autoware callback. If you are interested, refer to the GitHub discussion and the issue.

    To reduce LLC misses, it is necessary to reduce the working set and to use cache-efficient access patterns.

    In the sensing component, which handles large message data such as LiDAR point cloud data, minimizing copying is important. A callback that takes sensor data message types as input and output should be written in an in-place algorithm as much as possible. This means that in the following pseudocode, when generating output_msg from input_msg, it is crucial to avoid using buffers as much as possible to reduce the number of memory copies.

    void callback(const PointCloudMsg &input_msg) {\nauto output_msg = allocate_msg<PointCloudMsg>(output_size);\nfill(input_msg, output_msg);\npublish(std::move(output_msg));\n}\n

    To improve cache efficiency, implement an in-place style as much as possible, instead of touching memory areas sporadically. In ROS applications using PCL, the code shown below is often seen.

    void callback(const sensor_msgs::PointCloud2ConstPtr &input_msg) {\npcl::PointCloud<PointT>::Ptr input_pcl(new pcl::PointCloud<PointT>);\npcl::fromROSMsg(*input_msg, *input_pcl);\n\n// Algorithm is described for point cloud type of pcl\npcl::PointCloud<PointT>::Ptr output_pcl(new pcl::PointCloud<PointT>);\nfill_pcl(*input_pcl, *output_pcl);\n\nauto output_msg = allocate_msg<sensor_msgs::PointCloud2>(output_size);\npcl::toROSMsg(*output_pcl, *output_msg);\npublish(std::move(output_msg));\n}\n

    To use the PCL library, fromROSMsg() and toROSMsg() are used to perform message type conversion at the beginning and end of the callback. This is a wasteful copying process and should be avoided. We should eliminate unnecessary type conversions by removing dependencies on PCL (e.g., https://github.com/tier4/velodyne_vls/pull/39). For large message types such as map data, there should be only one instance in the entire system in terms of physical memory.

    "},{"location":"how-to-guides/integrating-autoware/tuning-parameters-and-performance/evaluating-real-time-performance/#planning-component","title":"Planning component","text":"

    First, we will pick up detection_area module in behavior_velocity_planner node, which tends to have long turnaround time. We have followed the performance analysis steps above to obtain the following graph. Axises are the same as the graphs in the sensing case study.

    Using pmu_analyzer tool to further identify the bottleneck, we have found that the following multiple loops were taking up a lot of processing time:

    for ( area : detection_areas )\nfor ( point : point_clouds )\nif ( boost::geometry::within(point, area) )\n// do something with O(1)\n

    It checks whether each point cloud is contained in each detection area. Let N be the size of point_clouds and M be the size of detection_areas, then the computational complexity of this program is O(N^2 * M), since the complexity of within is O(N). Here, given that most of the point clouds are located far away from a certain detection area, a certain optimization can be achieved. First, calculate the minimum enclosing circle that completely covers the detection area, and then check whether the points are contained in that circle. Most of the point clouds can be quickly ruled out by this method, we don\u2019t have to call the within function in most cases. Below is the pseudocode after optimization.

    for ( area : detection_areas )\ncircle = calc_minimum_enclosing_circle(area)\nfor ( point : point_clouds )\nif ( point is in circle )\nif ( boost::geometry::within(point, area) )\n// do something with O(1)\n

    By using O(N) algorithm for minimum enclosing circle, the computational complexity of this program is reduced to almost O(N * (N + M)) (note that the exact computational complexity does not really change). If you are interested, refer to the Pull Request.

    Similar to this example, in the planning component, we take into consideration thousands to tens of thousands of point clouds, thousands of points in a path representing our own route, and polygons representing obstacles and detection areas in the surroundings, and we repeatedly create paths based on them. Therefore, we access the contents of the point clouds and paths multiple times using for-loops. In most cases, the bottleneck lies in these naive for-loops. Here, understanding Big O notation and reducing the order of computational complexity directly leads to performance improvements.

    "},{"location":"how-to-guides/others/add-a-custom-ros-message/","title":"Add a custom ROS message","text":""},{"location":"how-to-guides/others/add-a-custom-ros-message/#add-a-custom-ros-message","title":"Add a custom ROS message","text":""},{"location":"how-to-guides/others/add-a-custom-ros-message/#overview","title":"Overview","text":"

    During the Autoware development, you will probably need to define your own messages. Read the following instructions before adding a custom message.

    1. Message in autoware_msgs define interfaces of Autoware Core.

      • If a contributor wishes to make changes or add new messages to autoware_msgs, they should first create a new discussion post under the Design category.
    2. Any other minor or proposal messages used for internal communication within a component(such as planning) should be defined in another repository.

      • tier4_autoware_msgs is an example of that.

    The following is a simple tutorial of adding a message package to autoware_msgs. For the general ROS 2 tutorial, see Create custom msg and srv files.

    "},{"location":"how-to-guides/others/add-a-custom-ros-message/#how-to-create-custom-message","title":"How to create custom message","text":"

    Make sure you are in the Autoware workspace, and then run the following command to create a new package. As an example, let's create a package to define sensor messages.

    1. Create a package

      cd ./src/core/autoware_msgs\nros2 pkg create --build-type ament_cmake autoware_sensing_msgs\n
    2. Create custom messages

      You should create .msg files and place them in the msg directory.

      NOTE: The initial letters of the .msg and .srv files must be capitalized.

      As an example, let's make .msg files GnssInsOrientation.msg and GnssInsOrientationStamped.msg to define GNSS/INS orientation messages:

      mkdir msg\ncd msg\ntouch GnssInsOrientation.msg\ntouch GnssInsOrientationStamped.msg\n

      Edit GnssInsOrientation.msg with your editor to be the following content:

      geometry_msgs/Quaternion orientation\nfloat32 rmse_rotation_x\nfloat32 rmse_rotation_y\nfloat32 rmse_rotation_z\n

      In this case, the custom message uses a message from another message package geometry_msgs/Quaternion.

      Edit GnssInsOrientationStamped.msg with your editor to be the following content:

      std_msgs/Header header\nGnssInsOrientation orientation\n

      In this case, the custom message uses a message from another message package std_msgs/Header.

    3. Edit CMakeLists.txt

      In order to use this custom message in C++ or Python languages, we need to add the following lines to CMakeList.txt:

      rosidl_generate_interfaces(${PROJECT_NAME}\n\"msg/GnssInsOrientation.msg\"\n\"msg/GnssInsOrientationStamped.msg\"\nDEPENDENCIES\ngeometry_msgs\nstd_msgs\nADD_LINTER_TESTS\n)\n

      The ament_cmake_auto tool is very useful and is more widely used in Autoware, so we recommend using ament_cmake_auto instead of ament_cmake.

      We need to replace

      find_package(ament_cmake REQUIRED)\n\nament_package()\n

      with

      find_package(ament_cmake_auto REQUIRED)\n\nament_auto_package()\n
    4. Edit package.xml

      We need to declare relevant dependencies in package.xml. For the above example we need to add the following content:

      <buildtool_depend>rosidl_default_generators</buildtool_depend>\n\n<exec_depend>rosidl_default_runtime</exec_depend>\n\n<depend>geometry_msgs</depend>\n<depend>std_msgs</depend>\n\n<member_of_group>rosidl_interface_packages</member_of_group>\n

      We need to replace <buildtool_depend>ament_cmake</buildtool_depend> with <buildtool_depend>ament_cmake_auto</buildtool_depend> in the package.xml file.

    5. Build the custom message package

      You can build the package in the root of your workspace, for example by running the following command:

      colcon build --packages-select autoware_sensing_msgs\n

      Now the GnssInsOrientationStamped message will be discoverable by other packages in Autoware.

    "},{"location":"how-to-guides/others/add-a-custom-ros-message/#how-to-use-custom-messages-in-autoware","title":"How to use custom messages in Autoware","text":"

    You can use the custom messages in Autoware by following these steps:

    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/","title":"Advanced usage of colcon","text":""},{"location":"how-to-guides/others/advanced-usage-of-colcon/#advanced-usage-of-colcon","title":"Advanced usage of colcon","text":"

    This page shows some advanced and useful usage of colcon. If you need more detailed information, refer to the colcon documentation.

    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#common-mistakes","title":"Common mistakes","text":""},{"location":"how-to-guides/others/advanced-usage-of-colcon/#do-not-run-from-other-than-the-workspace-root","title":"Do not run from other than the workspace root","text":"

    It is important that you always run colcon build from the workspace root because colcon builds only under the current directory. If you have mistakenly built in a wrong directory, run rm -rf build/ install/ log/ to clean the generated files.

    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#do-not-unnecessarily-overlay-workspaces","title":"Do not unnecessarily overlay workspaces","text":"

    colcon overlays workspaces if you have sourced the setup.bash of other workspaces before building a workspace. You should take care of this especially when you have multiple workspaces.

    Run echo $COLCON_PREFIX_PATH to check whether workspaces are overlaid. If you find some workspaces are unnecessarily overlaid, remove all built files, restart the terminal to clean environment variables, and re-build the workspace.

    For more details about workspace overlaying, refer to the ROS 2 documentation.

    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#cleaning-up-the-build-artifacts","title":"Cleaning up the build artifacts","text":"

    colcon sometimes causes errors of because of the old cache. To remove the cache and rebuild the workspace, run the following command:

    rm -rf build/ install/\n

    In case you know what packages to remove:

    rm -rf {build,install}/{package_a,package_b}\n
    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#selecting-packages-to-build","title":"Selecting packages to build","text":"

    To just build specified packages:

    colcon build --packages-select <package_name1> <package_name2> ...\n

    To build specified packages and their dependencies recursively:

    colcon build --packages-up-to <package_name1> <package_name2> ...\n

    You can also use these options for colcon test.

    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#changing-the-optimization-level","title":"Changing the optimization level","text":"

    Set DCMAKE_BUILD_TYPE to change the optimization level.

    Warning

    If you specify DCMAKE_BUILD_TYPE=Debug or no DCMAKE_BUILD_TYPE is given for building the entire Autoware, it may be too slow to use.

    colcon build --cmake-args -DCMAKE_BUILD_TYPE=Debug\n
    colcon build --cmake-args -DCMAKE_BUILD_TYPE=RelWithDebInfo\n
    colcon build --cmake-args -DCMAKE_BUILD_TYPE=Release\n
    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#changing-the-default-configuration-of-colcon","title":"Changing the default configuration of colcon","text":"

    Create $COLCON_HOME/defaults.yaml to change the default configuration.

    mkdir -p ~/.colcon\ncat << EOS > ~/.colcon/defaults.yaml\n{\n\"build\": {\n\"symlink-install\": true\n}\n}\n

    For more details, see here.

    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#generating-compile_commandsjson","title":"Generating compile_commands.json","text":"

    compile_commands.json is used by IDEs/tools to analyze the build dependencies and symbol relationships.

    You can generate it with the flag DCMAKE_EXPORT_COMPILE_COMMANDS=1:

    colcon build --cmake-args -DCMAKE_EXPORT_COMPILE_COMMANDS=1\n
    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#seeing-compiler-commands","title":"Seeing compiler commands","text":"

    To see the compiler and linker invocations for a package, use VERBOSE=1 and --event-handlers console_cohesion+:

    VERBOSE=1 colcon build --packages-up-to <package_name> --event-handlers console_cohesion+\n

    For other options, see here.

    "},{"location":"how-to-guides/others/advanced-usage-of-colcon/#using-ccache","title":"Using Ccache","text":"

    Ccache can speed up recompilation. It is recommended to use it to save your time unless you have a specific reason not to do so.

    1. Install Ccache:

      sudo apt update && sudo apt install ccache\n
    2. Write the following in your .bashrc:

      export CC=\"/usr/lib/ccache/gcc\"\nexport CXX=\"/usr/lib/ccache/g++\"\n
    "},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/","title":"An example procedure for adding and evaluating a new node","text":""},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/#an-example-procedure-for-adding-and-evaluating-a-new-node","title":"An example procedure for adding and evaluating a new node","text":""},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/#overview","title":"Overview","text":"

    This page provides a guide for evaluating Autoware when a new node is implemented, especially about developing a novel localization node.

    The workflow involves initial testing and rosbag recording using a real vehicle or AWSIM, implementing the new node, subsequent testing using the recorded rosbag, and finally evaluating with a real vehicle or AWSIM.

    It is assumed that the method intended for addition has already been verified well with public datasets and so on.

    "},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/#1-running-autoware-in-its-standard-configuration","title":"1. Running Autoware in its standard configuration","text":"

    First of all, it is important to be able to run the standard Autoware to establish a basis for performance and behavior comparison.

    Autoware constantly incorporates new features. It is crucial to initially confirm that it operates as expected with the current version, which helps in problem troubleshooting.

    In this context, AWSIM is presumed. Therefore, AWSIM simulator can be useful. If you are using actual hardware, please refer to the How-to guides.

    "},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/#2-recording-a-rosbag-using-autoware","title":"2. Recording a rosbag using Autoware","text":"

    Before developing a new node, it is recommended to record a rosbag in order to evaluate. If you need a new sensor, you should add it to your vehicle or AWSIM.

    In this case, it is recommended to save all topics regardless of whether they are necessary or not. For example, in Localization, since the initial position estimation service is triggered by the input to rviz and the GNSS topic, the initial position estimation does not start when playing back data unless those topics are saved.

    Consider the use of the mcap format if data capacity becomes a concern.

    It is worth noting that using ros2 bag record increases computational load and might affect performance. After data recording, verifying the smooth flow of sensor data and unchanged time series is advised. This verification can be accomplished, for example, by inspecting the image data with rqt_image_view during ros2 bag play.

    "},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/#3-developing-the-new-node","title":"3. Developing the new node","text":"

    When developing a new node, it could be beneficial to reference a package that is similar to the one you intend to create.

    It is advisable to thoroughly read the Design page, contemplate the addition or replacement of nodes in Autoware, and then implement your solution.

    For example, a node doing NDT, a LiDAR-based localization method, is ndt_scan_matcher. If you want to replace this with a different approach, implement a node which produces the same topics and provides the same services.

    ndt_scan_matcher is launched as pose_estimator, so it is necessary to replace the launch file as well.

    "},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/#4-evaluating-by-a-rosbag-based-simulator","title":"4. Evaluating by a rosbag-based simulator","text":"

    Once the new node is implemented, it is time to evaluate it. logging_simulator is a tool of how to evaluate the new node using the rosbag captured in step 2.

    When you run the logging_simulator, you can set planning:=false or control:=false to disable the launch of specific component nodes.

    ros2 launch autoware_launch logging_simulator.launch.xml ... planning:=false control:=false

    After launching logging_simulator, the rosbag file obtained in step 2 should be replayed using ros2 bag play <rosbag_file>.

    If you remap the topics related to the localization that you want to verify this time, Autoware will use the data it is calculating this time instead of the data it recorded. Also, using the --topics option of ros2 bag play, you can publish only specific topics in rosbag.

    There is ros2bag_extensions available to filter the rosbag file and create a new rosbag file that contains only the topics you need.

    "},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/#5-evaluating-in-a-realtime-environment","title":"5. Evaluating in a realtime environment","text":"

    Once you have sufficiently verified the behavior in the logging_simulator, let's run it as Autoware with new nodes added in the realtime environment.

    To debug Autoware, the method described at debug-autoware is useful.

    For reproducibility, you may want to fix the GoalPose. In such cases, consider using the tier4_automatic_goal_rviz_plugin.

    "},{"location":"how-to-guides/others/an-example-procedure-for-adding-and-evaluating-a-new-node/#6-sharing-the-results","title":"6. Sharing the results","text":"

    If your implementation works successfully, please consider a pull request to Autoware.

    It is also a good idea to start by presenting your ideas in Discussion at Show and tell.

    For localization, YabLoc's Proposal may provide valuable insights.

    "},{"location":"how-to-guides/others/applying-clang-tidy-to-ros-packages/","title":"Applying Clang-Tidy to ROS packages","text":""},{"location":"how-to-guides/others/applying-clang-tidy-to-ros-packages/#applying-clang-tidy-to-ros-packages","title":"Applying Clang-Tidy to ROS packages","text":"

    Clang-Tidy is a powerful C++ linter.

    "},{"location":"how-to-guides/others/applying-clang-tidy-to-ros-packages/#preparation","title":"Preparation","text":"

    You need to generate build/compile_commands.json before using Clang-Tidy.

    colcon build --cmake-args -DCMAKE_EXPORT_COMPILE_COMMANDS=1\n
    "},{"location":"how-to-guides/others/applying-clang-tidy-to-ros-packages/#usage","title":"Usage","text":"
    clang-tidy -p build/ path/to/file1 path/to/file2 ...\n

    If you want to apply Clang-Tidy to all files in a package, using the fd command is useful. To install fd, see the installation manual.

    clang-tidy -p build/ $(fd -e cpp -e hpp --full-path \"/autoware_utils/\")\n
    "},{"location":"how-to-guides/others/applying-clang-tidy-to-ros-packages/#ide-integration","title":"IDE integration","text":""},{"location":"how-to-guides/others/applying-clang-tidy-to-ros-packages/#clion","title":"CLion","text":"

    Refer to the CLion Documentation.

    "},{"location":"how-to-guides/others/applying-clang-tidy-to-ros-packages/#visual-studio-code","title":"Visual Studio Code","text":"

    Use either one of the following extensions:

    "},{"location":"how-to-guides/others/applying-clang-tidy-to-ros-packages/#troubleshooting","title":"Troubleshooting","text":"

    If you encounter clang-diagnostic-error, try installing libomp-dev.

    Related: https://github.com/autowarefoundation/autoware-github-actions/pull/172

    "},{"location":"how-to-guides/others/debug-autoware/","title":"Debug Autoware","text":""},{"location":"how-to-guides/others/debug-autoware/#debug-autoware","title":"Debug Autoware","text":"

    This page provides some methods for debugging Autoware.

    "},{"location":"how-to-guides/others/debug-autoware/#print-debug-messages","title":"Print debug messages","text":"

    The essential thing for debug is to print the program information clearly, which can quickly judge the program operation and locate the problem. Autoware uses ROS 2 logging tool to print debug messages, how to design console logging refer to tutorial Console logging.

    "},{"location":"how-to-guides/others/debug-autoware/#using-ros-tools-debug-autoware","title":"Using ROS tools debug Autoware","text":""},{"location":"how-to-guides/others/debug-autoware/#using-command-line-tools","title":"Using command line tools","text":"

    ROS 2 includes a suite of command-line tools for introspecting a ROS 2 system. The main entry point for the tools is the command ros2, which itself has various sub-commands for introspecting and working with nodes, topics, services, and more. How to use the ROS 2 command line tool refer to tutorial CLI tools.

    "},{"location":"how-to-guides/others/debug-autoware/#using-rviz2","title":"Using rviz2","text":"

    Rviz2 is a port of Rviz to ROS 2. It provides a graphical interface for users to view their robot, sensor data, maps, and more. You can run Rviz2 tool easily by:

    rviz2\n

    When Autoware launch the simulators, the Rviz2 tool is opened by default to visualize the autopilot graphic information.

    "},{"location":"how-to-guides/others/debug-autoware/#using-rqt-tools","title":"Using rqt tools","text":"

    RQt is a graphical user interface framework that implements various tools and interfaces in the form of plugins. You can run any RQt tools/plugins easily by:

    rqt\n

    This GUI allows you to choose any available plugins on your system. You can also run plugins in standalone windows. For example, RQt Console:

    ros2 run rqt_console rqt_console\n
    "},{"location":"how-to-guides/others/debug-autoware/#common-rqt-tools","title":"Common RQt tools","text":"
    1. rqt_graph: view node interaction

      In complex applications, it may be helpful to get a visual representation of the ROS node interactions.

      ros2 run rqt_graph rqt_graph\n
    2. rqt_console: view messages

      rqt_console is a great gui for viewing ROS topics.

      ros2 run rqt_console rqt_console\n
    3. rqt_plot: view data plots

      rqt_plot is an easy way to plot ROS data in real time.

      ros2 run rqt_plot rqt_plot\n
    "},{"location":"how-to-guides/others/debug-autoware/#using-ros2_graph","title":"Using ros2_graph","text":"

    ros2_graph can be used to generate mermaid description of ROS 2 graphs to add on your markdown files.

    It can also be used as a colorful alternative to rqt_graph even though it would require some tool to render the generated mermaid diagram.

    It can be installed with:

    pip install ros2-graph\n

    Then you can generate a mermaid description of the graph with:

    ros2_graph your_node\n\n# or like with an output file\nros2_graph /turtlesim -o turtle_diagram.md\n\n# or multiple nodes\nros2_graph /turtlesim /teleop_turtle\n

    You can then visualize these graphs with:

    "},{"location":"how-to-guides/others/debug-autoware/#using-ros2doctor","title":"Using ros2doctor","text":"

    When your ROS 2 setup is not running as expected, you can check its settings with the ros2doctor tool.

    ros2doctor checks all aspects of ROS 2, including platform, version, network, environment, running systems and more, and warns you about possible errors and reasons for issues.

    It's as simple as just running ros2 doctor in your terminal.

    It has the ability to list \"Subscribers without publishers\" for all topics in the system.

    And this information can help you find if a necessary node isn't running.

    For more details, see the following official documentation for Using ros2doctor to identify issues.

    "},{"location":"how-to-guides/others/debug-autoware/#using-a-debugger-with-breakpoints","title":"Using a debugger with breakpoints","text":"

    Many IDE(e.g. Visual Studio Code, CLion) supports debugging C/C++ executable with GBD on linux platform. The following lists some references for using the debugger:

    "},{"location":"how-to-guides/others/defining-temporal-performance-metrics/","title":"Defining temporal performance metrics on components","text":""},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#defining-temporal-performance-metrics-on-components","title":"Defining temporal performance metrics on components","text":""},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#motivation-to-defining-temporal-performance-metrics","title":"Motivation to defining temporal performance metrics","text":""},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#objective-of-the-page","title":"Objective of the page","text":"

    This page introduces policies to define metrics to evaluate temporal performance on components of Autoware. The term \"temporal performance\" is often used throughout the page in order to distinguish between functional performance, which referred to as accuracy as well, and time-related performance.

    It is expected that most algorithms employed for Autoware are executed with as high frequency and short response time as possible. In order to achieve safe autonomous driving, one of the desired outcomes is no time gap between perceived and actual situation. The time gap is commonly referred to as delay. If the delay is significant, the system may determine trajectory and maneuver based on outdated situation. Consequently, if the actual situation differs from the perceived one due to the delay, the system may make unexpected decisions.

    As mentioned above, this page presents the policies to define metrics. Besides, the page contains lists of sample metrics that are crucial for the main functionalities of Autoware: Localization, Perception, Planning, and Control.

    Note

    Other functionalities, such as system components for diagnosis, are excluded currently. However they will be taken into account in the near future.

    "},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#contribution-of-the-temporal-performance-metrics","title":"Contribution of the temporal performance metrics","text":"

    Temporal performance metrics are important for evaluating Autoware. These metrics are particularly useful for assessing delays caused by new algorithms and logic. They can be employed when comparing the temporal performance of software on a desktop computer with that on a vehicle during the vehicle integration phase.

    In addition, these metrics are useful for designers and evaluators of middleware, operating systems, and computers. They are selected based on user and product requirements. One of these requirements is to provide sufficient temporal performance for executing Autoware. \"Sufficient temporal performance\" is defined as a temporal performance requirement, but it can be challenging to define the requirement because it varies depending on the product type, Operational Design Domain (ODD), and other factors. Then, this page specifically focuses on temporal performance metrics rather than requirements.

    Temporal performance metrics are important for evaluating the reliability of Autoware. However, ensuring the reliability of Autoware requires consideration of not only temporal performance metrics but also other metrics.

    "},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#tools-for-evaluating-the-metrics","title":"Tools for evaluating the metrics","text":"

    There are several tools available for evaluating Autoware according to the metrics listed in the page. For example, both CARET and ros2_tracing are recommended options when evaluating Autoware on Linux and ROS 2. If you want to measure the metrics with either of these tools, refer to the corresponding user guide for instructions. It's important to note that if you import Autoware to a platform other than Linux and ROS 2, you will need to choose a supported tool for evaluation.

    Note

    TIER IV plans to measure Autoware, which is running according to the tutorial, and provide a performance evaluation report periodically. An example of such a report can be found here, although it may not include all of the metrics listed.

    The page does not aim to provide instructions on how to use these tools or measure the metrics. Its primary focus is on the metrics themselves, as they are more important than the specific tools used. These metrics retain their relevance regardless of the employed platform.

    "},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#policies-to-define-temporal-performance-metrics","title":"Policies to define temporal performance metrics","text":"

    As mentioned above, the configuration of Autoware varies by the product type, ODD, and other factors. The variety of configurations makes it difficult to define the uniform metrics for evaluating Autoware. However, the policies used to define them are basically reused even when the configuration changes. Each of temporal performance metrics is categorized into two types: execution frequency and response time. Although there are many types of metrics, such as communication latency, the only two types are considered for simplicity. Execution frequency is observed using rate of Inter-Process Communication (IPC) messages. You will find an enormous number of messages in Autoware, but you don't have to take care of all. Some messages might be critical to functionality and they should be chosen for evaluation. Response time is duration elapsed through a series of processing. A series of processing is referred to as a path. Response time is calculated from timestamps of start and end of a path. Although many paths can be defined in Autoware, you have to choose significant paths.

    As a hint, here are some characteristics of message and path in order to choose metrics.

    1. Messages and paths on boundaries where observed values from sensors are consumed
    2. Messages and paths on boundaries of functions, e.g., a boundary of perception and planning
    3. Messages and paths on boundaries where timer-based frequency is switched
    4. Messages and paths on boundaries where two different messages are synchronized and merged
    5. Messages that must be transmitted at expected frequency, e.g., vehicle command messages

    Those hints would be helpful for most configurations but there may be exclusions. Defining metrics precisely requires an understanding of configuration.

    In addition, it is recommended that metrics be determined incrementally from the architectural level to the detailed design and implementation level. Mixing metrics at different levels of granularity can be confusing.

    "},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#list-of-sample-metrics","title":"List of sample metrics","text":"

    This section demonstrates how to define metrics according to the policies explained and has lists of the metrics for Autoware launched according to the tutorial. The section is divided into multiple subsections, each containing a model diagram and an accompanying list that explains the important temporal performance metrics. Each model is equipped with checkpoints that serve as indicators for these metrics.

    The first subsection presents the top-level temporal performance metrics, which are depicted in the abstract structure of Autoware as a whole. The detailed metrics are not included in the model as they would add complexity to it. Instead, the subsequent section introduces the detailed metrics. The detailed metrics are subject to more frequent updates compared to the top-level ones, which is another reason for categorizing them separately.

    Each list includes a column for the reference value. The reference value represents the observed value of each metric when Autoware is running according to the tutorial. It is important to note that the reference value is not a required value, meaning that Autoware does not necessarily fail in the tutorial execution if certain metrics do not fulfill the reference value.

    "},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#top-level-temporal-performance-metrics-for-autoware","title":"Top-level temporal performance metrics for Autoware","text":"

    The diagram below introduces the model for top-level temporal performance metrics.

    The following three policies assist in selecting the top-level performance metrics:

    Additionally, it is assumed that algorithms are implemented as multiple nodes and function as a pipeline processing system.

    ID Representation in the model Metric meaning Related functionality Reference value Reason to choose it as a metric Note AWOV-001 Message rate from CPA #9 to CPA #18 Update rate of result from Prediction to Planning. Perception 10 Hz Planning relies on fresh and up-to-date perceived data from Perception for creating accurate trajectory. AWOV-002 Response time from CPA #0 to CPA #20 via CPA #18 Response time in main body of Perception. Perception N/A Planning relies on fresh and up-to-date perceived data from Perception for creating accurate trajectory. The metric is used if delay compensation is disabled in Tracking. AWOV-003 Response time from CPA #7 to CPA #20 Response time from Tracking output of Tracking to its data consumption in Planning. Perception N/A Planning relies on fresh and up-to-date perceived data from Perception for creating accurate trajectory. The metric is used if delay compensation is enabled in Tracking. AWOV-004 Response time from CPA #0 to CPA #6 Duration to process pointcloud data in Sensing and Detection. Perception N/A Tracking relies on detection to provide real-time and up-to-date sensed data for accurate tracking. The metric is used if delay compensation is enabled in Tracking. AWOV-005 Message rate from CPA #4 to CPA #5 Update rate of Detection result received by Tracking. Perception 10 Hz Tracking relies on detection to provide real-time and up-to-date sensed data for accurate tracking. AWOV-006 Response time from CPA #0 to CPA #14 Response time from output of observed data from LiDARs to its consumption in EKF Localizer via NDT Scan Matcher. Localization N/A EKF Localizer relies on fresh and up-to-date observed data from sensors for accurate estimation of self pose. AWOV-007 Message rate from CPA #11 to CPA #13 Update rate of pose estimated by NDT Scan Matcher. Localization 10 Hz EKF Localizer relies on fresh and up-to-date observed data from sensors for accurate estimation of self pose. AWOV-008 Message rate from CPA #15 to CPA #12 Update rate of feed backed pose estimated by EKF Localizer. Localization 50 Hz NDT Scan Matcher relies on receiving estimated pose from EKF Localizer smoothly for linear interpolation. AWOV-009 Message rate from CPA #17 to CPA #19 Update rate of Localization result received by Planning. Localization 50 Hz Planning relies on Localization to update the estimated pose frequently. AWOV-010 Response time from CPA #20 to CPA #23 Processing time from beginning of Planning to consumption of Trajectory message in Control. Planning N/A A vehicle relies on Planning to update trajectory within a short time frame to achieve safe driving behavior. AWOV-011 Message rate from CPA #21 to CPA #22 Update rate of Trajectory message from Planning. Planning 10 Hz A vehicle relies on Planning to update trajectory frequently to achieve safe driving behavior. AWOV-012 Message rate from CPA #24 to CPA #25 Update rate of Control command. Control 33 Hz Control stability and comfort relies on sampling frequency of Control. AWOV-013 Message rate between CPA #26 and Vehicle Communication rate between Autoware and Vehicle. Vehicle Interface N/A A vehicle requires Autoware to communicate with each other at predetermined frequency. Temporal performance requirement varies depending on vehicle type.

    Note

    There is an assumption that each of sensors, such as LiDARs and cameras, outputs a set of pointcloud with a timestamp. CPA #0 is observed with the timestamp. If the sensors are not configured to output the timestamp, the time when Autoware receives the pointcloud is used instead. That is represented by CPA #1 in the model. The detailed metrics employs the idea as well.

    "},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#detailed-temporal-performance-metrics-for-perception","title":"Detailed temporal performance metrics for Perception","text":"

    The diagram below introduces the model for temporal performance metrics for Perception.

    The following two policies assist in selecting the performance metrics:

    The following list shows the temporal performance metrics for Perception.

    ID Representation in the model Metric meaning Related functionality Reference value Reason to choose it as a metric Note APER-001 Message rate from CPP #2 to CPP #26 Update rate of Traffic Light Recognition. Traffic Light Recognition 10 Hz Planning relies on fresh and up-to-date perceived data from Traffic Light Recognition for making precise decisions. APER-002 Response time from CPP #0 to CPP #30 Response time from camera input to consumption of the result in Planning. Traffic Light Recognition N/A Planning relies on fresh and up-to-date perceived data from Traffic Light Recognition for making precise decisions. APER-003 Message rate from CPP #25 to CPP #28 Update rate of result from Prediction (Object Recognition) to Planning. Object Recognition 10 Hz Planning relies on fresh and up-to-date perceived data from Perception for creating accurate trajectory. The metric is same as AWOV-001. APER-004 Response time from CPP #6 to CPP #30 Response time from Tracking output of Tracking to its data consumption in Planning. Object Recognition N/A Planning relies on fresh and up-to-date perceived data from Perception for creating accurate trajectory. The metric is same as AWOV-002 and used if delay compensation is disabled in Tracking. APER-005 Response time from CPP #23 to CPP #30 Response time from Tracking output of Tracking to its data consumption in Planning. Object Recognition N/A Planning relies on fresh and up-to-date perceived data from Perception for creating accurate trajectory. The metric is same as AWOV-003 and used if delay compensation is enabled in Tracking. APER-006 Response time from CPP #6 to CPP #21 Duration to process pointcloud data in Sensing and Detection. Object Recognition N/A Tracking relies on Detection to provide real-time and up-to-date perceived data. The metrics is same as AWOV-004 and used if delay compensation is enabled in Tracking. APER-007 Message rate from CPP #20 to CPP #21 Update rate of Detection result received by Tracking. Object Recognition 10 Hz Tracking relies on detection to provide real-time and up-to-date sensed data for accurate tracking. The metric is same as AWOV-005 APER-008 Message rate from CPP #14 to CPP #19 Update rate of data sent from Sensor Fusion. Object Recognition 10 Hz Association Merger relies on the data to be updated at expected frequency for data synchronization. APER-009 Message rate from CPP #16 to CPP #19 Update rate of data sent from Detection by Tracker. Object Recognition 10 Hz Association Merger relies on the data to be updated at expected frequency for data synchronization. APER-010 Message rate from CPP #18 to CPP #19 Update rate of data sent from Validation Object Recognition. 10 Hz Association Merger relies on the data to be updated at expected frequency for data synchronization. APER-011 Response time from CPP #6 to CPP #19 via CPP #14 Response time to consume data sent from Sensor Fusion after LiDARs output pointcloud. Object Recognition N/A Association Merger relies on fresh and up-to-date data for data synchronization. APER-012 Response time from CPP #6 to CPP #19 via CPP #16 Response time to consume data sent from Detection by Tracker after LiDARs output pointcloud. Object Recognition N/A Association Merger relies on fresh and up-to-date data for data synchronization. APER-013 Response time from CPP #6 to CPP #19 via CPP #18 Response time to consume data sent from Validator after LiDARs output pointcloud. Object Recognition N/A Association Merger relies on fresh and up-to-date data for data synchronization. APER-014 Message rate from CPP #10 to CPP #13 Update rate of data sent from Clustering. Object Recognition 10 Hz Sensor Fusion relies on the data to be updated at expected frequency for data synchronization. APER-015 Message rate from CPP #5 to CPP #13 Update rate of data sent from Camera-based Object detection. Object Recognition 10 Hz Sensor Fusion relies on the data to be updated at expected frequency for data synchronization. APER-016 Response time from CPP #6 to CPP #13 Response time to consume data sent from Clustering after LiDARs output pointcloud. Object Recognition N/A Sensor Fusion relies on fresh and up-to-date data for data synchronization. APER-017 Response time from CPP #3 to CPP #13 Response time to consume data sent from Camera-based Object detection after Cameras output images. Object Recognition N/A Sensor Fusion relies on fresh and up-to-date data for data synchronization. APER-018 Message rate from CPP #10 to CPP #17 Update rate of data sent from Clustering. Object Recognition 10 Hz Validator relies on the data to be updated at expected frequency for data synchronization. It seems similar to APER-014, but the topic message is different. APER-019 Message rate from CPP #12 to CPP #17 Update rate of data sent from DNN-based Object Recognition. Object Recognition 10 Hz Validator relies on the data to be updated at expected frequency for data synchronization. APER-020 Response time from CPP #6 to CPP #17 via CPP #10 Response time to consume data sent from Clustering after LiDARs output pointcloud. Object Recognition N/A Validator relies on fresh and update-date data for data synchronization. It seems similar to APER-015, but the topic message is different. APER-021 Response time from CPP #6 to CPP #17 via CPP #12 Response time to consume data sent from DNN-based Object Recognition after LiDARs output pointcloud. Object Recognition N/A Validator relies on fresh and update-date data for data synchronization."},{"location":"how-to-guides/others/defining-temporal-performance-metrics/#detailed-temporal-performance-metrics-for-paths-between-obstacle-segmentation-and-planning","title":"Detailed temporal performance metrics for Paths between Obstacle segmentation and Planning","text":"

    Obstacle segmentation, which is a crucial part of Perception, transmits data to Planning. The figure below illustrates the model that takes into account performance metrics related to Obstacle segmentation and Planning.

    Note

    Both the Obstacle grid map and Obstacle segmentation transmit data to multiple sub-components of Planning. However, not all of these sub-components are described in the model. This is because our primary focus is on the paths from LiDAR to Planning via Obstacle segmentation.

    The following list shows the temporal performance metrics around Obstacle segmentation and Planning.

    ID Representation in the model Metric meaning Related functionality Reference value Reason to choose it as a metric Note OSEG-001 Message rate from CPS #4 to CPS #7 Update rate of Occupancy grid map received by Planning (behavior_path_planner) Obstacle segmentation 10 Hz Planning relies on Occupancy grid map to be updated frequently and smoothly for creating accurate trajectory. OSEG-002 Response time from CPS #0 to CPS #9 via CPS #7 Response time to consume Occupancy grid map after LiDARs output sensing data. Obstacle segmentation N/A Planning relies on fresh and up-to-date perceived data from Occupancy grid map for creating accurate trajectory.. OSEG-003 Message rate from CPS #6 to CPS #11 Update rate of obstacle segmentation received by Planning (behavior_velocity_planner). Obstacle segmentation 10 Hz Planning relies on Obstacle segmentation to be updated frequently and smoothly for creating accurate trajectory. OSEG-004 Response time from CPS #0 to CPS #13 via CPS #11 Response time to consume Obstacle segmentation after LiDARs output sensing data. Obstacle segmentation N/A Planning relies on fresh and up-to-date perceived data from Obstacle segmentation for creating accurate trajectory.."},{"location":"how-to-guides/others/determining-component-dependencies/","title":"Determining component dependencies","text":""},{"location":"how-to-guides/others/determining-component-dependencies/#determining-component-dependencies","title":"Determining component dependencies","text":"

    For any developers who wish to try and deploy Autoware as a microservices architecture, it is necessary to understand the software dependencies, communication, and implemented features of each ROS package/node.

    As an example, the commands necessary to determine the dependencies for the Perception component are shown below.

    "},{"location":"how-to-guides/others/determining-component-dependencies/#perception-component-dependencies","title":"Perception component dependencies","text":"

    To generate a graph of package dependencies, use the following colcon command:

    colcon graph --dot --packages-up-to tier4_perception_launch | dot -Tpng -o graph.png\n

    To generate a list of dependencies, use:

    colcon list --packages-up-to tier4_perception_launch --names-only\n
    colcon list output
    autoware_auto_geometry_msgs\nautoware_auto_mapping_msgs\nautoware_auto_perception_msgs\nautoware_auto_planning_msgs\nautoware_auto_vehicle_msgs\nautoware_cmake\nautoware_lint_common\nautoware_point_types\ncompare_map_segmentation\ndetected_object_feature_remover\ndetected_object_validation\ndetection_by_tracker\neuclidean_cluster\ngrid_map_cmake_helpers\ngrid_map_core\ngrid_map_cv\ngrid_map_msgs\ngrid_map_pcl\ngrid_map_ros\nground_segmentation\nimage_projection_based_fusion\nimage_transport_decompressor\ninterpolation\nkalman_filter\nlanelet2_extension\nlidar_apollo_instance_segmentation\nmap_based_prediction\nmulti_object_tracker\nmussp\nobject_merger\nobject_range_splitter\noccupancy_grid_map_outlier_filter\npointcloud_preprocessor\npointcloud_to_laserscan\nshape_estimation\ntensorrt_yolo\ntier4_autoware_utils\ntier4_debug_msgs\ntier4_pcl_extensions\ntier4_perception_launch\ntier4_perception_msgs\ntraffic_light_classifier\ntraffic_light_map_based_detector\ntraffic_light_ssd_fine_detector\ntraffic_light_visualization\nvehicle_info_util\n

    Tip

    To output a list of modules with their respective paths, run the command above without the --names-only parameter.

    To see which ROS topics are being subscribed and published to, use rqt_graph as follows:

    ros2 launch tier4_perception_launch perception.launch.xml mode:=lidar\nros2 run rqt_graph rqt_graph\n
    "},{"location":"how-to-guides/others/fixing-dependent-package-versions/","title":"Fixing dependent package versions","text":""},{"location":"how-to-guides/others/fixing-dependent-package-versions/#fixing-dependent-package-versions","title":"Fixing dependent package versions","text":"

    Autoware manages dependent package versions in autoware.repos. For example, let's say you make a branch in autoware.universe and add new features. Suppose you update other dependencies with vcs pull after cutting a branch from autoware.universe. Then the version of autoware.universe you are developing and other dependencies will become inconsistent, and the entire Autoware build will fail. We recommend saving the dependent package versions by executing the following command when starting the development.

    vcs export src --exact > my_autoware.repos\n
    "},{"location":"how-to-guides/others/reducing-start-delays/","title":"Reducing start delays on real vehicles","text":""},{"location":"how-to-guides/others/reducing-start-delays/#reducing-start-delays-on-real-vehicles","title":"Reducing start delays on real vehicles","text":"

    In simulation, the ego vehicle reacts nearly instantly to the control commands generated by Autoware. However, with a real vehicle, some delays occur that may make ego feel less responsive.

    This page presents start delays experienced when using Autoware on a real vehicle. We define the start delay as the time between (a) when Autoware decides to make the ego vehicle start and (b) when the vehicle actually starts moving. More precisely:

    "},{"location":"how-to-guides/others/reducing-start-delays/#start-delay-with-manual-driving","title":"Start delay with manual driving","text":"

    First, let us look at the start delay when a human is driving.

    The following figure shows the start delay when a human driver switches the gear from parked to drive and instantly releases the brake to push the throttle pedal and make the velocity of the vehicle increase.

    There are multiple things to note from this figure.

    "},{"location":"how-to-guides/others/reducing-start-delays/#filter-delay","title":"Filter delay","text":"

    To guarantee passenger comfort, some Autoware modules implement filters on the jerk of the vehicle, preventing sudden changes in acceleration.

    For example, the vehicle_cmd_gate filters the acceleration command generated by the controller and was previously introducing significant delays when transitioning between a stop command where the acceleration is negative, and a move command where the acceleration is positive. Because of the jerk filter, the transition between negative and positive was not instantaneous and would take several hundreds of milliseconds.

    "},{"location":"how-to-guides/others/reducing-start-delays/#gear-delay","title":"Gear delay","text":"

    In many vehicles, it is necessary to change gear before first starting to move the vehicle. When performed autonomously, this gear change can take some significant time. Moreover, as seen from the data recorded with manual driving, the measured gear value may be delayed.

    In Autoware, the controller sends a stopping control command until the gear is changed to the drive state. This means that delays in the gear change and its reported value can greatly impact the start delay. Note that this is only an issue when the vehicle is initially in the parked gear.

    The only way to reduce this delay is by tuning the vehicle to increase the gear change speed or to reduce the delay in the gear change report.

    "},{"location":"how-to-guides/others/reducing-start-delays/#brake-delay","title":"Brake delay","text":"

    In vehicles with a brake pedal, the braking system will often be made of several moving parts which cannot move instantly. Thus, when Autoware sends brake commands to a vehicle, some delays should be expected in the actual brake applied to the wheels.

    This lingering brake may prevent or delay the initial motion of the ego vehicle.

    This delay can be reduced by tuning the vehicle.

    "},{"location":"how-to-guides/others/reducing-start-delays/#throttle-response","title":"Throttle response","text":"

    For vehicles with throttle control, one of the main cause of start delays is due to the throttle response of the vehicle. When pushing the throttle pedal, the wheels of the vehicle do not instantly start rotating. This is partly due to the inertia of the vehicle, but also to the motor which may take a significant time to start applying some torque to the wheels.

    It may be possible to tune some vehicle side parameters to reduce this delay, but it is often done at the cost of reduced energy efficiency.

    On the Autoware side, the only way to decrease this delay is to increase the initial throttle but this can cause uncomfortably high initial accelerations.

    "},{"location":"how-to-guides/others/reducing-start-delays/#initial-acceleration-and-throttle","title":"Initial acceleration and throttle","text":"

    As we just discussed, for vehicles with throttle control, an increased initial throttle value can reduce the start delay.

    Since Autoware outputs an acceleration value, the conversion module raw_vehicle_cmd_converter is used to map the acceleration value from Autoware to a throttle value to be sent to the vehicle. Such mapping is usually calibrated automatically using the accel_brake_map_calibrator module, but it may produce a low initial throttle which leads to high start delays.

    In order to increase the initial throttle, there are two options: increase the initial acceleration output by Autoware, or modify the acceleration to throttle mapping.

    The initial acceleration output by Autoware can be tuned in the motion_velocity_smoother with parameters engage_velocity and engage_acceleration. However, the vehicle_cmd_gate applies a filter on the control command to prevent too sudden changes in jerk and acceleration, limiting the maximum allowed acceleration while the ego vehicle is stopped.

    Alternatively, the mapping of acceleration can be tuned to increase the throttle corresponding to the initial acceleration. If we look at an example acceleration map, it does the following conversion: when the ego velocity is 0 (first column), acceleration values between 0.631 (first row) and 0.836 (second row) are converted to a throttle between 0% and 10%. This means that any initial acceleration bellow 0.631m/s\u00b2 will not produce any throttle. Keep in mind that after tuning the acceleration map, it may be necessary to also update the brake map.

    default 0 1.39 2.78 4.17 5.56 6.94 8.33 9.72 11.11 12.5 13.89 0 0.631 0.11 -0.04 -0.04 -0.041 -0.096 -0.137 -0.178 -0.234 -0.322 -0.456 0.1 0.836 0.57 0.379 0.17 0.08 0.07 0.068 0.027 -0.03 -0.117 -0.251 0.2 1.129 0.863 0.672 0.542 0.4 0.38 0.361 0.32 0.263 0.176 0.042 0.3 1.559 1.293 1.102 0.972 0.887 0.832 0.791 0.75 0.694 0.606 0.472 0.4 2.176 1.909 1.718 1.588 1.503 1.448 1.408 1.367 1.31 1.222 1.089 0.5 3.027 2.76 2.57 2.439 2.354 2.299 2.259 2.218 2.161 2.074 1.94"},{"location":"how-to-guides/others/running-autoware-without-cuda/","title":"Running Autoware without CUDA","text":""},{"location":"how-to-guides/others/running-autoware-without-cuda/#running-autoware-without-cuda","title":"Running Autoware without CUDA","text":"

    Although CUDA installation is recommended to achieve better performance for object detection and traffic light recognition in Autoware Universe, it is possible to run these algorithms without CUDA. The following subsections briefly explain how to run each algorithm in such an environment.

    "},{"location":"how-to-guides/others/running-autoware-without-cuda/#running-2d3d-object-detection-without-cuda","title":"Running 2D/3D object detection without CUDA","text":"

    Autoware Universe's object detection can be run using one of five possible configurations:

    Of these five configurations, only the last one (euclidean_cluster) can be run without CUDA. For more details, refer to the euclidean_cluster module's README file.

    "},{"location":"how-to-guides/others/running-autoware-without-cuda/#running-traffic-light-detection-without-cuda","title":"Running traffic light detection without CUDA","text":"

    For traffic light recognition (both detection and classification), there are two modules that require CUDA:

    To run traffic light detection without CUDA, set enable_fine_detection to false in the traffic light launch file. Doing so disables the traffic_light_ssd_fine_detector such that traffic light detection is handled by the map_based_traffic_light_detector module instead.

    To run traffic light classification without CUDA, set use_gpu to false in the traffic light classifier launch file. Doing so will force the traffic_light_classifier to use a different classification algorithm that does not require CUDA or a GPU.

    "},{"location":"how-to-guides/training-machine-learning-models/training-models/","title":"Training and Deploying Models","text":""},{"location":"how-to-guides/training-machine-learning-models/training-models/#training-and-deploying-models","title":"Training and Deploying Models","text":""},{"location":"how-to-guides/training-machine-learning-models/training-models/#overview","title":"Overview","text":"

    The Autoware offers a comprehensive array of machine learning models, tailored for a wide range of tasks including 2D and 3D object detection, traffic light recognition and more. These models have been meticulously trained utilizing open-mmlab's extensive repositories. By leveraging the provided scripts and following the training steps, you have the capability to train these models using your own dataset, tailoring them to your specific needs.

    Furthermore, you will find the essential conversion scripts to deploy your trained models into Autoware using the mmdeploy repository.

    "},{"location":"how-to-guides/training-machine-learning-models/training-models/#training-traffic-light-classifier-model","title":"Training traffic light classifier model","text":"

    The traffic light classifier model within the Autoware has been trained using the mmlab/pretrained repository. The Autoware offers pretrained models based on EfficientNet-b1 and MobileNet-v2 architectures. To fine-tune these models, a total of 83,400 images were employed, comprising 58,600 for training, 14,800 for evaluation, and 10,000 for testing. These images represent Japanese traffic lights and were trained using TIER IV's internal dataset.

    Name Input Size Test Accuracy EfficientNet-b1 128 x 128 99.76% MobileNet-v2 224 x 224 99.81%

    Comprehensive training instructions for the traffic light classifier model are detailed within the readme file accompanying \"traffic_light_classifier\" package. These instructions will guide you through the process of training the model using your own dataset. To facilitate your training, we have also provided an example dataset containing three distinct classes (green, yellow, red), which you can leverage during the training process.

    Detailed instructions for training the traffic light classifier model can be found here.

    "},{"location":"installation/","title":"Installation","text":""},{"location":"installation/#installation","title":"Installation","text":""},{"location":"installation/#target-platforms","title":"Target platforms","text":"

    Autoware targets the platforms listed below. It may change in future versions of Autoware.

    The Autoware Foundation provides no support on other platforms than those listed below.

    "},{"location":"installation/#architecture","title":"Architecture","text":""},{"location":"installation/#minimum-hardware-requirements","title":"Minimum hardware requirements","text":"

    Info

    Autoware is scalable and can be customized to work with distributed or less powerful hardware. The minimum hardware requirements given below are just a general recommendation. However, performance will be improved with more cores, RAM and a higher-spec graphics card or GPU core.

    Although GPU is not required to run basic functionality, it is mandatory to enable the following neural network related functions:

    For details of how to enable object detection and traffic light detection/classification without a GPU, refer to the Running Autoware without CUDA.

    "},{"location":"installation/#installing-autoware","title":"Installing Autoware","text":"

    There are two ways to set up Autoware. Choose one according to your preference.

    If any issues occur during installation, refer to the Support page.

    "},{"location":"installation/#1-docker-installation","title":"1. Docker installation","text":"

    Docker can ensure that all developers in a project have a common, consistent development environment. It is recommended for beginners, casual users, people who are unfamiliar with Ubuntu.

    For more information, refer to the Docker installation guide.

    "},{"location":"installation/#2-source-installation","title":"2. Source installation","text":"

    Source installation is for the cases where more granular control of the installation environment is needed. It is recommended for experienced users or people who want to customize their environment. Note that some problems may occur depending on your local environment.

    For more information, refer to the source installation guide.

    "},{"location":"installation/#installing-related-tools","title":"Installing related tools","text":"

    Some other tools are required depending on the evaluation you want to do. For example, to run an end-to-end simulation you need to install an appropriate simulator.

    For more information, see here.

    "},{"location":"installation/#additional-settings-for-developers","title":"Additional settings for developers","text":"

    There are also tools and settings for developers, such as Shells or IDEs.

    For more information, see here.

    "},{"location":"installation/additional-settings-for-developers/","title":"Additional settings for developers","text":""},{"location":"installation/additional-settings-for-developers/#additional-settings-for-developers","title":"Additional settings for developers","text":""},{"location":"installation/additional-settings-for-developers/#console-settings-for-ros-2","title":"Console settings for ROS 2","text":""},{"location":"installation/additional-settings-for-developers/#colorizing-logger-output","title":"Colorizing logger output","text":"

    By default, ROS 2 logger doesn't colorize the output. To colorize it, write the following in your .bashrc:

    export RCUTILS_COLORIZED_OUTPUT=1\n
    "},{"location":"installation/additional-settings-for-developers/#customizing-the-format-of-logger-output","title":"Customizing the format of logger output","text":"

    By default, ROS 2 logger doesn't output detailed information such as file name, function name, or line number. To customize it, write the following in your .bashrc:

    export RCUTILS_CONSOLE_OUTPUT_FORMAT=\"[{severity} {time}] [{name}]: {message} ({function_name}() at {file_name}:{line_number})\"\n

    For more options, see here.

    "},{"location":"installation/additional-settings-for-developers/#network-settings-for-ros-2","title":"Network settings for ROS 2","text":"

    ROS 2 employs DDS, and the configuration of ROS 2 and DDS is described separately. For ROS 2 networking concepts, refer to the official documentation.

    "},{"location":"installation/additional-settings-for-developers/#ros-2-network-setting","title":"ROS 2 network setting","text":"

    ROS 2 multicasts data on the local network by default. Therefore, when you develop in an office, the data flows over the local network of your office. It may cause collisions of packets or increases in network traffic.

    To avoid these, there are two options.

    Unless you plan to use multiple host computers on the local network, localhost-only communication is recommended. For details, refer to the sections below.

    "},{"location":"installation/additional-settings-for-developers/#enabling-localhost-only-communication","title":"Enabling localhost-only communication","text":"

    Write the following in your .bashrc: For more information, see the ROS 2 documentation.

    export ROS_LOCALHOST_ONLY=1\n

    If you export ROS_LOCALHOST_ONLY=1, MULTICAST must be enabled at the loopback address. To verify that MULTICAST is enabled, use the following command.

    $ ip link show lo\n1: lo: <LOOPBACK,MULTICAST,UP,LOWER_UP> mtu 65536 qdisc noqueue state UNKNOWN mode DEFAULT group default qlen 1000\n

    If the word MULTICAST is not printed, use the following command to enable it.

    sudo ip link set lo multicast on\n
    "},{"location":"installation/additional-settings-for-developers/#same-domain-only-communication-on-the-local-network","title":"Same domain only communication on the local network","text":"

    ROS 2 uses ROS_DOMAIN_ID to create groups and communicate between machines in the groups. Since all ROS 2 nodes use domain ID 0 by default, it may cause unintended interference.

    To avoid it, set a different domain ID for each group in your .bashrc:

    # Replace X with the Domain ID you want to use\n# Domain ID should be a number in range [0, 101] (inclusive)\nexport ROS_DOMAIN_ID=X\n

    Also confirm that ROS_LOCALHOST_ONLY is 0 by using the following command.

    echo $ROS_LOCALHOST_ONLY # If the output is 1, localhost has priority.\n

    For more information, see the ROS 2 Documentation.

    "},{"location":"installation/additional-settings-for-developers/#dds-settings","title":"DDS settings","text":"

    Autoware uses DDS for inter-node communication. ROS 2 documentation recommends users to tune DDS to utilize its capability. Especially, receive buffer size is the critical parameter for Autoware. If the parameter is not large enough, Autoware will failed in receiving large data like point cloud or image.

    "},{"location":"installation/additional-settings-for-developers/#tuning-dds","title":"Tuning DDS","text":"

    Unless customized, CycloneDDS is adopted by default. For example, to execute Autoware with CycloneDDS, prepare a config file. A sample config file is given below. Save it as cyclonedds_config.xml.

    <?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<CycloneDDS xmlns=\"https://cdds.io/config\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"https://cdds.io/config https://raw.githubusercontent.com/eclipse-cyclonedds/cyclonedds/master/etc/cyclonedds.xsd\">\n<Domain Id=\"any\">\n<General>\n<Interfaces>\n<NetworkInterface autodetermine=\"true\" priority=\"default\" multicast=\"default\" />\n</Interfaces>\n<AllowMulticast>default</AllowMulticast>\n<MaxMessageSize>65500B</MaxMessageSize>\n</General>\n<Internal>\n<SocketReceiveBufferSize min=\"10MB\"/>\n<Watermarks>\n<WhcHigh>500kB</WhcHigh>\n</Watermarks>\n</Internal>\n</Domain>\n</CycloneDDS>\n

    This configuration is mostly taken from Eclipse Cyclone DDS:Run-time configuration documentation. You can see why each value is set as such under the documentation link.

    Set the config file path and enlarge the Linux kernel maximum buffer size before launching Autoware.

    export CYCLONEDDS_URI=file:///absolute/path/to/cyclonedds_config.xml\nsudo sysctl -w net.core.rmem_max=2147483647\n

    For more information, Refer to ROS 2 documentation. Reading user guide for chosen DDS is helpful for more understanding.

    "},{"location":"installation/additional-settings-for-developers/#tuning-dds-for-multiple-host-computers-for-advanced-users","title":"Tuning DDS for multiple host computers (for advanced users)","text":"

    When Autoware runs on multiple host computers, IP Fragmentation should be taken into account. As ROS 2 documentation recommends, parameters for IP Fragmentation should be set as shown in the following example.

    sudo sysctl -w net.ipv4.ipfrag_time=3\nsudo sysctl -w net.ipv4.ipfrag_high_thresh=134217728     # (128 MB)\n
    "},{"location":"installation/autoware/docker-installation-devel/","title":"Docker installation for development","text":""},{"location":"installation/autoware/docker-installation-devel/#docker-installation-for-development","title":"Docker installation for development","text":""},{"location":"installation/autoware/docker-installation-devel/#prerequisites","title":"Prerequisites","text":" "},{"location":"installation/autoware/docker-installation-devel/#how-to-set-up-a-development-environment","title":"How to set up a development environment","text":"
    1. Clone autowarefoundation/autoware and move to the directory.

      git clone https://github.com/autowarefoundation/autoware.git\ncd autoware\n
    2. You can install the dependencies either manually or using the provided Ansible script.

    Note: Before installing NVIDIA libraries, confirm and agree with the licenses.

    "},{"location":"installation/autoware/docker-installation-devel/#installing-dependencies-manually","title":"Installing dependencies manually","text":""},{"location":"installation/autoware/docker-installation-devel/#installing-dependencies-using-ansible","title":"Installing dependencies using Ansible","text":"

    Be very careful with this method. Make sure you read and confirmed all the steps in the Ansible configuration before using it.

    If you've manually installed the dependencies, you can skip this section.

    ./setup-dev-env.sh docker\n

    You might need to log out and log back to make the current user able to use docker.

    "},{"location":"installation/autoware/docker-installation-devel/#how-to-set-up-a-workspace","title":"How to set up a workspace","text":"

    Warning

    Before proceeding, confirm and agree with the NVIDIA Deep Learning Container license. By pulling and using the Autoware Universe images, you accept the terms and conditions of the license.

    1. Create the autoware_map directory for map data later.

      mkdir ~/autoware_map\n
    2. Pull the Docker image

      docker pull ghcr.io/autowarefoundation/autoware-universe:latest-cuda\n
    3. Launch a Docker container.

      • For amd64 architecture computers with NVIDIA GPU:

        rocker --nvidia --x11 --user --volume $HOME/autoware --volume $HOME/autoware_map -- ghcr.io/autowarefoundation/autoware-universe:latest-cuda\n
      • If you want to run container without using NVIDIA GPU, or for arm64 architecture computers:

        rocker -e LIBGL_ALWAYS_SOFTWARE=1 --x11 --user --volume $HOME/autoware --volume $HOME/autoware_map -- ghcr.io/autowarefoundation/autoware-universe:latest-cuda\n

        For detailed reason could be found here

      For more advanced usage, see here.

      After that, move to the workspace in the container:

      cd autoware\n
    4. Create the src directory and clone repositories into it.

      mkdir src\nvcs import src < autoware.repos\n
    5. Update dependent ROS packages.

      The dependency of Autoware may change after the Docker image was created. In that case, you need to run the following commands to update the dependency.

      sudo apt update\nrosdep update\nrosdep install -y --from-paths src --ignore-src --rosdistro $ROS_DISTRO\n
    6. Build the workspace.

      colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n

      If there is any build issue, refer to Troubleshooting.

    "},{"location":"installation/autoware/docker-installation-devel/#how-to-update-a-workspace","title":"How to update a workspace","text":"
    1. Update the Docker image.

      docker pull ghcr.io/autowarefoundation/autoware-universe:latest-cuda\n
    2. Launch a Docker container.

      • For amd64 architecture computers:

        rocker --nvidia --x11 --user --volume $HOME/autoware -- ghcr.io/autowarefoundation/autoware-universe:latest-cuda\n
      • If you want to run container without using NVIDIA GPU, or for arm64 architecture computers:

        rocker -e LIBGL_ALWAYS_SOFTWARE=1 --x11 --user --volume $HOME/autoware -- ghcr.io/autowarefoundation/autoware-universe:latest-cuda\n
    3. Update the .repos file.

      cd autoware\ngit pull\n
    4. Update the repositories.

      vcs import src < autoware.repos\nvcs pull src\n
    5. Build the workspace.

      colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n
    "},{"location":"installation/autoware/docker-installation-prebuilt/","title":"Docker installation for quick start","text":""},{"location":"installation/autoware/docker-installation-prebuilt/#docker-installation-for-quick-start","title":"Docker installation for quick start","text":""},{"location":"installation/autoware/docker-installation-prebuilt/#how-to-set-up-a-development-environment","title":"How to set up a development environment","text":"
    1. Installing dependencies manually

      • Install Docker Engine
      • Install NVIDIA Container Toolkit
      • Install rocker
    "},{"location":"installation/autoware/docker-installation-prebuilt/#how-to-set-up-a-workspace","title":"How to set up a workspace","text":"
    1. Create the autoware_map directory for map data later.

      mkdir ~/autoware_map\n
    2. Launch a Docker container.

      rocker --nvidia --x11 --user --volume $HOME/autoware_map -- ghcr.io/autowarefoundation/autoware-universe:humble-latest-prebuilt\n

      For more advanced usage, see here.

    3. Run Autoware simulator

      Inside the container, you can run the Autoware simulation by following this tutorial:

      planning simulation

      rosbag replay simulation.

    "},{"location":"installation/autoware/docker-installation/","title":"Docker installation","text":""},{"location":"installation/autoware/docker-installation/#docker-installation","title":"Docker installation","text":"

    Info

    Since this page explains Docker-specific information, it is recommended to see Source installation as well if you need detailed information.

    Here are two ways to install Autoware by docker:

    "},{"location":"installation/autoware/docker-installation/#docker-installation-for-quick-start","title":"Docker installation for quick start","text":"

    docker installation for quick start

    "},{"location":"installation/autoware/docker-installation/#docker-installation-for-development","title":"Docker installation for development","text":"

    docker installation for development

    "},{"location":"installation/autoware/docker-installation/#troubleshooting","title":"Troubleshooting","text":"

    Here are solutions for a few specific errors:

    "},{"location":"installation/autoware/docker-installation/#cuda-error-forward-compatibility-was-attempted-on-non-supported-hw","title":"cuda error: forward compatibility was attempted on non supported hw","text":"

    When starting Docker with GPU support enabled for NVIDIA graphics, you may sometimes receive the following error:

    docker: Error response from daemon: OCI runtime create failed: container_linux.go:349: starting container process caused \"process_linux.go:449: container init caused \\\"process_linux.go:432: running prestart hook 0 caused \\\\\\\"error running hook: exit status 1, stdout: , stderr: nvidia-container-cli: initialization error: cuda error: forward compatibility was attempted on non supported hw\\\\\\\\n\\\\\\\"\\\"\": unknown.\nERROR: Command return non-zero exit code (see above): 125\n

    This usually indicates that a new NVIDIA graphics driver has been installed (usually via apt) but the system has not yet been restarted. A similar message may appear if the graphics driver is not available, for example because of resuming after suspend.

    To fix this, restart your system after installing the new NVIDIA driver.

    "},{"location":"installation/autoware/docker-installation/#docker-with-nvidia-gpu-fails-to-start-autoware-on-arm64-devices","title":"Docker with NVIDIA gpu fails to start Autoware on arm64 devices","text":"

    When starting Docker with GPU support enabled for NVIDIA graphics on arm64 devices, e.g. NVIDIA jetson AGX xavier, you may receive the following error:

    nvidia@xavier:~$ rocker --nvidia --x11 --user --volume $HOME/autoware -- ghcr.io/autowarefoundation/autoware-universe:humble-latest-cuda-arm64\n...\n\nCollecting staticx==0.12.3\nDownloading https://files.pythonhosted.org/packages/92/ff/d9960ea1f9db48d6044a24ee0f3d78d07bcaddf96eb0c0e8806f941fb7d3/staticx-0.12.3.tar.gz (68kB)\nComplete output from command python setup.py egg_info:\nTraceback (most recent call last):\nFile \"\", line 1, in\nFile \"/tmp/pip-install-m_nm8mya/staticx/setup.py\", line 4, in\nfrom wheel.bdist_wheel import bdist_wheel\nModuleNotFoundError: No module named 'wheel'\n\nCommand \"python setup.py egg_info\" failed with error code 1 in /tmp/pip-install-m_nm8mya/staticx/\n...\n

    This error exists in current version of rocker tool, which relates to the os_detection function of rocker.

    To fix this error, temporary modification of rocker source code is required, which is not recommended.

    At current stage, it is recommended to run docker without NVIDIA gpu enabled for arm64 devices:

    rocker -e LIBGL_ALWAYS_SOFTWARE=1 --x11 --user --volume $HOME/autoware -- ghcr.io/autowarefoundation/autoware-universe:latest-cuda\n

    This tutorial will be updated after official fix from rocker.

    "},{"location":"installation/autoware/docker-installation/#tips","title":"Tips","text":""},{"location":"installation/autoware/docker-installation/#non-native-arm64-system","title":"Non-native arm64 System","text":"

    This section describes a process to run arm64 systems on amd64 systems using qemu-user-static.

    Initially, your system is usually incompatible with arm64 systems. To check that:

    $ docker run --rm -t arm64v8/ubuntu uname -m\nWARNING: The requested image's platform (linux/arm64/v8) does not match the detected host platform (linux/amd64) and no specific platform was requested\nstandard_init_linux.go:228: exec user process caused: exec format error\n

    Installing qemu-user-static enables us to run arm64 images on amd64 systems.

    $ sudo apt-get install qemu-user-static\n$ docker run --rm --privileged multiarch/qemu-user-static --reset -p yes\n$ docker run --rm -t arm64v8/ubuntu uname -m\nWARNING: The requested image's platform (linux/arm64/v8) does not match the detected host platform (linux/amd64) and no specific platform was requested\naarch64\n

    To run Autoware's Docker images of arm64 architecture, add the suffix -arm64.

    $ docker run --rm -it ghcr.io/autowarefoundation/autoware-universe:humble-latest-cuda-arm64\nWARNING: The requested image's platform (linux/arm64) does not match the detected host platform (linux/amd64) and no specific platform was requested\nroot@5b71391ad50f:/autoware#\n
    "},{"location":"installation/autoware/source-installation/","title":"Source installation","text":""},{"location":"installation/autoware/source-installation/#source-installation","title":"Source installation","text":""},{"location":"installation/autoware/source-installation/#prerequisites","title":"Prerequisites","text":"
    sudo apt-get -y update\nsudo apt-get -y install git\n

    Note: If you wish to use ROS 2 Galactic on Ubuntu 20.04, refer to installation instruction from galactic branch, but be aware that Galactic version of Autoware might not have latest features.

    "},{"location":"installation/autoware/source-installation/#how-to-set-up-a-development-environment","title":"How to set up a development environment","text":"
    1. Clone autowarefoundation/autoware and move to the directory.

      git clone https://github.com/autowarefoundation/autoware.git\ncd autoware\n
    2. If you are installing Autoware for the first time, you can automatically install the dependencies by using the provided Ansible script.

      ./setup-dev-env.sh\n

      If you encounter any build issues, please consult the Troubleshooting section for assistance.

    Info

    Before installing NVIDIA libraries, please ensure that you have reviewed and agreed to the licenses.

    Note

    The following items will be automatically installed. If the ansible script doesn't work or if you already have different versions of dependent libraries installed, please install the following items manually.

    If you didn't use ansible script you will need to download some package artifacts as explained in Manual loading of artifacts. Otherwise some packages (mostly from perception) will not be able to run as they need these artifacts for the inference.

    "},{"location":"installation/autoware/source-installation/#how-to-set-up-a-workspace","title":"How to set up a workspace","text":"
    1. Create the src directory and clone repositories into it.

      Autoware uses vcstool to construct workspaces.

      cd autoware\nmkdir src\nvcs import src < autoware.repos\n
    2. Install dependent ROS packages.

      Autoware requires some ROS 2 packages in addition to the core components. The tool rosdep allows an automatic search and installation of such dependencies. You might need to run rosdep update before rosdep install.

      source /opt/ros/humble/setup.bash\nrosdep install -y --from-paths src --ignore-src --rosdistro $ROS_DISTRO\n
    3. Build the workspace.

      Autoware uses colcon to build workspaces. For more advanced options, refer to the documentation.

      colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n

      If there is any build issue, refer to Troubleshooting.

    "},{"location":"installation/autoware/source-installation/#how-to-update-a-workspace","title":"How to update a workspace","text":"
    1. Update the .repos file.

      cd autoware\ngit pull <remote> <your branch>\n

      <remote> is usually git@github.com:autowarefoundation/autoware.git

    2. Update the repositories.

      vcs import src < autoware.repos\nvcs pull src\n

      For Git users:

      • vcs import is similar to git checkout.
        • Note that it doesn't pull from the remote.
      • vcs pull is similar to git pull.
        • Note that it doesn't switch branches.

      For more information, refer to the official documentation.

    3. Install dependent ROS packages.

      source /opt/ros/humble/setup.bash\nrosdep install -y --from-paths src --ignore-src --rosdistro $ROS_DISTRO\n
    4. Build the workspace.

      colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n
    "},{"location":"installation/related-tools/","title":"Installation of related tools","text":""},{"location":"installation/related-tools/#installation-of-related-tools","title":"Installation of related tools","text":"

    Warning

    Under Construction

    "},{"location":"models/","title":"Machine learning models","text":""},{"location":"models/#machine-learning-models","title":"Machine learning models","text":"

    The Autoware perception stack uses models for inference. These models are automatically downloaded if using ansible, but they can also be downloaded manually.

    "},{"location":"models/#onnx-model-files","title":"ONNX model files","text":""},{"location":"models/#download-instructions","title":"Download instructions","text":"

    The ONNX model files are stored in a common location, hosted by Web.Auto

    Any tool that can download files from the web (e.g. wget or curl) is the only requirement for downloading these files:

    # yabloc_pose_initializer\n\n$ mkdir -p ~/autoware_data/yabloc_pose_initializer/\n$ wget -P ~/autoware_data/yabloc_pose_initializer/ \\\nhttps://s3.ap-northeast-2.wasabisys.com/pinto-model-zoo/136_road-segmentation-adas-0001/resources.tar.gz\n\n\n# image_projection_based_fusion\n\n$ mkdir -p ~/autoware_data/image_projection_based_fusion/\n$ wget -P ~/autoware_data/image_projection_based_fusion/ \\\nhttps://awf.ml.dev.web.auto/perception/models/pointpainting/v4/pts_voxel_encoder_pointpainting.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/pointpainting/v4/pts_backbone_neck_head_pointpainting.onnx\n\n\n# lidar_apollo_instance_segmentation\n\n$ mkdir -p ~/autoware_data/lidar_apollo_instance_segmentation/\n$ wget -P ~/autoware_data/lidar_apollo_instance_segmentation/ \\\nhttps://awf.ml.dev.web.auto/perception/models/lidar_apollo_instance_segmentation/vlp-16.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/lidar_apollo_instance_segmentation/hdl-64.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/lidar_apollo_instance_segmentation/vls-128.onnx\n\n\n# lidar_centerpoint\n\n$ mkdir -p ~/autoware_data/lidar_centerpoint/\n$ wget -P ~/autoware_data/lidar_centerpoint/ \\\nhttps://awf.ml.dev.web.auto/perception/models/centerpoint/v2/pts_voxel_encoder_centerpoint.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/centerpoint/v2/pts_backbone_neck_head_centerpoint.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/centerpoint/v2/pts_voxel_encoder_centerpoint_tiny.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/centerpoint/v2/pts_backbone_neck_head_centerpoint_tiny.onnx\n\n\n# tensorrt_yolo\n\n$ mkdir -p ~/autoware_data/tensorrt_yolo/\n$ wget -P ~/autoware_data/tensorrt_yolo/ \\\nhttps://awf.ml.dev.web.auto/perception/models/yolov3.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/yolov4.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/yolov4-tiny.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/yolov5s.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/yolov5m.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/yolov5l.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/yolov5x.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/coco.names\n\n\n# tensorrt_yolox\n\n$ mkdir -p ~/autoware_data/tensorrt_yolox/\n$ wget -P ~/autoware_data/tensorrt_yolox/ \\\nhttps://awf.ml.dev.web.auto/perception/models/yolox-tiny.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/yolox-sPlus-opt.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/yolox-sPlus-opt.EntropyV2-calibration.table \\\nhttps://awf.ml.dev.web.auto/perception/models/object_detection_yolox_s/v1/yolox-sPlus-T4-960x960-pseudo-finetune.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/object_detection_yolox_s/v1/yolox-sPlus-T4-960x960-pseudo-finetune.EntropyV2-calibration.table \\\nhttps://awf.ml.dev.web.auto/perception/models/label.txt\n\n\n# traffic_light_classifier\n\n$ mkdir -p ~/autoware_data/traffic_light_classifier/\n$ wget -P ~/autoware_data/traffic_light_classifier/ \\\nhttps://awf.ml.dev.web.auto/perception/models/traffic_light_classifier/v2/traffic_light_classifier_mobilenetv2_batch_1.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/traffic_light_classifier/v2/traffic_light_classifier_mobilenetv2_batch_4.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/traffic_light_classifier/v2/traffic_light_classifier_mobilenetv2_batch_6.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/traffic_light_classifier/v2/traffic_light_classifier_efficientNet_b1_batch_1.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/traffic_light_classifier/v2/traffic_light_classifier_efficientNet_b1_batch_4.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/traffic_light_classifier/v2/traffic_light_classifier_efficientNet_b1_batch_6.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/traffic_light_classifier/v2/lamp_labels.txt\n\n\n# traffic_light_fine_detector\n\n$ mkdir -p ~/autoware_data/traffic_light_fine_detector/\n$ wget -P ~/autoware_data/traffic_light_fine_detector/ \\\nhttps://awf.ml.dev.web.auto/perception/models/tlr_yolox_s/v2/tlr_yolox_s_batch_1.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/tlr_yolox_s/v2/tlr_yolox_s_batch_4.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/tlr_yolox_s/v2/tlr_yolox_s_batch_6.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/tlr_yolox_s/v2/tlr_labels.txt\n\n\n# traffic_light_ssd_fine_detector\n\n$ mkdir -p ~/autoware_data/traffic_light_ssd_fine_detector/\n$ wget -P ~/autoware_data/traffic_light_ssd_fine_detector/ \\\nhttps://awf.ml.dev.web.auto/perception/models/mb2-ssd-lite-tlr.onnx \\\nhttps://awf.ml.dev.web.auto/perception/models/voc_labels_tl.txt\n
    "},{"location":"reference-hw/","title":"Reference HW Design","text":""},{"location":"reference-hw/#reference-hw-design","title":"Reference HW Design","text":"

    This document is created to describe and give additional information of the sensors and systems supported by Autoware.Auto software.

    All equipment listed in this document has available ROS 2 drivers and has been tested by one or more of the community members on field in autonomous vehicle and robotics applications.

    The listed sensors and systems are not sold, developed or given direct technical support by the Autoware community. Having said that any ROS 2 and Autoware related issue regarding the hardware usage could be asked using the community guidelines which found here.

    The documents consists of the sections listed below:

    "},{"location":"reference-hw/ad-computers/","title":"AD Computers","text":""},{"location":"reference-hw/ad-computers/#ad-computers","title":"AD Computers","text":""},{"location":"reference-hw/ad-computers/#adlink-in-vehicle-computers","title":"ADLINK In-Vehicle Computers","text":"

    ADLINK solutions which is used for autonomous driving and tested by one or more community members are listed below:

    Supported Products List CPU GPU RAM, Interfaces Environmental Autoware Tested (Y/N) AVA-351001 Intel\u00ae Xeon\u00ae E-2278GE Dual RTX 5000 64GB RAM,CAN, USB, 10G Ethernet, DIO, Hot-Swap SSD, USIM 9~36 VDC, MIL-STD-810H,ISO 7637-2 & SAE 113-11 Y SOAFEE\u2019s AVA Developer Platform Ampere Altra ARMv8 optional USB, Ethernet, DIO, M.2 NVMe SSDs 110/220 AC Y RQX-58G Carmel ARMv8.2 2.26GHz Nvidia Jetson AGX Xavier USB, Ethernet, M.2 NVME SSD, CAN, USIM, GMSL2 Camera support 9~36VDC Y RQX-59G 8-core Arm\u00ae Cortex\u00ae-A78AE v8.2 Nvidia Jetson AGX Orin USB, Ethernet, M.2 NVME SSD, CAN, USIM, GMSL2 Camera support 9~36VDC N SOAFEE\u2019s AVA AP1 Ampere Altra ARMv8 optional CAN, USB, Ethernet, DIO, M.2 NVMe SSDs 12 Volt Y

    Link to company website is here.

    "},{"location":"reference-hw/ad-computers/#nxp-in-vehicle-computers","title":"NXP In-Vehicle Computers","text":"

    NXP solutions which is used for autonomous driving and tested by one or more community members are listed below:

    Supported Products List CPU GPU RAM, Interfaces Environmental Autoware Tested (Y/N) BLUEBOX 3.0 16 x Arm\u00ae Cortex\u00ae-A72 Dual RTX 8000 or RTX A6000 16 GB RAM CAN, FlexRay, USB, Ethernet, DIO, SSD ASIL-D -

    Link to company website is here.

    "},{"location":"reference-hw/ad-computers/#neousys-in-vehicle-computers","title":"Neousys In-Vehicle Computers","text":"

    Neousys solutions which is used for autonomous driving and tested by one or more community members are listed below:

    Supported Products List CPU GPU RAM, Interfaces Environmental Autoware Tested (Y/N) 8208-GC Intel\u00ae Xeon\u00ae E-2278GE Dual RTX 2080ti or RTX 3070 128 GB RAM,CAN, USB, Ethernet, Serial, Hot-Swap SSD 8-35 VoltVibration:MIL-STD810G 5-500 Hz, 3 axes -

    Link to company website is here.

    "},{"location":"reference-hw/ad-computers/#crystal-rugged-in-vehicle-computers","title":"Crystal Rugged In-Vehicle Computers","text":"

    Crystal Rugged solutions which is used for autonomous driving and tested by one or more community members are listed below:

    Supported Products List CPU GPU RAM, Interfaces Environmental Autoware Tested (Y/N) AVC 0161-AC Intel\u00ae Xeon\u00ae Scalable Dual GPU RTX Series 2TB RAM,CAN, USB, Ethernet, Serial, Hot-Swap SSD 10-32 VoltVibration:2 G RMS 10-1000 Hz, 3 axes -

    Link to company website is here.

    "},{"location":"reference-hw/cameras/","title":"CAMERAs","text":""},{"location":"reference-hw/cameras/#cameras","title":"CAMERAs","text":""},{"location":"reference-hw/cameras/#tier-iv-automotive-hdr-cameras","title":"TIER IV Automotive HDR Cameras","text":"

    TIER IV's Automotive HDR cameras which have ROS 2 driver and tested by TIER IV are listed below:

    Supported Products List MP FPS Interface HDR LFM Trigger /Synchronization Ingress Protection ROS 2 Driver Autoware Tested (Y/N) C1 2.5 30 GMSL2 / USB3 Y (120dB) Y Y IP69K Y Y C2 5.4 30 GMSL2 / USB3 Y (120dB) Y Y IP69K Y Y C3 (to be released in 2024) 8.3 30 GMSL2 / TBD Y (120dB) Y Y IP69K Y Y

    Link to ROS 2 driver: https://github.com/tier4/ros2_v4l2_camera

    Link to product support site: TIER IV Edge.Auto documentation

    Link to product web site: TIER IV Automotive Camera Solution

    "},{"location":"reference-hw/cameras/#flir-machine-vision-cameras","title":"FLIR Machine Vision Cameras","text":"

    FLIR Machine Vision cameras which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List MP FPS Interface HDR LFM Trigger /Synchronization Ingress Protection ROS 2 Driver Autoware Tested (Y/N) Blackfly S 2.0 5.0 22 95 USB-GigE N/A N/A Y N/A Y - Grasshopper3 2.3 5.0 26 90 USB-GigE N/A N/A Y N/A Y -

    Link to ROS 2 driver: https://github.com/berndpfrommer/flir_spinnaker_ros2

    Link to company website: https://www.flir.eu/iis/machine-vision/

    "},{"location":"reference-hw/cameras/#lucid-vision-cameras","title":"Lucid Vision Cameras","text":"

    Lucid Vision cameras which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List MP FPS Interface HDR LFM Trigger /Synchronization Ingress Protection ROS 2 Driver Autoware Tested (Y/N) TRITON 054S 5.4 22 GigE Y Y Y up to IP67 Y Y TRITON 032S 3.2 35.4 GigE N/A N/A Y up to IP67 Y Y

    Link to ROS 2 driver: https://gitlab.com/leo-drive/Drivers/arena_camera Link to company website: https://thinklucid.com/triton-gige-machine-vision/

    "},{"location":"reference-hw/cameras/#allied-vision-cameras","title":"Allied Vision Cameras","text":"

    Allied Vision cameras which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List MP FPS Interface HDR LFM Trigger /Synchronization Ingress Protection ROS 2 Driver Autoware Tested (Y/N) Mako G319 3.2 37.6 GigE N/A N/A Y N/A Y -

    Link to ROS 2 driver: https://github.com/neil-rti/avt_vimba_camera

    Link to company website: https://www.alliedvision.com/en/products/camera-series/mako-g

    "},{"location":"reference-hw/full_drivers_list/","title":"Drivers List","text":""},{"location":"reference-hw/full_drivers_list/#drivers-list","title":"Drivers List","text":"

    The list of all drivers listed above for easy access as a table with additional information:

    Type Maker Driver links License Maintainer Lidar VelodyneHesai Link Apache 2 david.wong@tier4.jpabraham.monrroy@map4.jp Lidar Velodyne Link BSD jwhitley@autonomoustuff.com Lidar Robosense Link BSD zdxiao@robosense.cn Lidar Hesai Link Apache 2 wuxiaozhou@hesaitech.com Lidar Leishen Link - - Lidar Livox Link MIT dev@livoxtech.com Lidar Ouster Link Apache 2 stevenmacenski@gmail.comtom@boxrobotics.ai Radar smartmicro Link Apache 2 opensource@smartmicro.de Camera Flir Link Apache 2 bernd.pfrommer@gmail.com Camera Lucid Vision Link - kcolak@leodrive.ai Camera Allied Vision Link Apache 2 at@email.com GNSS NovAtel Link BSD preed@swri.org GNSS SBG Systems Link MIT support@sbg-systems.com GNSS PolyExplore Link - support@polyexplore.com"},{"location":"reference-hw/imu_ahrs_gnss_ins/","title":"IMU, AHRS & GNSS/INS","text":""},{"location":"reference-hw/imu_ahrs_gnss_ins/#imu-ahrs-gnssins","title":"IMU, AHRS & GNSS/INS","text":""},{"location":"reference-hw/imu_ahrs_gnss_ins/#novatel-gnssins-sensors","title":"NovAtel GNSS/INS Sensors","text":"

    NovAtel GNSS/INS sensors which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List INS Rate Roll, Pitch, Yaw Acc. GNSS ROS 2 Driver\u00a0 Autoware Tested (Y/N) PwrPak7D-E2 200 Hz R (0.013\u00b0)P (0.013\u00b0)Y (0.070\u00b0) 20 HzL1 / L2 / L5 555 Channels Y - Span CPT7 200 Hz R (0.01\u00b0)\u00a0P (0.01\u00b0)\u00a0Y (0.03\u00b0) 20 Hz L1 / L2 / L5 555 Channels Y -

    Link to ROS 2 driver: https://github.com/swri-robotics/novatel_gps_driver/tree/dashing-devel

    Link to company website: https://hexagonpositioning.com/

    "},{"location":"reference-hw/imu_ahrs_gnss_ins/#xsens-gnssins-imu-sensors","title":"XSens GNSS/INS & IMU Sensors","text":"

    XSens GNSS/INS sensors which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List INS/IMU Rate Roll, Pitch, Yaw Acc. GNSS ROS 2 Driver\u00a0 Autoware Tested (Y/N) MTi-680G 2 kHz R (0.2\u00b0)P (0.2\u00b0)Y (0.5\u00b0) 5 HzL1 / L2\u00a0184 Channels Y - MTi-300 AHRS 2 kHz R (0.2\u00b0)P (0.2\u00b0)Y (1\u00b0) Not Applicable Y -

    Link to ROS 2 driver: http://wiki.ros.org/xsens_mti_driver

    Link to company website: https://www.xsens.com/

    "},{"location":"reference-hw/imu_ahrs_gnss_ins/#sbg-gnssins-imu-sensors","title":"SBG GNSS/INS & IMU Sensors","text":"

    SBG GNSS/INS sensors which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List INS/IMU Rate Roll, Pitch, Yaw Acc. GNSS ROS 2 Driver\u00a0 Autoware Tested (Y/N) Ellipse-D 200 Hz, 1 kHz (IMU) R (0.1\u00b0)P (0.1\u00b0)Y (0.05\u00b0) 5 HzL1 / L2184 Channels Y Y Ellipse-A (AHRS) 200 Hz, 1 kHz (IMU) R (0.1\u00b0)P (0.1\u00b0)Y (0.8\u00b0) Not Applicable Y -

    Link to ROS 2 driver: https://github.com/SBG-Systems/sbg_ros2

    Link to company website: https://www.sbg-systems.com/products/ellipse-series/

    "},{"location":"reference-hw/imu_ahrs_gnss_ins/#applanix-gnssins-sensors","title":"Applanix GNSS/INS Sensors","text":"

    SBG GNSS/INS sensors which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List INS/IMU Rate Roll, Pitch, Yaw Acc. GNSS ROS 2 Driver\u00a0 Autoware Tested (Y/N) POSLVX 200 Hz R (0.03\u00b0)P (0.03\u00b0)Y (0.09\u00b0) L1 / L2 / L5336 Channels Y Y POSLV220 200 Hz R (0.02\u00b0)P (0.02\u00b0)Y (0.05\u00b0) L1 / L2 / L5336 Channels Y Y

    Link to ROS 2 driver: http://wiki.ros.org/applanix_driver

    Link to company website: https://www.applanix.com/products/poslv.htm

    "},{"location":"reference-hw/imu_ahrs_gnss_ins/#polyexplore-gnssins-sensors","title":"PolyExplore GNSS/INS Sensors","text":"

    PolyExplore GNSS/INS sensors which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List INS/IMU Rate Roll, Pitch, Yaw Acc. GNSS ROS 2 Driver\u00a0 Autoware Tested (Y/N) POLYNAV 2000P 100 Hz R (0.01\u00b0)P (0.01\u00b0)Y (0.1\u00b0) L1 / L2240 Channels Y - POLYNAV 2000S 100 Hz R (0.015\u00b0)P (0.015\u00b0)Y (0.08\u00b0) L1 / L240 Channels Y -

    Link to ROS 2 driver: https://github.com/polyexplore/ROS2_Driver

    Link to company website: https://www.polyexplore.com/

    "},{"location":"reference-hw/lidars/","title":"LIDARs","text":""},{"location":"reference-hw/lidars/#lidars","title":"LIDARs","text":""},{"location":"reference-hw/lidars/#velodyne-3d-lidar-sensors","title":"Velodyne 3D LIDAR Sensors","text":"

    Velodyne Lidars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (V), (H) ROS 2 Driver Autoware Tested (Y/N) Alpha Prime 245m (+15\u00b0)/(-25\u00b0), (360\u00b0) Y Y Ultra Puck 200m (+15\u00b0)/(-25\u00b0), (360\u00b0) Y Y Puck 100m (+15\u00b0)/(-15\u00b0), (360\u00b0) Y Y Puck Hi-res 100m (+10\u00b0)/(-10\u00b0), (360\u00b0) Y Y

    Link to ROS 2 drivers: https://github.com/tier4/nebula https://github.com/ros-drivers/velodyne/tree/ros2/velodyne_pointcloud https://gitlab.com/autowarefoundation/autoware.auto/AutowareAuto/-/tree/master/src/drivers/velodyne_nodes https://github.com/autowarefoundation/awf_velodyne/tree/tier4/universe

    Link to company website: https://velodynelidar.com/

    "},{"location":"reference-hw/lidars/#robosense-3d-lidar-sensors","title":"RoboSense 3D LIDAR Sensors","text":"

    RoboSense Lidars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (V), (H) ROS 2 Driver Autoware Tested (Y/N) RS-Ruby 250m (+15\u00b0)/(-25\u00b0), (360) Y - RS-Ruby-Lite 230m (+15\u00b0)/(-25\u00b0), (360) Y - RS-LiDAR-32 200m (+15\u00b0)/(-25\u00b0), (360) Y - RS-LiDAR-16 150m (+15\u00b0)/(-15), (360) Y -

    Link to ROS 2 driver: https://github.com/RoboSense-LiDAR/rslidar_sdk

    Link to company website: https://www.robosense.ai/

    "},{"location":"reference-hw/lidars/#hesai-3d-lidar-sensors","title":"HESAI 3D LIDAR Sensors","text":"

    Hesai Lidars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (V), (H) ROS 2 Driver Autoware Tested (Y/N) Pandar 128 200m (+15\u00b0)/(-25\u00b0), (360\u00b0) Y - Pandar 64 200m (+15\u00b0)/(-25\u00b0), (360\u00b0) Y Y Pandar 40P 200m (+15\u00b0)/(-25\u00b0), (360\u00b0) Y Y Pandar XT 120m (+15\u00b0)/(-16\u00b0), (360\u00b0) Y Y Pandar QT 20m (-52.1\u00b0/+52.1\u00b0)/(360\u00b0) Y Y

    Link to ROS 2 drivers: https://github.com/tier4/nebula https://github.com/HesaiTechnology/HesaiLidar_General_ROS

    Link to company website: https://www.hesaitech.com/en/

    "},{"location":"reference-hw/lidars/#leishen-3d-lidar-sensors","title":"Leishen 3D LIDAR Sensors","text":"

    Leishen Lidars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (V), (H) ROS 2 Driver Autoware Tested (Y/N) LS C16 150m (+15\u00b0/-15\u00b0), (360\u00b0) Y - LS C32\u00a0 150m (+15\u00b0/-15\u00b0), (360\u00b0) Y - CH 32 120m (+3.7\u00b0/-6.7\u00b0),(120\u00b0) Y - CH 128 20m (+14\u00b0/-17\u00b0)/(150\u00b0) Y -

    Link to ROS 2 driver: https://github.com/leishen-lidar

    Link to company website: http://www.lslidar.com/

    "},{"location":"reference-hw/lidars/#livox-3d-lidar-sensors","title":"Livox 3D LIDAR Sensors","text":"

    Livox Lidars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (V), (H) ROS 2 Driver Autoware Tested (Y/N) Horizon 260m (81.7\u00b0), (25.1\u00b0) Y Y Mid-70 90m (70.4\u00b0), (77.2\u00b0) Y - Avia 190m (70.4\u00b0), Circular Y - HAP 150m (25\u00b0), (120\u00b0) - -

    Link to ROS 2 driver: https://github.com/Livox-SDK/livox_ros2_driver

    Link to company website: https://www.livoxtech.com/

    "},{"location":"reference-hw/lidars/#ouster-3d-lidar-sensors","title":"Ouster 3D LIDAR Sensors","text":"

    Ouster Lidars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (V), (H) ROS 2 Driver Autoware Tested (Y/N) OS0 50m (90\u00b0), (360\u00b0) Y - OS1 120m (45\u00b0), (360\u00b0) Y - OS2 240m (22,5\u00b0), (360\u00b0) Y Y

    Link to ROS 2 driver: https://github.com/ros-drivers/ros2_ouster_drivers

    Link to company website: https://ouster.com/

    "},{"location":"reference-hw/radars/","title":"RADARs","text":""},{"location":"reference-hw/radars/#radars","title":"RADARs","text":""},{"location":"reference-hw/radars/#smartmicro-automotive-radars","title":"Smartmicro Automotive Radars","text":"

    Smartmicro Radars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (Azimuth), (Elevation) ROS 2 Driver Autoware Tested (Y/N) Type 153 (Triple Mode Short, Medium Long) S:0.2...19 m\u00a0M:0.4...55 m L:0.8...120 m Short: (130\u00b0), (15\u00b0) Medium: (130\u00b0), (15\u00b0)Long: (100\u00b0),(15\u00b0) Y Y Type 132 ,(Dual Mode ,Medium, Long) M:0.5...64 m\u00a0\u00a0L:1...175 m Medium: (100\u00b0), (15\u00b0) Long: (32\u00b0), (15\u00b0) Y Y

    Link to ROS 2 driver: https://github.com/smartmicro/smartmicro_ros2_radars

    Link to company website: https://www.smartmicro.com/automotive-radar

    "},{"location":"reference-hw/radars/#aptiv-automotive-radars","title":"Aptiv Automotive Radars","text":"

    Aptiv Radars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (Azimuth), (Elevation) ROS 2 Driver Autoware Tested (Y/N) Aptiv MMR (Dual Mode Short, Long) S: 1...40 m L: 3...160 m Short.: (90), (90\u00b0) Long: (90\u00b0), (90\u00b0) Y - Aptiv ESR 2.5 (Dual Mode (Medium, Long)) M: 1...60 m L: 1...175 m Med.: (90\u00b0), (4.4\u00b0) Long: (20\u00b0), (4.4\u00b0) Y -

    Link to company website: https://autonomoustuff.com/products

    "},{"location":"reference-hw/radars/#continental-engineering-radars","title":"Continental Engineering Radars","text":"

    Continental Engineering Radars which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List Range FOV (Azimuth), (Elevation) ROS 2 Driver Autoware Tested (Y/N) ARS430DI 250m (120), (18\u00b0) - -

    Link to company website: https://conti-engineering.com/components/ars430/

    "},{"location":"reference-hw/remote_drive/","title":"Remote Drive","text":""},{"location":"reference-hw/remote_drive/#remote-drive","title":"Remote Drive","text":""},{"location":"reference-hw/remote_drive/#fort-robotics","title":"FORT ROBOTICS","text":"

    Fort Robotics remote control & E-stop devices which are used for autonomous driving and tested by one or more community members are listed below:

    Supported Products Op.Frequency Controller ROS 2 Support Autoware Tested (Y/N) Vehicle Safety Controller with E-stop 900 Mhz radio: up to 2km LOS2.4Ghz radio: up to 500m LOS IP 66 EnclosureBuilt-in emergency stop safety control(2) 2-axis joysticks(2) 1-axis finger sticks(8) buttons - -

    Link to company website: https://fortrobotics.com/vehicle-safety-controller/

    "},{"location":"reference-hw/remote_drive/#logitech","title":"LOGITECH","text":"

    Logitech joysticks which are used for autonomous driving and tested by one or more community members are listed below:

    Supported Products Op.Frequency Controller ROS 2 Support Autoware Tested (Y/N) Logitech F-710 2.4 GHz Wireless, 10m range (2) 2-axis joysticks (18) buttons Y Y

    Link to ROS driver: http://wiki.ros.org/joy

    Link to company website: https://www.logitechg.com/en-us/products/gamepads/f710-wireless-gamepad.html

    "},{"location":"reference-hw/thermal_cameras/","title":"Thermal CAMERAs","text":""},{"location":"reference-hw/thermal_cameras/#thermal-cameras","title":"Thermal CAMERAs","text":""},{"location":"reference-hw/thermal_cameras/#flir-thermal-automotive-dev-kit","title":"FLIR Thermal Automotive Dev. Kit","text":"

    FLIR ADK Thermal Vision cameras which has ROS 2 driver and tested by one or more community members are listed below:

    Supported Products List MP FPS Interface Spectral Band FOV ROS 2 Driver Autoware Tested (Y/N) FLIR ADK 640x512 30 USB-GMSL,Ethernet 8-14 um (LWIR) 75\u02da, 50\u02da, 32\u02da, and 24\u02da - -"},{"location":"reference-hw/vehicle_drive_by_wire_suppliers/","title":"Vehicle Drive By Wire Suppliers","text":""},{"location":"reference-hw/vehicle_drive_by_wire_suppliers/#vehicle-drive-by-wire-suppliers","title":"Vehicle Drive By Wire Suppliers","text":""},{"location":"reference-hw/vehicle_drive_by_wire_suppliers/#new-eagle-dbw-solutions","title":"New Eagle DBW Solutions","text":"

    New Eagle DBW Controllers which is used for autonomous driving and tested by one or more community members are listed below:

    Supported Vehicles Power Remote Control ROS 2 Support Autoware Tested (Y/N) Jeep CherokeeChrysler PacificaToyota PriusChevy BoltFord TransitRAM 1500Custom\u00a0 500W Sine Inverter2000 Watts8 Channel PDS Optional, Available Y Y

    Link to company website: https://neweagle.net/autonomous-machines/

    "},{"location":"reference-hw/vehicle_drive_by_wire_suppliers/#dataspeed-dbw-solutions","title":"Dataspeed DBW Solutions","text":"

    Dataspeed DBW Controllers which is used for autonomous driving and tested by one or more community members are listed below:

    Supported Vehicles Power Remote Control ROS 2 Support Autoware Tested (Y/N) Lincoln MKZ, NautilusFord Fusion, F150, Transit Connect, RangerChrysler PacificaJeep CherokeePolaris GEM, RZR 12 Channel PDS,15 A Each at 12 V Optional, Available Y -

    Link to company website: https://www.dataspeedinc.com/

    "},{"location":"reference-hw/vehicle_drive_by_wire_suppliers/#astuff-pacmod-dbw-solutions","title":"AStuff Pacmod DBW Solutions","text":"

    Autonomous Stuff Pacmod DBW Controllers which is used for autonomous driving and tested by one or more community members are listed below:

    Supported Vehicles Power Remote Control ROS 2 Support Autoware Tested (Y/N) Polaris GEM SeriesPolaris eLXD MY 2016+Polaris Ranger X900International ProStarLexus RX-450h MYFord RangerToyota Minivan Power distribution panel Optional, Available Y Y

    Link to company website: https://autonomoustuff.com/platform/pacmod

    "},{"location":"reference-hw/vehicle_drive_by_wire_suppliers/#schaeffler-paravan-space-drive-dbw-solutions","title":"Schaeffler-Paravan Space Drive DBW Solutions","text":"

    Schaeffler-Paravan Space Drive DBW Controllers which is used for autonomous driving and tested by one or more community members are listed below:

    Supported Vehicles Power Remote Control ROS 2 Support Autoware Tested (Y/N) Custom Integration with Actuators - Optional, Available Y Y

    Link to company website: https://www.schaeffler-paravan.de/en/products/space-drive-system/

    "},{"location":"reference-hw/vehicle_platform_suppliers/","title":"Vehicle Platform Suppliers","text":""},{"location":"reference-hw/vehicle_platform_suppliers/#vehicle-platform-suppliers","title":"Vehicle Platform Suppliers","text":""},{"location":"reference-hw/vehicle_platform_suppliers/#pix-moving-autonomous-vehicle-solutions","title":"PIX MOVING Autonomous Vehicle Solutions","text":"

    PIX Moving AV solutions which is used for autonomous development and tested by one or more community members are listed below:

    Vehicle Types Sensors Integrated Autoware Installed ROS 2 Support Autoware Tested (Y/N) Electric DBW Chassis and Platforms Y Y Y -

    Link to company website: https://www.pixmoving.com/pixkit

    Different sizes of platforms

    "},{"location":"reference-hw/vehicle_platform_suppliers/#autonomoustuff-av-solutions","title":"Autonomoustuff AV Solutions","text":"

    Autonomoustuff platform solutions which is used for autonomous development and tested by one or more community members are listed below:

    Vehicle Types Sensors Integrated Autoware Installed ROS 2 Support Autoware Tested (Y/N) Road Vehicles, Golf Carts & Trucks Y Y Y -

    Link to company website: https://autonomoustuff.com/platform

    "},{"location":"reference-hw/vehicle_platform_suppliers/#navya-av-solutions","title":"NAVYA AV Solutions","text":"

    NAVYA platform solutions which is used for autonomous development and tested by one or more community members are listed below:

    Vehicle Types Sensors Integrated Autoware Installed ROS 2 Support Autoware Tested (Y/N) Shuttle Bus, Taxi and Tow Tractors Y Y - -

    Link to company website: https://navya.tech/en

    "},{"location":"reference-hw/vehicle_platform_suppliers/#zing-robotics-av-solutions","title":"ZING ROBOTICS AV Solutions","text":"

    ZING Robotics platform solutions which is used for autonomous development and tested by one or more community members are listed below:

    Vehicle Types Sensors Integrated Autoware Installed ROS 2 Support Autoware Tested (Y/N) Purpose built electric autonomous vehicles for aviation, military etc. Y Y - -

    Link to company website: https://www.zingrobotics.com/

    "},{"location":"support/","title":"Support","text":""},{"location":"support/#support","title":"Support","text":"

    This page explains several support resources.

    "},{"location":"support/docs-guide/","title":"Docs guide","text":""},{"location":"support/docs-guide/#docs-guide","title":"Docs guide","text":"

    This page explains several documentation sites that are useful for Autoware and ROS development.

    "},{"location":"support/support-guidelines/","title":"Support guidelines","text":""},{"location":"support/support-guidelines/#support-guidelines","title":"Support guidelines","text":"

    This page explains the support mechanisms we provide.

    Warning

    Before asking for help, search and read this documentation site carefully. Also, follow the discussion guidelines for discussions.

    Choose appropriate resources depending on what kind of help you need and read the detailed description in the sections below.

    "},{"location":"support/support-guidelines/#documentation-sites","title":"Documentation sites","text":"

    Docs guide shows the list of useful documentation sites. Visit them and see if there is any information related to your problem.

    Note that the documentation sites aren't always up-to-date and perfect. If you find out that some information is wrong, unclear, or missing in Autoware docs, feel free to submit a pull request following the contribution guidelines.

    Warning

    Since this documentation site is still under construction, there are some empty pages.

    "},{"location":"support/support-guidelines/#github-discussions","title":"GitHub Discussions","text":"

    If you encounter a problem with Autoware, check existing issues and questions and search for similar issues first.

    If no answer was found, create a new question thread here. If your question is not answered within a week, then @mention the maintainers to remind them.

    Also, there are other discussion types such as feature requests or design discussions. Feel free to open or join such discussions.

    If you don't know how to create a discussion, refer to GitHub Docs.

    "},{"location":"support/support-guidelines/#github-issues","title":"GitHub Issues","text":"

    If you have a problem and you have confirmed it is a bug, find the appropriate repository and create a new issue there. If you can't determine the appropriate repository, ask the maintainers for help by creating a new discussion in the Q&A category.

    Warning

    Do not create issues for questions or unconfirmed bugs. If such issues are created, maintainers will transfer them to GitHub Discussions.

    If you want to fix the bug by yourself, discuss the approach with maintainers and submit a pull request.

    "},{"location":"support/support-guidelines/#discord","title":"Discord","text":"

    Autoware has a Discord server for casual communication between contributors.

    The Autoware Discord server is a good place for the following activities:

    Note that it is not the right place to get help for your issues.

    "},{"location":"support/support-guidelines/#ros-discourse","title":"ROS Discourse","text":"

    If you want to widely discuss a topic with the general Autoware and ROS community or ask a question not related to Autoware's bugs, post to the Autoware category on ROS Discourse.

    Warning

    Do not post questions about bugs to ROS Discourse!

    "},{"location":"support/troubleshooting/","title":"Troubleshooting","text":""},{"location":"support/troubleshooting/#troubleshooting","title":"Troubleshooting","text":""},{"location":"support/troubleshooting/#setup-issues","title":"Setup issues","text":""},{"location":"support/troubleshooting/#cuda-related-errors","title":"CUDA-related errors","text":"

    When installing CUDA, errors may occur because of version conflicts. To resolve these types of errors, try one of the following methods:

    Warning

    Note that this may break your system and run carefully.

    Warning

    Note that some components in Autoware Universe require CUDA, and only the CUDA version in the env file is supported at this time. Autoware may work with other CUDA versions, but those versions are not supported and functionality is not guaranteed.

    "},{"location":"support/troubleshooting/#build-issues","title":"Build issues","text":""},{"location":"support/troubleshooting/#insufficient-memory","title":"Insufficient memory","text":"

    Building Autoware requires a lot of memory, and your machine can freeze or crash if memory runs out during a build. To avoid this problem, 16-32GB of swap should be configured.

    # Optional: Check the current swapfile\nfree -h\n\n# Remove the current swapfile\nsudo swapoff /swapfile\nsudo rm /swapfile\n\n# Create a new swapfile\nsudo fallocate -l 32G /swapfile\nsudo chmod 600 /swapfile\nsudo mkswap /swapfile\nsudo swapon /swapfile\n\n# Optional: Check if the change is reflected\nfree -h\n

    For more detailed configuration steps, along with an explanation of swap, refer to Digital Ocean's \"How To Add Swap Space on Ubuntu 20.04\" tutorial

    If there are too many CPU cores (more than 64) in your machine, it might requires larger memory. A workaround here is to limit the job number while building.

    MAKEFLAGS=\"-j4\" colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n

    You can adjust -j4 to any number based on your system. For more details, see the manual page of GNU make.

    By reducing the number of packages built in parallel, you can also reduce the amount of memory used. In the following example, the number of packages built in parallel is set to 1, and the number of jobs used by make is limited to 1.

    MAKEFLAGS=\"-j1\" colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release --parallel-workers 1\n

    Note

    By lowering both the number of packages built in parallel and the number of jobs used by make, you can reduce the memory usage. However, this also means that the build process takes longer.

    "},{"location":"support/troubleshooting/#errors-when-using-the-latest-version-of-autoware","title":"Errors when using the latest version of Autoware","text":"

    If you are working with the latest version of Autoware, issues can occur due to out-of-date software or old build files.

    To resolve these types of problems, first try cleaning your build artifacts and rebuilding:

    rm -rf build/ install/ log/\ncolcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n

    If the error is not resolved, remove src/ and update your workspace according to installation type (Docker / source).

    Warning

    Before removing src/, confirm that there are no modifications in your local environment that you want to keep!

    If errors still persist after trying the steps above, delete the entire workspace, clone the repository once again and restart the installation process.

    rm -rf autoware/\ngit clone https://github.com/autowarefoundation/autoware.git\n
    "},{"location":"support/troubleshooting/#errors-when-using-a-fixed-version-of-autoware","title":"Errors when using a fixed version of Autoware","text":"

    In principle, errors should not occur when using a fixed version. That said, possible causes include:

    In addition to the causes listed above, there are two common misunderstandings around the use of fixed versions.

    1. You used a fixed version for autowarefoundation/autoware only. All of the repository versions in the .repos file must be specified in order to use a completely fixed version.

    2. You didn't update the workspace after changing the branch of autowarefoundation/autoware. Changing the branch of autowarefoundation/autoware does not affect the files under src/. You have to run the vcs import command to update them.

    "},{"location":"support/troubleshooting/#error-when-building-python-package","title":"Error when building python package","text":"

    During building the following issue can occurs

    pkg_resources.extern.packaging.version.InvalidVersion: Invalid version: '0.23ubuntu1'\n

    The error is due to the fact that for versions between 66.0.0 and 67.5.0 setuptools enforces the python packages to be PEP-440 conformant. Since version 67.5.1 setuptools has a fallback that makes it possible to work with old packages again.

    The solution is to update setuptools to the newest version with the following command

    pip install --upgrade setuptools\n
    "},{"location":"support/troubleshooting/#dockerrocker-issues","title":"Docker/rocker issues","text":"

    If any errors occur when running Autoware with Docker or rocker, first confirm that your Docker installation is working correctly by running the following commands:

    docker run --rm -it hello-world\ndocker run --rm -it ubuntu:latest\n

    Next, confirm that you are able to access the base Autoware image that is stored on the GitHub Packages website

    docker run --rm -it ghcr.io/autowarefoundation/autoware-universe:latest\n
    "},{"location":"support/troubleshooting/#runtime-issues","title":"Runtime issues","text":""},{"location":"support/troubleshooting/#performance-related-issues","title":"Performance related issues","text":"

    Symptoms:

    If you have any of these symptoms, please the Performance Troubleshooting page.

    "},{"location":"support/troubleshooting/#map-does-not-display-when-running-the-planning-simulator","title":"Map does not display when running the Planning Simulator","text":"

    When running the Planning Simulator, the most common reason for the map not being displayed in RViz is because the map path has not been specified correctly in the launch command. You can confirm if this is the case by searching for Could not find lanelet map under {path-to-map-dir}/lanelet2_map.osm errors in the log.

    Another possible reason is that map loading is taking a long time due to poor DDS performance. For this, please visit the Performance Troubleshooting page.

    "},{"location":"support/troubleshooting/performance-troubleshooting/","title":"Performance Troubleshooting","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#performance-troubleshooting","title":"Performance Troubleshooting","text":"

    Overall symptoms:

    "},{"location":"support/troubleshooting/performance-troubleshooting/#diagnostic-steps","title":"Diagnostic Steps","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#check-if-multicast-is-enabled","title":"Check if multicast is enabled","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#target-symptoms","title":"Target symptoms","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#diagnosis","title":"Diagnosis","text":"

    Make sure that the multicast is enabled for your interface.

    For example when you run following:

    source /opt/ros/humble/setup.bash\nros2 run demo_nodes_cpp talker\n

    If you get the error message selected interface \"{your-interface-name}\" is not multicast-capable: disabling multicast, this should be fixed.

    "},{"location":"support/troubleshooting/performance-troubleshooting/#solution","title":"Solution","text":"

    Run the following command to allow multicast:

    sudo ip link set multicast on {your-interface-name}\n

    This way DDS will function as intended and multiple subscribers can receive data from a single publisher without any significant degradation in performance.

    This is a temporary solution. And will be reverted once the computer restarts.

    To make it permanent either,

    "},{"location":"support/troubleshooting/performance-troubleshooting/#check-the-compilation-flags","title":"Check the compilation flags","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#target-symptoms_1","title":"Target symptoms","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#diagnosis_1","title":"Diagnosis","text":"

    Check the ~/.bash_history file to see if there are any colcon build directives without -DCMAKE_BUILD_TYPE=Release or -DCMAKE_BUILD_TYPE=RelWithDebInfo flags at all.

    Even if a build starts with these flags but same workspace gets compiled without these flags, it will still be a slow build in the end.

    In addition, the nodes will run slow in general, especially the pointcloud_preprocessor nodes.

    Example issue: issue2597

    "},{"location":"support/troubleshooting/performance-troubleshooting/#solution_1","title":"Solution","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#check-the-dds-settings","title":"Check the DDS settings","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#target-symptoms_2","title":"Target symptoms","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#check-the-rmw-ros-middleware-implementation","title":"Check the RMW (ROS Middleware) implementation","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#diagnosis_2","title":"Diagnosis","text":"

    Run following to check the middleware used:

    echo $RMW_IMPLEMENTATION\n

    The return line should be rmw_cyclonedds_cpp. If not, apply the solution.

    If you are using a different DDS middleware, we might not have official support for it just yet.

    "},{"location":"support/troubleshooting/performance-troubleshooting/#solution_2","title":"Solution","text":"

    Add export RMW_IMPLEMENTATION=rmw_cyclonedds_cpp as a separate line in you ~/.bashrc file.

    "},{"location":"support/troubleshooting/performance-troubleshooting/#check-if-the-cyclonedds-is-configured-correctly","title":"Check if the CycloneDDS is configured correctly","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#diagnosis_3","title":"Diagnosis","text":"

    Run following to check the configuration .xml file of the CycloneDDS:

    echo $CYCLONEDDS_URI\n

    The return line should be a valid path pointing to an .xml file with CycloneDDS configuration.

    Also check if the file is configured correctly:

    cat !{echo $CYCLONEDDS_URI}\n

    This should print the .xml file on the terminal.

    "},{"location":"support/troubleshooting/performance-troubleshooting/#solution_3","title":"Solution","text":"

    Follow DDS settings:Tuning DDS documentation and make sure:

    "},{"location":"support/troubleshooting/performance-troubleshooting/#check-the-linux-kernel-maximum-buffer-size","title":"Check the Linux kernel maximum buffer size","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#diagnosis_4","title":"Diagnosis","text":"

    More info on these values: Cross-vendor tuning

    "},{"location":"support/troubleshooting/performance-troubleshooting/#solution_4","title":"Solution","text":"

    Either:

    "},{"location":"support/troubleshooting/performance-troubleshooting/#check-if-ros-localhost-only-communication-is-enabled","title":"Check if ROS localhost only communication is enabled","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#target-symptoms_3","title":"Target symptoms","text":""},{"location":"support/troubleshooting/performance-troubleshooting/#diagnosis_5","title":"Diagnosis","text":"

    Run following to check it:

    echo $ROS_LOCALHOST_ONLY\n

    The return line should be 1. If not, apply the solution.

    "},{"location":"support/troubleshooting/performance-troubleshooting/#solution_5","title":"Solution","text":""},{"location":"tutorials/","title":"Simulation tutorials","text":""},{"location":"tutorials/#simulation-tutorials","title":"Simulation tutorials","text":"

    Simulations provide a way of verifying Autoware's functionality before field testing with an actual vehicle. There are three main types of simulation that can be run ad hoc or via a scenario runner.

    "},{"location":"tutorials/#simulation-methods","title":"Simulation methods","text":""},{"location":"tutorials/#ad-hoc-simulation","title":"Ad hoc simulation","text":"

    Ad hoc simulation is a flexible method for running basic simulations on your local machine, and is the recommended method for anyone new to Autoware.

    "},{"location":"tutorials/#scenario-simulation","title":"Scenario simulation","text":"

    Scenario simulation uses a scenario runner to run more complex simulations based on predefined scenarios. It is often run automatically for continuous integration purposes, but can also be run on a local machine.

    "},{"location":"tutorials/#simulation-types","title":"Simulation types","text":""},{"location":"tutorials/#planning-simulation","title":"Planning simulation","text":"

    Planning simulation uses simple dummy data to test the Planning and Control components - specifically path generation, path following and obstacle avoidance. It verifies that a vehicle can reach a goal destination while avoiding pedestrians and surrounding cars, and is another method for verifying the validity of Lanelet2 maps. It also allows for testing of traffic light handling.

    "},{"location":"tutorials/#how-does-planning-simulation-work","title":"How does planning simulation work?","text":"
    1. Generate a path to the goal destination
    2. Control the car along the generated path
    3. Detect and avoid any humans or other vehicles on the way to the goal destination
    "},{"location":"tutorials/#rosbag-replay-simulation","title":"Rosbag replay simulation","text":"

    Rosbag replay simulation uses prerecorded rosbag data to test the following aspects of the Localization and Perception components:

    By repeatedly playing back the data, this simulation type can also be used for endurance testing.

    "},{"location":"tutorials/#digital-twin-simulation","title":"Digital twin simulation","text":"

    Digital twin simulation is a simulation type that is able to produce realistic data and simulate almost the entire system. It is also commonly referred to as end-to-end simulation.

    "},{"location":"tutorials/ad-hoc-simulation/","title":"Ad hoc simulation","text":""},{"location":"tutorials/ad-hoc-simulation/#ad-hoc-simulation","title":"Ad hoc simulation","text":"

    Warning

    Under Construction

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/","title":"Planning simulation","text":""},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#planning-simulation","title":"Planning simulation","text":""},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#preparation","title":"Preparation","text":"

    Download and unpack a sample map.

    gdown -O ~/autoware_map/ 'https://docs.google.com/uc?export=download&id=1499_nsbUbIeturZaDj7jhUownh5fvXHd'\nunzip -d ~/autoware_map ~/autoware_map/sample-map-planning.zip\n

    Note

    Sample map: Copyright 2020 TIER IV, Inc.

    Check if you have ~/autoware_data folder and files in it.

    $ cd ~/autoware_data\n$ ls -C -w 30\nimage_projection_based_fusion\nlidar_apollo_instance_segmentation\nlidar_centerpoint\ntensorrt_yolo\ntensorrt_yolox\ntraffic_light_classifier\ntraffic_light_fine_detector\ntraffic_light_ssd_fine_detector\nyabloc_pose_initializer\n

    If not, please, follow Manual downloading of artifacts.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#basic-simulations","title":"Basic simulations","text":""},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#lane-driving-scenario","title":"Lane driving scenario","text":""},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#1-launch-autoware","title":"1. Launch Autoware","text":"
    source ~/autoware/install/setup.bash\nros2 launch autoware_launch planning_simulator.launch.xml map_path:=$HOME/autoware_map/sample-map-planning vehicle_model:=sample_vehicle sensor_model:=sample_sensor_kit\n

    Warning

    Note that you cannot use ~ instead of $HOME here.

    If ~ is used, the map will fail to load.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#2-set-an-initial-pose-for-the-ego-vehicle","title":"2. Set an initial pose for the ego vehicle","text":"

    a) Click the 2D Pose estimate button in the toolbar, or hit the P key.

    b) In the 3D View pane, click and hold the left-mouse button, and then drag to set the direction for the initial pose. An image representing the vehicle should now be displayed.

    Warning

    Remember to set the initial pose of the car in the same direction as the lane.

    To confirm the direction of the lane, check the arrowheads displayed on the map.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#3-set-a-goal-pose-for-the-ego-vehicle","title":"3. Set a goal pose for the ego vehicle","text":"

    a) Click the 2D Goal Pose button in the toolbar, or hit the G key.

    b) In the 3D View pane, click and hold the left-mouse button, and then drag to set the direction for the goal pose. If done correctly, you will see a planned path from initial pose to goal pose.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#4-start-the-ego-vehicle","title":"4. Start the ego vehicle","text":"

    Now you can start the ego vehicle driving by clicking the AUTO button on OperationMode in AutowareStatePanel. Alteratively, you can manually start the vehicle by running the following command:

    source ~/autoware/install/setup.bash\nros2 service call /api/operation_mode/change_to_autonomous autoware_adapi_v1_msgs/srv/ChangeOperationMode {}\n

    After that, you can see AUTONOMOUS sign on OperationMode and AUTO button is grayed out.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#parking-scenario","title":"Parking scenario","text":"
    1. Set an initial pose and a goal pose, and engage the ego vehicle.

    2. When the vehicle approaches the goal, it will switch from lane driving mode to parking mode.

    3. After that, the vehicle will reverse into the destination parking spot.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#lane-change-scenario","title":"Lane change scenario","text":"
    1. Download and unpack Nishishinjuku map.

      gdown -O ~/autoware_map/ 'https://github.com/tier4/AWSIM/releases/download/v1.1.0/nishishinjuku_autoware_map.zip'\nunzip -d ~/autoware_map ~/autoware_map/nishishinjuku_autoware_map.zip\n
    2. Launch autoware with Nishishinjuku map with following command:

      source ~/autoware/install/setup.bash\nros2 launch autoware_launch planning_simulator.launch.xml map_path:=$HOME/autoware_map/nishishinjuku_autoware_map vehicle_model:=sample_vehicle sensor_model:=sample_sensor_kit\n

    3. Set an initial pose and a goal pose in adjacent lanes.

    4. Engage the ego vehicle. It will make a lane change along the planned path.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#avoidance-scenario","title":"Avoidance scenario","text":"
    1. Set an initial pose and a goal pose in the same lane. A path will be planned.

    2. Set a \"2D Dummy Bus\" on the roadside. A new path will be planned.

    3. Engage the ego vehicle. It will avoid the obstacle along the newly planned path.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#advanced-simulations","title":"Advanced Simulations","text":""},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#placing-dummy-objects","title":"Placing dummy objects","text":"
    1. Click the 2D Dummy Car or 2D Dummy Pedestrian button in the toolbar.
    2. Set the pose of the dummy object by clicking and dragging on the map.
    3. Set the velocity of the object in Tool Properties -> 2D Dummy Car/Pedestrian panel.

      !!! note

      Changes to the velocity parameter will only affect objects placed after the parameter is changed.

    4. Delete any dummy objects placed in the view by clicking the Delete All Objects button in the toolbar.

    5. Click the Interactive button in the toolbar to make the dummy object interactive.

    6. For adding an interactive dummy object, press SHIFT and click the right click.

    7. For deleting an interactive dummy object, press ALT and click the right click.
    8. For moving an interactive dummy object, hold the right click drag and drop the object.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#traffic-light-recognition-simulation","title":"Traffic light recognition simulation","text":"

    By default, traffic lights on the map are all treated as if they are set to green. As a result, when a path is created that passed through an intersection with a traffic light, the ego vehicle will drive through the intersection without stopping.

    The following steps explain how to set and reset traffic lights in order to test how the Planning component will respond.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#set-traffic-light","title":"Set traffic light","text":"
    1. Go to Panels -> Add new panel, select TrafficLightPublishPanel, and then press OK.

    2. In TrafficLightPublishPanel, set the ID and color of the traffic light.

    3. Click the SET button.

    4. Finally, click the PUBLISH button to send the traffic light status to the simulator. Any planned path that goes past the selected traffic light will then change accordingly.

    By default, Rviz should display the ID of each traffic light on the map. You can have a closer look at the IDs by zooming in the region or by changing the View type.

    In case the IDs are not displayed, try the following troubleshooting steps:

    a) In the Displays panel, find the traffic_light_id topic by toggling the triangle icons next to Map > Lanelet2VectorMap > Namespaces.

    b) Check the traffic_light_id checkbox.

    c) Reload the topic by clicking the Map checkbox twice.

    "},{"location":"tutorials/ad-hoc-simulation/planning-simulation/#updatereset-traffic-light","title":"Update/Reset traffic light","text":"

    You can update the color of the traffic light by selecting the next color (in the image it is GREEN) and clicking SET button. In the image the traffic light in front of the ego vehicle changed from RED to GREEN and the vehicle restarted.

    To remove a traffic light from TrafficLightPublishPanel, click the RESET button.

    Reference video tutorials

    "},{"location":"tutorials/ad-hoc-simulation/rosbag-replay-simulation/","title":"Rosbag replay simulation","text":""},{"location":"tutorials/ad-hoc-simulation/rosbag-replay-simulation/#rosbag-replay-simulation","title":"Rosbag replay simulation","text":""},{"location":"tutorials/ad-hoc-simulation/rosbag-replay-simulation/#steps","title":"Steps","text":"
    1. Download and unpack a sample map.

      • You can also download the map manually.
      gdown -O ~/autoware_map/ 'https://docs.google.com/uc?export=download&id=1A-8BvYRX3DhSzkAnOcGWFw5T30xTlwZI'\nunzip -d ~/autoware_map/ ~/autoware_map/sample-map-rosbag.zip\n
    2. Download the sample rosbag files.

      • You can also download the rosbag files manually.
      gdown -O ~/autoware_map/ 'https://docs.google.com/uc?export=download&id=1VnwJx9tI3kI_cTLzP61ktuAJ1ChgygpG'\nunzip -d ~/autoware_map/ ~/autoware_map/sample-rosbag.zip\n
    3. Check if you have ~/autoware_data folder and files in it.

      $ cd ~/autoware_data\n$ ls -C -w 30\nimage_projection_based_fusion\nlidar_apollo_instance_segmentation\nlidar_centerpoint\ntensorrt_yolo\ntensorrt_yolox\ntraffic_light_classifier\ntraffic_light_fine_detector\ntraffic_light_ssd_fine_detector\nyabloc_pose_initializer\n

      If not, please, follow Manual downloading of artifacts.

    "},{"location":"tutorials/ad-hoc-simulation/rosbag-replay-simulation/#note","title":"Note","text":""},{"location":"tutorials/ad-hoc-simulation/rosbag-replay-simulation/#how-to-run-a-rosbag-replay-simulation","title":"How to run a rosbag replay simulation","text":"
    1. Launch Autoware.

      source ~/autoware/install/setup.bash\nros2 launch autoware_launch logging_simulator.launch.xml map_path:=$HOME/autoware_map/sample-map-rosbag vehicle_model:=sample_vehicle sensor_model:=sample_sensor_kit\n

      Note that you cannot use ~ instead of $HOME here.

    2. Play the sample rosbag file.

      source ~/autoware/install/setup.bash\nros2 bag play ~/autoware_map/sample-rosbag/sample.db3 -r 0.2 -s sqlite3\n

    3. To focus the view on the ego vehicle, change the Target Frame in the RViz Views panel from viewer to base_link.

    4. To switch the view to Third Person Follower etc, change the Type in the RViz Views panel.

    Reference video tutorials

    "},{"location":"tutorials/ad-hoc-simulation/digital-twin-simulation/MORAI_Sim-tutorial/","title":"MORAI Sim: Drive","text":""},{"location":"tutorials/ad-hoc-simulation/digital-twin-simulation/MORAI_Sim-tutorial/#morai-sim-drive","title":"MORAI Sim: Drive","text":"

    Note

    Any kind of for-profit activity with the trial version of the MORAI SIM:Drive is strictly prohibited.

    "},{"location":"tutorials/ad-hoc-simulation/digital-twin-simulation/MORAI_Sim-tutorial/#hardware-requirements","title":"Hardware requirements","text":"Minimum PC Specs OS Windows 10, Ubuntu 20.04, Ubuntu 18.04, Ubuntu 16.04 CPU Intel i5-9600KF or AMD Ryzen 5 3500X RAM DDR4 16GB GPU RTX2060 Super Required PC Specs OS Windows 10, Ubuntu 20.04, Ubuntu 18.04, Ubuntu 16.04 CPU Intel i9-9900K or AMD Ryzen 7 3700X (or higher) RAM DDR4 64GB (or higher) GPU RTX2080Ti or higher"},{"location":"tutorials/ad-hoc-simulation/digital-twin-simulation/MORAI_Sim-tutorial/#application-and-download","title":"Application and Download","text":"

    Only for AWF developers, trial license for 3 months can be issued. Download the application form and send to Hyeongseok Jeon

    After the trial license is issued, you can login to MORAI Sim:Drive via Launchers (Windows/Ubuntu)

    CAUTION: Do not use the Launchers in the following manual

    "},{"location":"tutorials/ad-hoc-simulation/digital-twin-simulation/MORAI_Sim-tutorial/#technical-documents","title":"Technical Documents","text":"

    as Oct. 2022, our simulation version is ver.22.R3 but the english manual is under construction.

    Be aware that the following manuals are for ver.22.R2

    "},{"location":"tutorials/ad-hoc-simulation/digital-twin-simulation/MORAI_Sim-tutorial/#technical-support","title":"Technical Support","text":"

    Hyeongseok Jeon will give full technical support

    "},{"location":"tutorials/ad-hoc-simulation/digital-twin-simulation/awsim-tutorial/","title":"AWSIM simulator","text":""},{"location":"tutorials/ad-hoc-simulation/digital-twin-simulation/awsim-tutorial/#awsim-simulator","title":"AWSIM simulator","text":"

    AWSIM is a simulator for Autoware development and testing. To get started, please follow the official instruction provided by TIER IV.

    "},{"location":"tutorials/scenario-simulation/","title":"Scenario simulation","text":""},{"location":"tutorials/scenario-simulation/#scenario-simulation","title":"Scenario simulation","text":"

    Warning

    Under Construction

    "},{"location":"tutorials/scenario-simulation/planning-simulation/installation/","title":"Installation","text":""},{"location":"tutorials/scenario-simulation/planning-simulation/installation/#installation","title":"Installation","text":"

    This document contains step-by-step instruction on how to build AWF Autoware Core/Universe with scenario_simulator_v2.

    "},{"location":"tutorials/scenario-simulation/planning-simulation/installation/#prerequisites","title":"Prerequisites","text":"
    1. Autoware has been built and installed
    "},{"location":"tutorials/scenario-simulation/planning-simulation/installation/#how-to-build","title":"How to build","text":"
    1. Navigate to the Autoware workspace:

      cd autoware\n
    2. Import Simulator dependencies:

      vcs import src < simulator.repos\n
    3. Install dependent ROS packages:

      source /opt/ros/humble/setup.bash\nrosdep install -y --from-paths src --ignore-src --rosdistro $ROS_DISTRO\n
    4. Build the workspace:

      colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release\n
    "},{"location":"tutorials/scenario-simulation/planning-simulation/random-test-simulation/","title":"Random test simulation","text":""},{"location":"tutorials/scenario-simulation/planning-simulation/random-test-simulation/#random-test-simulation","title":"Random test simulation","text":"

    Note

    Running the Scenario Simulator requires some additional steps on top of building and installing Autoware, so make sure that Scenario Simulator installation has been completed first before proceeding.

    "},{"location":"tutorials/scenario-simulation/planning-simulation/random-test-simulation/#running-steps","title":"Running steps","text":"
    1. Move to the workspace directory where Autoware and the Scenario Simulator have been built.

    2. Source the workspace setup script:

      source install/setup.bash\n
    3. Run the simulation:

      ros2 launch random_test_runner random_test.launch.py \\\narchitecture_type:=awf/universe \\\nsensor_model:=sample_sensor_kit \\\nvehicle_model:=sample_vehicle\n

    For more information about supported parameters, refer to the random_test_runner documentation.

    "},{"location":"tutorials/scenario-simulation/planning-simulation/scenario-test-simulation/","title":"Scenario test simulation","text":""},{"location":"tutorials/scenario-simulation/planning-simulation/scenario-test-simulation/#scenario-test-simulation","title":"Scenario test simulation","text":"

    Note

    Running the Scenario Simulator requires some additional steps on top of building and installing Autoware, so make sure that Scenario Simulator installation has been completed first before proceeding.

    "},{"location":"tutorials/scenario-simulation/planning-simulation/scenario-test-simulation/#running-steps","title":"Running steps","text":"
    1. Move to the workspace directory where Autoware and the Scenario Simulator have been built.

    2. Source the workspace setup script:

      source install/setup.bash\n
    3. Run the simulation:

      ros2 launch scenario_test_runner scenario_test_runner.launch.py \\\narchitecture_type:=awf/universe \\\nrecord:=false \\\nscenario:='$(find-pkg-share scenario_test_runner)/scenario/sample.yaml' \\\nsensor_model:=sample_sensor_kit \\\nvehicle_model:=sample_vehicle\n

    Reference video tutorials

    "},{"location":"tutorials/scenario-simulation/rosbag-replay-simulation/driving-log-replayer/","title":"Driving Log Replayer","text":""},{"location":"tutorials/scenario-simulation/rosbag-replay-simulation/driving-log-replayer/#driving-log-replayer","title":"Driving Log Replayer","text":"

    Driving Log Replayer is an evaluation tool for Autoware. To get started, follow the official instruction provided by TIER IV.

    "}]} \ No newline at end of file diff --git a/pr-465/sitemap.xml b/pr-465/sitemap.xml index fda2800e0a5..1dff3e5fd0f 100644 --- a/pr-465/sitemap.xml +++ b/pr-465/sitemap.xml @@ -860,6 +860,11 @@ 2023-10-10 daily + + https://autowarefoundation.github.io/autoware-documentation/pr-465/how-to-guides/integrating-autoware/creating-maps/creating-vector-map/traffic-light/ + 2023-10-10 + daily + https://autowarefoundation.github.io/autoware-documentation/pr-465/how-to-guides/integrating-autoware/creating-maps/open-source-slam/ 2023-10-10 diff --git a/pr-465/sitemap.xml.gz b/pr-465/sitemap.xml.gz index 3ee9c406143..9f5f28e0d5e 100644 Binary files a/pr-465/sitemap.xml.gz and b/pr-465/sitemap.xml.gz differ diff --git a/pr-465/support/docs-guide/index.html b/pr-465/support/docs-guide/index.html index 64c7998dc3b..6734180e7a0 100644 --- a/pr-465/support/docs-guide/index.html +++ b/pr-465/support/docs-guide/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/support/index.html b/pr-465/support/index.html index 3de1b81cd21..d5b5a4a33cc 100644 --- a/pr-465/support/index.html +++ b/pr-465/support/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/support/support-guidelines/index.html b/pr-465/support/support-guidelines/index.html index 78856b7e3b4..bb8fcbed32c 100644 --- a/pr-465/support/support-guidelines/index.html +++ b/pr-465/support/support-guidelines/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/support/troubleshooting/index.html b/pr-465/support/troubleshooting/index.html index b798aaee39a..1ee5f2ab115 100644 --- a/pr-465/support/troubleshooting/index.html +++ b/pr-465/support/troubleshooting/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/support/troubleshooting/performance-troubleshooting/index.html b/pr-465/support/troubleshooting/performance-troubleshooting/index.html index 7e18f392e33..f549f80fd02 100644 --- a/pr-465/support/troubleshooting/performance-troubleshooting/index.html +++ b/pr-465/support/troubleshooting/performance-troubleshooting/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/tutorials/ad-hoc-simulation/digital-twin-simulation/MORAI_Sim-tutorial/index.html b/pr-465/tutorials/ad-hoc-simulation/digital-twin-simulation/MORAI_Sim-tutorial/index.html index 93f19636396..23e5aca26c3 100644 --- a/pr-465/tutorials/ad-hoc-simulation/digital-twin-simulation/MORAI_Sim-tutorial/index.html +++ b/pr-465/tutorials/ad-hoc-simulation/digital-twin-simulation/MORAI_Sim-tutorial/index.html @@ -2176,6 +2176,8 @@ + + @@ -2336,6 +2338,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/tutorials/ad-hoc-simulation/digital-twin-simulation/awsim-tutorial/index.html b/pr-465/tutorials/ad-hoc-simulation/digital-twin-simulation/awsim-tutorial/index.html index 1be5b172fce..4fe64855616 100644 --- a/pr-465/tutorials/ad-hoc-simulation/digital-twin-simulation/awsim-tutorial/index.html +++ b/pr-465/tutorials/ad-hoc-simulation/digital-twin-simulation/awsim-tutorial/index.html @@ -2124,6 +2124,8 @@ + + @@ -2284,6 +2286,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/tutorials/ad-hoc-simulation/index.html b/pr-465/tutorials/ad-hoc-simulation/index.html index 7f445aa965b..bb177c34840 100644 --- a/pr-465/tutorials/ad-hoc-simulation/index.html +++ b/pr-465/tutorials/ad-hoc-simulation/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/tutorials/ad-hoc-simulation/planning-simulation/index.html b/pr-465/tutorials/ad-hoc-simulation/planning-simulation/index.html index 64a824b4ba1..130a0b31469 100644 --- a/pr-465/tutorials/ad-hoc-simulation/planning-simulation/index.html +++ b/pr-465/tutorials/ad-hoc-simulation/planning-simulation/index.html @@ -2223,6 +2223,8 @@ + + @@ -2383,6 +2385,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/tutorials/ad-hoc-simulation/rosbag-replay-simulation/index.html b/pr-465/tutorials/ad-hoc-simulation/rosbag-replay-simulation/index.html index 6075c88d790..7402573d555 100644 --- a/pr-465/tutorials/ad-hoc-simulation/rosbag-replay-simulation/index.html +++ b/pr-465/tutorials/ad-hoc-simulation/rosbag-replay-simulation/index.html @@ -2175,6 +2175,8 @@ + + @@ -2335,6 +2337,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/tutorials/index.html b/pr-465/tutorials/index.html index 695eb91f234..1b868fab790 100644 --- a/pr-465/tutorials/index.html +++ b/pr-465/tutorials/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/tutorials/scenario-simulation/index.html b/pr-465/tutorials/scenario-simulation/index.html index a93fb92aa94..c692c9d9c99 100644 --- a/pr-465/tutorials/scenario-simulation/index.html +++ b/pr-465/tutorials/scenario-simulation/index.html @@ -2114,6 +2114,8 @@ + + @@ -2274,6 +2276,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/tutorials/scenario-simulation/planning-simulation/installation/index.html b/pr-465/tutorials/scenario-simulation/planning-simulation/installation/index.html index ebdc6d557c9..2266d89a1fc 100644 --- a/pr-465/tutorials/scenario-simulation/planning-simulation/installation/index.html +++ b/pr-465/tutorials/scenario-simulation/planning-simulation/installation/index.html @@ -2162,6 +2162,8 @@ + + @@ -2322,6 +2324,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/tutorials/scenario-simulation/planning-simulation/random-test-simulation/index.html b/pr-465/tutorials/scenario-simulation/planning-simulation/random-test-simulation/index.html index 4a466561df7..a12b503e175 100644 --- a/pr-465/tutorials/scenario-simulation/planning-simulation/random-test-simulation/index.html +++ b/pr-465/tutorials/scenario-simulation/planning-simulation/random-test-simulation/index.html @@ -2155,6 +2155,8 @@ + + @@ -2315,6 +2317,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/tutorials/scenario-simulation/planning-simulation/scenario-test-simulation/index.html b/pr-465/tutorials/scenario-simulation/planning-simulation/scenario-test-simulation/index.html index d1d8c8dbaaf..9bd15818eb3 100644 --- a/pr-465/tutorials/scenario-simulation/planning-simulation/scenario-test-simulation/index.html +++ b/pr-465/tutorials/scenario-simulation/planning-simulation/scenario-test-simulation/index.html @@ -2155,6 +2155,8 @@ + + @@ -2315,6 +2317,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + + diff --git a/pr-465/tutorials/scenario-simulation/rosbag-replay-simulation/driving-log-replayer/index.html b/pr-465/tutorials/scenario-simulation/rosbag-replay-simulation/driving-log-replayer/index.html index 60ca82d7f8b..bb8cf10720a 100644 --- a/pr-465/tutorials/scenario-simulation/rosbag-replay-simulation/driving-log-replayer/index.html +++ b/pr-465/tutorials/scenario-simulation/rosbag-replay-simulation/driving-log-replayer/index.html @@ -2124,6 +2124,8 @@ + + @@ -2284,6 +2286,52 @@ + + + + + + +
  • + + + + + + + + + + + + + + + + + + + + + +
  • + + + +