diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..f8098cd --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2017 Intel Corporation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md new file mode 100644 index 0000000..3795f66 --- /dev/null +++ b/README.md @@ -0,0 +1,58 @@ +# Intel® RealSense™ SDK for Linux ROS Samples + +## Features +These samples illustrate how to develop OSRF® ROS* applications using the Intel® RealSense™ [ZR300](http://click.intel.com/intelr-realsensetm-development-kit-featuring-the-zr300.html) camera for Object Library (OR), Person Library (PT), and Simultaneous Localization And Mapping (SLAM). + +## Installation Instructions + +The Intel RealSense SDK for Linux is used as the base for these ROS node. Full installation information for the SDK is available at https://software.intel.com/sites/products/realsense/intro. Here is the quick setup guide: + +```bash +# Install ROS Kinetic full desktop environment +sudo apt-key adv --keyserver hkp://ha.pool.sks-keyservers.net:80 --recv-key 421C365BD9FF1F717815A3895523BAEEB01FA116 +sudo sh -c 'echo "deb http://packages.ros.org/ros/ubuntu $(lsb_release -sc) main" > /etc/apt/sources.list.d/ros-latest.list' +sudo apt-get update +sudo apt-get -y install ros-kinetic-desktop-full +sudo rosdep init +rosdep update +echo "source /opt/ros/kinetic/setup.bash" >> ~/.bashrc +source ~/.bashrc + +# Install Intel RealSense SDK for Linux +sudo apt-key adv --keyserver keys.gnupg.net --recv-key D6FB2970 +sudo sh -c 'echo "deb http://realsense-alm-public.s3.amazonaws.com/apt-repo xenial main" > /etc/apt/sources.list.d/realsense-latest.list' +sudo apt update +sudo apt install -y librealsense-object-recognition-dev librealsense-persontracking-dev librealsense-slam-dev libopencv-dev + +# Download and compile ROS wrappers for Intel RealSense SDK for Linux +mkdir -p catkin_ws/src +cd catkin_ws/src/ +catkin_init_workspace +git clone https://github.com/IntelRealSense/realsense_samples_ros +cd .. +catkin_make +source devel/setup.bash +``` + +## Usage Instructions +- [Camera](realsense_ros_camera/README.md): This ROS node implements use of ZR300 camera as a standard ROS camera node. +- [Object Recognition, Localization, and Tracking](realsense_ros_object/README.md): This ROS node demonstrates use of ZR300 camera above to implement Object Recognition, Localization, and Tracking functionality. +- [Person Tracking and Analysis](realsense_ros_person/README.md): This ROS node demonstrates use of the ZR300 camera above to implement Person Detection, Tracking, and Gesture analysis. +- [SLAM](realsense_ros_slam/README.md): This ROS node demonstrates use of ZR300 camera above for simultaneous location and mapping (SLAM), relocalization, and occupancy map generation. + +## License +Copyright 2017 Intel Corporation + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this project except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +**Other names and brands may be claimed as the property of others* diff --git a/realsense_ros_camera/CHANGELOG.rst b/realsense_ros_camera/CHANGELOG.rst new file mode 100644 index 0000000..aff28ae --- /dev/null +++ b/realsense_ros_camera/CHANGELOG.rst @@ -0,0 +1,13 @@ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Changelog for package realsense_ros_camera +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +0.8.2 (2017-04-04) +------------------ +* Bug fixes and updates +* Contributors: Ben Hirashima, Matt Curfman + +0.8.1 (2017-03-02) +------------------ +* Initial release +* Contributors: Ben Hirashima, Curfman, Matthew C, Lu, LuX, Matt Curfman, Stanislav Gutliansky diff --git a/realsense_ros_camera/CMakeLists.txt b/realsense_ros_camera/CMakeLists.txt new file mode 100644 index 0000000..ae7ffc2 --- /dev/null +++ b/realsense_ros_camera/CMakeLists.txt @@ -0,0 +1,86 @@ +cmake_minimum_required(VERSION 2.8.3) +project(realsense_ros_camera) + +find_package(catkin REQUIRED COMPONENTS + message_generation + roscpp + sensor_msgs + std_msgs + nodelet + cv_bridge + image_transport + tf +) + +find_package(OpenCV 3 REQUIRED) +#find_package(librealsense 1 REQUIRED) librealsense package currently lacks required pkgconfig file + +set(CMAKE_CXX_FLAGS "-fPIE -fPIC -std=c++11 -O2 -D_FORTIFY_SOURCE=2 -fstack-protector -Wformat -Wformat-security -Wall ${CMAKE_CXX_FLAGS}") + +add_message_files( + FILES + IMUInfo.msg + Extrinsics.msg +) + +generate_messages( + DEPENDENCIES + sensor_msgs + std_msgs +) + +catkin_package( + LIBRARIES realsense_ros_camera + CATKIN_DEPENDS message_runtime roscpp sensor_msgs std_msgs + nodelet + cv_bridge + image_transport +) + +include_directories( + include + ${OpenCV_INCLUDE_DIRS} + ${catkin_INCLUDE_DIRS} +) +add_library(realsense_ros_camera + src/camera_node.cpp +) + +add_dependencies(${PROJECT_NAME} ${PROJECT_NAME}_generate_messages_cpp) +add_dependencies(${PROJECT_NAME} ${catkin_EXPORTED_TARGETS}) + +target_link_libraries(realsense_ros_camera + realsense + ${catkin_LIBRARIES} + ${OpenCV_LIBRARIES} + ${CMAKE_THREAD_LIBS_INIT} + ) + +if (CATKIN_ENABLE_TESTING) + find_package(rostest REQUIRED) + + add_executable(realsense_ros_tests_camera test/camera_core.cpp) + target_link_libraries(realsense_ros_tests_camera + ${catkin_LIBRARIES} + ${GTEST_LIBRARIES} + -lpthread -lm + ) + add_dependencies(realsense_ros_tests_camera ${PROJECT_NAME}_generate_messages_cpp) + add_dependencies(realsense_ros_tests_camera ${catkin_EXPORTED_TARGETS}) +endif() + + +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} +) + +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch +) + +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + diff --git a/realsense_ros_camera/README.md b/realsense_ros_camera/README.md new file mode 100644 index 0000000..497c2f7 --- /dev/null +++ b/realsense_ros_camera/README.md @@ -0,0 +1,103 @@ +realsense_ros_camera +========================================= +This package provides a ROS node for using the Intel® RealSense™ R200, LR200, and ZR300 cameras. + +# Features +- Color RGB stream, up to 1920x1080 resolution +- Depth 16bit stream, up to 640x480 resolution +- Fisheye 8bit monochromotic stream, up to 640x480 resolution (ZR300 only) +- IMU accel and gyro data streams (ZR300 only) + +# Firmware +To ensure you camera has the most current, supported firmware, please review the librealsense compatible device information. If the camera requires a firmware upgrade, please refer to the Intel® RealSense™ Camera software support page. +*Note*: Currently there is no native Linux tool for FW updates; all updates require a system with Microsoft Windows. + +# Supported Camera Types +Intel RealSense R200 +Intel RealSense LR200 +Intel RealSense ZR300 + +# ROS API + +## realsense_ros_camera +The Intel RealSense camera driver nodelet + +### Published Topics + + /camera/color/camera_info (sensor_msgs/CameraInfo) + Color camera calibration and metadata. + /camera/color/image_raw (sensor_msgs/Image) + Color rectified image, RGB format. + /camera/depth/camera_info (sensor_msgs/CameraInfo) + Depth camera calibration and metadata. + /camera/depth/image_raw (sensor_msgs/Image) + Depth image, 16-bit format containing uint16 depths in mm. + /camera/fisheye/camera_info (sensor_msgs/CameraInfo) + (ZR300 only) Fisheye camera calibration and metadata. + /camera/fisheye/image_raw (sensor_msgs/Image) + (ZR300 only) Fisheye image, 8-bit grayscale. + /camera/accel/imu_info (realsense_ros_camera/IMUInfo) [Latched] + (ZR300 only) IMU accelerometer calibration and metadata. + /camera/accel/sample (sensor_msgs/Imu) + (ZR300 only) IMU accelerometer data + /camera/gyro/imu_info (realsense_ros_camera/IMUInfo) [Latched] + (ZR300 only) IMU angular calibration and metadata. + /camera/gyro/sample (sensor_msgs/Imu) + (ZR300 only) IMU angular data + /camera/extrinsics/fisheye2depth (realsense_ros_camera/Extrinsics) [Latched] + (ZR300 only) Calibration extrinsics allowing projection from fisheye stream to depth data + /camera/extrinsics/fisheye2imu (realsense_ros_camera/Extrinsics) [Latched] + (ZR300 only) Calibration extrinsics allowing projection from fisheye stream to IMU data + +### Parameters + + ~serial_no (string, default: blank) + Specify the serial_no to uniquely connect to a camera, especially if multiple cameras are detected by the nodelet. You may get the serial_no from the info stream by launching the camear.launch file. + ~enable_depth (bool, default: true) + Specify if to enable or not the depth camera. + ~depth_width (int, default: 480) + Specify the depth camera width resolution. + ~depth_height (int, default: 360) + Specify the depth camera height resolution. + ~depth_fps (int, default: 30) + Specify the depth camera FPS. + ~enable_color (bool, default: true) + Specify if to enable or not the color camera. + ~color_width (int, default: 640) + Specify the color camera width resolution. + ~color_height (int, default: 480) + Specify the color camera height resolution. + ~color_fps (int, default: 30) + Specify the color camera FPS. + ~enable_fisheye (bool, default: true) + (ZR300 only) Specify if to enable or not the fisheye camera. + ~fisheye_width (int, default: 640) + (ZR300 only) Specify the fisheye camera width resolution. + ~fisheye_height (int, default: 480) + (ZR300 only) Specify the fisheye camera height resolution. + ~fisheye_fps (int, default: 30) + (ZR300 only) Specify the fisheye camera FPS. + +## Usage +To start the camera: +```bash +$ roslaunch realsense_ros_camera camera.launch +``` + +To start the camera and camera viewers to see the video stream: + ```bash +# For R200/LR200 camera: +$ roslaunch realsense_ros_camera demo_r200_camera.launch +# For ZRZ300 camera: +$ roslaunch realsense_ros_camera demo_zr300_camera.launch +``` + +## Differences between this node and realsense_camera + +A full featured realsense camera node is available as open source at http://github.com/intel-ros/realsense. This camera node is intended to be a smaller and more compact node illustrating use of the included middleware. +- This node is provided as sample source code only, and is not available pre-built as a ROS package. +- This node only supports the R200, LR200, and ZR300 cameras +- This node does not support use with a RGBD launch script +- This node does not support dynamic configuration of camera parameters +- This node does not export use of IR streams from camera + diff --git a/realsense_ros_camera/include/realsense_ros_camera/constants.h b/realsense_ros_camera/include/realsense_ros_camera/constants.h new file mode 100644 index 0000000..66331fc --- /dev/null +++ b/realsense_ros_camera/include/realsense_ros_camera/constants.h @@ -0,0 +1,44 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#include +#include +#include +#include + +#pragma once +#ifndef REALSENSE_CAMERA_CONSTANTS_H +#define REALSENSE_CAMERA_CONSTANTS_H + +namespace realsense_ros_camera +{ +// Default Constants. +const std::string DEFAULT_SERIAL_NO = ""; + +const int DEPTH_WIDTH = 480; +const int DEPTH_HEIGHT = 360; +const int COLOR_WIDTH = 640; +const int COLOR_HEIGHT = 480; +const int FISHEYE_WIDTH = 640; +const int FISHEYE_HEIGHT = 480; +const int DEPTH_FPS = 30; +const int COLOR_FPS = 30; +const int FISHEYE_FPS = 30; +const bool ENABLE_DEPTH = true; +const bool ENABLE_COLOR = true; +const bool ENABLE_FISHEYE = true; + +const double ROTATION_IDENTITY[] = {1, 0, 0, 0, 1, 0, 0, 0, 1}; +const float MILLIMETER_METERS = 0.001; + +const std::string DEFAULT_BASE_FRAME_ID = "camera_link"; +const std::string DEFAULT_DEPTH_FRAME_ID = "camera_depth_frame"; +const std::string DEFAULT_COLOR_FRAME_ID = "camera_rgb_frame"; +const std::string DEFAULT_FISHEYE_FRAME_ID = "camera_fisheye_frame"; +const std::string DEFAULT_IMU_FRAME_ID = "camera_imu_frame"; +const std::string DEFAULT_DEPTH_OPTICAL_FRAME_ID = "camera_depth_optical_frame"; +const std::string DEFAULT_COLOR_OPTICAL_FRAME_ID = "camera_rgb_optical_frame"; +const std::string DEFAULT_FISHEYE_OPTICAL_FRAME_ID = "camera_fisheye_optical_frame"; +const std::string DEFAULT_IMU_OPTICAL_FRAME_ID = "camera_imu_optical_frame"; +} // namespace realsense_camera +#endif // REALSENSE_CAMERA_CONSTANTS_H diff --git a/realsense_ros_camera/launch/camera.launch b/realsense_ros_camera/launch/camera.launch new file mode 100644 index 0000000..4562e05 --- /dev/null +++ b/realsense_ros_camera/launch/camera.launch @@ -0,0 +1,48 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/realsense_ros_camera/launch/demo_r200_camera.launch b/realsense_ros_camera/launch/demo_r200_camera.launch new file mode 100644 index 0000000..3e53bc0 --- /dev/null +++ b/realsense_ros_camera/launch/demo_r200_camera.launch @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/realsense_ros_camera/launch/demo_zr300_camera.launch b/realsense_ros_camera/launch/demo_zr300_camera.launch new file mode 100644 index 0000000..a7db80f --- /dev/null +++ b/realsense_ros_camera/launch/demo_zr300_camera.launch @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + diff --git a/realsense_ros_camera/launch/play_bag.launch b/realsense_ros_camera/launch/play_bag.launch new file mode 100644 index 0000000..0c2d73a --- /dev/null +++ b/realsense_ros_camera/launch/play_bag.launch @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/realsense_ros_camera/msg/Extrinsics.msg b/realsense_ros_camera/msg/Extrinsics.msg new file mode 100644 index 0000000..6482a0e --- /dev/null +++ b/realsense_ros_camera/msg/Extrinsics.msg @@ -0,0 +1,3 @@ +std_msgs/Header header +float64[9] rotation +float64[3] translation diff --git a/realsense_ros_camera/msg/IMUInfo.msg b/realsense_ros_camera/msg/IMUInfo.msg new file mode 100644 index 0000000..68f47b7 --- /dev/null +++ b/realsense_ros_camera/msg/IMUInfo.msg @@ -0,0 +1,6 @@ +# header.frame_id is either set to "imu_accel" or "imu_gyro" +# to distinguish between "accel" and "gyro" info. +std_msgs/Header header +float64[12] data +float64[3] noise_variances +float64[3] bias_variances diff --git a/realsense_ros_camera/nodelet_plugins.xml b/realsense_ros_camera/nodelet_plugins.xml new file mode 100644 index 0000000..66d412a --- /dev/null +++ b/realsense_ros_camera/nodelet_plugins.xml @@ -0,0 +1,7 @@ + + + + RealSense Camera Access node + + + diff --git a/realsense_ros_camera/package.xml b/realsense_ros_camera/package.xml new file mode 100644 index 0000000..c1c2571 --- /dev/null +++ b/realsense_ros_camera/package.xml @@ -0,0 +1,30 @@ + + + realsense_ros_camera + 0.8.2 + The realsense_ros_camera package + Intel RealSense + Apache 2.0 + catkin + message_generation + roscpp + sensor_msgs + std_msgs + genmsg + nodelet + cv_bridge + image_transport + tf + image_transport + cv_bridge + nodelet + genmsg + roscpp + sensor_msgs + std_msgs + message_runtime + tf + + + + diff --git a/realsense_ros_camera/src/camera_node.cpp b/realsense_ros_camera/src/camera_node.cpp new file mode 100644 index 0000000..c6869fb --- /dev/null +++ b/realsense_ros_camera/src/camera_node.cpp @@ -0,0 +1,646 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +// R200 and ZR300 types +std::map image_publishers_; +std::map image_format_; +std::map format_; +std::map info_publisher_; +std::map image_; +std::map encoding_; +std::string base_frame_id_; +std::map frame_id_; +std::map optical_frame_id_; +std::string imu_frame_id_, optical_imu_frame_id_; +std::map seq_; +std::map unit_step_size_; +std::map> stream_callback_per_stream; +std::map camera_info_; + +// ZR300 specific types +bool isZR300_ = false; +ros::Publisher accelInfo_publisher_, gyroInfo_publisher_, fe2imu_publisher_, fe2depth_publisher_; +ros::Publisher imu_publishers_[2] = {};//accel 0 gyro 1 +int seq_motion[2] = {0, 0}; +std::string optical_imu_id_[2] = {"imu_accel_frame_id", "imu_gyro_frame_id"}; + + +namespace realsense_ros_camera +{ +void getImuInfo(rs::device* device, IMUInfo &accelInfo, IMUInfo &gyroInfo) +{ + rs::motion_intrinsics imuIntrinsics = device->get_motion_intrinsics(); + + accelInfo.header.frame_id = "imu_accel"; + int index = 0; + for (int i = 0; i < 3; ++i) + { + for (int j = 0; j < 4; ++j) + { + accelInfo.data[index] = imuIntrinsics.acc.data[i][j]; + ++index; + } + accelInfo.noise_variances[i] = imuIntrinsics.acc.noise_variances[i]; + accelInfo.bias_variances[i] = imuIntrinsics.acc.bias_variances[i]; + } + + gyroInfo.header.frame_id = "imu_gyro"; + index = 0; + for (int i = 0; i < 3; ++i) + { + for (int j = 0; j < 4; ++j) + { + gyroInfo.data[index] = imuIntrinsics.gyro.data[i][j]; + ++index; + } + gyroInfo.noise_variances[i] = imuIntrinsics.gyro.noise_variances[i]; + gyroInfo.bias_variances[i] = imuIntrinsics.gyro.bias_variances[i]; + } +} + +Extrinsics rsExtrinsicsToMsg(rs::extrinsics rsExtrinsics) +{ + Extrinsics extrinsicsMsg; + + for (int i = 0; i < 9; ++i) + { + extrinsicsMsg.rotation[i] = rsExtrinsics.rotation[i]; + if (i < 3) extrinsicsMsg.translation[i] = rsExtrinsics.translation[i]; + } + + return extrinsicsMsg; +} + +Extrinsics getFisheye2ImuExtrinsicsMsg(rs::device* device) +{ + Extrinsics extrinsicsMsg = rsExtrinsicsToMsg(device->get_motion_extrinsics_from(rs::stream::fisheye)); + extrinsicsMsg.header.frame_id = "fisheye2imu_extrinsics"; + return extrinsicsMsg; +} + +Extrinsics getFisheye2DepthExtrinsicsMsg(rs::device* device) +{ + Extrinsics extrinsicsMsg = rsExtrinsicsToMsg(device->get_extrinsics(rs::stream::depth, rs::stream::fisheye)); + extrinsicsMsg.header.frame_id = "fisheye2depth_extrinsics"; + return extrinsicsMsg; +} + +class NodeletCamera: public nodelet::Nodelet +{ +public: + NodeletCamera() : + cameraStarted_(false) + { + // libRealsense format types for stream + format_[rs::stream::depth] = rs::format::z16; + format_[rs::stream::color] = rs::format::rgb8; + format_[rs::stream::fisheye] = rs::format::raw8; + + // CVBridge native image types + image_format_[rs::stream::depth] = CV_16UC1; + image_format_[rs::stream::color] = CV_8UC3; + image_format_[rs::stream::fisheye] = CV_8UC1; + + // ROS sensor_msgs::ImagePtr encoding types + encoding_[rs::stream::depth] = sensor_msgs::image_encodings::TYPE_16UC1; + encoding_[rs::stream::color] = sensor_msgs::image_encodings::RGB8; + encoding_[rs::stream::fisheye] = sensor_msgs::image_encodings::TYPE_8UC1; + + // ROS sensor_msgs::ImagePtr row step sizes + unit_step_size_[rs::stream::depth] = sizeof(uint16_t); + unit_step_size_[rs::stream::color] = sizeof(unsigned char) * 3; + unit_step_size_[rs::stream::fisheye] = sizeof(unsigned char); + + stream_name_[rs::stream::depth] = "depth"; + stream_name_[rs::stream::color] = "color"; + stream_name_[rs::stream::fisheye] = "fisheye"; + } + + virtual ~NodeletCamera() + { + if (cameraStarted_) + { + if (isZR300_) + device_->stop(rs::source::all_sources); + else + device_->stop(); + } + ctx_.reset(); + + const rs::stream All[] = { rs::stream::depth, rs::stream::color, rs::stream::fisheye }; + for (const auto stream : All) + { + if (false == enable_[stream]) + continue; + image_publishers_[stream].shutdown(); + } + image_publishers_.clear(); + } + +private: + virtual void onInit() + { + getParameters(); + + if (false == setupDevice()) + return; + + setupPublishers(); + setupStreams(); + publishStaticTransforms(); + }//end onInit + + + void getParameters() + { + pnh_ = getPrivateNodeHandle(); + + pnh_.param("serial_no", serial_no_, DEFAULT_SERIAL_NO); + + pnh_.param("depth_width", width_[rs::stream::depth], DEPTH_WIDTH); + pnh_.param("depth_height", height_[rs::stream::depth], DEPTH_HEIGHT); + pnh_.param("depth_fps", fps_[rs::stream::depth], DEPTH_FPS); + pnh_.param("enable_depth", enable_[rs::stream::depth], true); + + pnh_.param("color_width", width_[rs::stream::color], COLOR_WIDTH); + pnh_.param("color_height", height_[rs::stream::color], COLOR_HEIGHT); + pnh_.param("color_fps", fps_[rs::stream::color], COLOR_FPS); + pnh_.param("enable_color", enable_[rs::stream::color], false); + + pnh_.param("fisheye_width", width_[rs::stream::fisheye], FISHEYE_WIDTH); + pnh_.param("fisheye_height", height_[rs::stream::fisheye], FISHEYE_HEIGHT); + pnh_.param("fisheye_fps", fps_[rs::stream::fisheye], FISHEYE_FPS); + pnh_.param("enable_fisheye", enable_[rs::stream::fisheye], false); + + pnh_.param("base_frame_id", base_frame_id_, DEFAULT_BASE_FRAME_ID); + pnh_.param("depth_frame_id", frame_id_[rs::stream::depth], DEFAULT_DEPTH_FRAME_ID); + pnh_.param("color_frame_id", frame_id_[rs::stream::color], DEFAULT_COLOR_FRAME_ID); + pnh_.param("fisheye_frame_id", frame_id_[rs::stream::fisheye], DEFAULT_FISHEYE_FRAME_ID); + pnh_.param("imu_frame_id", imu_frame_id_, DEFAULT_IMU_FRAME_ID); + pnh_.param("depth_optical_frame_id", optical_frame_id_[rs::stream::depth], DEFAULT_DEPTH_OPTICAL_FRAME_ID); + pnh_.param("color_optical_frame_id", optical_frame_id_[rs::stream::color], DEFAULT_COLOR_OPTICAL_FRAME_ID); + pnh_.param("fisheye_optical_frame_id", optical_frame_id_[rs::stream::fisheye], DEFAULT_FISHEYE_OPTICAL_FRAME_ID); + pnh_.param("imu_optical_frame_id", optical_imu_frame_id_, DEFAULT_IMU_OPTICAL_FRAME_ID); + }//end getParameters + + + bool setupDevice() + { + ctx_.reset(new rs::context()); + int num_of_cams = ctx_ -> get_device_count(); + if (num_of_cams == 0) + { + ROS_ERROR("error : no RealSense R200, LR200, or ZR300 devices found."); + ctx_.reset(); + return false; + } + + rs::device *detected_dev; + for (int i = 0; i < num_of_cams; i++) + { + detected_dev = ctx_->get_device(i); + detected_dev->get_serial(); + if (serial_no_.empty() || (serial_no_ == std::string(detected_dev->get_serial()))) + { + device_ = detected_dev; + break; + } + } + + if (device_ == nullptr) + { + ROS_ERROR_STREAM("error: No RealSense device with serial_no = " << serial_no_ << " found."); + ctx_.reset(); + return false; + } + + auto device_name = device_ -> get_name(); + ROS_INFO_STREAM(device_name << ", serial_no: " << std::string(device_ ->get_serial())); + if (std::string(device_name).find("ZR300") == std::string::npos) + { + isZR300_ = false; + enable_[rs::stream::fisheye] = false; + + if ((std::string(device_name).find("R200") == std::string::npos) && + (std::string(device_name).find("LR200") == std::string::npos)) + { + ROS_ERROR_STREAM("error: This ROS node supports R200, LR200, and ZR300 only."); + ROS_ERROR_STREAM(" See https://github.com/intel-ros/realsense for F200 and SR300 support."); + ctx_.reset(); + return false; + } + } + else + { + // Only enable ZR300 functionality if fisheye stream is enabled. + // Accel/Gyro automatically enabled when fisheye requested + if (true == enable_[rs::stream::fisheye]) + isZR300_ = true; + } + + return true; + }//end setupDevice + + + void setupPublishers() + { + image_transport::ImageTransport image_transport(getNodeHandle()); + + // Stream publishers and latched topics + if (true == enable_[rs::stream::color]) + { + image_publishers_[rs::stream::color] = image_transport.advertise("camera/color/image_raw", 1); + info_publisher_[rs::stream::color] = + node_handle.advertise< sensor_msgs::CameraInfo >("camera/color/camera_info", 1); + } + + if (true == enable_[rs::stream::depth]) + { + image_publishers_[rs::stream::depth] = image_transport.advertise("camera/depth/image_raw", 1); + info_publisher_[rs::stream::depth] = + node_handle.advertise< sensor_msgs::CameraInfo >("camera/depth/camera_info", 1); + } + + if (isZR300_) + { + // Stream publishers + image_publishers_[rs::stream::fisheye] = image_transport.advertise("camera/fisheye/image_raw", 1); + info_publisher_[rs::stream::fisheye] = + node_handle.advertise< sensor_msgs::CameraInfo >("camera/fisheye/camera_info", 1); + + imu_publishers_[RS_EVENT_IMU_GYRO] = node_handle.advertise< sensor_msgs::Imu >("camera/gyro/sample", 100); + imu_publishers_[RS_EVENT_IMU_ACCEL] = node_handle.advertise< sensor_msgs::Imu >("camera/accel/sample", 100); + + // Latched topics + fe2imu_publisher_ = node_handle.advertise< Extrinsics >("camera/extrinsics/fisheye2imu", 1, true); + fe2depth_publisher_ = node_handle.advertise< Extrinsics >("camera/extrinsics/fisheye2depth", 1, true); + accelInfo_publisher_ = node_handle.advertise< IMUInfo >("camera/accel/imu_info", 1, true); + gyroInfo_publisher_ = node_handle.advertise< IMUInfo >("camera/gyro/imu_info", 1, true); + } + }//end setupPublishers + + + void setupStreams() + { + device_->set_option(rs::option::r200_lr_auto_exposure_enabled, 1); + + const rs::stream All[] = { rs::stream::depth, rs::stream::color, rs::stream::fisheye }; + for (const auto stream : All) + { + if (false == enable_[stream]) + continue; + + // Define lambda callback for receiving stream data + stream_callback_per_stream[stream] = [stream](rs::frame frame) + { + if (isZR300_) + { + const auto timestampDomain = frame.get_frame_timestamp_domain(); + if (rs::timestamp_domain::microcontroller != timestampDomain) + { + ROS_ERROR_STREAM("error: Junk time stamp in stream:" << (int)(stream) << + "\twith frame counter:" << frame.get_frame_number()); + return ; + } + } + + image_[stream].data = (unsigned char *) frame.get_data(); + + sensor_msgs::ImagePtr img; + img = cv_bridge::CvImage(std_msgs::Header(), encoding_[stream], image_[stream]).toImageMsg(); + img->width = image_[stream].cols; + img->height = image_[stream].rows; + img->is_bigendian = false; + img->step = image_[stream].cols * unit_step_size_[stream]; + seq_[stream] += 1; + + img->header.frame_id = optical_frame_id_[stream]; + img->header.stamp = ros::Time(frame.get_timestamp()); + img->header.seq = seq_[stream]; + image_publishers_[stream].publish(img); + + camera_info_[stream].header.stamp = img->header.stamp; + camera_info_[stream].header.seq = img->header.seq; + info_publisher_[stream].publish(camera_info_[stream]); + }; + + // Enable the stream + device_->enable_stream(stream, width_[stream], height_[stream], format_[stream], fps_[stream]); + + // Publish info about the stream + getStreamCalibData(stream); + + // Setup stream callback for stream + image_[stream] = cv::Mat(camera_info_[stream].height, camera_info_[stream].width, + image_format_[stream], cv::Scalar(0, 0, 0)); + device_->set_frame_callback(stream, stream_callback_per_stream[stream]); + + ROS_INFO_STREAM(" enabled " << stream_name_[stream] << " stream, width: " + << camera_info_[stream].width << " height: " << camera_info_[stream].height << " fps: " << fps_[stream]); + }//end for + + + if (isZR300_) + { + // Needed to align image timestamps to common clock-domain with the motion events + device_->set_option(rs::option::fisheye_strobe, 1); + // This option causes the fisheye image to be aquired in-sync with the depth image. + device_->set_option(rs::option::fisheye_external_trigger, 1); + device_->set_option(rs::option::fisheye_color_auto_exposure, 1); + + //define callback to the motion events and set it. + std::function motion_callback; + motion_callback = [](rs::motion_data entry) + { + if ((entry.timestamp_data.source_id != RS_EVENT_IMU_GYRO) && + (entry.timestamp_data.source_id != RS_EVENT_IMU_ACCEL)) + return; + + rs_event_source motionType = entry.timestamp_data.source_id; + sensor_msgs::Imu imu_msg = sensor_msgs::Imu(); + imu_msg.header.stamp = ros::Time::now(); + imu_msg.header.frame_id = optical_imu_id_[motionType]; + imu_msg.orientation.x = 0.0; + imu_msg.orientation.y = 0.0; + imu_msg.orientation.z = 0.0; + imu_msg.orientation.w = 0.0; + imu_msg.orientation_covariance = { -1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0}; + if (motionType == RS_EVENT_IMU_GYRO) + { + imu_msg.angular_velocity.x = entry.axes[0]; + imu_msg.angular_velocity.y = entry.axes[1]; + imu_msg.angular_velocity.z = entry.axes[2]; + } + if (motionType == RS_EVENT_IMU_ACCEL) + { + imu_msg.linear_acceleration.x = entry.axes[0]; + imu_msg.linear_acceleration.y = entry.axes[1]; + imu_msg.linear_acceleration.z = entry.axes[2]; + } + seq_motion[motionType] += 1; + imu_msg.header.seq = seq_motion[motionType]; + imu_msg.header.stamp = ros::Time(entry.timestamp_data.timestamp); + imu_publishers_[motionType].publish(imu_msg); + }; + + std::function timestamp_callback; + timestamp_callback = [](rs::timestamp_data entry) {}; + + device_->enable_motion_tracking(motion_callback, timestamp_callback); + ROS_INFO_STREAM(" enabled accel and gyro stream"); + + // publish ZR300-specific intrinsics/extrinsics + IMUInfo accelInfo, gyroInfo; + getImuInfo(device_, accelInfo, gyroInfo); + + fe2imu_publisher_.publish(getFisheye2ImuExtrinsicsMsg(device_)); + fe2depth_publisher_.publish(getFisheye2DepthExtrinsicsMsg(device_)); + accelInfo_publisher_.publish(accelInfo); + gyroInfo_publisher_.publish(gyroInfo); + } + + if (isZR300_) + device_->start(rs::source::all_sources); + else + device_->start(); + cameraStarted_ = true; + }//end setupStreams + + + void getStreamCalibData(rs::stream stream) + { + rs::intrinsics intrinsic = device_->get_stream_intrinsics(stream); + + camera_info_[stream].header.frame_id = optical_frame_id_[stream]; + camera_info_[stream].width = intrinsic.width; + camera_info_[stream].height = intrinsic.height; + + camera_info_[stream].K.at(0) = intrinsic.fx; + camera_info_[stream].K.at(2) = intrinsic.ppx; + camera_info_[stream].K.at(4) = intrinsic.fy; + camera_info_[stream].K.at(5) = intrinsic.ppy; + camera_info_[stream].K.at(8) = 1; + + camera_info_[stream].P.at(0) = camera_info_[stream].K.at(0); + camera_info_[stream].P.at(1) = 0; + camera_info_[stream].P.at(2) = camera_info_[stream].K.at(2); + camera_info_[stream].P.at(3) = 0; + camera_info_[stream].P.at(4) = 0; + camera_info_[stream].P.at(5) = camera_info_[stream].K.at(4); + camera_info_[stream].P.at(6) = camera_info_[stream].K.at(5); + camera_info_[stream].P.at(7) = 0; + camera_info_[stream].P.at(8) = 0; + camera_info_[stream].P.at(9) = 0; + camera_info_[stream].P.at(10) = 1; + camera_info_[stream].P.at(11) = 0; + + if (stream == rs::stream::depth) + { + // set depth to color translation values in Projection matrix (P) + rs::extrinsics extrinsic = device_->get_extrinsics(rs::stream::depth, rs::stream::color); + camera_info_[stream].P.at(3) = extrinsic.translation[0]; // Tx + camera_info_[stream].P.at(7) = extrinsic.translation[1]; // Ty + camera_info_[stream].P.at(11) = extrinsic.translation[2]; // Tz + + for (int i = 0; i < 9; i++) + camera_info_[stream].R.at(i) = extrinsic.rotation[i]; + } + + switch ((int32_t)intrinsic.model()) + { + case 0: + camera_info_[stream].distortion_model = "none"; + break; + case 1: + // This is the same as "modified_brown_conrady", but used by ROS + camera_info_[stream].distortion_model = "plumb_bob"; + break; + case 2: + camera_info_[stream].distortion_model = "inverse_brown_conrady"; + break; + case 3: + camera_info_[stream].distortion_model = "distortion_ftheta"; + break; + default: + camera_info_[stream].distortion_model = "others"; + break; + } + + // set R (rotation matrix) values to identity matrix + if (stream != rs::stream::depth) + { + camera_info_[stream].R.at(0) = 1.0; + camera_info_[stream].R.at(1) = 0.0; + camera_info_[stream].R.at(2) = 0.0; + camera_info_[stream].R.at(3) = 0.0; + camera_info_[stream].R.at(4) = 1.0; + camera_info_[stream].R.at(5) = 0.0; + camera_info_[stream].R.at(6) = 0.0; + camera_info_[stream].R.at(7) = 0.0; + camera_info_[stream].R.at(8) = 1.0; + } + + for (int i = 0; i < 5; i++) + { + camera_info_[stream].D.push_back(intrinsic.coeffs[i]); + } + }//end getStreamCalibData + + + void publishStaticTransforms() + { + // Publish transforms for the cameras + tf::Quaternion q_c2co; + tf::Quaternion q_d2do; + tf::Quaternion q_i2io; + geometry_msgs::TransformStamped b2d_msg; + geometry_msgs::TransformStamped d2do_msg; + geometry_msgs::TransformStamped b2c_msg; + geometry_msgs::TransformStamped c2co_msg; + geometry_msgs::TransformStamped b2i_msg; + geometry_msgs::TransformStamped i2io_msg; + + // Get the current timestamp for all static transforms + ros::Time transform_ts_ = ros::Time::now(); + + // The color frame is used as the base frame. + // Hence no additional transformation is done from base frame to color frame. + b2c_msg.header.stamp = transform_ts_; + b2c_msg.header.frame_id = base_frame_id_; + b2c_msg.child_frame_id = frame_id_[rs::stream::color]; + b2c_msg.transform.translation.x = 0; + b2c_msg.transform.translation.y = 0; + b2c_msg.transform.translation.z = 0; + b2c_msg.transform.rotation.x = 0; + b2c_msg.transform.rotation.y = 0; + b2c_msg.transform.rotation.z = 0; + b2c_msg.transform.rotation.w = 1; + static_tf_broadcaster_.sendTransform(b2c_msg); + + // Transform color frame to color optical frame + q_c2co.setRPY(-M_PI / 2, 0.0, -M_PI / 2); + c2co_msg.header.stamp = transform_ts_; + c2co_msg.header.frame_id = frame_id_[rs::stream::color]; + c2co_msg.child_frame_id = optical_frame_id_[rs::stream::color]; + c2co_msg.transform.translation.x = 0; + c2co_msg.transform.translation.y = 0; + c2co_msg.transform.translation.z = 0; + c2co_msg.transform.rotation.x = q_c2co.getX(); + c2co_msg.transform.rotation.y = q_c2co.getY(); + c2co_msg.transform.rotation.z = q_c2co.getZ(); + c2co_msg.transform.rotation.w = q_c2co.getW(); + static_tf_broadcaster_.sendTransform(c2co_msg); + + // Transform base frame to depth frame + rs::extrinsics color2depth_extrinsic = device_->get_extrinsics(rs::stream::color, rs::stream::depth); + b2d_msg.header.stamp = transform_ts_; + b2d_msg.header.frame_id = base_frame_id_; + b2d_msg.child_frame_id = frame_id_[rs::stream::depth]; + b2d_msg.transform.translation.x = color2depth_extrinsic.translation[2]; + b2d_msg.transform.translation.y = -color2depth_extrinsic.translation[0]; + b2d_msg.transform.translation.z = -color2depth_extrinsic.translation[1]; + b2d_msg.transform.rotation.x = 0; + b2d_msg.transform.rotation.y = 0; + b2d_msg.transform.rotation.z = 0; + b2d_msg.transform.rotation.w = 1; + static_tf_broadcaster_.sendTransform(b2d_msg); + + // Transform depth frame to depth optical frame + q_d2do.setRPY(-M_PI / 2, 0.0, -M_PI / 2); + d2do_msg.header.stamp = transform_ts_; + d2do_msg.header.frame_id = frame_id_[rs::stream::depth]; + d2do_msg.child_frame_id = optical_frame_id_[rs::stream::depth]; + d2do_msg.transform.translation.x = 0; + d2do_msg.transform.translation.y = 0; + d2do_msg.transform.translation.z = 0; + d2do_msg.transform.rotation.x = q_d2do.getX(); + d2do_msg.transform.rotation.y = q_d2do.getY(); + d2do_msg.transform.rotation.z = q_d2do.getZ(); + d2do_msg.transform.rotation.w = q_d2do.getW(); + static_tf_broadcaster_.sendTransform(d2do_msg); + + if (isZR300_) + { + tf::Quaternion q_f2fo, q_imu2imuo; + geometry_msgs::TransformStamped b2f_msg; + geometry_msgs::TransformStamped f2fo_msg; + geometry_msgs::TransformStamped b2imu_msg; + geometry_msgs::TransformStamped imu2imuo_msg; + + // Transform base frame to fisheye frame + b2f_msg.header.stamp = transform_ts_; + b2f_msg.header.frame_id = base_frame_id_; + b2f_msg.child_frame_id = frame_id_[rs::stream::fisheye]; + rs::extrinsics color2fisheye_extrinsic = device_->get_extrinsics(rs::stream::color, rs::stream::fisheye); + b2f_msg.transform.translation.x = color2fisheye_extrinsic.translation[2]; + b2f_msg.transform.translation.y = -color2fisheye_extrinsic.translation[0]; + b2f_msg.transform.translation.z = -color2fisheye_extrinsic.translation[1]; + b2f_msg.transform.rotation.x = 0; + b2f_msg.transform.rotation.y = 0; + b2f_msg.transform.rotation.z = 0; + b2f_msg.transform.rotation.w = 1; + static_tf_broadcaster_.sendTransform(b2f_msg); + + // Transform fisheye frame to fisheye optical frame + q_f2fo.setRPY(-M_PI / 2, 0.0, -M_PI / 2); + f2fo_msg.header.stamp = transform_ts_; + f2fo_msg.header.frame_id = frame_id_[rs::stream::fisheye]; + f2fo_msg.child_frame_id = optical_frame_id_[rs::stream::fisheye]; + f2fo_msg.transform.translation.x = 0; + f2fo_msg.transform.translation.y = 0; + f2fo_msg.transform.translation.z = 0; + f2fo_msg.transform.rotation.x = q_f2fo.getX(); + f2fo_msg.transform.rotation.y = q_f2fo.getY(); + f2fo_msg.transform.rotation.z = q_f2fo.getZ(); + f2fo_msg.transform.rotation.w = q_f2fo.getW(); + static_tf_broadcaster_.sendTransform(f2fo_msg); + } + } + +private: + ros::NodeHandle node_handle, pnh_; + bool cameraStarted_; + std::unique_ptr< rs::context > ctx_; + rs::device *device_; + + std::string serial_no_; + std::string usb_port_id_; + std::string camera_type_; + + std::map width_; + std::map height_; + std::map fps_; + std::map enable_; + std::map stream_name_; + tf2_ros::StaticTransformBroadcaster static_tf_broadcaster_; +};//end class + +PLUGINLIB_DECLARE_CLASS(realsense_ros_camera, NodeletCamera, realsense_ros_camera::NodeletCamera, nodelet::Nodelet); + +}//end namespace + diff --git a/realsense_ros_camera/test/camera.test b/realsense_ros_camera/test/camera.test new file mode 100644 index 0000000..5e1cf58 --- /dev/null +++ b/realsense_ros_camera/test/camera.test @@ -0,0 +1,42 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/realsense_ros_camera/test/camera_core.cpp b/realsense_ros_camera/test/camera_core.cpp new file mode 100644 index 0000000..e6e0fa6 --- /dev/null +++ b/realsense_ros_camera/test/camera_core.cpp @@ -0,0 +1,608 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#include +#include "camera_core.h" // NOLINT(build/include) +#include // Added to satisfy roslint +#include // Added to satisfy roslint + +using std::string; +using std::stringstream; +using std::vector; + +void getMsgInfo(rs_stream stream, const sensor_msgs::ImageConstPtr &msg) +{ + g_encoding_recv[stream] = msg->encoding; + g_width_recv[stream] = msg->width; + g_height_recv[stream] = msg->height; + g_step_recv[stream] = msg->step; +} + +void getCameraInfo(rs_stream stream, const sensor_msgs::CameraInfoConstPtr &info_msg) +{ + g_caminfo_height_recv[stream] = info_msg->height; + g_caminfo_width_recv[stream] = info_msg->width; + + g_dmodel_recv[stream] = info_msg->distortion_model; + + // copy rotation matrix + for (unsigned int i = 0; i < sizeof(info_msg->R) / sizeof(double); i++) + { + g_caminfo_rotation_recv[stream][i] = info_msg->R[i]; + } + + // copy projection matrix + for (unsigned int i = 0; i < sizeof(info_msg->P) / sizeof(double); i++) + { + g_caminfo_projection_recv[stream][i] = info_msg->P[i]; + } +} + +void imageDepthCallback(const sensor_msgs::ImageConstPtr &msg, const sensor_msgs::CameraInfoConstPtr &info_msg) +{ + cv::Mat image = cv_bridge::toCvShare(msg, sensor_msgs::image_encodings::TYPE_16UC1)->image; + uint16_t *image_data = reinterpret_cast(image.data); + + double depth_total = 0; + int depth_count = 0; + for (unsigned int i = 0; i < msg->height * msg->width; ++i) + { + if ((0 < *image_data) && (*image_data <= g_max_z)) + { + depth_total += *image_data; + depth_count++; + } + image_data++; + } + if (depth_count != 0) + { + g_depth_avg = static_cast(depth_total / depth_count); + } + + getMsgInfo(RS_STREAM_DEPTH, msg); + getCameraInfo(RS_STREAM_DEPTH, info_msg); + + for (unsigned int i = 0; i < 5; i++) + { + g_depth_caminfo_D_recv[i] = info_msg->D[i]; + } + + g_depth_recv = true; +} + +void imageColorCallback(const sensor_msgs::ImageConstPtr &msg, const sensor_msgs::CameraInfoConstPtr &info_msg) +{ + cv::Mat image = cv_bridge::toCvShare(msg, "rgb8")->image; + uchar *color_data = image.data; + + int64 color_total = 0; + int color_count = 1; + for (unsigned int i = 0; i < msg->height * msg->width * 3; i++) + { + if (*color_data > 0 && *color_data < 255) + { + color_total += *color_data; + color_count++; + } + color_data++; + } + if (color_count != 0) + { + g_color_avg = static_cast(color_total / color_count); + } + + getMsgInfo(RS_STREAM_COLOR, msg); + getCameraInfo(RS_STREAM_COLOR, info_msg); + + for (unsigned int i = 0; i < 5; i++) + { + g_color_caminfo_D_recv[i] = info_msg->D[i]; + } + + g_color_recv = true; +} + +void imageFisheyeCallback(const sensor_msgs::ImageConstPtr &msg, const sensor_msgs::CameraInfoConstPtr &info_msg) +{ + cv::Mat image = cv_bridge::toCvShare(msg, sensor_msgs::image_encodings::TYPE_8UC1)->image; + uchar *fisheye_data = image.data; + + double fisheye_total = 0.0; + int fisheye_count = 1; + for (unsigned int i = 0; i < msg->height * msg->width; i++) + { + if (*fisheye_data > 0 && *fisheye_data < 255) + { + fisheye_total += *fisheye_data; + fisheye_count++; + } + fisheye_data++; + } + if (fisheye_count != 0) + { + g_fisheye_avg = static_cast(fisheye_total / fisheye_count); + } + + getMsgInfo(RS_STREAM_FISHEYE, msg); + getCameraInfo(RS_STREAM_FISHEYE, info_msg); + + for (unsigned int i = 0; i < 5; i++) + { + g_fisheye_caminfo_D_recv[i] = info_msg->D[i]; + } + + g_fisheye_recv = true; +} + +void accelCallback(const sensor_msgs::ImuConstPtr &imu) +{ + g_accel_recv = false; + if (imu->linear_acceleration_covariance[0] != -1.0) + { + if ((imu->linear_acceleration.x != 0.000) || + (imu->linear_acceleration.y != 0.000) || + (imu->linear_acceleration.z != 0.000)) + { + g_accel_recv = true; + } + } +} + +void gyroCallback(const sensor_msgs::ImuConstPtr &imu) +{ + g_gyro_recv = false; + if (imu->angular_velocity_covariance[0] != -1.0) + { + if ((imu->angular_velocity.x != 0.0) || + (imu->angular_velocity.y != 0.0) || + (imu->angular_velocity.z != 0.0)) + { + g_gyro_recv = true; + } + } +} + +TEST(RealsenseTests, testIsColorStreamEnabled) +{ + if (g_enable_color) + { + EXPECT_TRUE(g_color_recv); + } + else + { + EXPECT_FALSE(g_color_recv); + } +} + +TEST(RealsenseTests, testColorStream) +{ + if (g_enable_color) + { + EXPECT_GT(g_color_avg, 0); + if (!g_color_encoding_exp.empty()) + { + EXPECT_EQ(g_color_encoding_exp, g_encoding_recv[RS_STREAM_COLOR]); + } + if (g_color_step_exp > 0) + { + EXPECT_EQ(g_color_step_exp, g_step_recv[RS_STREAM_COLOR]); + } + } +} + +TEST(RealsenseTests, testColorResolution) +{ + if (g_enable_color) + { + EXPECT_TRUE(g_color_recv); + if (g_color_height_exp > 0) + { + EXPECT_EQ(g_color_height_exp, g_height_recv[RS_STREAM_COLOR]); + } + if (g_color_width_exp > 0) + { + EXPECT_EQ(g_color_width_exp, g_width_recv[RS_STREAM_COLOR]); + } + } +} + +TEST(RealsenseTests, testColorCameraInfo) +{ + if (g_enable_color) + { + EXPECT_EQ(g_width_recv[RS_STREAM_COLOR], g_caminfo_width_recv[RS_STREAM_COLOR]); + EXPECT_EQ(g_height_recv[RS_STREAM_COLOR], g_caminfo_height_recv[RS_STREAM_COLOR]); + EXPECT_STREQ(g_dmodel_recv[RS_STREAM_COLOR].c_str(), "plumb_bob"); + + // verify rotation is equal to identity matrix + for (unsigned int i = 0; i < sizeof(realsense_ros_camera::ROTATION_IDENTITY) / sizeof(double); i++) + { + EXPECT_EQ(realsense_ros_camera::ROTATION_IDENTITY[i], g_caminfo_rotation_recv[RS_STREAM_COLOR][i]); + } + + // check projection matrix values are set + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_COLOR][0] != 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_COLOR][1], 0.0); + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_COLOR][2] != 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_COLOR][3], 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_COLOR][4], 0.0); + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_COLOR][5] != 0.0); + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_COLOR][6] != 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_COLOR][7], 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_COLOR][8], 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_COLOR][9], 0.0); + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_COLOR][10] != 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_COLOR][11], 0.0); + + // R200 and ZR300 cameras have Color distortion parameters + if ((g_camera_type == "R200") || (g_camera_type == "ZR300")) + { + bool any_are_zero = false; + // Ignoring the 5th value since it always appears to be 0.0 + for (unsigned int i = 0; i < 4; i++) + { + if (g_color_caminfo_D_recv[i] == 0.0) + { + any_are_zero = true; + } + } + EXPECT_FALSE(any_are_zero); + } + } +} + +TEST(RealsenseTests, testIsDepthStreamEnabled) +{ + if (g_enable_depth) + { + EXPECT_TRUE(g_depth_recv); + } + else + { + EXPECT_FALSE(g_depth_recv); + } +} + +TEST(RealsenseTests, testDepthStream) +{ + if (g_enable_depth) + { + EXPECT_GT(g_depth_avg, 0); + if (!g_depth_encoding_exp.empty()) + { + EXPECT_EQ(g_depth_encoding_exp, g_encoding_recv[RS_STREAM_DEPTH]); + } + if (g_depth_step_exp > 0) + { + EXPECT_EQ(g_depth_step_exp, g_step_recv[RS_STREAM_DEPTH]); + } + } +} + +TEST(RealsenseTests, testDepthResolution) +{ + if (g_enable_depth) + { + EXPECT_TRUE(g_depth_recv); + if (g_depth_height_exp > 0) + { + EXPECT_EQ(g_depth_height_exp, g_height_recv[RS_STREAM_DEPTH]); + } + if (g_depth_width_exp > 0) + { + EXPECT_EQ(g_depth_width_exp, g_width_recv[RS_STREAM_DEPTH]); + } + } +} + +TEST(RealsenseTests, testDepthCameraInfo) +{ + if (g_enable_depth) + { + EXPECT_EQ(g_width_recv[RS_STREAM_DEPTH], g_caminfo_width_recv[RS_STREAM_DEPTH]); + EXPECT_EQ(g_height_recv[RS_STREAM_DEPTH], g_caminfo_height_recv[RS_STREAM_DEPTH]); + EXPECT_STREQ(g_dmodel_recv[RS_STREAM_DEPTH].c_str(), "none"); + + // verify rotation is equal to identity matrix + // TODO: Investigate why this is commented out on camera_node.cpp, func 'getStreamCalibData' + // for (unsigned int i = 0; i < sizeof(realsense_ros_camera::ROTATION_IDENTITY)/sizeof(double); i++) + // { + // EXPECT_EQ(realsense_ros_camera::ROTATION_IDENTITY[i], g_caminfo_rotation_recv[RS_STREAM_DEPTH][i]); + // } + + // check projection matrix values are set + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_DEPTH][0] != 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_DEPTH][1], 0.0); + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_DEPTH][2] != 0.0); + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_DEPTH][3] != 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_DEPTH][4], 0.0); + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_DEPTH][5] != 0.0); + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_DEPTH][6] != 0.0); + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_DEPTH][7] != 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_DEPTH][8], 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_DEPTH][9], 0.0); + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_DEPTH][10] != 0.0); + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_DEPTH][11] != 0.0); + } +} + + +TEST(RealsenseTests, testIsFisheyeStreamEnabled) +{ + if (g_camera_type != "ZR300") + return; + + if (g_enable_fisheye) + { + EXPECT_TRUE(g_fisheye_recv); + } + else + { + EXPECT_FALSE(g_fisheye_recv); + } +} + + +TEST(RealsenseTests, testFisheyeStream) +{ + if (g_camera_type != "ZR300") + return; + + if (g_enable_fisheye) + { + EXPECT_GT(g_fisheye_avg, 0); + } +} + +TEST(RealsenseTests, testFisheyeResolution) +{ + if (g_camera_type != "ZR300") + return; + + if (g_enable_fisheye) + { + EXPECT_TRUE(g_fisheye_recv); + if (g_fisheye_height_exp > 0) + { + EXPECT_EQ(g_fisheye_height_exp, g_height_recv[RS_STREAM_FISHEYE]); + } + if (g_fisheye_width_exp > 0) + { + EXPECT_EQ(g_fisheye_width_exp, g_width_recv[RS_STREAM_FISHEYE]); + } + } +} + +TEST(RealsenseTests, testFisheyeCameraInfo) +{ + if (g_camera_type != "ZR300") + return; + + if (g_enable_fisheye) + { + EXPECT_EQ(g_width_recv[RS_STREAM_FISHEYE], g_caminfo_width_recv[RS_STREAM_FISHEYE]); + EXPECT_EQ(g_height_recv[RS_STREAM_FISHEYE], g_caminfo_height_recv[RS_STREAM_FISHEYE]); + EXPECT_STREQ(g_dmodel_recv[RS_STREAM_FISHEYE].c_str(), "distortion_ftheta"); + + // verify rotation is equal to identity matrix + for (unsigned int i = 0; i < sizeof(realsense_ros_camera::ROTATION_IDENTITY) / sizeof(double); i++) + { + EXPECT_EQ(realsense_ros_camera::ROTATION_IDENTITY[i], g_caminfo_rotation_recv[RS_STREAM_FISHEYE][i]); + } + + // check projection matrix values are set + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_FISHEYE][0] != 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_FISHEYE][1], 0.0); + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_FISHEYE][2] != 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_FISHEYE][3], 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_FISHEYE][4], 0.0); + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_FISHEYE][5] != 0.0); + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_FISHEYE][6] != 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_FISHEYE][7], 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_FISHEYE][8], 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_FISHEYE][9], 0.0); + EXPECT_TRUE(g_caminfo_projection_recv[RS_STREAM_FISHEYE][10] != 0.0); + EXPECT_EQ(g_caminfo_projection_recv[RS_STREAM_FISHEYE][11], 0.0); + + // ZR300 cameras have Fisheye distortion parameters + // Only the first coefficient is used/valid + if (g_camera_type == "ZR300") + { + bool any_are_zero = false; + for (unsigned int i = 0; i < 1; i++) + { + if (g_fisheye_caminfo_D_recv[i] == 0.0) + { + any_are_zero = true; + } + } + EXPECT_FALSE(any_are_zero); + } + } +} + +TEST(RealsenseTests, testImu) +{ + if (g_camera_type != "ZR300") + return; + + if (g_enable_imu) + { + EXPECT_TRUE(g_accel_recv); + EXPECT_TRUE(g_gyro_recv); + } + else + { + EXPECT_FALSE(g_accel_recv); + EXPECT_FALSE(g_gyro_recv); + } +} + +void fillConfigMap(int argc, char **argv) +{ + std::vector args; + + for (int i = 1; i < argc; ++i) + { + args.push_back(argv[i]); + } + + while (args.size() > 1) + { + g_config_args[args[0]] = args[1]; + + args.erase(args.begin()); + args.erase(args.begin()); + } + + if (argc > 1) + { + if (g_config_args.find("camera_type") != g_config_args.end()) + { + ROS_INFO("RealSense camera test - Setting %s to %s", "camera_type", g_config_args.at("camera_type").c_str()); + g_camera_type = g_config_args.at("camera_type").c_str(); + } + + // Set depth arguments. + if (g_config_args.find("enable_depth") != g_config_args.end()) + { + ROS_INFO("RealSense camera test - Setting %s to %s", "enable_depth", g_config_args.at("enable_depth").c_str()); + if (strcmp((g_config_args.at("enable_depth").c_str()), "true") == 0) + { + g_enable_depth = true; + } + else + { + g_enable_depth = false; + } + } + + if (g_config_args.find("depth_encoding") != g_config_args.end()) + { + ROS_INFO("RealSense camera test - Setting %s to %s", "depth_encoding", g_config_args.at("depth_encoding").c_str()); + g_depth_encoding_exp = g_config_args.at("depth_encoding").c_str(); + } + if (g_config_args.find("depth_height") != g_config_args.end()) + { + ROS_INFO("RealSense camera test - Setting %s to %s", "depth_height", g_config_args.at("depth_height").c_str()); + g_depth_height_exp = atoi(g_config_args.at("depth_height").c_str()); + } + if (g_config_args.find("depth_width") != g_config_args.end()) + { + ROS_INFO("RealSense camera test - Setting %s to %s", "depth_width", g_config_args.at("depth_width").c_str()); + g_depth_width_exp = atoi(g_config_args.at("depth_width").c_str()); + } + if (g_config_args.find("depth_step") != g_config_args.end()) + { + ROS_INFO("RealSense camera test - Setting %s to %s", "depth_step", g_config_args.at("depth_step").c_str()); + g_depth_step_exp = atoi(g_config_args.at("depth_step").c_str()); + } + + // Set color arguments. + if (g_config_args.find("enable_color") != g_config_args.end()) + { + ROS_INFO("RealSense camera test - Setting %s to %s", "enable_color", g_config_args.at("enable_color").c_str()); + if (strcmp((g_config_args.at("enable_color").c_str()), "true") == 0) + { + g_enable_color = true; + } + else + { + g_enable_color = false; + } + } + if (g_config_args.find("color_encoding") != g_config_args.end()) + { + ROS_INFO("RealSense camera test - Setting %s to %s", "color_encoding", g_config_args.at("color_encoding").c_str()); + g_color_encoding_exp = g_config_args.at("color_encoding").c_str(); + } + if (g_config_args.find("color_height") != g_config_args.end()) + { + ROS_INFO("RealSense camera test - Setting %s to %s", "color_height", g_config_args.at("color_height").c_str()); + g_color_height_exp = atoi(g_config_args.at("color_height").c_str()); + } + if (g_config_args.find("color_width") != g_config_args.end()) + { + ROS_INFO("RealSense camera test - Setting %s to %s", "color_width", g_config_args.at("color_width").c_str()); + g_color_width_exp = atoi(g_config_args.at("color_width").c_str()); + } + if (g_config_args.find("color_step") != g_config_args.end()) + { + ROS_INFO("RealSense camera test - Setting %s to %s", "color_step", g_config_args.at("color_step").c_str()); + g_color_step_exp = atoi(g_config_args.at("color_step").c_str()); + } + + // Set fisheye arguments. + if (g_config_args.find("enable_fisheye") != g_config_args.end()) + { + ROS_INFO("RealSense camera test - Setting %s to %s", "enable_fisheye", + g_config_args.at("enable_fisheye").c_str()); + if (strcmp((g_config_args.at("enable_fisheye").c_str()), "true") == 0) + { + g_enable_fisheye = true; + g_enable_imu = true; + } + else + { + g_enable_fisheye = false; + g_enable_imu = false; + } + } + + if (g_config_args.find("fisheye_height") != g_config_args.end()) + { + ROS_INFO("RealSense camera test - Setting %s to %s", "fisheye_height", g_config_args.at("fisheye_height").c_str()); + g_depth_height_exp = atoi(g_config_args.at("fisheye_height").c_str()); + } + if (g_config_args.find("fisheye_width") != g_config_args.end()) + { + ROS_INFO("RealSense camera test - Setting %s to %s", "fisheye_width", g_config_args.at("fisheye_width").c_str()); + g_depth_width_exp = atoi(g_config_args.at("fisheye_width").c_str()); + } + } +} + +int main(int argc, char **argv) //try +{ + testing::InitGoogleTest(&argc, argv); + + ros::init(argc, argv, "utest"); + ros::NodeHandle n; + ros::NodeHandle nh(n, "camera"); + + image_transport::CameraSubscriber camera_subscriber[STREAM_COUNT]; + ros::Subscriber sub_accel; + ros::Subscriber sub_gyro; + + fillConfigMap(argc, argv); + + ROS_INFO_STREAM("RealSense camera test - Initializing Tests..."); + + ros::NodeHandle depth_nh(nh, "depth"); + image_transport::ImageTransport depth_image_transport(depth_nh); + camera_subscriber[0] = depth_image_transport.subscribeCamera("image_raw", 1, imageDepthCallback, 0); + + ros::NodeHandle color_nh(nh, "color"); + image_transport::ImageTransport color_image_transport(color_nh); + camera_subscriber[1] = color_image_transport.subscribeCamera("image_raw", 1, imageColorCallback, 0); + + // ZR300 cameras have Fisheye and IMU + ros::NodeHandle fisheye_nh(nh, "fisheye"); + image_transport::ImageTransport fisheye_image_transport(fisheye_nh); + if (g_camera_type == "ZR300") + { + camera_subscriber[4] = fisheye_image_transport.subscribeCamera("image_raw", 1, imageFisheyeCallback, 0); + sub_accel = n.subscribe("camera/accel/sample", 1, accelCallback); + sub_gyro = n.subscribe("camera/gyro/sample", 1, gyroCallback); + } + + ros::Duration duration; + duration.sec = 5; + duration.sleep(); + ros::spinOnce(); + + ROS_INFO_STREAM("RealSense camera test - Running Tests..."); + + return RUN_ALL_TESTS(); +} +//catch(...) {} // catch the "testing::internal::::ClassUniqueToAlwaysTrue" from gtest diff --git a/realsense_ros_camera/test/camera_core.h b/realsense_ros_camera/test/camera_core.h new file mode 100644 index 0000000..7db593f --- /dev/null +++ b/realsense_ros_camera/test/camera_core.h @@ -0,0 +1,85 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#pragma once +#ifndef CAMERA_CORE_H // NOLINT(build/header_guard) +#define CAMERA_CORE_H + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +const int STREAM_COUNT = 5; + +// utest commandline args +int g_color_height_exp = 0; +int g_color_width_exp = 0; +int g_depth_height_exp = 0; +int g_depth_width_exp = 0; +int g_fisheye_height_exp = 0; +int g_fisheye_width_exp = 0; +uint32_t g_depth_step_exp; // Expected depth step. +uint32_t g_color_step_exp; // Expected color step. + +bool g_enable_color = true; +bool g_enable_depth = true; +bool g_enable_fisheye = false; +bool g_enable_imu = false; + +std::string g_depth_encoding_exp; // Expected depth encoding. +std::string g_color_encoding_exp; // Expected color encoding. + +std::map g_config_args; +const float R200_MAX_Z = 10.0f; +double g_max_z = R200_MAX_Z * 1000.0f; // Converting meter to mm. + +bool g_depth_recv = false; +bool g_color_recv = false; +bool g_fisheye_recv = false; +bool g_accel_recv = false; +bool g_gyro_recv = false; + +float g_depth_avg = 0.0f; +float g_color_avg = 0.0f; +float g_fisheye_avg = 0.0f; +float g_pc_depth_avg = 0.0f; + +int g_height_recv[STREAM_COUNT] = {0}; +int g_width_recv[STREAM_COUNT] = {0}; +uint32_t g_step_recv[STREAM_COUNT] = {0}; // Received stream step. + +std::string g_encoding_recv[STREAM_COUNT]; // Expected stream encoding. + +int g_caminfo_height_recv[STREAM_COUNT] = {0}; +int g_caminfo_width_recv[STREAM_COUNT] = {0}; +double g_color_caminfo_D_recv[5] = {0.0}; +double g_depth_caminfo_D_recv[5] = {0.0}; +double g_fisheye_caminfo_D_recv[5] = {0.0}; + +double g_caminfo_rotation_recv[STREAM_COUNT][9] = {{0.0}}; +double g_caminfo_projection_recv[STREAM_COUNT][12] = {{0.0}}; + +std::string g_dmodel_recv[STREAM_COUNT]; +std::string g_camera_type; + + +#endif // CAMERA_CORE_H // NOLINT(build/header_guard) diff --git a/realsense_ros_object/CHANGELOG.rst b/realsense_ros_object/CHANGELOG.rst new file mode 100644 index 0000000..543f434 --- /dev/null +++ b/realsense_ros_object/CHANGELOG.rst @@ -0,0 +1,13 @@ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Changelog for package realsense_ros_object +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +0.8.2 (2017-04-04) +------------------ +* Bug fixes and updates +* Contributors: Curfman, Matthew C, Matt Curfman, Shoshana Skurnik, Skurnik, Shoshana + +0.8.1 (2017-03-02) +------------------ +* Initial release +* Contributors: Curfman, Matthew C, Matt Curfman, Shoshana Skurnik, Skurnik, Shoshana diff --git a/realsense_ros_object/CMakeLists.txt b/realsense_ros_object/CMakeLists.txt new file mode 100755 index 0000000..999623b --- /dev/null +++ b/realsense_ros_object/CMakeLists.txt @@ -0,0 +1,128 @@ +cmake_minimum_required(VERSION 2.8.3) +project(realsense_ros_object) + +set(CMAKE_CXX_FLAGS "-std=c++0x ${CMAKE_CXX_FLAGS}") + +#set(UP "${CMAKE_CURRENT_SOURCE_DIR}/../../../") +#set(DEPS "${UP}dependencies") + + +find_package(OpenCV REQUIRED) +find_package(catkin REQUIRED COMPONENTS + cv_bridge + image_transport + message_generation +# realsense_ros_camera +) + +add_message_files( FILES + Object.msg + ObjectArray.msg + ObjectInBox.msg + ObjectsInBoxes.msg + TrackedObject.msg + TrackedObjectsArray.msg + UI.msg + cpu_gpu.msg + Rect.msg + Location3D.msg +) + +generate_messages( + DEPENDENCIES + std_msgs + geometry_msgs + sensor_msgs +) + +catkin_package( + CATKIN_DEPENDS message_runtime std_msgs sensor_msgs geometry_msgs + LIBRARIES ${PROJECT_NAME} +) + +SET( CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DRUN_AS_NODELET" ) + +# Specify additional locations of header files +include_directories( + ${catkin_INCLUDE_DIRS} + ${OpenCV_INCLUDE_DIRS} +) + +add_library(${PROJECT_NAME} src/realsense_localization_nodelet.cpp src/realsense_recognition_nodelet.cpp src/realsense_tracking_nodelet.cpp src/utils.cpp) + +target_link_libraries(${PROJECT_NAME} + realsense + realsense_image + realsense_projection + realsense_object_recognition + ${catkin_LIBRARIES} + ${OpenCV_LIBRARIES} +) +add_dependencies(${PROJECT_NAME} ${PROJECT_NAME}_generate_messages_cpp) +add_dependencies(${PROJECT_NAME} ${catkin_EXPORTED_TARGETS}) + + +# Viewer +add_library(${PROJECT_NAME}_viewer src/viewer/realsense_orview_nodelet.cpp src/viewer/GUI_utils.cpp) + +target_link_libraries(${PROJECT_NAME}_viewer + ${catkin_LIBRARIES} + ${OpenCV_LIBRARIES} +) + +if (REALSENSE_ENABLE_TESTING) + find_package(rostest REQUIRED) + + catkin_download_test_data( + detection.bag + https://s3-eu-west-1.amazonaws.com/realsense-rostest-public/realsense_ros_object/detection.bag + DESTINATION ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_SHARE_DESTINATION}/tests + MD5 50afdbdd5d3f761612e1643f2a3281f1) + + add_executable(tests_object test/object_test.cpp) + target_link_libraries(tests_object + ${catkin_LIBRARIES} + ${GTEST_LIBRARIES} + -lpthread -lm + ) + + add_dependencies(tests_object ${PROJECT_NAME}_generate_messages_cpp) + add_dependencies(tests_object ${catkin_EXPORTED_TARGETS}) + add_dependencies(tests_object detection.bag) +endif() + +add_dependencies(${PROJECT_NAME}_viewer ${PROJECT_NAME}) +add_dependencies(${PROJECT_NAME}_viewer ${catkin_EXPORTED_TARGETS}) + + +# Sample1: Localization and Tracking +add_library(${PROJECT_NAME}_sample1 src/sample-localize-track/realsense_ormgr_nodelet.cpp) + +target_link_libraries(${PROJECT_NAME}_sample1 + ${catkin_LIBRARIES} +) +add_dependencies(${PROJECT_NAME}_sample1 ${PROJECT_NAME}) +add_dependencies(${PROJECT_NAME}_sample1 ${catkin_EXPORTED_TARGETS}) + + +# Install nodelet library +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} +) + +# Install header files +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} +) + +# Install launch files +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch +) + +# Install xml files +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) diff --git a/realsense_ros_object/README.md b/realsense_ros_object/README.md new file mode 100644 index 0000000..506ec45 --- /dev/null +++ b/realsense_ros_object/README.md @@ -0,0 +1,133 @@ +ROS Node for Intel® RealSense™ Object Library +========================================= +This package contains a ROS wrapper for Intel's realsense library. The realsense_object package provides a solution to recognize, localize and track objects as a ROS nodelet. + +## Nodelets + +### realsense_ros_object_localization +The realsense_localization nodelet takes color image and depth image from device, then processes localization on the frame, and publishes the result as realsense_or_msgs::ObjectsInBoxes message. + +ObjectsInBoxes conteains color image header and array of ObjectInBoxs, which contains object (class name and probability), rect and 3d location. + +#### Subscribed Topics + camera/color/camera_info (sensor_msgs/CameraInfo) + The intrinsic of color camera + camera/depth/camera_info (sensor_msgs/CameraInfo) + The intrinsic (and extrinsic) of camera( depth and depth to color) + camera/color/image_raw (sensor_msgs::Image) + color image from camera + camera/depth/image_raw (sensor_msgs::Image) + depth image from camera + +#### Published Topics + realsense_or_msgs::ObjectsInBoxes + color image header and array of ObjectInBoxs, which contains object name, rect and 3d location. + realsense_or_msgs::cpu_gpu + error message when unable to use GPU, and use CPU instead + +#### Parameters + ~show_rgb: (boolean, default: false) + if true - view localization result, drawn on rgb frame + ~confidence: (float, default: 0.7) + confidence threshold of localization result. result with confidence that low then the confidence will not be published. + ~estimate_center: (boolean, default: true) + if true - object estimation center will be enabled, and 3d location field in ObjectInBox will store it. + ~use_CPU: (boolean, default: true) + if true - use CPU when processing localization. otherwize - use GPU opencl. + +### realsense_ros_object_recognition +The realsense_recognition nodelet takes color image and depth image from device, then processes recognition on the frame, and publishes the result as realsense_or_msgs::ObjectArray message. + +ObjectArray is an array of objects - class name and probability + +#### Subscribed Topics + camera/color/camera_info (sensor_msgs/CameraInfo) + The intrinsic of color camera + camera/depth/camera_info (sensor_msgs/CameraInfo) + The intrinsic (and extrinsic) of camera( depth and depth to color) + camera/color/image_raw (sensor_msgs::Image) + color image from camera + camera/depth/image_raw (sensor_msgs::Image) + depth image from camera + +#### Published Topics + realsense_or_msgs::ObjectArray + array of objects - class name and probability + +#### Command Line Parameters + ~show_rgb to show recognition result with rgb frame + +### realsense_ros_object_tracking +The realsense_tracking nodelet takes color image and depth image from device, get ROIs of objects to track, and then processes tracking on the frame, and publishes the result as realsense_or_msgs::TrackedObjectsArray message. + +TrackedObjectsArray conteains color image header and array of TrackedObject, which contains object index, rect and 3d location. + +#### Subscribed Topics + camera/color/camera_info (sensor_msgs/CameraInfo) + The intrinsic of color camera + camera/depth/camera_info (sensor_msgs/CameraInfo) + The intrinsic (and extrinsic) of camera( depth and depth to color) + camera/color/image_raw (sensor_msgs::Image) + color image from camera + camera/depth/image_raw (sensor_msgs::Image) + depth image from camera + realsense/localized_objects (realsense_or_msgs::ObjectsInBoxes) + localization result. for tracking + localization, when localization result is used as tracking ROIs input. + realsense/objects_to_track (realsense_or_msgs::TrackedObjectsArray) + tracking ROIs input. for tracking only. + +#### Published Topics + realsense_or_msgs::TrackedObjectsArray + color image header and array of TrackedObject, which contains object index, rect and 3d location. + +#### Parameters + ~show_rgb: (boolean, default: false) + if true - view localization result, drawn on rgb frame + ~estimate_center: (boolean, default: true) + if true - object estimation center will be enabled, and 3d location field in ObjectInBox will store it. + ~use_CPU: (boolean, default: true) + if true - use GPU opencl when processing localization. otherwize - use CPU. + ~max_number_of_objects: (int, default: 5) + number of object to track. if the input roi array size is higher - tracking will be processed only on first max_number_of_objects ROIs + +## Usage + +Object package consists of 3 nodelets: realsense_ros_object_localization, realsense_ros_object_recognition, and realsense_ros_object_tracking. + +The user should create his combination from those 3 nodelets by creating new nodelet that associates with them. + + +### For sample app + +#### From camera +```bash +$ roslaunch realsense_ros_object demo_object.launch +``` + +#### From ros bag file + +Recorging a bag file: +```bash +$ roslaunch realsense_ros_object record_bag_object.launch bag_path:=mypath +``` +Running object demo from bag: +```bash +$ roslaunch realsense_ros_object demo_object_from_bag.launch bag_path:=mypath +``` + +The sample app demonstrates a combination of localization and tracking: process localization when the user hint space key, and the ROIs of localization output are the input to tracking. + +## Testing + +The object package can be tested with pre-recorded data using the provided ROS unit test. No physical camera needs to be present in order to run the test. The following steps can be used to build the unit test and download the pre-recorded ROS .bag data: + +```bash +$ cd ~/catkin_ws +$ catkin_make -DREALSENSE_ENABLE_TESTING=On +$ rostest realsense_ros_object object.test +``` + +You will see the test execute with the graphics display recognizing known objects, and the test passes with a "RESULT: SUCCESS" status. + +**Note:** Depending on your internet connection speed, enabling 'REALSENSE_ENABLE_TESTING' can cause catkin_make to run for very long time (more than 5 minutes), as it downloads required pre-recorded .bag data. + diff --git a/realsense_ros_object/launch/demo_object.launch b/realsense_ros_object/launch/demo_object.launch new file mode 100644 index 0000000..d324f6b --- /dev/null +++ b/realsense_ros_object/launch/demo_object.launch @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/realsense_ros_object/launch/demo_object_from_bag.launch b/realsense_ros_object/launch/demo_object_from_bag.launch new file mode 100644 index 0000000..0465776 --- /dev/null +++ b/realsense_ros_object/launch/demo_object_from_bag.launch @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/realsense_ros_object/launch/object.launch b/realsense_ros_object/launch/object.launch new file mode 100644 index 0000000..46461e5 --- /dev/null +++ b/realsense_ros_object/launch/object.launch @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/realsense_ros_object/launch/record_bag_object.launch b/realsense_ros_object/launch/record_bag_object.launch new file mode 100644 index 0000000..0ed2aa1 --- /dev/null +++ b/realsense_ros_object/launch/record_bag_object.launch @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + diff --git a/realsense_ros_object/msg/Frame.msg b/realsense_ros_object/msg/Frame.msg new file mode 100644 index 0000000..aaa019a --- /dev/null +++ b/realsense_ros_object/msg/Frame.msg @@ -0,0 +1,2 @@ +int32 numberOfUsers +User[] usersData diff --git a/realsense_ros_object/msg/Gestures.msg b/realsense_ros_object/msg/Gestures.msg new file mode 100644 index 0000000..e282c73 --- /dev/null +++ b/realsense_ros_object/msg/Gestures.msg @@ -0,0 +1 @@ +Pointing pointing diff --git a/realsense_ros_object/msg/Landmark.msg b/realsense_ros_object/msg/Landmark.msg new file mode 100644 index 0000000..2c68aad --- /dev/null +++ b/realsense_ros_object/msg/Landmark.msg @@ -0,0 +1,4 @@ +int32 type +float32 confidenceLevel +geometry_msgs/Point32 location +geometry_msgs/Point32 realWorldCoordinates diff --git a/realsense_ros_object/msg/Location3D.msg b/realsense_ros_object/msg/Location3D.msg new file mode 100644 index 0000000..d261a0e --- /dev/null +++ b/realsense_ros_object/msg/Location3D.msg @@ -0,0 +1,3 @@ +geometry_msgs/Point32 coordinates +float32 horiz_margin +float32 vert_margin diff --git a/realsense_ros_object/msg/Object.msg b/realsense_ros_object/msg/Object.msg new file mode 100644 index 0000000..6dd957b --- /dev/null +++ b/realsense_ros_object/msg/Object.msg @@ -0,0 +1,3 @@ +int32 label +string object_name +float32 probability diff --git a/realsense_ros_object/msg/ObjectArray.msg b/realsense_ros_object/msg/ObjectArray.msg new file mode 100644 index 0000000..83c3474 --- /dev/null +++ b/realsense_ros_object/msg/ObjectArray.msg @@ -0,0 +1 @@ +Object[] objects_vector diff --git a/realsense_ros_object/msg/ObjectInBox.msg b/realsense_ros_object/msg/ObjectInBox.msg new file mode 100644 index 0000000..8aec574 --- /dev/null +++ b/realsense_ros_object/msg/ObjectInBox.msg @@ -0,0 +1,3 @@ +Object object +Rect object_bbox +Location3D location diff --git a/realsense_ros_object/msg/ObjectsInBoxes.msg b/realsense_ros_object/msg/ObjectsInBoxes.msg new file mode 100644 index 0000000..e69180b --- /dev/null +++ b/realsense_ros_object/msg/ObjectsInBoxes.msg @@ -0,0 +1,2 @@ +std_msgs/Header header +ObjectInBox[] objects_vector \ No newline at end of file diff --git a/realsense_ros_object/msg/Pointing.msg b/realsense_ros_object/msg/Pointing.msg new file mode 100644 index 0000000..4211289 --- /dev/null +++ b/realsense_ros_object/msg/Pointing.msg @@ -0,0 +1,5 @@ +int32 confidence +geometry_msgs/Point32 originColor +geometry_msgs/Point32 originWorld +geometry_msgs/Point32 orientationColor +geometry_msgs/Vector3 orientationWorld diff --git a/realsense_ros_object/msg/Rect.msg b/realsense_ros_object/msg/Rect.msg new file mode 100644 index 0000000..f81d0ba --- /dev/null +++ b/realsense_ros_object/msg/Rect.msg @@ -0,0 +1,4 @@ +int32 x +int32 y +int32 height +int32 width diff --git a/realsense_ros_object/msg/TrackedObject.msg b/realsense_ros_object/msg/TrackedObject.msg new file mode 100644 index 0000000..3afee69 --- /dev/null +++ b/realsense_ros_object/msg/TrackedObject.msg @@ -0,0 +1,3 @@ +Rect bbox +int32 id +Location3D location diff --git a/realsense_ros_object/msg/TrackedObjectsArray.msg b/realsense_ros_object/msg/TrackedObjectsArray.msg new file mode 100644 index 0000000..308e89f --- /dev/null +++ b/realsense_ros_object/msg/TrackedObjectsArray.msg @@ -0,0 +1,2 @@ +std_msgs/Header header +TrackedObject[] tracked_objects_vector diff --git a/realsense_ros_object/msg/UI.msg b/realsense_ros_object/msg/UI.msg new file mode 100644 index 0000000..de3011b --- /dev/null +++ b/realsense_ros_object/msg/UI.msg @@ -0,0 +1 @@ +int32 key diff --git a/realsense_ros_object/msg/User.msg b/realsense_ros_object/msg/User.msg new file mode 100644 index 0000000..e6b136f --- /dev/null +++ b/realsense_ros_object/msg/User.msg @@ -0,0 +1,11 @@ +UserInfo userInfo +geometry_msgs/Point32 centerOfMassImage +geometry_msgs/Point32 centerOfMassWorld +geometry_msgs/Point32[2] userRectCorners +geometry_msgs/Point32[2] userFaceRectCorners +sensor_msgs/Image userBlobImage +sensor_msgs/Image userSegmentationImage +sensor_msgs/Image geodesicMatrix +Landmark[] landmarks +Gestures gestures + diff --git a/realsense_ros_object/msg/UserInfo.msg b/realsense_ros_object/msg/UserInfo.msg new file mode 100644 index 0000000..bcf8ed4 --- /dev/null +++ b/realsense_ros_object/msg/UserInfo.msg @@ -0,0 +1,2 @@ +int32 Id +int32 recognitionId \ No newline at end of file diff --git a/realsense_ros_object/msg/cpu_gpu.msg b/realsense_ros_object/msg/cpu_gpu.msg new file mode 100644 index 0000000..1f542f5 --- /dev/null +++ b/realsense_ros_object/msg/cpu_gpu.msg @@ -0,0 +1 @@ +string CPU_GPU diff --git a/realsense_ros_object/nodelet_plugins.xml b/realsense_ros_object/nodelet_plugins.xml new file mode 100755 index 0000000..1f4ce2d --- /dev/null +++ b/realsense_ros_object/nodelet_plugins.xml @@ -0,0 +1,31 @@ + + + + Intel(R) RealSense(TM) Object tracking nodelet. + + + + + Intel(R) RealSense(TM) Object recognize nodelet. + + + + + Intel(R) RealSense(TM) Object localize nodelet. + + + + + + + Intel(R) RealSense(TM) Object viewer nodelet. + + + + + + + Intel(R) RealSense(TM) Object Sample Localization/Tracker nodelet + + + diff --git a/realsense_ros_object/package.xml b/realsense_ros_object/package.xml new file mode 100755 index 0000000..2f896ad --- /dev/null +++ b/realsense_ros_object/package.xml @@ -0,0 +1,32 @@ + + + realsense_ros_object + 0.8.2 + The realsense_ros_object package + Intel RealSense + Apache 2.0 + catkin + roscpp + rospy + std_msgs + genmsg + sensor_msgs + geometry_msgs + message_generation + nodelet + realsense_ros_camera + roscpp + rospy + std_msgs + genmsg + sensor_msgs + geometry_msgs + message_runtime + nodelet + message_generation + message_runtime + realsense_ros_camera + + + + diff --git a/realsense_ros_object/src/realsense_localization_nodelet.cpp b/realsense_ros_object/src/realsense_localization_nodelet.cpp new file mode 100644 index 0000000..9c52589 --- /dev/null +++ b/realsense_ros_object/src/realsense_localization_nodelet.cpp @@ -0,0 +1,342 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved. + +#include "ros/ros.h" + + +#include "realsense_ros_object/Rect.h" +#include "realsense_ros_object/ObjectInBox.h" +#include "realsense_ros_object/ObjectsInBoxes.h" +#include "realsense_ros_object/cpu_gpu.h" + +#include +#include +#include +#include + + +#include "realsense_localization_nodelet.h" + +#include + +#include + +#ifdef RUN_AS_NODELET +PLUGINLIB_EXPORT_CLASS(realsense_ros_object::CLocalizationNodelet, nodelet::Nodelet) +#endif + + +namespace realsense_ros_object +{ + + +//****************************** +// Public Methods +//****************************** + +CLocalization::CLocalization() +{ + initialize(); +} + +CLocalization::~CLocalization() +{ + unInitialize(); + if (m_show_rgb) + { + cv::destroyAllWindows(); + } +} +int CLocalization::unInitialize() +{ + + return 0; +} + +int CLocalization::initialize() +{ + m_show_rgb = false; + m_confidence = 0.7; + m_estimateCenter = true; + m_use_CPU = true; + return 0; +} + +void CLocalization::getParams(ros::NodeHandle& nh) +{ + nh.getParam("show_rgb", m_show_rgb); + nh.getParam("confidence", m_confidence); + nh.getParam("estimate_center", m_estimateCenter); + nh.getParam("use_CPU", m_use_CPU); +} + +void CLocalization::getParams(const std::vector & argv) +{ + + for (int i = 0; i < argv.size(); i++) + { + if (argv[i] == "show_rgb") + { + m_show_rgb = true; + } + else if (argv[i] == "-confidence") + { + m_confidence = atof(argv[++i].c_str()); + } + else if (argv[i] == "-estimate_center") + { + m_estimateCenter = true; + } + } + +} + +void CLocalization::init(ros::NodeHandle& nh) +{ + m_or_data = nullptr; + m_or_configuration = nullptr; + + m_no_subscribers = true; + m_nh = nh; + + image_transport::ImageTransport it(nh); + + m_sub_depthCameraInfo = nh.subscribe("camera/depth/camera_info", 1, &CLocalization::depthCameraInfoCallback, this); + m_recognized_objects_pub = nh.advertise("realsense/localized_objects", 1); + m_cpu_gpu_pub = nh.advertise("realsense/cpu_gpu", 1); + if (m_show_rgb) + { + cv::namedWindow("localization"); + } +} + + +void CLocalization::depthCameraInfoCallback(const sensor_msgs::CameraInfo::ConstPtr & cameraInfo) +{ + // this callback is used in trackMiddle mode. + // get the image size and set the bbox in the middle of the image + m_depthHeight = cameraInfo->height; + m_depthWidth = cameraInfo->width; + + ROS_INFO("got depth info"); + utils.get_extrinsics(cameraInfo, m_ext); + utils.get_intrinsics(cameraInfo, m_depthInt); + + m_sub_colorCameraInfo = m_nh.subscribe("camera/color/camera_info", 1, &CLocalization::colorCameraInfoCallback, this); + m_sub_depthCameraInfo.shutdown(); + +} + +void CLocalization::colorCameraInfoCallback(const sensor_msgs::CameraInfo::ConstPtr & cameraInfo) +{ + m_sub_colorCameraInfo.shutdown(); + ROS_INFO("got color info"); + utils.get_intrinsics(cameraInfo, m_colorInt); + // get the image size and set the bbox in the middle of the image + m_colorHeight = cameraInfo->height; + m_colorWidth = cameraInfo->width; + rs::core::status st = utils.init_or(m_colorInt, m_depthInt, m_ext, m_impl, &m_or_data, &m_or_configuration); + if (st != rs::core::status_no_error) + { + ROS_ERROR("Unable to init Object Recgonition!"); + exit(0); + } + + + m_colorInfo.height = m_colorHeight; + m_colorInfo.width = m_colorWidth; + m_colorInfo.format = rs::core::pixel_format::rgb8; + m_colorInfo.pitch = m_colorInfo.width * 3; + + + m_depthInfo.height = m_depthHeight; + m_depthInfo.width = m_depthWidth; + m_depthInfo.format = rs::core::pixel_format::z16; + m_depthInfo.pitch = m_depthInfo.width * 2; + + //change mode to localization + m_or_configuration->set_recognition_mode(rs::object_recognition::recognition_mode::LOCALIZATION); + //set the localization mechnizm to use CNN + m_or_configuration->set_localization_mechanism(rs::object_recognition::localization_mechanism::CNN); + //ignore all objects under 0.7 probabilty (confidence) + m_or_configuration->set_recognition_confidence(m_confidence); + //enable object center feature + m_or_configuration->enable_object_center_estimation(m_estimateCenter); + if (false == m_use_CPU) + { + //enable GPU computing + m_or_configuration->set_compute_engine(rs::object_recognition::compute_engine::GPU); + ROS_INFO("using GPU - opencl driver"); + } + else + { + ROS_INFO("using CPU"); + } + try + { + m_or_configuration->apply_changes(); + ROS_INFO("init done"); + } + catch (...) + { + ROS_INFO("Unable to use GPU, using CPU"); + m_or_configuration->set_compute_engine(rs::object_recognition::compute_engine::CPU); + m_or_configuration->apply_changes(); + realsense_ros_object::cpu_gpu msg; + msg.CPU_GPU = "Unable to use GPU, using CPU"; + m_cpu_gpu_pub.publish(msg); + } + + // Subscribe to color, depth using synchronization filter + mDepthSubscriber = std::unique_ptr> + (new message_filters::Subscriber(m_nh, "camera/color/image_raw", 1)); + mColorSubscriber = std::unique_ptr> + (new message_filters::Subscriber(m_nh, "camera/depth/image_raw", 1)); + + mTimeSynchronizer = std::unique_ptr> + (new message_filters::TimeSynchronizer(*mDepthSubscriber, *mColorSubscriber, 60)); + + mTimeSynchronizer->registerCallback(boost::bind(&CLocalization::LocalizationCallback, this, _1, _2)); + +} + + +void CLocalization::LocalizationCallback(const sensor_msgs::ImageConstPtr& color, const sensor_msgs::ImageConstPtr& depth) +{ + // if there are no subscribers do nothing + + if (m_recognized_objects_pub.getNumSubscribers() <= 0) + { + return; + } + + + ROS_INFO("working on localization"); + + try + { + m_imageColor = cv_bridge::toCvShare(color, "rgb8")->image; + } + catch (cv_bridge::Exception& e) + { + ROS_ERROR("Could not convert from '%s' to 'rgb8'.", color->encoding.c_str()); + return; + } + + try + { + m_imageDepth = cv_bridge::toCvShare(depth, sensor_msgs::image_encodings::TYPE_16UC1)->image; + } + catch (cv_bridge::Exception& e) + { + ROS_ERROR("toCvShare failed"); + return; + } + + int array_size = 0; + rs::core::status st; + rs::core::correlated_sample_set* sample_set = utils.get_sample_set(m_colorInfo, m_depthInfo, m_imageColor.data, m_imageDepth.data, color->header.seq, (uint64_t)color->header.stamp.nsec); + st = m_impl.process_sample_set(*sample_set); + if (st != rs::core::status_no_error) + { + ROS_ERROR("process_sample_set_sync failed"); + return; + } + rs::object_recognition::localization_data* localization_data = nullptr; + + //retrieve localization data from the or_data object + st = m_or_data->query_localization_result(&localization_data, array_size); + if (st != rs::core::status_no_error) + { + ROS_ERROR("query_localization_result failed"); + return; + } + + //ROS_INFO(("array" + std::to_string(array_size)).c_str() ); + realsense_ros_object::ObjectsInBoxes objects_with_pos; + realsense_ros_object::ObjectInBox objectInBox; + for (int i = 0; i < array_size; i++) + { + objectInBox.object.label = localization_data[i].label; + objectInBox.object.object_name = m_or_configuration->query_object_name_by_id(localization_data[i].label); + objectInBox.object.probability = localization_data[i].probability; + objectInBox.object_bbox.x = localization_data[i].roi.x; + objectInBox.object_bbox.y = localization_data[i].roi.y; + objectInBox.object_bbox.width = localization_data[i].roi.width; + objectInBox.object_bbox.height = localization_data[i].roi.height; + objectInBox.location.coordinates.x = localization_data[i].object_center.coordinates.x; + objectInBox.location.coordinates.y = localization_data[i].object_center.coordinates.y; + objectInBox.location.coordinates.z = localization_data[i].object_center.coordinates.z; + objectInBox.location.horiz_margin = localization_data[i].object_center.horiz_margin; + objectInBox.location.vert_margin = localization_data[i].object_center.vert_margin; + + objects_with_pos.objects_vector.push_back(objectInBox); + + } + + // copy the header for image and localization sync; + objects_with_pos.header = color->header; + + m_recognized_objects_pub.publish(objects_with_pos); + ROS_INFO("localization done"); + if (m_show_rgb) + { + draw_results(localization_data, array_size, m_or_configuration); + cv::imshow("localization", m_imageColor); + cv::waitKey(1); + } + +} + + +void CLocalization::draw_results(rs::object_recognition::localization_data* localization_data, int array_size, rs::object_recognition::or_configuration_interface* or_configuration) +{ + for (int i = 0; i < array_size; i++) + { + std::stringstream text; + std::string object_name = or_configuration->query_object_name_by_id(localization_data[i].label); + text.precision(2); + text << std::fixed << object_name << ": " << localization_data[i].probability; + if (or_configuration->is_object_center_estimation_enabled()) + { + text.precision(0); + text << " COM: (" << localization_data[i].object_center.coordinates.x << ", " << localization_data[i].object_center.coordinates.y << ", " << localization_data[i].object_center.coordinates.z << ")" ; + } //todo: print margin vertical and horizental + cv::Point txtLocation(localization_data[i].roi.x - 5, std::max(localization_data[i].roi.y, 20)); + cv::putText(m_imageColor, text.str(), txtLocation, cv::FONT_HERSHEY_PLAIN, 1.5, cvScalar(0, 0, 0), 2); // black text outlining + cv::putText(m_imageColor, text.str(), txtLocation, cv::FONT_HERSHEY_PLAIN, 1.5, cvScalar(255, 128, 128), 1); + cv::Rect roiRect(localization_data[i].roi.x, localization_data[i].roi.y, localization_data[i].roi.width, localization_data[i].roi.height); + cv::rectangle(m_imageColor, roiRect, cvScalar(255, 0, 0)); + } +} + +#ifdef RUN_AS_NODELET + +//****************************** +// Public Methods +//****************************** + +CLocalizationNodelet::~CLocalizationNodelet() +{ +} + +void CLocalizationNodelet::onInit() +{ + ROS_INFO_STREAM("Object Localization OnInit"); + NODELET_INFO("Object Localization OnInit."); + + ros::NodeHandle& local_nh = getPrivateNodeHandle(); + m_RecgonitionNodelet.getParams(local_nh); + + ros::NodeHandle& nh = getNodeHandle(); + m_RecgonitionNodelet.init(nh); + + + NODELET_INFO("Objects Localization nodelet initialized."); +} + +#endif + +} + + diff --git a/realsense_ros_object/src/realsense_localization_nodelet.h b/realsense_ros_object/src/realsense_localization_nodelet.h new file mode 100644 index 0000000..a2ebf10 --- /dev/null +++ b/realsense_ros_object/src/realsense_localization_nodelet.h @@ -0,0 +1,134 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved. + +#pragma once +# ifndef RS_LOCALIZATION_NODELET +# define RS_LOCALIZATION_NODELET + +/////////////////////////////////////////////// +/// Dependencies +/////////////////////////////////////////////// + +#include + +#include +#include + +#include +#include "utils.h" + + +namespace realsense_ros_object +{ +/////////////////////////////////////////////// +/// CLocalization - +/////////////////////////////////////////////// +class CLocalization +{ +public : + //=================================== + // Interface + //=================================== + void getParams(ros::NodeHandle& nh); + void getParams(const std::vector & argv); + void init(ros::NodeHandle& nh); + + CLocalization(); + virtual ~CLocalization(); + +private: + //=================================== + // Member Functions + //=================================== + + int initialize(); + int unInitialize(); + + //Static member functions: + void colorCameraInfoCallback(const sensor_msgs::CameraInfo::ConstPtr & cameraInfo); + void depthCameraInfoCallback(const sensor_msgs::CameraInfo::ConstPtr & cameraInfo); + void draw_results(rs::object_recognition::localization_data* localization_data, int array_size, rs::object_recognition::or_configuration_interface* or_configuration); + + void LocalizationCallback(const sensor_msgs::ImageConstPtr& color , const sensor_msgs::ImageConstPtr& depth); + + + //=================================== + // Member Variables + //=================================== + + image_transport::Subscriber m_sub_color; + image_transport::Subscriber m_sub_depth; + ros::Subscriber m_sub_objects_with_pos; + ros::Subscriber m_sub_colorCameraInfo; + ros::Subscriber m_sub_depthCameraInfo; + + std::unique_ptr> mDepthSubscriber; + std::unique_ptr> mColorSubscriber; + std::unique_ptr> mTimeSynchronizer; + + ros::Publisher m_recognized_objects_pub; + ros::Publisher m_cpu_gpu_pub; + + cv::Mat m_imageColor; + cv::Mat m_imageDepth; + + + bool m_show_rgb; + bool m_estimateCenter; + float m_confidence; + bool m_use_CPU; + bool m_no_subscribers; + + ros::NodeHandle m_nh; + + int m_colorHeight; + int m_colorWidth; + int m_depthHeight; + int m_depthWidth; + + rs::extrinsics m_ext; + rs::intrinsics m_colorInt; + rs::intrinsics m_depthInt; + + ros_camera_utils utils; + + rs::core::image_info m_colorInfo; + rs::core::image_info m_depthInfo; + + rs::object_recognition::or_video_module_impl m_impl; + rs::object_recognition::or_data_interface* m_or_data; + rs::object_recognition::or_configuration_interface* m_or_configuration; +}; + +#ifdef RUN_AS_NODELET +/////////////////////////////////////////////// +/// CLocalizationNodelet +/////////////////////////////////////////////// +class CLocalizationNodelet : public nodelet::Nodelet +{ +public : + //=================================== + // Interface + //=================================== + virtual void onInit(); + + ~CLocalizationNodelet(); + +private: + //=================================== + // Member Functions + //=================================== + + //=================================== + // Member Variables + //=================================== + + CLocalization m_RecgonitionNodelet; + +}; +#endif +}; + + +#endif // RS_RECOGNITION_NODELET + diff --git a/realsense_ros_object/src/realsense_recognition_nodelet.cpp b/realsense_ros_object/src/realsense_recognition_nodelet.cpp new file mode 100644 index 0000000..bc549f1 --- /dev/null +++ b/realsense_ros_object/src/realsense_recognition_nodelet.cpp @@ -0,0 +1,243 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved. + +#include +#include + +#include + +#include "ros/ros.h" +#include "std_msgs/String.h" + +#include "realsense_ros_object/ObjectArray.h" + +#include +#include +#include +#include + + +#include "realsense_recognition_nodelet.h" + +#include + + + + + +#include + + +#ifdef RUN_AS_NODELET +PLUGINLIB_EXPORT_CLASS(realsense_ros_object::CRecognitionNodelet, nodelet::Nodelet) +#endif + + +namespace realsense_ros_object +{ + + +//****************************** +// Public Methods +//****************************** + +CRecognition::~CRecognition() +{ + unInitialize(); + if (m_show_rgb) + { + cv::destroyAllWindows(); + } +} +int CRecognition::unInitialize() +{ + + return 0; +} + +int CRecognition::initialize() +{ + return 0; +} + +void CRecognition::init(ros::NodeHandle& nh, const std::vector & argv) +{ + + m_or_data = nullptr; + m_or_configuration = nullptr; + m_no_subscribers = true; + m_show_rgb = false; + m_nh = nh; + + for (int i = 0; i < argv.size(); i++) + { + if (argv[i] == "show_rgb") + { + m_show_rgb = true; + } + } + + image_transport::ImageTransport it(nh); + + m_sub_depthCameraInfo = nh.subscribe("camera/depth/camera_info", 1, &CRecognition::depthCameraInfoCallback, this); + m_recognized_objects_pub = nh.advertise("realsense/recognized_objects", 1); + + + + if (m_show_rgb) + { + cv::namedWindow("viewColor"); + } +} + +void CRecognition::depthCameraInfoCallback(const sensor_msgs::CameraInfo::ConstPtr & cameraInfo) +{ + // this callback is used in trackMiddle mode. + // get the image size and set the bbox in the middle of the image + m_depthHeight = cameraInfo->height; + m_depthWidth = cameraInfo->width; + + + utils.get_extrinsics(cameraInfo, m_ext); + utils.get_intrinsics(cameraInfo, m_depthInt); + + m_sub_colorCameraInfo = m_nh.subscribe("camera/color/camera_info", 1, &CRecognition::colorCameraInfoCallback, this); + m_sub_depthCameraInfo.shutdown(); +} + +void CRecognition::colorCameraInfoCallback(const sensor_msgs::CameraInfo::ConstPtr & cameraInfo) +{ + + utils.get_intrinsics(cameraInfo, m_colorInt); + // get the image size and set the bbox in the middle of the image + m_colorHeight = cameraInfo->height; + m_colorWidth = cameraInfo->width; + + image_transport::ImageTransport it(m_nh); +// m_sub_color = it.subscribe("camera/color/image_raw", 1, &CRecognition::imageColorCallback, this); + m_sub_colorCameraInfo.shutdown(); + + if (utils.init_or(m_colorInt, m_depthInt, m_ext, m_impl, &m_or_data, &m_or_configuration) != rs::core::status_no_error) + { + ROS_ERROR("Unable to init Object Recgonition!"); + exit(0); + } + + + m_colorInfo.height = m_colorHeight; + m_colorInfo.width = m_colorWidth; + m_colorInfo.format = rs::core::pixel_format::bgr8; + m_colorInfo.pitch = m_colorInfo.width * 3; + + + m_depthInfo.height = m_depthHeight; + m_depthInfo.width = m_depthWidth; + m_depthInfo.format = rs::core::pixel_format::z16; + m_depthInfo.pitch = m_depthInfo.width * 2; + + //add bounding box to recognition + m_or_configuration->set_roi(rs::core::rect{(int)(m_colorInfo.width / 4), (int)(m_colorInfo.height / 4), (int)(m_colorInfo.width / 2), (int)(m_colorInfo.height / 2)}); + m_or_configuration->apply_changes(); + + + // Subscribe to color, depth using synchronization filter + mDepthSubscriber = std::unique_ptr> + (new message_filters::Subscriber(m_nh, "camera/color/image_raw", 1)); + mColorSubscriber = std::unique_ptr> + (new message_filters::Subscriber(m_nh, "camera/depth/image_raw", 1)); + + mTimeSynchronizer = std::unique_ptr> + (new message_filters::TimeSynchronizer(*mDepthSubscriber, *mColorSubscriber, 40)); + + mTimeSynchronizer->registerCallback(boost::bind(&CRecognition::RecognitionCallback, this, _1, _2)); + +} + + +void CRecognition::RecognitionCallback(const sensor_msgs::ImageConstPtr& color, const sensor_msgs::ImageConstPtr& depth) +{ + try + { + m_imageColor = cv_bridge::toCvShare(color, "bgr8")->image; + } + catch (cv_bridge::Exception& e) + { + ROS_ERROR("Could not convert from '%s' to 'bgr8'.", color->encoding.c_str()); + return; + } + + try + { + m_imageDepth = cv_bridge::toCvShare(depth, sensor_msgs::image_encodings::TYPE_16UC1)->image; + } + catch (cv_bridge::Exception& e) + { + ROS_ERROR("toCvShare failed"); + return; + } + + int array_size = 0; + rs::core::status st; + + rs::core::correlated_sample_set* sample_set = utils.get_sample_set(m_colorInfo, m_depthInfo, m_imageColor.data, m_imageDepth.data, color->header.seq, (uint64_t)color->header.stamp.nsec); + + st = m_impl.process_sample_set(*sample_set); + if (st != rs::core::status_no_error) + { + ROS_ERROR("process_sample_set_sync failed"); + return; + } + rs::object_recognition::recognition_data* recognition_data = nullptr; + + //retrieve recognition data from the or_data object + st = m_or_data->query_single_recognition_result(&recognition_data, array_size); + if (st != rs::core::status_no_error) + { + ROS_ERROR("query_single_recognition_result failed"); + return; + } + + realsense_ros_object::ObjectArray object_vector; + realsense_ros_object::Object one_object; + for (int i = 0; i < array_size; i++) + { + + one_object.object_name = m_or_configuration->query_object_name_by_id(recognition_data[i].label); + one_object.probability = recognition_data[i].probability; + one_object.label = recognition_data[i].label; + + object_vector.objects_vector.push_back(one_object); + } + + m_recognized_objects_pub.publish(object_vector); + + +} + +#ifdef RUN_AS_NODELET + +//****************************** +// Public Methods +//****************************** + +CRecognitionNodelet::~CRecognitionNodelet() +{ + +} + +void CRecognitionNodelet::onInit() +{ + ROS_INFO_STREAM("Object Recognition OnInit"); + NODELET_INFO("Object Recognition OnInit."); + + const std::vector & argv = getMyArgv(); + ros::NodeHandle& nh = getPrivateNodeHandle();// getNodeHandle(); + m_RecgonitionNodelet.init(nh, argv); + + NODELET_INFO("Objects Tracker nodelet initialized."); +} + +#endif + +} + diff --git a/realsense_ros_object/src/realsense_recognition_nodelet.h b/realsense_ros_object/src/realsense_recognition_nodelet.h new file mode 100644 index 0000000..f0f052b --- /dev/null +++ b/realsense_ros_object/src/realsense_recognition_nodelet.h @@ -0,0 +1,128 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved. + +#pragma once +# ifndef RS_RECOGNITION_NODELET +# define RS_RECOGNITION_NODELET + +/////////////////////////////////////////////// +/// Dependencies +/////////////////////////////////////////////// + +#include + +#include +#include + +#include +#include "utils.h" + + +namespace realsense_ros_object +{ +/////////////////////////////////////////////// +/// CRecognition - +/////////////////////////////////////////////// +class CRecognition +{ +public : + //=================================== + // Interface + //=================================== + void init(ros::NodeHandle& nh, const std::vector & argv); + + ~CRecognition(); + +private: + //=================================== + // Member Functions + //=================================== + + int initialize(); + int unInitialize(); + + //Static member functions: + void colorCameraInfoCallback(const sensor_msgs::CameraInfo::ConstPtr & cameraInfo); + void depthCameraInfoCallback(const sensor_msgs::CameraInfo::ConstPtr & cameraInfo); + void draw_results(rs::object_recognition::recognition_data* recognition_data, int array_size, rs::object_recognition::or_configuration_interface* or_configuration); + + void RecognitionCallback(const sensor_msgs::ImageConstPtr& color , const sensor_msgs::ImageConstPtr& depth); + + //=================================== + // Member Variables + //=================================== + + image_transport::Subscriber m_sub_color; + image_transport::Subscriber m_sub_depth; + ros::Subscriber m_sub_objects_with_pos; + ros::Subscriber m_sub_colorCameraInfo; + ros::Subscriber m_sub_depthCameraInfo; + + std::unique_ptr> mDepthSubscriber; + std::unique_ptr> mColorSubscriber; + + std::unique_ptr> mTimeSynchronizer; + + + ros::Publisher m_recognized_objects_pub; + + cv::Mat m_imageColor; + cv::Mat m_imageDepth; + + bool m_show_rgb; + bool m_no_subscribers; + + ros::NodeHandle m_nh; + std::string m_colorInfoTopic; + + int m_colorHeight; + int m_colorWidth; + int m_depthHeight; + int m_depthWidth; + + rs::extrinsics m_ext; + rs::intrinsics m_colorInt; + rs::intrinsics m_depthInt; + + ros_camera_utils utils; + + rs::core::image_info m_colorInfo; + rs::core::image_info m_depthInfo; + + rs::object_recognition::or_video_module_impl m_impl; + rs::object_recognition::or_data_interface* m_or_data; + rs::object_recognition::or_configuration_interface* m_or_configuration; +}; + +#ifdef RUN_AS_NODELET +/////////////////////////////////////////////// +/// CRecognitionNodelet +/////////////////////////////////////////////// +class CRecognitionNodelet : public nodelet::Nodelet +{ +public : + //=================================== + // Interface + //=================================== + virtual void onInit(); + + ~CRecognitionNodelet(); + +private: + //=================================== + // Member Functions + //=================================== + + //=================================== + // Member Variables + //=================================== + + CRecognition m_RecgonitionNodelet; + +}; +#endif +}; + + +#endif // RS_RECOGNITION_NODELET + diff --git a/realsense_ros_object/src/realsense_tracking_nodelet.cpp b/realsense_ros_object/src/realsense_tracking_nodelet.cpp new file mode 100644 index 0000000..5dbdb66 --- /dev/null +++ b/realsense_ros_object/src/realsense_tracking_nodelet.cpp @@ -0,0 +1,327 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved. + +#include + +#include "realsense_ros_object/TrackedObject.h" +#include "realsense_ros_object/TrackedObjectsArray.h" +#include "realsense_ros_object/ObjectInBox.h" +#include "realsense_ros_object/ObjectsInBoxes.h" + +#include +#include +#include +#include + +#include "realsense_tracking_nodelet.h" + +#include + +#ifdef RUN_AS_NODELET +PLUGINLIB_EXPORT_CLASS(realsense_ros_object::CTrackingNodelet, nodelet::Nodelet) +#endif + + +namespace realsense_ros_object +{ + + +//****************************** +// Public Methods +//****************************** + +CTracking::CTracking() +{ + initialize(); +} + + +CTracking::~CTracking() +{ + unInitialize(); + if (m_show_rgb) + { + cv::destroyAllWindows(); + } +} +int CTracking::unInitialize() +{ + + return 0; +} + +int CTracking::initialize() +{ + m_localization_msg = false; + m_estimateCenter = true; + m_maxNumberOfObjects = 5; + return 0; +} + +void CTracking::getParams(ros::NodeHandle& nh) +{ + + nh.getParam("show_rgb", m_show_rgb); + nh.getParam("subscribe_to_localization", m_localization_msg); + nh.getParam("estimate_center", m_estimateCenter); + nh.getParam("max_number_of_objects", m_maxNumberOfObjects); + +} + +void CTracking::getParams(const std::vector & argv) +{ + + for (int i = 0; i < argv.size(); i++) + { + if (argv[i] == "show_rgb") + { + m_show_rgb = true; + } + else if (argv[i] == "-max_number_of_objects") // max objects to track + { + m_maxNumberOfObjects = atoi(argv[++i].c_str()); + } + else if (argv[i] == "-subscribe_to_localization") + { + m_localization_msg = true; + } + else if (argv[i] == "-estimate_center") + { + m_estimateCenter = true; + } + } + + +} + +void CTracking::init(ros::NodeHandle& nh) +{ + + m_or_data = nullptr; + m_or_configuration = nullptr; + m_no_subscribers = true; + m_nh = nh; + m_sub_depthCameraInfo = m_nh.subscribe("camera/depth/camera_info", 1, &CTracking::depthCameraInfoCallback, this); + m_tracked_objects_pub = m_nh.advertise("realsense/tracked_objects", 1); + + + if (m_show_rgb) + { + cv::namedWindow("tracking"); + } +} + +void CTracking::depthCameraInfoCallback(const sensor_msgs::CameraInfo::ConstPtr & cameraInfo) +{ + m_sub_depthCameraInfo.shutdown(); + // this callback is used in trackMiddle mode. + // get the image size and set the bbox in the middle of the image + m_depthHeight = cameraInfo->height; + m_depthWidth = cameraInfo->width; + + ROS_INFO("%s", "got depth info"); + utils.get_extrinsics(cameraInfo, m_ext); + utils.get_intrinsics(cameraInfo, m_depthInt); + + m_sub_colorCameraInfo = m_nh.subscribe("camera/color/camera_info", 1, &CTracking::colorCameraInfoCallback, this); + +} + +void CTracking::colorCameraInfoCallback(const sensor_msgs::CameraInfo::ConstPtr & cameraInfo) +{ + m_sub_colorCameraInfo.shutdown(); + ROS_INFO("got color info"); + utils.get_intrinsics(cameraInfo, m_colorInt); + // get the image size and set the bbox in the middle of the image + m_colorHeight = cameraInfo->height; + m_colorWidth = cameraInfo->width; + + if (utils.init_or(m_colorInt, m_depthInt, m_ext, m_impl, &m_or_data, &m_or_configuration) != rs::core::status_no_error) + { + ROS_ERROR("Unable to init Object Recgonition!"); + exit(0); + } + + + m_colorInfo.height = m_colorHeight; + m_colorInfo.width = m_colorWidth; + m_colorInfo.format = rs::core::pixel_format::rgb8; + m_colorInfo.pitch = m_colorInfo.width * 3; + + + m_depthInfo.height = m_depthHeight; + m_depthInfo.width = m_depthWidth; + m_depthInfo.format = rs::core::pixel_format::z16; + m_depthInfo.pitch = m_depthInfo.width * 2; + + + m_or_configuration->set_recognition_mode(rs::object_recognition::recognition_mode::TRACKING); + m_or_configuration->enable_object_center_estimation(m_estimateCenter); + + m_or_configuration->apply_changes(); + ROS_INFO("init done"); + + // Subscribe to color, depth using synchronization filter + mColorSubscriber = std::unique_ptr> + (new message_filters::Subscriber(m_nh, "camera/color/image_raw", 1)); + mDepthSubscriber = std::unique_ptr> + (new message_filters::Subscriber(m_nh, "camera/depth/image_raw", 1)); + mTimeSynchronizer = std::unique_ptr> + (new message_filters::TimeSynchronizer(*mColorSubscriber, *mDepthSubscriber, 60)); + + if (m_localization_msg) + m_sub_objects_with_pos = m_nh.subscribe("realsense/localized_objects", 1, &CTracking::localizedObjectsToTrackCallback, this); + else + m_sub_objects_with_pos = m_nh.subscribe("realsense/objects_to_track", 1, &CTracking::objectsToTrackCallback, this); + + +} + + +void CTracking::objectsToTrackCallback(const realsense_ros_object::TrackedObjectsArray& msg) +{ + + // m_sub_objects_with_pos.shutdown(); + + + int array_size = std::min(m_maxNumberOfObjects, (int)msg.tracked_objects_vector.size()); //todo: insert m_maxNumberOfObjects as parameter to orconfiguration + rs::core::rect* trackingRois = new rs::core::rect[array_size]; + + for (int i = 0; i < array_size ; i++) + { + trackingRois[i].x = msg.tracked_objects_vector[i].bbox.x; + trackingRois[i].y = msg.tracked_objects_vector[i].bbox.y; + trackingRois[i].width = msg.tracked_objects_vector[i].bbox.width; + trackingRois[i].height = msg.tracked_objects_vector[i].bbox.height; + } + + rs::core::status st = m_or_configuration->set_tracking_rois(trackingRois, array_size); + st = m_or_configuration->apply_changes(); + delete[] trackingRois; + + + mTimeSynchronizer->registerCallback(boost::bind(&CTracking::TrackingCallback, this, _1, _2)); + +} + + +void CTracking::localizedObjectsToTrackCallback(const realsense_ros_object::ObjectsInBoxes& msg) +{ + + m_sub_objects_with_pos.shutdown(); + ROS_INFO("new objects to track"); + + int array_size = std::min(m_maxNumberOfObjects, (int)msg.objects_vector.size()); + rs::core::rect* trackingRois = new rs::core::rect[array_size]; + + for (int i = 0; i < array_size ; i++) + { + trackingRois[i].x = msg.objects_vector[i].object_bbox.x; + trackingRois[i].y = msg.objects_vector[i].object_bbox.y; + trackingRois[i].width = msg.objects_vector[i].object_bbox.width; + trackingRois[i].height = msg.objects_vector[i].object_bbox.height; + } + + rs::core::status st = m_or_configuration->set_tracking_rois(trackingRois, array_size); + st = m_or_configuration->apply_changes(); + delete[] trackingRois; + + + mTimeSynchronizer->registerCallback(boost::bind(&CTracking::TrackingCallback, this, _1, _2)); + +} + +void CTracking::TrackingCallback(const sensor_msgs::ImageConstPtr& color , const sensor_msgs::ImageConstPtr& depth) +{ + size_t timeRos1 = std::clock(); + if (m_tracked_objects_pub.getNumSubscribers() <= 0) + return; + try + { + m_imageColor = cv_bridge::toCvShare(color, "rgb8")->image; + } + catch (cv_bridge::Exception& e) + { + ROS_ERROR("Could not convert from '%s' to 'rgb8'.", color->encoding.c_str()); + return; + } + + try + { + m_imageDepth = cv_bridge::toCvShare(depth, sensor_msgs::image_encodings::TYPE_16UC1)->image; + } + catch (cv_bridge::Exception& e) + { + ROS_ERROR("toCvShare failed"); + return; + } + + + + int array_size = 0; + rs::core::status st; + + rs::core::correlated_sample_set* sample_set = utils.get_sample_set(m_colorInfo, m_depthInfo, m_imageColor.data, m_imageDepth.data, color->header.seq, (uint64_t)color->header.stamp.nsec); + st = m_impl.process_sample_set(*sample_set); + if (st != rs::core::status_no_error) + ROS_ERROR("process_sample_set_sync failed"); + + rs::object_recognition::tracking_data* tracking_data = nullptr; + + //retrieve tracking data from the or_data object + + st = m_or_data->query_tracking_result(&tracking_data, array_size); + if (st != rs::core::status_no_error) + ROS_ERROR("query_tracking_result failed"); + + realsense_ros_object::TrackedObjectsArray outROIs; + realsense_ros_object::TrackedObject to; + for (int i = 0; i < array_size; i++) + { + + + to.bbox.width = tracking_data[i].roi.width; + to.bbox.height = tracking_data[i].roi.height; + to.bbox.x = tracking_data[i].roi.x; + to.bbox.y = tracking_data[i].roi.y; + to.location.coordinates.x = tracking_data[i].object_center.coordinates.x; + to.location.coordinates.y = tracking_data[i].object_center.coordinates.y; + to.location.coordinates.z = tracking_data[i].object_center.coordinates.z; + to.location.horiz_margin = tracking_data[i].object_center.horiz_margin; + to.location.vert_margin = tracking_data[i].object_center.vert_margin; + to.id = i; + outROIs.tracked_objects_vector.push_back(to); + + } + // copy the header for image and tracking sync; + outROIs.header = color->header; + m_tracked_objects_pub.publish(outROIs); +} + + +#ifdef RUN_AS_NODELET + +//****************************** +// Public Methods +//****************************** + +CTrackingNodelet::~CTrackingNodelet() +{ +} + +void CTrackingNodelet::onInit() +{ + NODELET_INFO("Object Tracking OnInit."); + ros::NodeHandle& local_nh = getPrivateNodeHandle(); + m_TrackingNodelet.getParams(local_nh); + + ros::NodeHandle& nh = getNodeHandle(); + m_TrackingNodelet.init(nh); + + NODELET_INFO("Objects Tracker nodelet initialized."); +} + +#endif + +} + diff --git a/realsense_ros_object/src/realsense_tracking_nodelet.h b/realsense_ros_object/src/realsense_tracking_nodelet.h new file mode 100644 index 0000000..713066d --- /dev/null +++ b/realsense_ros_object/src/realsense_tracking_nodelet.h @@ -0,0 +1,144 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved. + +#pragma once +# ifndef RS_TRACKING_NODELET +# define RS_TRACKING_NODELET + +/////////////////////////////////////////////// +/// Dependencies +/////////////////////////////////////////////// + +#include +#include +#include + +#include +#include "utils.h" + + +namespace realsense_ros_object +{ +/////////////////////////////////////////////// +/// CTracking - +/////////////////////////////////////////////// +class CTracking +{ +public : + //=================================== + // Interface + //=================================== + void init(ros::NodeHandle& nh); + void getParams(ros::NodeHandle& nh); + void getParams(const std::vector & argv); + + CTracking(); + virtual ~CTracking(); + +private: + //=================================== + // Member Functions + //=================================== + + int initialize(); + int unInitialize(); + + + + //Static member functions: + void colorCameraInfoCallback(const sensor_msgs::CameraInfo::ConstPtr & cameraInfo); + void depthCameraInfoCallback(const sensor_msgs::CameraInfo::ConstPtr & cameraInfo); + void objectsToTrackCallback(const realsense_ros_object::TrackedObjectsArray& msg); + void localizedObjectsToTrackCallback(const realsense_ros_object::ObjectsInBoxes& msg); + + + void TrackingCallback(const sensor_msgs::ImageConstPtr& color , const sensor_msgs::ImageConstPtr& depth); + + //=================================== + // Member Variables + //=================================== + + image_transport::Subscriber m_sub_color; + image_transport::Subscriber m_sub_depth; + ros::Subscriber m_sub_objects_with_pos; + ros::Subscriber m_sub_colorCameraInfo; + ros::Subscriber m_sub_depthCameraInfo; + + std::unique_ptr> mDepthSubscriber; + std::unique_ptr> mColorSubscriber; + std::unique_ptr> mTimeSynchronizer; + + ros::Publisher m_tracked_objects_pub; + + cv::Mat m_imageColor; + cv::Mat m_imageDepth; + int m_frame_number; + + bool m_show_rgb; + bool m_localization_msg; + bool m_estimateCenter; + + + bool m_no_subscribers; + int m_numberOfObjects; + int m_maxNumberOfObjects; + + ros::NodeHandle m_nh; + + + std::string m_colorInfoTopic; + + int m_colorHeight; + int m_colorWidth; + int m_depthHeight; + int m_depthWidth; + + rs::extrinsics m_ext; + rs::intrinsics m_colorInt; + rs::intrinsics m_depthInt; + + ros_camera_utils utils; + + rs::core::image_info m_colorInfo; + rs::core::image_info m_depthInfo; + + rs::object_recognition::or_video_module_impl m_impl; + rs::object_recognition::or_data_interface* m_or_data; + rs::object_recognition::or_configuration_interface* m_or_configuration; + + + +}; + +#ifdef RUN_AS_NODELET +/////////////////////////////////////////////// +/// CTrackingNodelet +/////////////////////////////////////////////// +class CTrackingNodelet : public nodelet::Nodelet +{ +public : + //=================================== + // Interface + //=================================== + virtual void onInit(); + + ~CTrackingNodelet(); + +private: + //=================================== + // Member Functions + //=================================== + + //=================================== + // Member Variables + //=================================== + + CTracking m_TrackingNodelet; + +}; +#endif +}; + + +#endif // RS_TRACKING_NODELET + diff --git a/realsense_ros_object/src/sample-localize-track/realsense_ormgr_nodelet.cpp b/realsense_ros_object/src/sample-localize-track/realsense_ormgr_nodelet.cpp new file mode 100644 index 0000000..2385ebd --- /dev/null +++ b/realsense_ros_object/src/sample-localize-track/realsense_ormgr_nodelet.cpp @@ -0,0 +1,194 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved. + +#include "ros/ros.h" + +#include "realsense_ros_object/Rect.h" +#include "realsense_ros_object/TrackedObjectsArray.h" +#include "realsense_ros_object/ObjectInBox.h" +#include "realsense_ros_object/ObjectsInBoxes.h" +#include "realsense_ros_object/UI.h" + +#include +#include +#include +#include "realsense_ormgr_nodelet.h" +#include + + +#if defined(RUN_AS_NODELET) +PLUGINLIB_EXPORT_CLASS(realsense_ros_object::COrmgrNodelet, nodelet::Nodelet) +#endif + +namespace realsense_ros_object +{ +//****************************** +// Public Methods +//****************************** + +COrmgr::COrmgr() +{ + initialize(); +} + +COrmgr::~COrmgr() +{ + unInitialize(); +} + +int COrmgr::unInitialize() +{ + objects_vector_.objects_vector.clear(); + return 0; +} + +int COrmgr::initialize() +{ + threshold_ = 1; + max_number_of_objects_ = 5; + return 0; +} + +void COrmgr::getParams(ros::NodeHandle& nh) +{ + nh.getParam("threshold", threshold_); + nh.getParam("max_number_of_objects", max_number_of_objects_); + nh.getParam("localize_every_frame", localize_every_frame_); +} + +void COrmgr::getParams(const std::vector & argv) +{ + for (int i = 0; i < argv.size(); i++) + { + if (argv[i] == "-threshold") // max objects to track + { + threshold_ = atoi(argv[++i].c_str()); + } + else if (argv[i] == "-max_number_of_objects") // max objects to track + { + max_number_of_objects_ = atoi(argv[++i].c_str()); + } + } +} + +void COrmgr::init(ros::NodeHandle& nh) +{ + nh_ = nh; + sub_localized_objects_ = nh_.subscribe("realsense/localized_objects", 1, &COrmgr::localizeidObjectsCallback , this); + sub_UI_ = nh_.subscribe("realsense/UI", 1, &COrmgr::UICallback , this); + + objects_to_track_pub_ = nh_.advertise("realsense/objects_to_track", 1); + tracked_localized_pub_ = nh_.advertise( + "realsense/localized_tracked_objects", 1); +} + + + +void COrmgr::localizeidObjectsCallback(const realsense_ros_object::ObjectsInBoxes& msg) +{ +// get objects roi from localization and publish it + int array_size = std::min(max_number_of_objects_, (int)msg.objects_vector.size()); + if (array_size <= 0) + return ; + sub_localized_objects_.shutdown(); + ROS_INFO("new objects to track"); + realsense_ros_object::TrackedObjectsArray outROIs; + objects_vector_.objects_vector.clear(); + + realsense_ros_object::TrackedObject to; + for (int i = 0; i < array_size; i++) + { + to.bbox = msg.objects_vector[i].object_bbox; + to.location = msg.objects_vector[i].location; + to.id = i; + outROIs.tracked_objects_vector.push_back(to); + objects_vector_.objects_vector.push_back(msg.objects_vector[i]); + } + + sub_tracked_objects_ = nh_.subscribe("realsense/tracked_objects", 1, &COrmgr::trackedObjectCallback , this); + objects_to_track_pub_.publish(outROIs); +} + +void COrmgr::UICallback(const realsense_ros_object::UI& msg) +{ + if (true == localize_every_frame_) + { + return; + } + ROS_INFO("re-localization\n "); + sub_tracked_objects_.shutdown(); + sub_localized_objects_ = nh_.subscribe("realsense/localized_objects", 1, &COrmgr::localizeidObjectsCallback , this); +} + +void COrmgr::trackedObjectCallback(const realsense_ros_object::TrackedObjectsArray::ConstPtr & msg) +{ + int nCtr = 0; + for (int i = 0; i < (int)msg->tracked_objects_vector.size(); i++) + { + objects_vector_.objects_vector[i].object_bbox = msg->tracked_objects_vector[i].bbox; + objects_vector_.objects_vector[i].location = msg->tracked_objects_vector[i].location; + + if (msg->tracked_objects_vector[i].bbox.width == 0 || msg->tracked_objects_vector[i].bbox.height == 0) + nCtr++; + } + + if (localize_every_frame_) + { + sub_tracked_objects_.shutdown(); + sub_localized_objects_ = nh_.subscribe("realsense/localized_objects", 1, &COrmgr::localizeidObjectsCallback , this); + } + + objects_vector_.header = msg->header; + tracked_localized_pub_.publish(objects_vector_); +} + + +#if defined(RUN_AS_NODELET) + +//****************************** +// Public Methods +//****************************** + +COrmgrNodelet::~COrmgrNodelet() +{ +} + +void COrmgrNodelet::onInit() +{ + ROS_INFO_STREAM("OR manager OnInit"); + NODELET_INFO("OR manager OnInit."); + + ros::NodeHandle& local_nh = getPrivateNodeHandle(); + manager_nodelet_.getParams(local_nh); + + ros::NodeHandle& nh = getNodeHandle(); + manager_nodelet_.init(nh); + + NODELET_INFO("OR manager nodelet initialized."); +} + +#endif + +} + +#if !defined(RUN_AS_NODELET) + +int main(int argc, char **argv) +{ + + ros::init(argc, argv, "realsense_ormgr"); + ros::NodeHandle nh; + realsense::COrmgr or_manager; + + std::vector argv_strings; + for (int i = 0; i < argc; i++) + { + argvStrings.push_back(argv[i]); + } + + or_manager.getParams(argv_strings); + or_manager.init(nh); + ros::spin(); +} +#endif + diff --git a/realsense_ros_object/src/sample-localize-track/realsense_ormgr_nodelet.h b/realsense_ros_object/src/sample-localize-track/realsense_ormgr_nodelet.h new file mode 100644 index 0000000..81c82d5 --- /dev/null +++ b/realsense_ros_object/src/sample-localize-track/realsense_ormgr_nodelet.h @@ -0,0 +1,97 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved. + +#pragma once +# ifndef RS_ORMGR_NODELET +# define RS_ORMGR_NODELET + +/////////////////////////////////////////////// +/// Dependencies +/////////////////////////////////////////////// + +#include +#include +#include + +namespace realsense_ros_object +{ +/////////////////////////////////////////////// +/// CTracking - +/////////////////////////////////////////////// +class COrmgr +{ +public : + //=================================== + // Interface + //=================================== + void getParams(ros::NodeHandle& nh); + void getParams(const std::vector & argv); + void init(ros::NodeHandle& nh); + + COrmgr(); + virtual ~COrmgr(); + +private: + //=================================== + // Member Functions + //=================================== + + int initialize(); + int unInitialize(); + + //Static member functions: + void localizeidObjectsCallback(const realsense_ros_object::ObjectsInBoxes& msg); + void trackedObjectCallback(const realsense_ros_object::TrackedObjectsArray::ConstPtr & msg); + void UICallback(const realsense_ros_object::UI & msg); + + //=================================== + // Member Variables + //=================================== + + ros::Subscriber sub_localized_objects_; + ros::Subscriber sub_tracked_objects_; + ros::Subscriber sub_UI_; + + ros::Publisher objects_to_track_pub_; + ros::Publisher tracked_localized_pub_; + + ros::NodeHandle nh_; + + realsense_ros_object::ObjectsInBoxes objects_vector_; + + int max_number_of_objects_; + int threshold_; + bool localize_every_frame_; +}; + +#ifdef RUN_AS_NODELET +/////////////////////////////////////////////// +/// COrmgrNodelet +/////////////////////////////////////////////// +class COrmgrNodelet : public nodelet::Nodelet +{ +public : + //=================================== + // Interface + //=================================== + virtual void onInit(); + + ~COrmgrNodelet(); + +private: + //=================================== + // Member Functions + //=================================== + + //=================================== + // Member Variables + //=================================== + + COrmgr manager_nodelet_; +}; +#endif +}; + + +#endif // RS_TRACKING_NODELET + diff --git a/realsense_ros_object/src/utils.cpp b/realsense_ros_object/src/utils.cpp new file mode 100644 index 0000000..0d8438b --- /dev/null +++ b/realsense_ros_object/src/utils.cpp @@ -0,0 +1,164 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved. + +#include "utils.h" +#include +ros_camera_utils::ros_camera_utils() +{ + m_sample_set = nullptr; +} + +ros_camera_utils::~ros_camera_utils() +{ + release_images(); + delete m_sample_set; +} + +void ros_camera_utils::release_images() +{ + if (m_sample_set) + { + if (m_sample_set->images[(int)rs::stream::color]) + { + m_sample_set->images[(int)rs::stream::color]->release(); + m_sample_set->images[(int)rs::stream::color] = nullptr; + } + + if (m_sample_set->images[(int)rs::stream::depth]) + { + m_sample_set->images[(int)rs::stream::depth]->release(); + m_sample_set->images[(int)rs::stream::depth] = nullptr; + } + } + +} +void ros_camera_utils::get_intrinsics(const sensor_msgs::CameraInfo::ConstPtr & cameraInfo, rs::intrinsics& intrins) +{ + + intrins.width = cameraInfo->width; /* width of the image in pixels */ + intrins.height = cameraInfo->height; /* height of the image in pixels */ + intrins.ppx = cameraInfo->K[2]; /* horizontal coordinate of the principal point of the image, as a pixel offset from the left edge */ + intrins.ppy = cameraInfo->K[5]; /* vertical coordinate of the principal point of the image, as a pixel offset from the top edge */ + intrins.fx = cameraInfo->K[0]; /* focal length of the image plane, as a multiple of pixel width */ + intrins.fy = cameraInfo->K[4]; /* focal length of the image plane, as a multiple of pixel height */ + intrins.rs_intrinsics::model = RS_DISTORTION_MODIFIED_BROWN_CONRADY ; /* distortion model of the image */ + + + /* distortion coefficients */ + for (unsigned int i = 0; i < sizeof(cameraInfo->D) / sizeof(double); i++) + { + intrins.coeffs[i] = cameraInfo->D[i]; + } + +} + +void ros_camera_utils::get_extrinsics(const sensor_msgs::CameraInfo::ConstPtr & cameraInfo, rs::extrinsics& extrins) +{ + //get extrinsics + for (unsigned int i = 0; i < sizeof(cameraInfo->R) / sizeof(double); i++) + { + extrins.rotation[i] = cameraInfo->R[i]; + } + + // copy projection matrix + + for (unsigned int i = 3, j = 0; i < sizeof(cameraInfo->P) / sizeof(double); i = i + 4, j++) + { + extrins.translation[j] = (float)(cameraInfo->P[i]); + } + + +} + +rs::core::status ros_camera_utils::init_or(const rs::intrinsics& colorInt, const rs::intrinsics& depthInt, const rs::extrinsics& ext, rs::object_recognition::or_video_module_impl& impl, + rs::object_recognition::or_data_interface** or_data, rs::object_recognition::or_configuration_interface** or_configuration) +{ + rs::core::status st; + + // after getting all parameters from the camera we need to set the actual_module_config + rs::core::video_module_interface::actual_module_config actualConfig; + // get the extrisics paramters from the camera + rs::core::extrinsics core_ext; + + //get color intrinsics + rs::core::intrinsics core_colorInt; + + //get depth intrinsics + rs::core::intrinsics core_depthInt; + + rs::core::video_module_interface::supported_module_config cfg; + bool supported = false; + + for (int i = 0; impl.query_supported_module_config(i, cfg) == rs::core::status_no_error; i++) + { + // add check that the cfg is ok + if (cfg.image_streams_configs[(int)rs::stream::color].size.width == colorInt.width && + cfg.image_streams_configs[(int)rs::stream::color].size.height == colorInt.height && + cfg.image_streams_configs[(int)rs::stream::depth].size.width == depthInt.width && + cfg.image_streams_configs[(int)rs::stream::depth].size.height == depthInt.height) + { + supported = true; + break; + } + } + if (!supported) + return rs::core::status_param_unsupported; + + //1. copy the extrinsics + memcpy(&actualConfig.image_streams_configs[(int)rs::stream::color].extrinsics, &ext, sizeof(rs::extrinsics)); + core_ext = rs::utils::convert_extrinsics(ext); + + //2. copy the color intrinsics + memcpy(&actualConfig.image_streams_configs[(int)rs::stream::color].intrinsics, &colorInt, sizeof(rs::intrinsics)); + core_colorInt = rs::utils::convert_intrinsics(colorInt); + + //3. copy the depth intrinsics + memcpy(&actualConfig.image_streams_configs[(int)rs::stream::depth].intrinsics, &depthInt, sizeof(rs::intrinsics)); + core_depthInt = rs::utils::convert_intrinsics(depthInt); + + // handling projection + rs::core::projection_interface* proj = rs::core::projection_interface::create_instance(&core_colorInt, &core_depthInt, &core_ext); + actualConfig.projection = proj; + + //setting the selected configuration (after projection) + st = impl.set_module_config(actualConfig); + if (st != rs::core::status_no_error) + { + return st; + } + + //create or data object + *or_data = impl.create_output(); + //create or data object + *or_configuration = impl.create_active_configuration(); + + m_sample_set = new rs::core::correlated_sample_set(); + + m_sample_set->images[(int)rs::stream::color] = nullptr; + m_sample_set->images[(int)rs::stream::depth] = nullptr; + + return st; +} + +rs::core::correlated_sample_set* ros_camera_utils::get_sample_set(rs::core::image_info& colorInfo, rs::core::image_info& depthInfo, const void* colorBuffer, const void*depthBuffer, int frameNumber, uint64_t timestamp) +{ + + + // release images from the prevoius frame + release_images(); + //create images from buffers + + rs::core::image_interface::image_data_with_data_releaser color_container(colorBuffer); + rs::core::image_interface::image_data_with_data_releaser depth_container(depthBuffer); + auto colorImg = rs::core::image_interface::create_instance_from_raw_data(&colorInfo, color_container, rs::core::stream_type::color, rs::core::image_interface::any, frameNumber, timestamp); + auto depthImg = rs::core::image_interface::create_instance_from_raw_data(&depthInfo, depth_container, rs::core::stream_type::depth, rs::core::image_interface::any, frameNumber, timestamp); + + //create sample from both images + + m_sample_set->images[(int)rs::stream::color] = colorImg; + m_sample_set->images[(int)rs::stream::depth] = depthImg; + + + return m_sample_set; + +} diff --git a/realsense_ros_object/src/utils.h b/realsense_ros_object/src/utils.h new file mode 100644 index 0000000..dac2b75 --- /dev/null +++ b/realsense_ros_object/src/utils.h @@ -0,0 +1,33 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved. + +#pragma once +#include +#include +#include +#include +#include +#include + + +class ros_camera_utils +{ +public: + ros_camera_utils(); + ~ros_camera_utils(); + + void get_intrinsics(const sensor_msgs::CameraInfo::ConstPtr & cameraInfo, rs::intrinsics& intrins); + void get_extrinsics(const sensor_msgs::CameraInfo::ConstPtr & cameraInfo, rs::extrinsics& extrins); + rs::core::status init_or(const rs::intrinsics& colorInt, const rs::intrinsics& depthInt, const rs::extrinsics& ext, rs::object_recognition::or_video_module_impl& impl, + rs::object_recognition::or_data_interface** or_data, rs::object_recognition::or_configuration_interface** or_configuration); + + rs::core::correlated_sample_set* get_sample_set(rs::core::image_info& colorInfo, rs::core::image_info& depthInfo, + const void* colorBuffer, const void*depthBuffer, int frameNumber, uint64_t timestamp); + +private: + + void release_images(); + + rs::core::correlated_sample_set* m_sample_set; + +}; diff --git a/realsense_ros_object/src/viewer/GUI_utils.cpp b/realsense_ros_object/src/viewer/GUI_utils.cpp new file mode 100644 index 0000000..f7203e4 --- /dev/null +++ b/realsense_ros_object/src/viewer/GUI_utils.cpp @@ -0,0 +1,364 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved. + +#include "GUI_utils.h" +#include +#include +#include + +bool GUI_utils::init_GUI(cv::String window_name) +{ + m_rect_thickness = 3; + m_rect_line_type = 8; + m_rect_shift = 0; + m_text_thickness = 1; + m_text_font = cv::FONT_HERSHEY_SIMPLEX; + m_font_scale = 0.75; + m_text_line_type = 5; + m_window_name = window_name; + m_image.create(468, 628, CV_8UC3); + if (!m_image.data) + return false; + + return true; +} + +void GUI_utils::set_rect_thickness(int newThickness) +{ + m_rect_thickness = newThickness; +} + +int GUI_utils::get_rect_thickness() +{ + return m_rect_thickness; +} + +void GUI_utils::set_line_type(int newLineType) +{ + m_rect_line_type = newLineType; +} + +int GUI_utils::get_line_type() +{ + return m_rect_line_type; +} + +void GUI_utils::set_rect_shift(int newShift) +{ + m_rect_shift = newShift; +} + +int GUI_utils::get_rect_shift() +{ + return m_rect_shift; +} + +void GUI_utils::set_text_thickness(int newTextThickness) +{ + m_text_thickness = newTextThickness; +} + +int GUI_utils::get_text_thickness() +{ + return m_text_thickness; +} + +void GUI_utils::set_text_font(int newTextFont) +{ + m_text_font = newTextFont; +} + +int GUI_utils::get_text_font() +{ + return m_text_font; +} + +void GUI_utils::set_font_scale(int newFontScale) +{ + m_font_scale = newFontScale; +} + +int GUI_utils::get_font_scale() +{ + return m_font_scale; +} + +void GUI_utils::set_text_line_type(int newTextLineType) +{ + m_text_line_type = newTextLineType; +} + +int GUI_utils::get_text_line_type() +{ + return m_text_line_type; +} + +cv::Mat& GUI_utils::get_color_cvmat() +{ + return m_image; +} + +cv::String GUI_utils::get_win_name() +{ + return m_window_name; +} + +void GUI_utils::draw_text(cv::String text, cv::Point originPoint, int classID) +{ + // Getting the rectangle color + cv::Scalar color = get_color(classID); + + //Getting title text size + int baseline = 0; + cv::Size nameTextSize = cv::getTextSize(text, m_text_font, m_font_scale, m_text_thickness, &baseline); + baseline += m_text_thickness; + + // Getting beginning point of text + cv::Point textOrigin(originPoint.x, originPoint.y - nameTextSize.height / 2); + + // Draw the box + int filledRectangle = -1; + cv::Point originShifted = textOrigin + cv::Point(0, baseline); + cv::Point opositeToOrigin = textOrigin + cv::Point(nameTextSize.width, -nameTextSize.height); + cv::rectangle(m_image, originShifted, opositeToOrigin, color, filledRectangle, m_rect_line_type, m_rect_shift); + + // Then put the text itself + cv::putText(m_image, text, textOrigin, m_text_font, m_font_scale, get_text_color(classID), m_text_thickness, m_text_line_type); +} + +void GUI_utils::draw_rect_no_text(cv::Point topLeftPoint, int width, int height, int classID) +{ + // Translating the given data to two opposite corners of the rectangle + cv::Point* bottomRightPoint = new cv::Point(topLeftPoint.x + width, topLeftPoint.y + height); + + // Getting the rectangle color + cv::Scalar color = get_color(classID); + + // Drawing the rectangle + cv::rectangle(m_image, topLeftPoint, *bottomRightPoint, color, m_rect_thickness, m_rect_line_type, m_rect_shift); +} + +bool GUI_utils::draw_rect(cv::String name, int classID, int x, int y, int width, int height) +{ + if (x == 0 || y == 0) + { + return false; + } + //Getting title text size + int baseline = 0; + cv::Size nameTextSize = cv::getTextSize(name, m_text_font, m_font_scale, m_text_thickness, &baseline); + + int minCoordinateValue = 10; + cv::Point* topLeftPoint = new cv::Point(std::max(minCoordinateValue, x), + std::max(minCoordinateValue + nameTextSize.height, y)); + + draw_rect_no_text(*topLeftPoint, width + std::min(x, 0), height + std::min(y, 0), classID); + + draw_text(name, *topLeftPoint, classID); + + return true; +} + + +void GUI_utils::draw_no_results() +{ + cv::String text = "No object was found"; + int baseline = 0; + + // Getting the rectangle color + int classID = 0; // This text doesn't belong to any object class + cv::Scalar color = get_color(classID); // Meaning no color - black + + //Getting title text size + cv::Size textSize = cv::getTextSize(text, m_text_font, m_font_scale, m_text_thickness, &baseline); + baseline += m_text_thickness; + + // Getting beginning point of text + cv::Point textOrigin(0, 0); + + // Draw the box + int filledRectangle = -1; + int rectangleHeightFactor = 2; // I can't say why, but this number seems to put the rectangle in it's right place + cv::Point originShifted = textOrigin + cv::Point(0, baseline); + cv::Point opositeToOrigin = textOrigin + cv::Point(textSize.width, rectangleHeightFactor * textSize.height); + cv::rectangle(m_image, originShifted, opositeToOrigin, color, filledRectangle, m_rect_line_type, m_rect_shift); + + int finalTextShift = 3; // I can't say why, but this number seems to put the text in it's right place + + // Then put the text itself + cv::putText(m_image, text, finalTextShift * originShifted, m_text_font, m_font_scale, + get_text_color(classID), m_text_thickness, m_text_line_type); + +} + +bool GUI_utils::draw_results(const sensor_msgs::ImageConstPtr& color, const realsense_ros_object::ObjectsInBoxes& msg) +{ + + try + { + + m_image = cv_bridge::toCvShare(color, "bgr8")->image; + } + catch (cv_bridge::Exception& e) + { + ROS_ERROR("Could not convert from '%s' to 'bgr8'.", color->encoding.c_str()); + return false; + } + + if (m_image.data == NULL) + { + std::cout << "No image found! Check path.\n"; + return false;//ERROR + } + + for (int i = 0; i < (int)msg.objects_vector.size(); i++) + { + + cv::String text = msg.objects_vector[i].object.object_name + " " + get_3D_location_string(msg.objects_vector[i].location.coordinates); //todo: drow vertical and horisontical margin + + + draw_rect(text, 3 * i, (int)msg.objects_vector[i].object_bbox.x, (int)msg.objects_vector[i].object_bbox.y, (int)msg.objects_vector[i].object_bbox.width, (int)msg.objects_vector[i].object_bbox.height); + } + + return true; +} + +char GUI_utils::save_results(const realsense_ros_object::ObjectsInBoxes &msg, std::string file_path) +{ + if(file_path == "") + { + const char *home_dir = getenv("HOME"); + + if ( home_dir== NULL) { + home_dir = getpwuid(getuid())->pw_dir; + } + file_path = std::string(home_dir) + "/object_bag_results.txt"; + } + + std::fstream myfile; + myfile.open (file_path, std::ofstream::out | std::ofstream::app); + static int frame_num = -1; + frame_num++; + myfile << "frame " + std::to_string(frame_num) << std::endl; + for (int i = 0; i < (int)msg.objects_vector.size(); i++) + { + myfile << "object id: " << i << std::endl; + myfile << msg.objects_vector[i].object.object_name << std::endl; + myfile << msg.objects_vector[i].object_bbox.x << std::endl; + myfile << msg.objects_vector[i].object_bbox.y << std::endl; + myfile << msg.objects_vector[i].object_bbox.width << std::endl; + myfile << msg.objects_vector[i].object_bbox.height << std::endl; + + myfile << msg.objects_vector[i].location.coordinates.x << std::endl; + myfile << msg.objects_vector[i].location.coordinates.y << std::endl; + myfile << msg.objects_vector[i].location.coordinates.z << std::endl; + myfile << msg.objects_vector[i].location.horiz_margin << std::endl; + myfile << msg.objects_vector[i].location.vert_margin << std::endl; + } + myfile.close(); +} + +std::vector GUI_utils::read_results(std::string file_path) +{ + + std::string frame("frame"); + std::string object_id("object id:"); + std::vector res; + std::ifstream resFile; + resFile.open(file_path); + std::string str; + if (resFile.is_open()) + { + bool new_frame = true; + realsense_ros_object::ObjectsInBoxes objs; + while(new_frame) + { + new_frame = false; + std::getline(resFile, str); + while(std::getline(resFile, str)) + { + if(!str.compare(0, object_id.size(), object_id)) + { + realsense_ros_object::ObjectInBox objectInBox; + std::getline(resFile, objectInBox.object.object_name); + std::getline(resFile, str); + objectInBox.object_bbox.x = std::stoi(str); + std::getline(resFile, str); + objectInBox.object_bbox.y = std::stoi(str); + std::getline(resFile, str); + objectInBox.object_bbox.width = std::stoi(str); + std::getline(resFile, str); + objectInBox.object_bbox.height = std::stoi(str); + std::getline(resFile, str); + objectInBox.location.coordinates.x = std::stoi(str); + std::getline(resFile, str); + objectInBox.location.coordinates.y = std::stoi(str); + std::getline(resFile, str); + objectInBox.location.coordinates.z = std::stoi(str); + std::getline(resFile, str); + objectInBox.location.horiz_margin = std::stof(str); + std::getline(resFile, str); + objectInBox.location.vert_margin = std::stof(str); + objs.objects_vector.push_back(objectInBox); + } + else if (!str.compare(0, frame.size(), frame)) + { + new_frame = true; + res.push_back(objs); + } + } + + } + } + + for(auto & frame : res) + { + save_results(frame, std::string("/home/sshoshan/object_bag_results2.txt")); + } + + + return res; +} + +char GUI_utils::show_results() +{ + cv::namedWindow(m_window_name, cv::WINDOW_AUTOSIZE); + cv::imshow(m_window_name, m_image); + char c = cv::waitKey(5); + return c; +} + +cv::Scalar GUI_utils::get_color(int classID) +{ + if (classID > m_max_classes || classID < 1) + { + classID = m_max_classes; + } + + return cv::Scalar(m_color_mat.at(classID - 1, 0), + m_color_mat.at(classID - 1, 1), + m_color_mat.at(classID - 1, 2), + m_color_mat.at(classID - 1, 3)); +} + +cv::Scalar GUI_utils::get_text_color(int classID) +{ + if (classID > m_max_classes || classID < 1) + { + classID = m_max_classes; + } + + return cv::Scalar(m_text_mat.at(classID - 1, 0), + m_text_mat.at(classID - 1, 1), + m_text_mat.at(classID - 1, 2), + m_text_mat.at(classID - 1, 3)); +} + +std::string GUI_utils::get_3D_location_string(geometry_msgs::Point32 location) +{ + std::stringstream str_stream; + str_stream << std::fixed << std::setprecision(1) << (location.z / 1000); + std::string str = "@"; + str += str_stream.str() + "m"; + return str; +} diff --git a/realsense_ros_object/src/viewer/GUI_utils.h b/realsense_ros_object/src/viewer/GUI_utils.h new file mode 100644 index 0000000..9f6e04c --- /dev/null +++ b/realsense_ros_object/src/viewer/GUI_utils.h @@ -0,0 +1,350 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved. + +#pragma once + +#include +#include +#include +#include +#include "ros/ros.h" +#include +#include +#include "realsense_ros_object/ObjectsInBoxes.h" + +/** + * @brief Class designed to serve the localization demo app, and to display localization rectangles + * together with their recognized objects. + */ +class GUI_utils +{ +public: + bool init_GUI(cv::String window_name); + + /** + * @brief Sets new thickness for the drawn bounding rectangles. + * + * @remark The thickness values are defined by OpenCV. negative values will draw a filled rectangle. + * + * @param[in] new_thickness The new thickness to be set + */ + void set_rect_thickness(int new_thickness); + + /** + * @brief Returns the current value of the thickness of the drawn bounding rectangles. + * + * @return bounding rectangle thickness value. + */ + int get_rect_thickness(); + + /** + * @brief Sets new line type for the drawn bounding rectangles. + * + * @remark The line types values are defined by OpenCV. Please view OpenCV documentation. + * + * @param[in] new_line_type The new line type to be set + */ + void set_line_type(int new_line_type); + + /** + * @brief Returns the current value of the line type of the drawn bounding rectangles. + * + * @return bounding rectangle line type value. + */ + int get_line_type(); + + /** + * @brief Sets new shift for the drawn bounding rectangles. + * + * @remark The shift values are defined by OpenCV as number of fractional bits in the point coordinates + * + * @param[in] new_shift The new shift value to be set + */ + void set_rect_shift(int new_shift); + + /** + * @brief Returns the current value of the shift of the drawn bounding rectangles. + * + * @return bounding rectangle shift value. + */ + int get_rect_shift(); + + /** + * @brief Sets new text thickness for the bounding rectangle title. + * + * @remark The thickness values are defined by OpenCV. + * + * @param[in] new_text_thickness The new shift value to be set + */ + void set_text_thickness(int new_text_thickness); + + /** + * @brief Returns the current value of the textThickness for the bounding rectangle title. + * + * @return Text thickness value. + */ + int get_text_thickness(); + + /** + * @brief Sets new text font value for the bounding rectangle title. + * + * @remark The text font values are defined by OpenCV. + * + * @param[in] new_text_font The new text font to be set + */ + void set_text_font(int new_text_font); + + /** + * @brief Returns the text font value for the bounding rectangle title. + * + * @return Text font value. + */ + int get_text_font(); + + /** + * @brief Sets new text font scale value for the bounding rectangle title. + * + * @remark The text font scale values are defined by OpenCV. + * + * @param[in] new_font_scale The new text font scale to be set + */ + void set_font_scale(int new_font_scale); + + /** + * @brief Returns the text font scale value for the bounding rectangle title. + * + * @return Text font scale value. + */ + int get_font_scale(); + + /** + * @brief Sets new text line type value for the bounding rectangle title. + * + * @remark The text line type values are defined by OpenCV. + * + * @param[in] new_text_line_type The new text font scale to be set + */ + void set_text_line_type(int new_text_line_type); + + /** + * @brief Returns the text line type value for the bounding rectangle title. + * + * @return Text line type value. + */ + int get_text_line_type(); + + /** + * @brief Returns pointer to processed image (color image) + * + * @return Pointer to color image + */ + cv::Mat& get_color_cvmat(); + + /** + * @brief Returns the window name showing the streaming video. + * + * @return Window name. + */ + cv::String get_win_name(); + + /** + * @brief Receives rectangle parameters as represented by object recognition module and an + * object name, then draws the rectangle with the objects name as a title. + * + * @param[in] name Name of the bounded object. + * @param[in] class_ID The class ID that represents the object class. Should suit the objects name. + * @param[in] x Relative coordinate x of top left point. + * @param[in] y Relative coordinate y of top left point. + * @param[in] height Relative height of the rectangle. + * @param[in] width Relative width of the rectangle. + * + * @return True if rectangle was drawn successfully , false otherwise. + */ + bool draw_rect(cv::String name, int class_ID, int x, int y, int width, int height); + + bool draw_results(const sensor_msgs::ImageConstPtr& color, const realsense_ros_object::ObjectsInBoxes& msg); + + + /** + * @brief Draws the previously calculated results + * + * @return Pressed key, if exists. + */ + char show_results(); + + /** + * @brief Write the previously calculated results to text file + * + * @return True if results were writen successfully , false otherwise + */ + char save_results(const realsense_ros_object::ObjectsInBoxes& msg, std::string file_path = ""); + + /** + * @brief Read the calculated results from text file. + * + * @return vector of results + */ + std::vector read_results(std::string file_path); + +protected: + /** + * @brief Gets an object ID and returns unique color. + * + * @warning Addresses only objects recognized by the localization or recognition mechanism. + * @remark In case of unrecognized object - the color black will be returned. + * + * @param[in] class_ID: The class ID that represents the object class. + * + * @return Unique RGBA color represented as a cv::Scalar. + */ + cv::Scalar get_color(int class_ID); + + /** + * @brief Gets an object ID and returns a text color that suits its unique color. + * + * @warning Addresses only objects recognized by the localization or recognition mechanism. + * @remark In case of unrecognized object - the color white will be returned. + * + * @param[in] class_ID: The class ID that represents the object class. + * + * @return RGBA text color that suits the objects unique color. + */ + cv::Scalar get_text_color(int class_ID); + + /** + * @brief Shows "No Results" message, meaning the OR localization algorithm returned no recognised results. + */ + void draw_no_results(); + + /** + * @brief Draws rectangle in a color suitable to classID + * + * @param[in] top_left_point Top left point of the rectangle + * @param[in] width Width of the rectangle + * @param[in] height of the rectangle + * @param[in] class_ID the id of the object class, used to determine the color + */ + void draw_rect_no_text(cv::Point top_left_point, int width, int height, int class_ID); + + /** + * @brief Draws text on image + * + * @param[in] text The text to draw + * @param[in] origin_point The bottom left corner of the text + * @param[in] class_ID the id of the object class, used to determine the color + */ + void draw_text(cv::String text, cv::Point origin_point, int class_ID); + + std::string get_3D_location_string(geometry_msgs::Point32 location); + + int m_rect_thickness; + int m_rect_line_type; + int m_rect_shift; + int m_text_thickness; + int m_text_font; + double m_font_scale; + int m_text_line_type; + cv::Mat m_image; + cv::String m_window_name; + std::vector m_objects_IDs_for_tracking_localization; + + static const int m_max_classes = 44; + + const unsigned char m_color_arr[m_max_classes][4] = + { + {102, 0, 0, 0}, + {153, 0, 0, 0}, + {255, 102, 102, 0}, + {255, 128, 0, 0}, + {255, 178, 102, 0}, + {255, 255, 0, 0}, + {76, 153, 0, 0}, + {128, 255, 0, 0}, + {0, 255, 128, 0}, + {0, 153, 153, 0}, + {0, 204, 204, 0}, + {0, 255, 255, 0}, + {0, 76, 153, 0}, + {0, 128, 255, 0}, + {153, 204, 255, 0}, + {0, 0, 153, 0}, + {0, 0, 255, 0}, + {102, 102, 255, 0}, + {76, 0, 153, 0}, + {127, 0, 255, 0}, + {178, 102, 255, 0}, + {153, 0, 153, 0}, + {204, 0, 204, 0}, + {255, 102, 255, 0}, + {102, 0, 51, 0}, + {204, 0, 102, 0}, + {255, 102, 178, 0}, + {96, 96, 96, 0}, + {192, 192, 192, 0}, + {204, 204, 255, 0}, + {255, 204, 229, 0}, + {0, 102, 51, 0}, + {102, 51, 0, 0}, + {102, 102, 0, 0}, + {153, 153, 0, 0}, + {204, 255, 204, 0}, + {255, 255, 255, 0}, + {255, 204, 204, 0}, + {153, 153, 255, 0}, + {255, 153, 204, 0}, + {0, 153, 76, 0}, + {204, 102, 0, 0}, + {0, 102, 102, 0}, + {0, 0, 0, 0} + }; + + const unsigned char m_text_arr[m_max_classes][4] = + { + {255, 255, 255, 0}, + {255, 255, 255, 0}, + {0, 0, 0, 0}, //3 + {255, 255, 255, 0}, + {0, 0, 0, 0}, //5 + {0, 0, 0, 0}, + {255, 255, 255, 0}, //7 + {0, 0, 0, 0}, + {0, 0, 0, 0}, + {255, 255, 255, 0}, //10 + {0, 0, 0, 0}, + {0, 0, 0, 0}, + {255, 255, 255, 0}, //13 + {255, 255, 255, 0}, //14 + {0, 0, 0, 0}, + {255, 255, 255, 0}, //16 + {255, 255, 255, 0}, + {255, 255, 255, 0}, + {255, 255, 255, 0}, + {255, 255, 255, 0}, + {255, 255, 255, 0}, + {255, 255, 255, 0}, + {255, 255, 255, 0}, + {255, 255, 255, 0}, + {255, 255, 255, 0}, + {255, 255, 255, 0}, + {255, 255, 255, 0}, + {255, 255, 255, 0}, //28 + {0, 0, 0, 0}, + {0, 0, 0, 0}, + {0, 0, 0, 0}, + {255, 255, 255, 0}, //32 + {255, 255, 255, 0}, + {255, 255, 255, 0}, + {255, 255, 255, 0}, //35 + {0, 0, 0, 0}, + {0, 0, 0, 0}, + {0, 0, 0, 0}, + {0, 0, 0, 0}, + {0, 0, 0, 0}, + {255, 255, 255, 0}, + {255, 255, 255, 0}, + {255, 255, 255, 0}, + {255, 255, 255, 0} + }; + const cv::Mat m_color_mat = cv::Mat(m_max_classes, 4, CV_8UC1, const_cast((const void*)(&m_color_arr))); + const cv::Mat m_text_mat = cv::Mat(m_max_classes, 4, CV_8UC1, const_cast((const void*)(&m_text_arr))); +}; diff --git a/realsense_ros_object/src/viewer/realsense_orview_nodelet.cpp b/realsense_ros_object/src/viewer/realsense_orview_nodelet.cpp new file mode 100644 index 0000000..fdbd40b --- /dev/null +++ b/realsense_ros_object/src/viewer/realsense_orview_nodelet.cpp @@ -0,0 +1,135 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved. + +#include "ros/ros.h" +#include "realsense_ros_object/UI.h" +#include "realsense_ros_object/ObjectsInBoxes.h" +#include +#include +#include +#include "realsense_orview_nodelet.h" +#include + +#if defined(RUN_AS_NODELET) +PLUGINLIB_EXPORT_CLASS(realsense_ros_object::COrViewNodelet, nodelet::Nodelet) +#endif + +namespace realsense_ros_object +{ + +//****************************** +// Public Methods +// ****************************** +COrView::COrView() +{ + initialize(); + gui.init_GUI("Localization + Tracking"); +} + +COrView::~COrView() +{ + unInitialize(); +} + +int COrView::unInitialize() +{ + cv::destroyAllWindows(); + return 0; +} + +int COrView::initialize() +{ + return 0; +} + +void COrView::getParams(ros::NodeHandle& nh) +{ +} + +void COrView::getParams(const std::vector & argv) +{ + for (int i = 0; i < argv.size(); i++) + { + } + +} + +void COrView::init(ros::NodeHandle& nh) +{ + nh_ = nh; + + // Subscribe to color, tracking using synchronization filter + color_subscriber_.reset(new message_filters::Subscriber(nh_, "camera/color/image_raw", 1)); + tracking_subscriber_.reset(new message_filters::Subscriber( + nh_, "realsense/localized_tracked_objects", 1)); + UI_pub_ = nh_.advertise("realsense/UI", 1); + time_synchronizer_.reset(new message_filters::TimeSynchronizer( + *color_subscriber_, *tracking_subscriber_, 60)); + time_synchronizer_->registerCallback(boost::bind(&COrView::localizedTrackedObjectsCallback, this, _1, _2)); +} + +void COrView::localizedTrackedObjectsCallback( + const sensor_msgs::ImageConstPtr& color, + const realsense_ros_object::ObjectsInBoxes::ConstPtr& msg) +{ + int key = cv::waitKey(1); + if ((char)key == ' ') + { + realsense_ros_object::UI msg; + msg.key = key; + UI_pub_.publish(msg); + //ROS_INFO(std::to_string(key).c_str()); + } + gui.draw_results(color, *msg); + gui.show_results(); +} + +#if defined(RUN_AS_NODELET) + +//****************************** +// Public Methods +//****************************** + +COrViewNodelet::~COrViewNodelet() +{} + +void COrViewNodelet::onInit() +{ + ROS_INFO_STREAM("OR viewer OnInit"); + NODELET_INFO("OR viewer OnInit."); + + ros::NodeHandle& local_nh = getPrivateNodeHandle(); + view_nodelet_.getParams(local_nh); + ros::NodeHandle& nh = getNodeHandle(); + view_nodelet_.init(nh); + + NODELET_INFO("OR viewer nodelet initialized."); +} + +#endif + +} + + +#if !defined(RUN_AS_NODELET) +int main(int argc, char **argv) +{ + + ros::init(argc, argv, "realsense_orview"); + ros::NodeHandle nh; + + realsense::COrView or_manager; + + std::vector argv_strings; + for (int i = 0; i < argc; i++) + { + argv_strings.push_back(argv[i]); + } + + or_manager.getParams(argv_strings); + or_manager.init(nh); + + ros::spin(); +} +#endif + diff --git a/realsense_ros_object/src/viewer/realsense_orview_nodelet.h b/realsense_ros_object/src/viewer/realsense_orview_nodelet.h new file mode 100644 index 0000000..a82df14 --- /dev/null +++ b/realsense_ros_object/src/viewer/realsense_orview_nodelet.h @@ -0,0 +1,96 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved. + +#pragma once +# ifndef RS_ORVIEW_NODELET +# define RS_ORVIEW_NODELET + +/////////////////////////////////////////////// +/// Dependencies +/////////////////////////////////////////////// + +#include +#include +#include +#include "GUI_utils.h" +#include + + +namespace realsense_ros_object +{ +/////////////////////////////////////////////// +/// CTracking - +/////////////////////////////////////////////// +class COrView +{ +public: + //=================================== + // Interface + //=================================== + void getParams(ros::NodeHandle& nh); + void getParams(const std::vector & argv); + void init(ros::NodeHandle& nh); + + COrView(); + virtual ~COrView(); + +private: + //=================================== + // Member Functions + //=================================== + + int initialize(); + int unInitialize(); + //Static member functions: + void localizedTrackedObjectsCallback( + const sensor_msgs::ImageConstPtr& color, + const realsense_ros_object::ObjectsInBoxes::ConstPtr& msg); + + //=================================== + // Member Variables + //=================================== + + std::unique_ptr> color_subscriber_; + std::unique_ptr> tracking_subscriber_; + std::unique_ptr> time_synchronizer_; + ros::Publisher UI_pub_; + ros::NodeHandle nh_; + + GUI_utils gui; + + cv::Mat m_imageColor; + +}; + +#ifdef RUN_AS_NODELET +/////////////////////////////////////////////// +/// COrViewNodelet +/////////////////////////////////////////////// +class COrViewNodelet : public nodelet::Nodelet +{ +public: + //=================================== + // Interface + //=================================== + virtual void onInit(); + + ~COrViewNodelet(); + +private: + //=================================== + // Member Functions + //=================================== + + //=================================== + // Member Variables + //=================================== + + COrView view_nodelet_; + +}; +#endif + +}; + +#endif //RS_ORVIEW_NODELET + diff --git a/realsense_ros_object/test/object.test b/realsense_ros_object/test/object.test new file mode 100644 index 0000000..3fe9815 --- /dev/null +++ b/realsense_ros_object/test/object.test @@ -0,0 +1,13 @@ + + + + + + + + + + + diff --git a/realsense_ros_object/test/object_test.cpp b/realsense_ros_object/test/object_test.cpp new file mode 100644 index 0000000..8951de1 --- /dev/null +++ b/realsense_ros_object/test/object_test.cpp @@ -0,0 +1,84 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2016 Intel Corporation. All Rights Reserved + +#include +#include +#include "realsense_ros_object/ObjectsInBoxes.h" +#include + +bool objects_in_boxes_recieved = false; +bool camera_info_recieved = false; + +int image_width = 0; +int image_height = 0; +realsense_ros_object::ObjectsInBoxes last_objs; + + +TEST(RealsenseTests, testObj) +{ + EXPECT_TRUE(camera_info_recieved); + EXPECT_TRUE(objects_in_boxes_recieved); + if (!objects_in_boxes_recieved) return; + for(auto objs : last_objs.objects_vector) + { + EXPECT_GT(objs.object.object_name.length(), 1); + EXPECT_GT(objs.object_bbox.width, 0); + EXPECT_GT(objs.object_bbox.height, 0); + if(objs.object_bbox.height > objs.object_bbox.width) + { + if(objs.location.vert_margin != 0 || objs.location.horiz_margin != 0) + { + EXPECT_GT(objs.location.vert_margin, objs.location.horiz_margin); + } + } + else if (objs.object_bbox.height < objs.object_bbox.width) + { + if(objs.location.vert_margin != 0 || objs.location.horiz_margin != 0) + { + EXPECT_LT(objs.location.vert_margin, objs.location.horiz_margin); + } + } + } +} + +void localized_tracked_objects_callback( + const realsense_ros_object::ObjectsInBoxes::ConstPtr& msg) +{ + objects_in_boxes_recieved = true; + last_objs = *msg; +} + + +void color_camera_info_callback(const sensor_msgs::CameraInfo::ConstPtr & camera_info) +{ + camera_info_recieved = true; + image_width = camera_info->width; + image_height = camera_info->height; +} + +int main(int argc, char **argv) try +{ + testing::InitGoogleTest(&argc, argv); + + ros::init(argc, argv, "utest"); + ros::NodeHandle nh; + + + ROS_INFO_STREAM("RealSense OBJECT test - Initializing Tests..."); + + + + ros::Subscriber sub_cam_pose = nh.subscribe("camera/color/camera_info", 10, &color_camera_info_callback); + ros::Subscriber sub_reloc_pose = nh.subscribe("realsense/localized_tracked_objects", 10, &localized_tracked_objects_callback); + + + ros::Duration duration; + duration.sec = 10; + duration.sleep(); + ros::spinOnce(); + + ROS_INFO_STREAM("RealSense OBJECT test - Running Tests..."); + + return RUN_ALL_TESTS(); +} +catch (...) {} // catch the "testing::internal::::ClassUniqueToAlwaysTrue" from gtest diff --git a/realsense_ros_person/CHANGELOG.rst b/realsense_ros_person/CHANGELOG.rst new file mode 100644 index 0000000..faf48ad --- /dev/null +++ b/realsense_ros_person/CHANGELOG.rst @@ -0,0 +1,13 @@ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Changelog for package realsense_ros_person +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +0.8.2 (2017-04-04) +------------------ +* Bug fixes and updates +* Contributors: Curfman, Matthew C, Matt Curfman, Stanislav Gutliansky + +0.8.1 (2017-03-02) +------------------ +* Initial release +* Contributors: Curfman, Matthew C, Hirashima, Ben, Lu, LuX, Matt Curfman, Stanislav Gutliansky diff --git a/realsense_ros_person/CMakeLists.txt b/realsense_ros_person/CMakeLists.txt new file mode 100755 index 0000000..0565cd7 --- /dev/null +++ b/realsense_ros_person/CMakeLists.txt @@ -0,0 +1,182 @@ +cmake_minimum_required(VERSION 2.8.3) +project(realsense_ros_person) + +# Find catkin macros and libraries +find_package(catkin REQUIRED COMPONENTS + std_msgs + sensor_msgs + nodelet + message_filters + message_generation + roscpp + roslib + rostime + image_transport + cv_bridge + roscpp_serialization + rospy + ) + +include(FindPkgConfig) +pkg_check_modules(PERSONTRACKING REQUIRED realsense_persontracking) + +# Set compile flags +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -fexceptions -frtti") +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ") + +set(PNG_LIBRARIES png) +set(JPEG_LIBRARIES jpeg) +set(CMAKE_THREAD_LIBS_INIT pthread) + +set(ROOT "${CMAKE_CURRENT_SOURCE_DIR}") + + +add_message_files( + FILES + Landmark.msg + LandmarksInfo.msg + SkeletonJoint.msg + User.msg + UserInfo.msg + Gesture.msg + Gestures.msg + Pointing.msg + Wave.msg + Frame.msg + FrameTest.msg + RectWithConfidence.msg + EulerAngles.msg + EulerAnglesWithConfidence.msg + PersonModuleState.msg +) + +add_service_files( + FILES + TrackingConfig.srv + StartTracking.srv + StopTracking.srv + Recognition.srv + RecognitionRegister.srv + LoadRecognitionDB.srv + SaveRecognitionDB.srv +) + +generate_messages( + DEPENDENCIES + std_msgs + sensor_msgs + geometry_msgs +) + +########### +## Build ## +########### + +include_directories( + ${catkin_INCLUDE_DIRS} + ${PERSONTRACKING_INCLUDE_DIRS} + ${ROOT}/src/apiwrapper/server/ + ${ROOT}/src/apiwrapper/publisher/ + ${ROOT}/src/apiwrapper/helpers/ + +) + +set( PT_LINK_LIBS + ${PERSONTRACKING_LIBRARIES} +-Wl,--start-group + ${OpenCV_LIBS} + ${JPEG_LIBRARIES} +-Wl,--end-group + ${PNG_LIBRARIES} + ${GTK2_LIBRARIES} + ${CMAKE_THREAD_LIBS_INIT} + ) + +catkin_package( + LIBRARIES realsense_ros_person + CATKIN_DEPENDS nodelet roscpp std_msgs image_transport sensor_msgs message_filters roslib rostime cv_bridge roscpp_serialization rospy + +) + +file(GLOB ROS_WRAPPER_SOURCES + "src/apiwrapper/*.cpp" + "src/apiwrapper/helpers/*.cpp" + "src/apiwrapper/publisher/*.cpp" + "src/apiwrapper/server/*.cpp" +) + +# build person tracking API ROS wrapper +add_library(realsense_ros_person + ${ROS_WRAPPER_SOURCES} +) + +target_link_libraries(realsense_ros_person + ${PT_LINK_LIBS} + ${catkin_LIBRARIES} +) + + +add_dependencies(realsense_ros_person ${PROJECT_NAME}_generate_messages_cpp) + + +add_executable(${PROJECT_NAME}_sample + ${ROOT}/src/sample/PersonTrackingSampleRunner.cpp + ${ROOT}/src/sample/PersonTrackingSample.cpp + ${ROOT}/src/sample/TrackingRenderer/TrackingRenderer.cpp + ${ROOT}/src/sample/TrackingRenderer/Viewer.cpp +) + +target_link_libraries(${PROJECT_NAME}_sample + ${catkin_LIBRARIES} + ${OpenCV_LIBRARIES} + ) + + +add_dependencies(${PROJECT_NAME}_sample ${PROJECT_NAME}_generate_messages_cpp) + +if(REALSENSE_ENABLE_TESTING) + # build person unit tests + find_package(rostest REQUIRED) + + catkin_download_test_data( + person_detection.bag + https://s3-eu-west-1.amazonaws.com/realsense-rostest-public/realsense_ros_person/person_detection.bag + DESTINATION ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_SHARE_DESTINATION}/tests + MD5 8a4a9db6b57d142ba8c408fda65b72c2) + + catkin_download_test_data( + recognition.bag + https://s3-eu-west-1.amazonaws.com/realsense-rostest-public/realsense_ros_person/recognition.bag + DESTINATION ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_SHARE_DESTINATION}/tests + MD5 f62e471583946bdc1314a250bde7d073) + + catkin_download_test_data( + wave_detection.bag + https://s3-eu-west-1.amazonaws.com/realsense-rostest-public/realsense_ros_person/wave_detection.bag + DESTINATION ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_SHARE_DESTINATION}/tests + MD5 716657342c4dd72cfd7abbfdc8d16b11) + + add_executable(tests_person test/person_test.cpp) + add_rostest(test/person_detection.test) + add_rostest(test/person_tracking.test) + add_rostest(test/recognition.test) + add_rostest(test/wave_detection.test) + target_link_libraries(tests_person + ${catkin_LIBRARIES} + ${GTEST_LIBRARIES} + ) + add_dependencies(tests_person realsense_ros_person) + add_dependencies(tests_person person_detection.bag recognition.bag wave_detection.bag) + +endif() + +# Install launch files +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch +) + +# Install xml files +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) + diff --git a/realsense_ros_person/README.md b/realsense_ros_person/README.md new file mode 100644 index 0000000..701bfc8 --- /dev/null +++ b/realsense_ros_person/README.md @@ -0,0 +1,165 @@ +# ROS Node for Intel® RealSense Person Library +========================================= + +This package contains a ROS wrapper for Intel's Person library. +The realsense_ros_person package provides a image-based stategy to follow the +people in front of the camera as a ROS nodelet. The objective of this module is +to find out the information (can be pose, orientation, id etc.) of a person (or a specific person) +in the images. + +## Nodelets + +### realsense_ros_person - person nodelet - wraps Person API + The realsense_ros_person nodelet takes sensor_msgs/CameraInfo camera infos and sensor_msgs/Image images(color, depth),then send the informations of person detected as realsense_ros_person/Frame realsense_ros_person/FrameTest messages. + +#### Subscribed Topics + camera/depth/camera_info (sensor_msgs/CameraInfo) + The informations of camera used + camera/color/camera_info (sensor_msgs/CameraInfo) + The informations of camera used + camera/depth/image_raw (sensor_msgs/Image) + Depth image got from realsense camera + camera/color/image_raw (sensor_msgs/Image) + Color image got from realsense camera + +#### Published Topics + person_tracking_output (realsense_ros_person/Frame) Informations of person tracked + person_tracking_output_test (realsense_ros_person/FrameTest) Information of person tracked + color image from camera + /person_tracking/module_state (realsense_ros_person/PersonModuleState) Person tracking module state (which features is enabled tracking state, etc.) + +#### Services + person_tracking/tracking_config (realsense_ros_person/TrackingConfig) + Reconfigure several settings for PersonTracking + person_tracking/register_request (realsense_ros_person/RecognitionRegister) + Register person at recognition database + person_tracking/recognition_request (realsense_ros_person/Recognition) + Recognize person + person_tracking/start_tracking_request (realsense_ros_person/StartTracking) + Start tracking on specific person + person_tracking/stop_tracking_request (realsense_ros_person/StopTracking) + Start tracking on specific person + person_tracking/save_recognition (realsense_ros_person/SaveRecognitionDB) + Save the recognition data base to file + person_tracking/load_recognition (realsense_ros_person/LoadRecognitionDB) + Load the recognition data base from file + +#### Parameters + ~recognitionEnabled (bool, default: false) + Enable/Disable recognition + ~sceletonEnabled (bool, default: false) + Enable/Disable skeleton + ~pointingGestureEnabled (bool, default: false) + Enable/Disable pointing gesture + ~waveGestureEnabled + Enable/Disable wave gesture + ~trackingEnabled (bool, default: true) + Enable/Disable tracking + ~headPoseEnabled (bool, default: false) + Enable/Disable head pose + ~headBoundingBoxEnabled (bool, default: false) + Enable/Disable head bounding box + ~landmarksEnabled (bool, default: false) + Enable/Disable face landmarks + ~loadDb (bool, default: false) + Loads recognition data base from file specified at (~dbPath) parameter (load as part of node initialization) + ~dbPath: 'package_path/db' + Path of recognition data base + ~isTestMode: (bool, defaultL false) + Start node at test mode, publish FrameTest messages on person_tracking/person_tracking_output_test topic + +### realsense_ros_person_sample_nodelet - person sample node - controls person node and visualizes output + +#### Subscribed Topics + person_tracking/person_tracking_output_test (realsense_pt_msgs/FrameTest) + +#### Published Topics + none + +#### Services + none + +#### GUI commands + Start tracking: + Mouse wheel click + Register: + Mouse left button click + Recognize: + Ctrl+left mouse button click + +#### Parameters +IMPORTANT this parameters will overide parameters of realsense_ros_person nodelet, +configuration request will be sent to realsense_ros_person nodelet + + ~recognitionEnabled (bool, default: false) + Enable/Disable recognition + ~sceletonEnabled (bool, default: false) + Enable/Disable skeleton + ~pointingGestureEnabled (bool, default: false) + Enable/Disable pointing gesture + ~waveGestureEnabled + Enable/Disable wave gesture + ~trackingEnabled (bool, default: true) + Enable/Disable tracking + ~headPoseEnabled (bool, default: false) + Enable/Disable head pose + ~headBoundingBoxEnabled (bool, default: false) + Enable/Disable head bounding box + ~landmarksEnabled (bool, default: false) + Enable/Disable face landmarks + +## Usage +The person package consists of 2 nodelets: + +1. realsense_ros_person nodelet is the Person API wrapper for ROS. +2. realsense_ros_person_sample_nodelet - demo for usage of Person ROS API - visualize output of realsense_ros_person nodelet, provide GUI for realsense_ros_person node control(Start tracking, Register/Recognize users) + +The person node by default starts in detection mode, and some features requires tracking mode (e.g. skeleton). + +### For person tracking feature +```bash +$ roslaunch realsense_ros_person demo_person_tracking.launch +``` + +### For person gestures feature(pointing & wave) +```bash +$ roslaunch realsense_ros_person demo_person_gestures.launch +``` + +### For skeleton feature +```bash +$ roslaunch realsense_ros_person demo_person_skeleton.launch +``` + +### For person face features (recogntion, landmarks, head pose, head bounding box) +```bash +$ roslaunch realsense_ros_person demo_person_face_features.launch + ``` +## Testing + +The person package can be tested with pre-recorded data using the provided ROS unit test. No physical camera needs to be present in order to run the test. The following steps can be used to build the unit test and download the pre-recorded ROS .bag data: + +```bash +$ cd ~/catkin_ws +$ catkin_make -DREALSENSE_ENABLE_TESTING=On +$ rostest realsense_ros_person person_detection.test +$ rostest realsense_ros_person recognition.test +$ rostest realsense_ros_person wave_detection.test +``` + +You will see the three tests each execute with command line output only, and then each test passes with a "RESULT: SUCCESS" status. + +**Note:** Depending on your internet connection speed, enabling 'REALSENSE_ENABLE_TESTING' can cause catkin_make to run for very long time (more than 5 minutes), as it downloads required pre-recorded .bag data. + +# Person features requirements for tracking mode +Part of person features works only at tracking/detection mode. + +|Feature name |Tracking mode | +|-----------------------|-----------------------| +|Tracking |Tracking | +|Skeleton |Tracking | +|Pointing gesture |Tracking | +|Wave gesture |Detection | +|Recognition |(either) | +|Landmarks |(either) | +|Head bounding box |(either) | +|Head pose |(either) | diff --git a/realsense_ros_person/launch/demo_person_face_features.launch b/realsense_ros_person/launch/demo_person_face_features.launch new file mode 100644 index 0000000..a061889 --- /dev/null +++ b/realsense_ros_person/launch/demo_person_face_features.launch @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/realsense_ros_person/launch/demo_person_gestures.launch b/realsense_ros_person/launch/demo_person_gestures.launch new file mode 100644 index 0000000..ed20bb3 --- /dev/null +++ b/realsense_ros_person/launch/demo_person_gestures.launch @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/realsense_ros_person/launch/demo_person_skeleton.launch b/realsense_ros_person/launch/demo_person_skeleton.launch new file mode 100644 index 0000000..2aea51e --- /dev/null +++ b/realsense_ros_person/launch/demo_person_skeleton.launch @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/realsense_ros_person/launch/demo_person_tracking.launch b/realsense_ros_person/launch/demo_person_tracking.launch new file mode 100644 index 0000000..3f3589b --- /dev/null +++ b/realsense_ros_person/launch/demo_person_tracking.launch @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/realsense_ros_person/launch/demo_person_tracking_from_bag.launch b/realsense_ros_person/launch/demo_person_tracking_from_bag.launch new file mode 100755 index 0000000..c33c359 --- /dev/null +++ b/realsense_ros_person/launch/demo_person_tracking_from_bag.launch @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/realsense_ros_person/launch/person_tracking.launch b/realsense_ros_person/launch/person_tracking.launch new file mode 100644 index 0000000..a98e356 --- /dev/null +++ b/realsense_ros_person/launch/person_tracking.launch @@ -0,0 +1,6 @@ + + + + + + diff --git a/realsense_ros_person/launch/pt_params.yaml b/realsense_ros_person/launch/pt_params.yaml new file mode 100644 index 0000000..40a7e44 --- /dev/null +++ b/realsense_ros_person/launch/pt_params.yaml @@ -0,0 +1,23 @@ +#publisher type defines messages for person tracking node(RecognRsPersonTracker or Frame message) +# supported values: +# defaultPublisher - default message format - main message - "Frame" +publisherType: "defaultPublisher" + +#test mode, true - publish person tracking topic with color image +isTestMode: true + +pointingGestureEnabled: false +waveGestureEnabled: false +skeletonEnabled: false +recognitionEnabled: false +trackingEnabled: true +headPoseEnabled: false + +#load Data saved +loadDb: false +dbPath: '/home/elisa/testPT1.db' + +headPoseEnabled: false +headBoundingBoxEnabled: false +landmarksEnabled: false + diff --git a/realsense_ros_person/launch/record_bag_person.launch b/realsense_ros_person/launch/record_bag_person.launch new file mode 100644 index 0000000..069c305 --- /dev/null +++ b/realsense_ros_person/launch/record_bag_person.launch @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + diff --git a/realsense_ros_person/msg/EulerAngles.msg b/realsense_ros_person/msg/EulerAngles.msg new file mode 100644 index 0000000..a5b3828 --- /dev/null +++ b/realsense_ros_person/msg/EulerAngles.msg @@ -0,0 +1,3 @@ +float32 pitch +float32 roll +float32 yaw diff --git a/realsense_ros_person/msg/EulerAnglesWithConfidence.msg b/realsense_ros_person/msg/EulerAnglesWithConfidence.msg new file mode 100644 index 0000000..41fd536 --- /dev/null +++ b/realsense_ros_person/msg/EulerAnglesWithConfidence.msg @@ -0,0 +1,2 @@ +EulerAngles angles +int32 confidence diff --git a/realsense_ros_person/msg/Frame.msg b/realsense_ros_person/msg/Frame.msg new file mode 100644 index 0000000..aaa019a --- /dev/null +++ b/realsense_ros_person/msg/Frame.msg @@ -0,0 +1,2 @@ +int32 numberOfUsers +User[] usersData diff --git a/realsense_ros_person/msg/FrameTest.msg b/realsense_ros_person/msg/FrameTest.msg new file mode 100644 index 0000000..094804e --- /dev/null +++ b/realsense_ros_person/msg/FrameTest.msg @@ -0,0 +1,2 @@ +sensor_msgs/Image colorImage +Frame frameData diff --git a/realsense_ros_person/msg/Gesture.msg b/realsense_ros_person/msg/Gesture.msg new file mode 100644 index 0000000..fa7c14c --- /dev/null +++ b/realsense_ros_person/msg/Gesture.msg @@ -0,0 +1,3 @@ +int32 type +geometry_msgs/Point32 vectorOrigin +geometry_msgs/Vector3 pointingVector \ No newline at end of file diff --git a/realsense_ros_person/msg/Gestures.msg b/realsense_ros_person/msg/Gestures.msg new file mode 100644 index 0000000..5c8af77 --- /dev/null +++ b/realsense_ros_person/msg/Gestures.msg @@ -0,0 +1,2 @@ +Pointing pointing +Wave wave diff --git a/realsense_ros_person/msg/Landmark.msg b/realsense_ros_person/msg/Landmark.msg new file mode 100644 index 0000000..5db1ff7 --- /dev/null +++ b/realsense_ros_person/msg/Landmark.msg @@ -0,0 +1,2 @@ +geometry_msgs/Point location +geometry_msgs/Point32 realWorldCoordinates diff --git a/realsense_ros_person/msg/LandmarksInfo.msg b/realsense_ros_person/msg/LandmarksInfo.msg new file mode 100644 index 0000000..ccdcc55 --- /dev/null +++ b/realsense_ros_person/msg/LandmarksInfo.msg @@ -0,0 +1,2 @@ +int32 confidence +Landmark[] landmarks diff --git a/realsense_ros_person/msg/PersonModuleState.msg b/realsense_ros_person/msg/PersonModuleState.msg new file mode 100644 index 0000000..494deff --- /dev/null +++ b/realsense_ros_person/msg/PersonModuleState.msg @@ -0,0 +1,14 @@ +#features enables/disabled states +bool isRecognitionEnabled +bool isSkeletonEnabled +bool isGesturesEnabled +bool isLandmarksEnabled +bool isHeadBoundingBoxEnabled +bool isHeadPoseEnabled +bool isTrackingEnabled + +#additional states +int32 trackingState + +int32 TRACKING_STATE_TRACKING=0 +int32 TRACKING_STATE_DETECTING=1 diff --git a/realsense_ros_person/msg/Pointing.msg b/realsense_ros_person/msg/Pointing.msg new file mode 100644 index 0000000..4211289 --- /dev/null +++ b/realsense_ros_person/msg/Pointing.msg @@ -0,0 +1,5 @@ +int32 confidence +geometry_msgs/Point32 originColor +geometry_msgs/Point32 originWorld +geometry_msgs/Point32 orientationColor +geometry_msgs/Vector3 orientationWorld diff --git a/realsense_ros_person/msg/RectWithConfidence.msg b/realsense_ros_person/msg/RectWithConfidence.msg new file mode 100644 index 0000000..3aacc77 --- /dev/null +++ b/realsense_ros_person/msg/RectWithConfidence.msg @@ -0,0 +1,2 @@ +int32 confidence +geometry_msgs/Point32[2] rectCorners diff --git a/realsense_ros_person/msg/SkeletonJoint.msg b/realsense_ros_person/msg/SkeletonJoint.msg new file mode 100644 index 0000000..6338e81 --- /dev/null +++ b/realsense_ros_person/msg/SkeletonJoint.msg @@ -0,0 +1,32 @@ +int32 type +float32 confidence +geometry_msgs/Point location +geometry_msgs/Point32 realWorldCoordinates + +#joint types constants +int32 JOINT_ANKLE_LEFT=0 +int32 JOINT_ANKLE_RIGHT=1 +int32 JOINT_ELBOW_LEFT=2 +int32 JOINT_ELBOW_RIGHT=3 +int32 JOINT_FOOT_LEFT=4 +int32 JOINT_FOOT_RIGHT=5 +int32 JOINT_HAND_LEFT=6 +int32 JOINT_HAND_RIGHT=7 +int32 JOINT_HAND_TIP_LEFT=8 +int32 JOINT_HAND_TIP_RIGHT=9 +int32 JOINT_HEAD=10 +int32 JOINT_HIP_LEFT=11 +int32 JOINT_HIP_RIGHT=12 +int32 JOINT_KNEE_LEFT=13 +int32 JOINT_KNEE_RIGHT=14 +int32 JOINT_NECK=15 +int32 JOINT_SHOULDER_LEFT=16 +int32 JOINT_SHOULDER_RIGHT=17 +int32 JOINT_SPINE_BASE=18 +int32 JOINT_SPINE_MID=19 +int32 JOINT_SPINE_SHOULDER=20 +int32 JOINT_THUMB_LEFT=21 +int32 JOINT_THUMB_RIGHT=22 +int32 JOINT_WRIST_LEFT=23 +int32 JOINT_WRIST_RIGHT=24 +int32 JOINT_UNKNOWN=25 diff --git a/realsense_ros_person/msg/User.msg b/realsense_ros_person/msg/User.msg new file mode 100644 index 0000000..2093eb1 --- /dev/null +++ b/realsense_ros_person/msg/User.msg @@ -0,0 +1,10 @@ +UserInfo userInfo +geometry_msgs/Point32 centerOfMassImage +geometry_msgs/Point32 centerOfMassWorld +RectWithConfidence userRect +RectWithConfidence headBoundingBox +LandmarksInfo landmarksInfo +EulerAnglesWithConfidence headPose +SkeletonJoint[] skeletonJoints +Gestures gestures + diff --git a/realsense_ros_person/msg/UserInfo.msg b/realsense_ros_person/msg/UserInfo.msg new file mode 100644 index 0000000..d91d36b --- /dev/null +++ b/realsense_ros_person/msg/UserInfo.msg @@ -0,0 +1 @@ +int32 Id diff --git a/realsense_ros_person/msg/Wave.msg b/realsense_ros_person/msg/Wave.msg new file mode 100644 index 0000000..b672902 --- /dev/null +++ b/realsense_ros_person/msg/Wave.msg @@ -0,0 +1,9 @@ +int32 type + +#Wave gesture types +int32 WAVE_NOT_DETECTED=-1 # Wave not detected +int32 WAVE_LEFT_LA=1 # Wave ended with hand motion to the left, in the left area +int32 WAVE_RIGHT_LA=2 # Wave ended with hand motion to the right, in the left area +int32 WAVE_LEFT_RA=3 # Wave ended with hand motion to the left, in the right area +int32 WAVE_RIGHT_RA=4 # Wave ended with hand motion to the right, in the right area + diff --git a/realsense_ros_person/nodelet_plugins.xml b/realsense_ros_person/nodelet_plugins.xml new file mode 100644 index 0000000..2e29cc7 --- /dev/null +++ b/realsense_ros_person/nodelet_plugins.xml @@ -0,0 +1,8 @@ + + + + A nodelet of person tracking. + + + + diff --git a/realsense_ros_person/package.xml b/realsense_ros_person/package.xml new file mode 100644 index 0000000..f60806d --- /dev/null +++ b/realsense_ros_person/package.xml @@ -0,0 +1,37 @@ + + + realsense_ros_person + 0.8.2 + The realsense_ros_person package + Intel RealSense + Apache 2.0 + catkin + nodelet + roscpp + rostime + std_msgs + roslib + message_filters + image_transport + cv_bridge + roscpp_serialization + rospy + sensor_msgs + realsense_msgs + realsense_srvs + cv_bridge + image_transport + message_filters + roslib + nodelet + roscpp + std_msgs + rostime + realsense_camera + roscpp_serialization + rospy + sensor_msgs + + + + diff --git a/realsense_ros_person/src/apiwrapper/helpers/PersonTracking2RosHelper.h b/realsense_ros_person/src/apiwrapper/helpers/PersonTracking2RosHelper.h new file mode 100644 index 0000000..98279aa --- /dev/null +++ b/realsense_ros_person/src/apiwrapper/helpers/PersonTracking2RosHelper.h @@ -0,0 +1,179 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#pragma once + +#include + +#include "realsense_ros_person/RecognitionResponse.h" +#include "realsense_ros_person/RecognitionRegisterResponse.h" +#include "realsense_ros_person/Wave.h" +#include "realsense_ros_person/RectWithConfidence.h" +#include "realsense_ros_person/SkeletonJoint.h" +#include "realsense_ros_person/PersonModuleState.h" + +namespace realsense_ros_person +{ +class PersonTracking2RosHelper +{ +public: + int RecognitionStatus2RosRecognitionStatus( + Intel::RealSense::PersonTracking::PersonTrackingData::PersonRecognition::RecognitionStatus ptRegistrationStatus) + { + using namespace Intel::RealSense::PersonTracking; + switch (ptRegistrationStatus) + { + case PersonTrackingData::PersonRecognition::RecognitionStatus::RecognitionPassedPersonRecognized: + return realsense_ros_person::RecognitionResponse::RECOGNITION_PASSED_PERSON_RECOGNIZED; + case PersonTrackingData::PersonRecognition::RecognitionStatus::RecognitionPassedPersonNotRecognized: + return realsense_ros_person::RecognitionResponse::RECOGNITION_PASSED_PERSON_NOT_RECOGNIZED; + case PersonTrackingData::PersonRecognition::RecognitionStatus::RecognitionFailed: + return realsense_ros_person::RecognitionResponse::RECOGNITION_FAILED; + case PersonTrackingData::PersonRecognition::RecognitionStatus::RecognitionFailedFaceNotDetected: + return realsense_ros_person::RecognitionResponse::RECOGNITION_FAILED_FACE_NOT_DETECTED; + case PersonTrackingData::PersonRecognition::RecognitionStatus::RecognitionFailedFaceNotClear: + return realsense_ros_person::RecognitionResponse::RECOGNITION_FAILED_FACE_NOT_CLEAR; + case PersonTrackingData::PersonRecognition::RecognitionStatus::RecognitionFailedPersonTooFar: + return realsense_ros_person::RecognitionResponse::RECOGNITION_FAILED_PERSON_TOO_FAR; + case PersonTrackingData::PersonRecognition::RecognitionStatus::RecognitionFailedPersonTooClose: + return realsense_ros_person::RecognitionResponse::RECOGNITION_FAILED_PERSON_TOO_CLOSE; + case PersonTrackingData::PersonRecognition::RecognitionStatus::RecognitionFailedFaceAmbiguity: + return realsense_ros_person::RecognitionResponse::RECOGNITION_FAILED_FACE_AMBIGUITY; + default: + throw std::runtime_error("unsupported value"); + } + } + + int RegistrationStatus2RosRecognitionStatus( + Intel::RealSense::PersonTracking::PersonTrackingData::PersonRecognition::RegistrationStatus ptRegistrationStatus) + { + using namespace Intel::RealSense::PersonTracking; + switch (ptRegistrationStatus) + { + case PersonRecognition::RegistrationSuccessful: + return realsense_ros_person::RecognitionRegisterResponse::REGISTRATION_SUCCESSFULL; + case PersonRecognition::RegistrationFailed: + return realsense_ros_person::RecognitionRegisterResponse::REGISTRATION_FAILED; + case PersonRecognition::RegistrationFailedAlreadyRegistered: + return realsense_ros_person::RecognitionRegisterResponse::REGISTRATION_FAILED_ALREADY_REGISTERED; + case PersonRecognition::RegistrationFailedFaceNotDetected: + return realsense_ros_person::RecognitionRegisterResponse::REGISTRATION_FAILED_FACE_NOT_DETECTED; + case PersonRecognition::RegistrationFailedFaceNotClear: + return realsense_ros_person::RecognitionRegisterResponse::REGISTRATION_FAILED_FACE_NOT_CLEAR; + case PersonRecognition::RegistrationFailedPersonTooFar: + return realsense_ros_person::RecognitionRegisterResponse::REGISTRATION_FAILED_PERSON_TO_FAR; + case PersonRecognition::RegistrationFailedPersonTooClose: + return realsense_ros_person::RecognitionRegisterResponse::REGISTRATION_FAILED_PERSON_TO_CLOSE; + default: + throw std::runtime_error("unsupported value"); + } + } + + int WaveGesture2RosWaveGesture(Intel::RealSense::PersonTracking::PersonTrackingData::PersonGestures::WaveType waveType) + { + using namespace Intel::RealSense::PersonTracking; + switch (waveType) + { + case PersonTrackingData::PersonGestures::WaveType::WAVE_LEFT_LA: + return realsense_ros_person::Wave::WAVE_LEFT_LA; + case PersonTrackingData::PersonGestures::WaveType ::WAVE_RIGHT_LA: + return realsense_ros_person::Wave::WAVE_RIGHT_LA; + case PersonTrackingData::PersonGestures::WaveType::WAVE_LEFT_RA: + return realsense_ros_person::Wave::WAVE_LEFT_RA; + case PersonTrackingData::PersonGestures::WaveType::WAVE_RIGHT_RA: + return realsense_ros_person::Wave::WAVE_RIGHT_RA; + case PersonTrackingData::PersonGestures::WaveType::WAVE_NOT_DETECTED: + return realsense_ros_person::Wave::WAVE_NOT_DETECTED; + default: + throw std::runtime_error("unsupported value"); + } + } + + realsense_ros_person::RectWithConfidence BoundingBox2D2RosRectWithConfidence( + Intel::RealSense::PersonTracking::PersonTrackingData::BoundingBox2D& boundingBox2D) + { + realsense_ros_person::RectWithConfidence rectWithConfidence; + rectWithConfidence.rectCorners[0].x = boundingBox2D.rect.x; + rectWithConfidence.rectCorners[0].y = boundingBox2D.rect.y; + rectWithConfidence.rectCorners[1].x = boundingBox2D.rect.x + boundingBox2D.rect.w; + rectWithConfidence.rectCorners[1].y = boundingBox2D.rect.y + boundingBox2D.rect.h; + rectWithConfidence.confidence = boundingBox2D.confidence; + return rectWithConfidence; + } + + int SkeletonJointType2RosSkeletonJointType(Intel::RealSense::PersonTracking::PersonJoints::JointType jointType) + { + using namespace Intel::RealSense::PersonTracking; + switch (jointType) + { + case PersonJoints::JointType::JOINT_ANKLE_LEFT: + return realsense_ros_person::SkeletonJoint::JOINT_ANKLE_LEFT; + case PersonJoints::JointType::JOINT_ANKLE_RIGHT: + return realsense_ros_person::SkeletonJoint::JOINT_ANKLE_RIGHT; + case PersonJoints::JointType::JOINT_ELBOW_LEFT: + return realsense_ros_person::SkeletonJoint::JOINT_ELBOW_LEFT; + case PersonJoints::JointType::JOINT_ELBOW_RIGHT: + return realsense_ros_person::SkeletonJoint::JOINT_ELBOW_RIGHT; + case PersonJoints::JointType::JOINT_FOOT_LEFT: + return realsense_ros_person::SkeletonJoint::JOINT_FOOT_LEFT; + case PersonJoints::JointType::JOINT_FOOT_RIGHT: + return realsense_ros_person::SkeletonJoint::JOINT_FOOT_RIGHT; + case PersonJoints::JointType::JOINT_HAND_LEFT: + return realsense_ros_person::SkeletonJoint::JOINT_HAND_LEFT; + case PersonJoints::JointType::JOINT_HAND_RIGHT: + return realsense_ros_person::SkeletonJoint::JOINT_HAND_RIGHT; + case PersonJoints::JointType::JOINT_HAND_TIP_LEFT: + return realsense_ros_person::SkeletonJoint::JOINT_HAND_TIP_LEFT; + case PersonJoints::JointType::JOINT_HAND_TIP_RIGHT: + return realsense_ros_person::SkeletonJoint::JOINT_HAND_TIP_RIGHT; + case PersonJoints::JointType::JOINT_HEAD: + return realsense_ros_person::SkeletonJoint::JOINT_HEAD; + case PersonJoints::JointType::JOINT_HIP_LEFT: + return realsense_ros_person::SkeletonJoint::JOINT_HIP_LEFT; + case PersonJoints::JointType::JOINT_HIP_RIGHT: + return realsense_ros_person::SkeletonJoint::JOINT_HIP_RIGHT; + case PersonJoints::JointType::JOINT_KNEE_LEFT: + return realsense_ros_person::SkeletonJoint::JOINT_KNEE_LEFT; + case PersonJoints::JointType::JOINT_KNEE_RIGHT: + return realsense_ros_person::SkeletonJoint::JOINT_KNEE_RIGHT; + case PersonJoints::JointType::JOINT_NECK: + return realsense_ros_person::SkeletonJoint::JOINT_NECK; + case PersonJoints::JointType::JOINT_SHOULDER_LEFT: + return realsense_ros_person::SkeletonJoint::JOINT_SHOULDER_LEFT; + case PersonJoints::JointType::JOINT_SHOULDER_RIGHT: + return realsense_ros_person::SkeletonJoint::JOINT_SHOULDER_RIGHT; + case PersonJoints::JointType::JOINT_SPINE_BASE: + return realsense_ros_person::SkeletonJoint::JOINT_SPINE_BASE; + case PersonJoints::JointType::JOINT_SPINE_MID: + return realsense_ros_person::SkeletonJoint::JOINT_SPINE_MID; + case PersonJoints::JointType::JOINT_SPINE_SHOULDER: + return realsense_ros_person::SkeletonJoint::JOINT_SPINE_SHOULDER; + case PersonJoints::JointType::JOINT_THUMB_LEFT: + return realsense_ros_person::SkeletonJoint::JOINT_THUMB_LEFT; + case PersonJoints::JointType::JOINT_THUMB_RIGHT: + return realsense_ros_person::SkeletonJoint::JOINT_THUMB_RIGHT; + case PersonJoints::JointType::JOINT_WRIST_LEFT: + return realsense_ros_person::SkeletonJoint::JOINT_WRIST_LEFT; + case PersonJoints::JointType::JOINT_WRIST_RIGHT: + return realsense_ros_person::SkeletonJoint::JOINT_WRIST_RIGHT; + default: + return realsense_ros_person::SkeletonJoint::JOINT_UNKNOWN; + } + } + + int TrackingState2RosTrackingState(Intel::RealSense::PersonTracking::TrackingState trackingState) + { + using namespace Intel::RealSense::PersonTracking; + switch (trackingState) + { + case TrackingState::TRACKING_STATE_TRACKING: + return realsense_ros_person::PersonModuleState::TRACKING_STATE_TRACKING; + case TrackingState::TRACKING_STATE_DETECTING: + return realsense_ros_person::PersonModuleState::TRACKING_STATE_DETECTING; + default: + throw std::runtime_error("unsupported value"); + } + } + +}; +} \ No newline at end of file diff --git a/realsense_ros_person/src/apiwrapper/helpers/PersonTrackingHelper.cpp b/realsense_ros_person/src/apiwrapper/helpers/PersonTrackingHelper.cpp new file mode 100644 index 0000000..d0d7640 --- /dev/null +++ b/realsense_ros_person/src/apiwrapper/helpers/PersonTrackingHelper.cpp @@ -0,0 +1,187 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#include "PersonTrackingHelper.h" + +#include + +namespace realsense_ros_person +{ +void ConfigurePersonTracking(PersonTrackingConfig config, + Intel::RealSense::PersonTracking::PersonTrackingConfiguration *ptConfiguration) +{ + + if (config.recognitionEnabled) + { + ROS_INFO("recognition enabled"); + ptConfiguration->QueryRecognition()->Enable(); + } + else + { + ROS_INFO("recognition disabled"); + ptConfiguration->QueryRecognition()->Disable(); + } + + if (config.pointingGestureEnabled) + { + ROS_INFO("pointing gesture enabled"); + ptConfiguration->QueryGestures()->Enable(); + ptConfiguration->QueryGestures()->EnableGesture(Intel::RealSense::PersonTracking::GestureType::Pointing); + } + else + { + ROS_INFO("pointing gesture disabled"); + ptConfiguration->QueryGestures()->DisableGesture(Intel::RealSense::PersonTracking::GestureType::Pointing); + } + + if (config.waveGestureEnabled) + { + ROS_INFO("wave gesture enabled"); + ptConfiguration->QueryGestures()->Enable(); + ptConfiguration->QueryGestures()->EnableGesture(Intel::RealSense::PersonTracking::GestureType::Wave); + } + else + { + ROS_INFO("wave gesture disabled"); + ptConfiguration->QueryGestures()->DisableGesture(Intel::RealSense::PersonTracking::GestureType::Wave); + } + + if (config.skeletonEnabled) + { + ROS_INFO("skeleton enabled"); + ptConfiguration->QuerySkeletonJoints()->Enable(); + } + else + { + ROS_INFO("skeleton disabled"); + ptConfiguration->QuerySkeletonJoints()->Disable(); + } + + if (config.trackingEnabled) + { + ROS_INFO("tracking enabled"); + ptConfiguration->QueryTracking()->Enable(); + } + else + { + ROS_INFO("tracking disabled"); + ptConfiguration->QueryTracking()->Disable(); + } + + if (config.headPoseEnabled) + { + ROS_INFO("head pose enabled"); + ptConfiguration->QueryTracking()->Enable(); + ptConfiguration->QueryFace()->EnableHeadPose(); + } + else + { + ROS_INFO("head pose disabled"); + ptConfiguration->QueryFace()->DisableHeadPose(); + } + + + if (config.headBoundingBoxEnabled) + { + ROS_INFO("head bounding box enabled"); + ptConfiguration->QueryTracking()->Enable(); + ptConfiguration->QueryTracking()->EnableHeadBoundingBox(); + } + else + { + ROS_INFO("head bounding box disabled"); + ptConfiguration->QueryTracking()->DisableHeadBoundingBox(); + } + + + if (config.landmarksEnabled) + { + ROS_INFO("landmarks enabled"); + ptConfiguration->QueryTracking()->Enable(); + ptConfiguration->QueryFace()->EnableFaceLandmarks(); + } + else + { + ROS_INFO("landmarks disabled"); + ptConfiguration->QueryFace()->DisableFaceLandmarks(); + } +} + +bool +LoadDatabase(std::string dbPath, Intel::RealSense::PersonTracking::PersonTrackingConfiguration *ptConfiguration) +{ + ROS_INFO_STREAM("loading database from: " << dbPath); + auto database = ptConfiguration->QueryRecognition()->QueryDatabase(); + auto dbUtils = ptConfiguration->QueryRecognition()->QueryDatabaseUtilities(); + + std::filebuf fb; + if (!fb.open(dbPath, std::ios::in | std::ios::binary)) + { + throw std::runtime_error("failed to open database file"); + } + std::istream is(&fb); + is.seekg(0, std::ios::end); + int size = is.tellg(); + is.seekg(0, std::ios::beg); + std::vector buffer(size); + is.read(buffer.data(), buffer.size()); + fb.close(); + if (!dbUtils->DeserializeDatabase(database, (unsigned char *) buffer.data(), (int) buffer.size())) + { + ROS_ERROR("failed to load database"); + return false; + } + return true; +} + +bool +SaveDatabase(std::string dbPath, Intel::RealSense::PersonTracking::PersonTrackingConfiguration *ptConfiguration) +{ + ROS_INFO_STREAM("save database to: " << dbPath); + auto db = ptConfiguration->QueryRecognition()->QueryDatabase(); + auto dbUtils = ptConfiguration->QueryRecognition()->QueryDatabaseUtilities(); + + const size_t dbSize = dbUtils->GetDatabaseMemorySize(db); + + std::vector buffer(dbSize, 0); + int32_t writtenSize; + dbUtils->SerializeDatabase(db, buffer.data(), (int32_t) buffer.size(), &writtenSize); + + std::ofstream out(dbPath, std::ios::binary); + if (out) + { + out.write((char *) buffer.data(), buffer.size()); + } + else + { + ROS_ERROR("fail to save database"); + return false; + } + return true; +} + +std::string RecognitionStatusToString( + Intel::RealSense::PersonTracking::PersonTrackingData::PersonRecognition::RecognitionStatus status) +{ + using Intel::RealSense::PersonTracking::PersonTrackingData; + switch (status) + { + case PersonTrackingData::PersonRecognition::RecognitionPassedPersonRecognized: + return "RecognitionPassedPersonRecognized"; + case PersonTrackingData::PersonRecognition::RecognitionPassedPersonNotRecognized: + return "RecognitionPassedPersonNotRecognized"; + case PersonTrackingData::PersonRecognition::RecognitionFailed: + return "RecognitionFailed"; + case PersonTrackingData::PersonRecognition::RecognitionFailedFaceNotDetected: + return "RecognitionFailedFaceNotDetected"; + case PersonTrackingData::PersonRecognition::RecognitionFailedFaceNotClear: + return "RecognitionFailedFaceNotClear"; + case PersonTrackingData::PersonRecognition::RecognitionFailedPersonTooFar: + return "RecognitionFailedPersonTooFar"; + case PersonTrackingData::PersonRecognition::RecognitionFailedPersonTooClose: + return "RecognitionFailedPersonTooClose"; + default: + return "unknown recognition status please check the code"; + } +} +} \ No newline at end of file diff --git a/realsense_ros_person/src/apiwrapper/helpers/PersonTrackingHelper.h b/realsense_ros_person/src/apiwrapper/helpers/PersonTrackingHelper.h new file mode 100644 index 0000000..9038ccc --- /dev/null +++ b/realsense_ros_person/src/apiwrapper/helpers/PersonTrackingHelper.h @@ -0,0 +1,46 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#pragma once + +#include +#include +#include "RealSense/PersonTracking/PersonTrackingConfiguration.h" + +namespace realsense_ros_person +{ +struct PersonTrackingConfig +{ + bool pointingGestureEnabled; + bool waveGestureEnabled; + bool skeletonEnabled; + bool recognitionEnabled; + bool trackingEnabled; + bool headPoseEnabled; + bool headBoundingBoxEnabled; + bool landmarksEnabled; + +public: + PersonTrackingConfig() : + pointingGestureEnabled(false), + waveGestureEnabled(false), + skeletonEnabled(false), + recognitionEnabled(false), + trackingEnabled(false), + headPoseEnabled(false), + headBoundingBoxEnabled(false), + landmarksEnabled(false) {} +}; + +//parse command line arguments and configure person tracking +void ConfigurePersonTracking(PersonTrackingConfig config, + Intel::RealSense::PersonTracking::PersonTrackingConfiguration *ptConfiguration); + +//load person tracking recognition database from file +bool +LoadDatabase(std::string dbPath, Intel::RealSense::PersonTracking::PersonTrackingConfiguration *ptConfiguration); + +//save person tracking recognition database from file +bool +SaveDatabase(std::string dbPath, Intel::RealSense::PersonTracking::PersonTrackingConfiguration *ptConfiguration); +} \ No newline at end of file diff --git a/realsense_ros_person/src/apiwrapper/helpers/Ros2RealsenseSdkConverter.cpp b/realsense_ros_person/src/apiwrapper/helpers/Ros2RealsenseSdkConverter.cpp new file mode 100644 index 0000000..ceb390b --- /dev/null +++ b/realsense_ros_person/src/apiwrapper/helpers/Ros2RealsenseSdkConverter.cpp @@ -0,0 +1,149 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#include +#include +#include +#include "Ros2RealsenseSdkConverter.h" + +namespace realsense_ros_person +{ +rs::core::image_interface* Ros2RealSenseSdkConverter::CreateSdkImage(rs::core::stream_type streamType, const sensor_msgs::ImageConstPtr &imageRos) +{ + int width = imageRos->width; + int height = imageRos->height; + rs::core::pixel_format pixelFormat = RosImageFormat2RealSenseSdkFormat(imageRos->encoding); + rs::core::image_info info = + { + width, + height, + pixelFormat, + static_cast(imageRos->step) + }; + + return rs::core::image_interface::create_instance_from_raw_data( + &info, + imageRos->data.data(), + streamType, + rs::core::image_interface::flag::any, + 0, + 0); +} + +rs::core::pixel_format Ros2RealSenseSdkConverter::RosImageFormat2RealSenseSdkFormat(std::string rosImageFormat) +{ + if (rosImageFormat.compare("mono8") == 0) + { + return rs::core::pixel_format::raw8; + } + if (rosImageFormat.compare("16UC1") == 0) + { + return rs::core::pixel_format::z16; + } + else if (rosImageFormat.compare("bgr8") == 0) + { + return rs::core::pixel_format::bgr8; + } + else if (rosImageFormat.compare("bgra8") == 0) + { + return rs::core::pixel_format::bgra8; + } + else if (rosImageFormat.compare("rgb8") == 0) + { + return rs::core::pixel_format::rgb8; + } + else if (rosImageFormat.compare("rgba8") == 0) + { + return rs::core::pixel_format::rgba8; + } + ROS_ERROR_STREAM("Unsupported ROS image format: " << rosImageFormat); + throw std::runtime_error(std::string("Unsupported ROS image format")); +} + +rs::core::correlated_sample_set Ros2RealSenseSdkConverter::CreateSdkSampleSet( + const sensor_msgs::ImageConstPtr &colorImageRos, + const sensor_msgs::ImageConstPtr &depthImageRos) +{ + rs::core::correlated_sample_set sampleSet; + sampleSet[rs::core::stream_type::color] = CreateSdkImage(rs::core::stream_type::color, colorImageRos); + sampleSet[rs::core::stream_type::depth] = CreateSdkImage(rs::core::stream_type::color, depthImageRos); + return sampleSet; +} + +void Ros2RealSenseSdkConverter::ReleaseSampleSet(rs::core::correlated_sample_set &sampleSet) +{ + //release images + for (uint32_t i = 0; i < static_cast(rs::core::stream_type::max); ++i) + { + rs::core::image_interface* image = sampleSet.images[i]; + if (image) + { + image->release(); + } + } +} + +rs::core::video_module_interface::actual_module_config +Ros2RealSenseSdkConverter::CreateSdkModuleConfig(const sensor_msgs::CameraInfoConstPtr &colorCameraInfo, + const sensor_msgs::CameraInfoConstPtr &depthCameraInfo) +{ + + rs::core::intrinsics depthIntrinsics, colorIntrinsics; + rs::core::extrinsics extrinsics; + + colorIntrinsics = CreateSdkIntrinsics(colorCameraInfo); + depthIntrinsics = CreateSdkIntrinsics(depthCameraInfo); + std::copy(depthCameraInfo->R.begin(), depthCameraInfo->R.end(), extrinsics.rotation); + extrinsics.translation[0] = depthCameraInfo->P[3]; + extrinsics.translation[1] = depthCameraInfo->P[7]; + extrinsics.translation[2] = depthCameraInfo->P[11]; + + + rs::core::video_module_interface::actual_module_config actualModuleConfig = {}; + std::vector possible_streams = {rs::core::stream_type::depth, + rs::core::stream_type::color //person tracking uses only color & depth + }; + std::map intrinsics; + intrinsics[rs::core::stream_type::depth] = depthIntrinsics; + intrinsics[rs::core::stream_type::color] = colorIntrinsics; + + //fill stream configurations + for (auto &stream : possible_streams) + { + rs::core::video_module_interface::actual_image_stream_config &actualStreamConfig = actualModuleConfig[stream]; + actualStreamConfig.size.width = intrinsics[stream].width; + actualStreamConfig.size.height = intrinsics[stream].height; + actualStreamConfig.frame_rate = 30; + actualStreamConfig.intrinsics = intrinsics[stream]; + actualStreamConfig.extrinsics = extrinsics; + actualStreamConfig.is_enabled = true; + } + + //create projection object + actualModuleConfig.projection = rs::core::projection_interface::create_instance(&colorIntrinsics, &depthIntrinsics, &extrinsics); + return actualModuleConfig; +} + +rs::core::intrinsics +Ros2RealSenseSdkConverter::CreateSdkIntrinsics(const sensor_msgs::CameraInfoConstPtr &rosCameraInfo) +{ + rs::core::intrinsics intrinsics; + intrinsics.width = rosCameraInfo->width; + intrinsics.height = rosCameraInfo->height; + intrinsics.fx = rosCameraInfo->K[0]; + intrinsics.fy = rosCameraInfo->K[4]; + intrinsics.ppx = rosCameraInfo->K[2]; + intrinsics.ppy = rosCameraInfo->K[5]; + std::copy(rosCameraInfo->D.begin(), rosCameraInfo->D.end(), intrinsics.coeffs); + std::string distortion_model = rosCameraInfo->distortion_model; + if (distortion_model.compare("Modified_Brown-Conrady") == 0) + { + intrinsics.model = rs::core::distortion_type::modified_brown_conrady; + } + else + { + intrinsics.model = rs::core::distortion_type::none; + } + return intrinsics; +} +} diff --git a/realsense_ros_person/src/apiwrapper/helpers/Ros2RealsenseSdkConverter.h b/realsense_ros_person/src/apiwrapper/helpers/Ros2RealsenseSdkConverter.h new file mode 100644 index 0000000..ca07495 --- /dev/null +++ b/realsense_ros_person/src/apiwrapper/helpers/Ros2RealsenseSdkConverter.h @@ -0,0 +1,65 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#pragma once + +#include +#include + +#include +#include + +namespace realsense_ros_person +{ +class Ros2RealSenseSdkConverter +{ +public: + /** + * @brief Create Realsense SDK image from ROS image + * @param imageRos ros image + * @return RealSense sdk image, IMPORTANT shalow copy - image memory managed by ImageConstPtr + */ + rs::core::image_interface* CreateSdkImage(rs::core::stream_type streamType, const sensor_msgs::ImageConstPtr &imageRos); + + /** + * @brief Create RealSense SDK sample set from ROS images + * @param colorImageRos color image + * @param depthImageRos depth image + * @return RealSense SDK sample set, IMPORTANT images memory managed by ImageConstPtr & ImageConstPtr, + * sample set should be release with ReleaseSampleSet method + */ + rs::core::correlated_sample_set CreateSdkSampleSet(const sensor_msgs::ImageConstPtr& colorImageRos, + const sensor_msgs::ImageConstPtr& depthImageRos); + + /** + * @brief Release RealSense SDK sample set - release image objects + * @param sampleSet RealSense SDK sample set + */ + void ReleaseSampleSet(rs::core::correlated_sample_set& sampleSet); + + /** + * @brief Create RealSense SDK module config from ROS camera parameters + * @param colorCameraInfo color camera info + * @param depthCameraInfo depth camera info + * @return RealSense SDK module config + */ + rs::core::video_module_interface::actual_module_config CreateSdkModuleConfig(const sensor_msgs::CameraInfoConstPtr& colorCameraInfo, + const sensor_msgs::CameraInfoConstPtr& depthCameraInfo); + + /** + * @brief Convert ROS image format(e.g. rgba8) to RealSense SDK format + * @param rosImageFormat + * @return RealSense SDK pixel format + */ + rs::core::pixel_format RosImageFormat2RealSenseSdkFormat(std::string rosImageFormat); + + /** + * @brief Create RealSense SDK intrinsics from ROS camera info + * @param rosCameraInfo + * @return RealSense SDK intrinsics + */ + rs::core::intrinsics CreateSdkIntrinsics(const sensor_msgs::CameraInfoConstPtr& rosCameraInfo); +}; +} + + diff --git a/realsense_ros_person/src/apiwrapper/publisher/AbsPersonTrackingPublisher.h b/realsense_ros_person/src/apiwrapper/publisher/AbsPersonTrackingPublisher.h new file mode 100644 index 0000000..907e23f --- /dev/null +++ b/realsense_ros_person/src/apiwrapper/publisher/AbsPersonTrackingPublisher.h @@ -0,0 +1,33 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#pragma once +#include "ros/ros.h" +#include "person_tracking_video_module_factory.h" +#include +#include "realsense_ros_person/User.h" +#include "realsense_ros_person/Frame.h" + +namespace realsense_ros_person +{ +class AbsPersonTrackingPublisher +{ +public: + AbsPersonTrackingPublisher() = default; + + virtual void publishOutput(Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData &trackingData) = 0; + + virtual void publishTestOutput(Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData &trackingData, + const sensor_msgs::ImageConstPtr &colorImage) = 0; + + virtual void onInit(ros::NodeHandle &nodeHandle) = 0; + + virtual ~AbsPersonTrackingPublisher() = default; + +protected: + ros::Publisher mPublisher; + ros::Publisher mTestPublisher; +}; +} \ No newline at end of file diff --git a/realsense_ros_person/src/apiwrapper/publisher/PersonTrackingDefaultPublisher.cpp b/realsense_ros_person/src/apiwrapper/publisher/PersonTrackingDefaultPublisher.cpp new file mode 100644 index 0000000..7e067c1 --- /dev/null +++ b/realsense_ros_person/src/apiwrapper/publisher/PersonTrackingDefaultPublisher.cpp @@ -0,0 +1,62 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#include "PersonTrackingDefaultPublisher.h" + +#include "realsense_ros_person/FrameTest.h" +#include "realsense_ros_person/PersonModuleState.h" + +namespace realsense_ros_person +{ +void PersonTrackingDefaultPublisher::onInit(ros::NodeHandle &nodeHandle) +{ + std::string personTrackingTopic = "person_tracking_output"; + std::string personTrackingTestTopic = "person_tracking_output_test"; + std::string personTrackingModuleStateTopic = "/person_tracking/module_state"; + ROS_INFO_STREAM("Publishing to " << personTrackingTopic); + mPublisher = nodeHandle.advertise(personTrackingTopic, 1); + mTestPublisher = nodeHandle.advertise(personTrackingTestTopic, + 1); + mPersonModuleStatePublisher = nodeHandle.advertise(personTrackingModuleStateTopic, 1); +} + +void PersonTrackingDefaultPublisher::publishOutput( + Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData &trackingData) +{ + + realsense_ros_person::Frame frame; + FillFrameData(frame, ptConfiguration, trackingData); + mPublisher.publish(frame); + mPersonModuleStatePublisher.publish(mPtPublisherHelper.BuildPersonModuleState(ptConfiguration, trackingData)); +} + +void PersonTrackingDefaultPublisher::publishTestOutput( + Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData &trackingData, + const sensor_msgs::ImageConstPtr &colorImage) +{ + realsense_ros_person::FrameTest frameTest; + //fill regular person tracking data + FillFrameData(frameTest.frameData, ptConfiguration, trackingData); + //fill color image + frameTest.colorImage.height = colorImage->height; + frameTest.colorImage.width = colorImage->width; + frameTest.colorImage.encoding = colorImage->encoding; + frameTest.colorImage.step = colorImage->step; + frameTest.colorImage.data = colorImage->data; + + mTestPublisher.publish(frameTest); +} + +void PersonTrackingDefaultPublisher::FillFrameData(realsense_ros_person::Frame &frame, + Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData &trackingData) +{ + using Intel::RealSense::PersonTracking::PersonTrackingData; + frame.numberOfUsers = trackingData.QueryNumberOfPeople(); + std::vector usersVector = mPtPublisherHelper.BuildUsersVector(ptConfiguration, + trackingData); + std::copy(usersVector.begin(), usersVector.end(), std::back_inserter(frame.usersData)); +} +} diff --git a/realsense_ros_person/src/apiwrapper/publisher/PersonTrackingDefaultPublisher.h b/realsense_ros_person/src/apiwrapper/publisher/PersonTrackingDefaultPublisher.h new file mode 100644 index 0000000..515e9ff --- /dev/null +++ b/realsense_ros_person/src/apiwrapper/publisher/PersonTrackingDefaultPublisher.h @@ -0,0 +1,36 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#pragma once + +#include + +#include "AbsPersonTrackingPublisher.h" +#include "PersonTrackingPublisherHelper.h" + +namespace realsense_ros_person +{ +class PersonTrackingDefaultPublisher : public AbsPersonTrackingPublisher +{ +public: + PersonTrackingDefaultPublisher() = default; + + virtual void publishOutput(Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData &trackingData) override; + + virtual void publishTestOutput(Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData &trackingData, + const sensor_msgs::ImageConstPtr &colorImage) override; + + virtual void onInit(ros::NodeHandle &nodeHandle) override; + + virtual ~PersonTrackingDefaultPublisher() = default; + +protected: + PersonTrackingPublisherHelper mPtPublisherHelper; + ros::Publisher mPersonModuleStatePublisher; + void FillFrameData(realsense_ros_person::Frame &frame, + Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData &trackingData); +}; +} \ No newline at end of file diff --git a/realsense_ros_person/src/apiwrapper/publisher/PersonTrackingPublisherHelper.cpp b/realsense_ros_person/src/apiwrapper/publisher/PersonTrackingPublisherHelper.cpp new file mode 100644 index 0000000..c0c5de4 --- /dev/null +++ b/realsense_ros_person/src/apiwrapper/publisher/PersonTrackingPublisherHelper.cpp @@ -0,0 +1,214 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#include + +#include "PersonTrackingPublisherHelper.h" +#include "realsense_ros_person/SkeletonJoint.h" +#include "PersonTrackingHelper.h" + +namespace realsense_ros_person +{ +void PersonTrackingPublisherHelper::addSkeletonToOutput( + Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData::Person *personData, realsense_ros_person::User &user) +{ + using Intel::RealSense::PersonTracking::PersonTrackingData; + if (!ptConfiguration.QuerySkeletonJoints()->IsEnabled()) return; + + PersonTrackingData::PersonJoints *personJoints = personData->QuerySkeletonJoints(); + int numberOfJoints = personJoints->QueryNumJoints(); + + if (numberOfJoints == 0) return; + + std::vector skeletonPoints(numberOfJoints); + if (!personJoints->QueryJoints(skeletonPoints.data())) + { + return; + } + + + for (PersonTrackingData::PersonJoints::SkeletonPoint &skeletonPoint : skeletonPoints) + { + realsense_ros_person::SkeletonJoint joint; + joint.type = mPt2rosHelper.SkeletonJointType2RosSkeletonJointType(skeletonPoint.jointType); + joint.confidence = skeletonPoint.confidenceImage; + joint.location.x = skeletonPoint.image.x; + joint.location.y = skeletonPoint.image.y; + joint.realWorldCoordinates.x = skeletonPoint.world.x; + joint.realWorldCoordinates.y = skeletonPoint.world.y; + joint.realWorldCoordinates.z = skeletonPoint.world.z; + user.skeletonJoints.push_back(joint); + } +} + +void PersonTrackingPublisherHelper::addGesturesToOutput( + Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData::Person *const personData, + realsense_ros_person::User &user) +{ + using Intel::RealSense::PersonTracking::PersonTrackingData; + + user.gestures.pointing.confidence = 0; + user.gestures.wave.type = mPt2rosHelper.WaveGesture2RosWaveGesture(PersonTrackingData::PersonGestures::WaveType::WAVE_NOT_DETECTED); + + if (!ptConfiguration.QueryGestures()->IsEnabled()) return; + + PersonTrackingData::PersonGestures *personGestures = personData->QueryGestures(); + + if (personGestures == nullptr) + { + ROS_WARN("personGestures == nullptr"); + return; + } + + if (personGestures->IsPointing()) + { + PersonTrackingData::PersonGestures::PointingInfo poitingInfo = personGestures->QueryPointingInfo(); + user.gestures.pointing.originColor.x = poitingInfo.colorPointingData.origin.x; + user.gestures.pointing.originColor.y = poitingInfo.colorPointingData.origin.y; + user.gestures.pointing.originWorld.x = poitingInfo.worldPointingData.origin.x; + user.gestures.pointing.originWorld.y = poitingInfo.worldPointingData.origin.y; + user.gestures.pointing.originWorld.z = poitingInfo.worldPointingData.origin.z; + user.gestures.pointing.orientationColor.x = poitingInfo.colorPointingData.direction.x; + user.gestures.pointing.orientationColor.y = poitingInfo.colorPointingData.direction.y; + user.gestures.pointing.orientationWorld.x = poitingInfo.worldPointingData.direction.x; + user.gestures.pointing.orientationWorld.y = poitingInfo.worldPointingData.direction.y; + user.gestures.pointing.orientationWorld.z = poitingInfo.worldPointingData.direction.z; + user.gestures.pointing.confidence = poitingInfo.confidence; + } + + user.gestures.wave.type = mPt2rosHelper.WaveGesture2RosWaveGesture(personGestures->QueryWaveGesture()); +} + +std::vector PersonTrackingPublisherHelper::BuildUsersVector( + Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData &trackingData) +{ + using Intel::RealSense::PersonTracking::PersonTrackingData; + std::vector usersVector; + int numberOfUsers = trackingData.QueryNumberOfPeople(); + + for (int index = 0; index < numberOfUsers; ++index) + { + PersonTrackingData::Person *personData = trackingData.QueryPersonData( + PersonTrackingData::AccessOrderType::ACCESS_ORDER_BY_INDEX, index); + + if (personData) + { + realsense_ros_person::User user; + + PersonTrackingData::PersonTracking *personTrackingData = personData->QueryTracking(); + PersonTrackingData::BoundingBox2D box = personTrackingData->Query2DBoundingBox(); + PersonTrackingData::BoundingBox2D headBoundingBox = personTrackingData->QueryHeadBoundingBox(); + + PersonTrackingData::PointCombined centerMass = personTrackingData->QueryCenterMass(); + + user.userInfo.Id = personTrackingData->QueryId(); + + user.userRect = mPt2rosHelper.BoundingBox2D2RosRectWithConfidence(box); + user.headBoundingBox = mPt2rosHelper.BoundingBox2D2RosRectWithConfidence(headBoundingBox); + + user.centerOfMassImage.x = centerMass.image.point.x; + user.centerOfMassImage.y = centerMass.image.point.y; + user.centerOfMassImage.z = centerMass.image.point.z; + + user.centerOfMassWorld.x = centerMass.world.point.x; + user.centerOfMassWorld.y = centerMass.world.point.y; + user.centerOfMassWorld.z = centerMass.world.point.z; + + addSkeletonToOutput(ptConfiguration, personData, user); + addGesturesToOutput(ptConfiguration, personData, user); + addLandmarksToOutput(ptConfiguration, personData, user); + addHeadPoseToOutput(ptConfiguration, personData, user); + usersVector.push_back(user); + } + } + return usersVector; +} + +void PersonTrackingPublisherHelper::addLandmarksToOutput( + Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData::Person *const personData, + realsense_ros_person::User &user) +{ + if (!ptConfiguration.QueryFace()->IsFaceLandmarksEnabled()) + { + return; + } + + Intel::RealSense::PersonTracking::PersonTrackingData::PersonFace *faceData = personData->QueryFace(); + if (faceData == nullptr) + { + return; + } + + Intel::RealSense::PersonTracking::PersonTrackingData::PersonFace::LandmarksInfo* landmarksInfo = faceData->QueryLandmarks(); + if (landmarksInfo == nullptr) + { + return; + } + if (landmarksInfo->landmarks == nullptr) + { + return; + } + + int32_t numLandmarks = landmarksInfo->numLandmarks; + user.landmarksInfo.confidence = landmarksInfo->confidence; + if (landmarksInfo->confidence > 0) + { + for (int landmarkNb = 0; landmarkNb < numLandmarks; ++landmarkNb) + { + realsense_ros_person::Landmark landmark; + landmark.location.x = landmarksInfo->landmarks[landmarkNb].image.x; + landmark.location.y = landmarksInfo->landmarks[landmarkNb].image.y; + landmark.realWorldCoordinates.x = landmarksInfo->landmarks[landmarkNb].world.x; + landmark.realWorldCoordinates.y = landmarksInfo->landmarks[landmarkNb].world.y; + landmark.realWorldCoordinates.z = landmarksInfo->landmarks[landmarkNb].world.z; + user.landmarksInfo.landmarks.push_back(landmark); + } + } +} + +void PersonTrackingPublisherHelper::addHeadPoseToOutput( + Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData::Person *const personData, + realsense_ros_person::User &user) +{ + + Intel::RealSense::PersonTracking::PersonTrackingData::PersonFace *faceData = personData->QueryFace(); + if (faceData == nullptr) + { + user.headPose.confidence = 0; + return; + } + Intel::RealSense::PersonTracking::PersonTrackingData::PoseEulerAngles angles; + if (faceData->QueryHeadPose(angles)) + { + user.headPose.angles.yaw = angles.yaw; + user.headPose.angles.pitch = angles.pitch; + user.headPose.angles.roll = angles.roll; + user.headPose.confidence = 100; + } + else + { + user.headPose.confidence = 0; + } + return; +} + +realsense_ros_person::PersonModuleState PersonTrackingPublisherHelper::BuildPersonModuleState( + Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData &trackingData) +{ + realsense_ros_person::PersonModuleState state; + state.isRecognitionEnabled = ptConfiguration.QueryRecognition()->IsEnabled(); + state.isSkeletonEnabled = ptConfiguration.QuerySkeletonJoints()->IsEnabled(); + state.isGesturesEnabled = ptConfiguration.QueryGestures()->IsEnabled(); + state.isLandmarksEnabled = ptConfiguration.QueryFace()->IsFaceLandmarksEnabled(); + state.isHeadBoundingBoxEnabled = ptConfiguration.QueryTracking()->IsHeadBoundingBoxEnabled(); + state.isTrackingEnabled = ptConfiguration.QueryTracking()->IsEnabled(); + state.trackingState = mPt2rosHelper.TrackingState2RosTrackingState(trackingData.GetTrackingState()); + return state; +} +} \ No newline at end of file diff --git a/realsense_ros_person/src/apiwrapper/publisher/PersonTrackingPublisherHelper.h b/realsense_ros_person/src/apiwrapper/publisher/PersonTrackingPublisherHelper.h new file mode 100644 index 0000000..bfb73be --- /dev/null +++ b/realsense_ros_person/src/apiwrapper/publisher/PersonTrackingPublisherHelper.h @@ -0,0 +1,51 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#pragma once + +#include +#include "RealSense/PersonTracking/PersonTrackingConfiguration.h" +#include "RealSense/PersonTracking/PersonTrackingData.h" + +#include "realsense_ros_person/User.h" +#include "realsense_ros_person/PersonModuleState.h" + +namespace realsense_ros_person +{ +class PersonTrackingPublisherHelper +{ +public: + PersonTrackingPublisherHelper() = default; + + ~PersonTrackingPublisherHelper() = default; + + virtual std::vector + BuildUsersVector(Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData &trackingData); + + virtual void addSkeletonToOutput(Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData::Person *const personData, + realsense_ros_person::User &user); + + virtual void addGesturesToOutput(Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData::Person *const personData, + realsense_ros_person::User &user); + + virtual void + addLandmarksToOutput(Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData::Person *const personData, + realsense_ros_person::User &user); + + virtual void + addHeadPoseToOutput(Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData::Person *const personData, + realsense_ros_person::User &user); + + virtual + realsense_ros_person::PersonModuleState BuildPersonModuleState( + Intel::RealSense::PersonTracking::PersonTrackingConfiguration &ptConfiguration, + Intel::RealSense::PersonTracking::PersonTrackingData &trackingData); + + PersonTracking2RosHelper mPt2rosHelper; +}; +} \ No newline at end of file diff --git a/realsense_ros_person/src/apiwrapper/realsense_ros_person_nodelet.cpp b/realsense_ros_person/src/apiwrapper/realsense_ros_person_nodelet.cpp new file mode 100644 index 0000000..356f1b2 --- /dev/null +++ b/realsense_ros_person/src/apiwrapper/realsense_ros_person_nodelet.cpp @@ -0,0 +1,152 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#include "realsense_ros_person_nodelet.h" + +PLUGINLIB_EXPORT_CLASS(realsense_ros_person::TNodeletPt, nodelet::Nodelet) + +namespace realsense_ros_person +{ +const std::string TNodeletPt::DEFAULT_PUBLISHER = "defaultPublisher"; + +TNodeletPt::TNodeletPt() : + m_ros2realsense(new realsense_ros_person::Ros2RealSenseSdkConverter()), + m_isTestMode(false) +{ +} + +void TNodeletPt::onInit() +{ + setParams(config); + NODELET_INFO("start nodelet param setted"); + ROS_INFO_STREAM("enable" << config.recognitionEnabled); + + std::string path = ros::package::getPath("realsense_ros_person"); + + /***************create and configure person tracker video module**********************/ + ptModule.reset(rs::person_tracking::person_tracking_video_module_factory::create_person_tracking_video_module()); + + //configure person tracking - enable requested features + ConfigurePersonTracking(config, ptModule.get()->QueryConfiguration()); + //configure person tracking - load recognition database if needed + bool loadDb = false; + getPrivateNodeHandle().param("loadDb", loadDb, false); + if (loadDb) + { + std::string dbPath; + getPrivateNodeHandle().param("dbPath", dbPath, path + "/db"); //in case of loadDb + LoadDatabase(dbPath, ptModule->QueryConfiguration()); + } + + // Subcribe to camera info + std::string colorCameraInfoStream = "camera/color/camera_info"; + std::string depthCameraInfoStream = "camera/depth/camera_info"; + + mColorCameraInfoSubscriber = std::shared_ptr>(new message_filters::Subscriber(nh, colorCameraInfoStream, 1)); + mDepthCameraInfoSubscriber = std::shared_ptr>(new message_filters::Subscriber(nh, depthCameraInfoStream, 1)); + + mTimeSynchronizer_info = std::shared_ptr>(new message_filters::TimeSynchronizer(*mColorCameraInfoSubscriber, *mDepthCameraInfoSubscriber, 40)); + mTimeSynchronizer_info->registerCallback(boost::bind(&TNodeletPt::cameraInfoCallback, this, _1, _2)); + + getPrivateNodeHandle().param("isTestMode", m_isTestMode, false); + if (m_isTestMode) + { + NODELET_INFO("test mode publish person tracking test topic(includes color image)"); + } + //configure and init publisher + std::string publisherType; + getPrivateNodeHandle().param("publisherType", publisherType, "123"); + if (publisherType.compare(DEFAULT_PUBLISHER) == 0) + { + mPublisher.reset(new PersonTrackingDefaultPublisher()); + NODELET_INFO("using default publisher"); + } + else + { + NODELET_ERROR("unsupported publisher: %s", publisherType.c_str()); + throw std::runtime_error("unsupported publisher"); + } + mPublisher->onInit(nh); + //init server + mServer.onInit(nh, ptModule.get(), config); + + NODELET_INFO("end OnInit"); + +} //end OnInit + +void TNodeletPt::setParams(PersonTrackingConfig& config) +{ + ros::NodeHandle & nodeHandle = getPrivateNodeHandle(); + std::string path = ros::package::getPath("realsense_ros_person"); + NODELET_INFO("setParams"); + nodeHandle.param("recognitionEnabled", config.recognitionEnabled, false); + nodeHandle.param("pointingGestureEnabled", config.pointingGestureEnabled, false); + nodeHandle.param("waveGestureEnabled", config.waveGestureEnabled, false); + + nodeHandle.param("trackingEnabled", config.trackingEnabled, true); + nodeHandle.param("skeletonEnabled", config.skeletonEnabled, false); + + nodeHandle.param("headPoseEnabled", config.headPoseEnabled, false); + nodeHandle.param("headBoundingBoxEnabled", config.headBoundingBoxEnabled, false); + nodeHandle.param("landmarksEnabled", config.landmarksEnabled, false); +} +/////////////////////////////////////////////////////////////////////////////// +void TNodeletPt::cameraInfoCallback(const sensor_msgs::CameraInfoConstPtr& colorCameraInfo, const sensor_msgs::CameraInfoConstPtr& depthCameraInfo) +{ + NODELET_INFO("cameraInfoCallback"); + rs::core::video_module_interface::actual_module_config config = m_ros2realsense->CreateSdkModuleConfig(colorCameraInfo, depthCameraInfo); + if (ptModule == nullptr || config.projection == nullptr) + { + NODELET_ERROR_STREAM("person tracking module or projection is null"); + } + if (ptModule->set_module_config(config) != rs::core::status_no_error) + { + NODELET_ERROR_STREAM("error : failed to set the enabled module configuration"); + return; + } + mColorCameraInfoSubscriber->unsubscribe(); + mDepthCameraInfoSubscriber->unsubscribe(); + seq = 0; + subscribeFrameMessages(); +} + +void TNodeletPt::subscribeFrameMessages() +{ + std::string depthImageStream = "camera/depth/image_raw"; + std::string colorImageStream = "camera/color/image_raw"; + NODELET_INFO_STREAM("Listening on " << depthImageStream); + NODELET_INFO_STREAM("Listening on " << colorImageStream); + // Subscribe to color, point cloud and depth using synchronization filter + mDepthSubscriber = std::shared_ptr> + (new message_filters::Subscriber(nh, depthImageStream, 1)); + mColorSubscriber = std::shared_ptr> + (new message_filters::Subscriber(nh, colorImageStream, 1)); + + mTimeSynchronizer = std::shared_ptr> + (new message_filters::TimeSynchronizer(*mDepthSubscriber, *mColorSubscriber, 40)); + mTimeSynchronizer->registerCallback(boost::bind(&TNodeletPt::personTrackingCallback, this, _1, _2)); + +} + +void TNodeletPt::personTrackingCallback(const sensor_msgs::ImageConstPtr& depthImageMsg, const sensor_msgs::ImageConstPtr& colorImageMsg) +{ + rs::core::correlated_sample_set sampleSet = m_ros2realsense->CreateSdkSampleSet(colorImageMsg, depthImageMsg); + seq++; + { + //prevent race condition on asynchronous requests(change configuration/recognize) and processing + std::lock_guard guard(mProcessingMutex); + if (ptModule->process_sample_set(sampleSet) != rs::core::status_no_error) + { + NODELET_ERROR("error : failed to process sample"); + return; + } + mPublisher->publishOutput(*ptModule->QueryConfiguration(), *ptModule->QueryOutput()); + if (m_isTestMode) + { + //publish test output(includes color image) only at test mode + mPublisher->publishTestOutput(*ptModule->QueryConfiguration(), *ptModule->QueryOutput(), colorImageMsg); + } + } + m_ros2realsense->ReleaseSampleSet(sampleSet); +} +} diff --git a/realsense_ros_person/src/apiwrapper/realsense_ros_person_nodelet.h b/realsense_ros_person/src/apiwrapper/realsense_ros_person_nodelet.h new file mode 100644 index 0000000..5b528a5 --- /dev/null +++ b/realsense_ros_person/src/apiwrapper/realsense_ros_person_nodelet.h @@ -0,0 +1,79 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include "ros/ros.h" +#include "ros/package.h" +#include "ros/time.h" +#include +#include +#include +#include +#include +#include +#include + +#include "opencv2/imgproc/types_c.h" +#include "opencv2/opencv.hpp" +#include +#include "rs_sdk.h" +#include "person_tracking_video_module_factory.h" +#include "PersonTrackingHelper.h" + +#include "PersonTrackingServer.h" +#include "PersonTrackingDefaultPublisher.h" +#include "Ros2RealsenseSdkConverter.h" + +namespace realsense_ros_person +{ +class TNodeletPt: public nodelet::Nodelet +{ +public: + TNodeletPt(); + ros::NodeHandle nh; + void onInit()override; +private: + std::unique_ptr ptModule; + + + std::shared_ptr> mColorCameraInfoSubscriber; + std::shared_ptr> mDepthCameraInfoSubscriber; + std::shared_ptr> mTimeSynchronizer_info; + + std::shared_ptr> mDepthSubscriber; + std::shared_ptr> mColorSubscriber; + std::shared_ptr> mTimeSynchronizer; + + std::mutex mProcessingMutex; + std::unique_ptr mPublisher; + PersonTrackingServer mServer; + uint64_t seq; + + //set ros params + void setParams(PersonTrackingConfig& config); + + void personTrackingCallback(const sensor_msgs::ImageConstPtr& depthImageMsg, const sensor_msgs::ImageConstPtr& colorImageMsg); + void subscribeFrameMessages(); + void cameraInfoCallback(const sensor_msgs::CameraInfoConstPtr& colorCameraInfo, const sensor_msgs::CameraInfoConstPtr& depthCameraInfo); + PersonTrackingConfig config; + + static const std::string DEFAULT_PUBLISHER; + std::unique_ptr m_ros2realsense; + bool m_isTestMode; +}; +} + diff --git a/realsense_ros_person/src/apiwrapper/server/PersonTrackingServer.cpp b/realsense_ros_person/src/apiwrapper/server/PersonTrackingServer.cpp new file mode 100644 index 0000000..b6b26e8 --- /dev/null +++ b/realsense_ros_person/src/apiwrapper/server/PersonTrackingServer.cpp @@ -0,0 +1,255 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#include "PersonTrackingServer.h" + +#include "PersonTracking2RosHelper.h" + +namespace realsense_ros_person +{ +PersonTrackingServer::PersonTrackingServer() +{} + +void PersonTrackingServer::onInit(ros::NodeHandle &nodeHandle, + rs::person_tracking::person_tracking_video_module_interface *personTracking, + PersonTrackingConfig &config) +{ + mPersonTracking = personTracking; + mConfig = config; + mTrackingConfigService = nodeHandle.advertiseService("person_tracking/tracking_config", + &PersonTrackingServer::trackingConfigRequestCallback, + this); + mRecognitionRequestService = nodeHandle.advertiseService("person_tracking/recognition_request", + &PersonTrackingServer::recognitionRequestCallback, + this); + mRecognitionRegisterRequestService = nodeHandle.advertiseService("person_tracking/register_request", + &PersonTrackingServer::recognitionRegisterRequestCallback, + this); + mStartTrackingService = nodeHandle.advertiseService("person_tracking/start_tracking_request", + &PersonTrackingServer::startTrackingRequestCallback, this); + mStopTrackingService = nodeHandle.advertiseService("person_tracking/stop_tracking_request", + &PersonTrackingServer::stopTrackingRequestCallback, this); + mSaveRecognitionDbService = nodeHandle.advertiseService("person_tracking/save_recognition", + &PersonTrackingServer::saveRecognitionDbCallback, this); + mLoadRecognitionDbService = nodeHandle.advertiseService("person_tracking/load_recognition", + &PersonTrackingServer::loadRecognitionDbCallback, this); +} + +bool PersonTrackingServer::trackingConfigRequestCallback(realsense_ros_person::TrackingConfig::Request &request, + realsense_ros_person::TrackingConfig::Response &response) +{ + mConfig.recognitionEnabled = request.enableRecognition; + mConfig.pointingGestureEnabled = request.enablePointingGesture; + mConfig.waveGestureEnabled = request.enableWaveGesture; + mConfig.skeletonEnabled = request.enableSkeleton; + mConfig.headBoundingBoxEnabled = request.enableHeadBoundingBox; + mConfig.landmarksEnabled = request.enableLandmarks; + mConfig.headPoseEnabled = request.enableHeadPose; + mConfig.trackingEnabled = true; + ConfigurePersonTracking(mConfig, mPersonTracking->QueryConfiguration()); + response.status = true; + return true; +} + +bool PersonTrackingServer::recognitionRequestCallback(realsense_ros_person::Recognition::Request &request, + realsense_ros_person::Recognition::Response &response) +{ + if (!mPersonTracking->QueryConfiguration()->QueryRecognition()->IsEnabled()) + { + ROS_ERROR("Recognition is not enabled"); + response.status = realsense_ros_person::RecognitionResponse::RECOGNITION_FAILED; + return true; + } + + ROS_INFO_STREAM("Received recognition request for person: " << request.personId); + PXCPersonTrackingData *trackingData = mPersonTracking->QueryOutput(); + PXCPersonTrackingData::Person *personData = trackingData->QueryPersonDataById(request.personId); + if (!personData) + { + ROS_ERROR_STREAM("Couldn't find recognition request target"); + response.status = realsense_ros_person::RecognitionResponse::RECOGNITION_FAILED; + return true; + } + ROS_INFO_STREAM("Found recognition request target"); + PXCPersonTrackingData::PersonRecognition *recognition = personData->QueryRecognition(); + ROS_INFO("User recognition"); + PXCPersonTrackingData::PersonRecognition::RecognizerData result; + auto status = recognition->RecognizeUser(&result); + response.status = m_pt2rosHelper.RecognitionStatus2RosRecognitionStatus(status); + response.recognitionId = -1; + response.similarityScore = 0; + switch (status) + { + case PXCPersonTrackingData::PersonRecognition::RecognitionStatus::RecognitionPassedPersonRecognized: + ROS_INFO_STREAM("Recognized person: " << result.recognitionId << " TrackingId: " << result.trackingId); + response.recognitionId = result.recognitionId; + response.similarityScore = result.similarityScore; + + break; + case PXCPersonTrackingData::PersonRecognition::RecognitionStatus::RecognitionPassedPersonNotRecognized: + ROS_INFO_STREAM("Recognition passed. person not recognized"); + break; + case PXCPersonTrackingData::PersonRecognition::RecognitionStatus::RecognitionFailed: + ROS_INFO_STREAM("Recognition failed."); + break; + case PXCPersonTrackingData::PersonRecognition::RecognitionStatus::RecognitionFailedFaceNotDetected: + ROS_INFO_STREAM("Recognition failed. Face not detected"); + break; + case PXCPersonTrackingData::PersonRecognition::RecognitionStatus::RecognitionFailedFaceNotClear: + ROS_INFO_STREAM("Recognition failed. Face not clear"); + break; + case PXCPersonTrackingData::PersonRecognition::RecognitionStatus::RecognitionFailedPersonTooFar: + ROS_INFO_STREAM("Recognition failed. Person too far"); + break; + case PXCPersonTrackingData::PersonRecognition::RecognitionStatus::RecognitionFailedPersonTooClose: + ROS_INFO_STREAM("Recognition failed. Person too close"); + break; + case PXCPersonTrackingData::PersonRecognition::RecognitionStatus::RecognitionFailedFaceAmbiguity: + ROS_INFO_STREAM("Recognition failed. Face ambiguity"); + break; + } + + return true; +} + +bool +PersonTrackingServer::recognitionRegisterRequestCallback(realsense_ros_person::RecognitionRegister::Request &request, + realsense_ros_person::RecognitionRegister::Response &response) +{ + if (!mPersonTracking->QueryConfiguration()->QueryRecognition()->IsEnabled()) + { + ROS_INFO("Recognition is not enabled"); + response.status = realsense_ros_person::RecognitionRegisterResponse::REGISTRATION_FAILED; + return true; + } + + ROS_INFO_STREAM("Received register request for person: " << request.personId); + PXCPersonTrackingData *trackingData = mPersonTracking->QueryOutput(); + PXCPersonTrackingData::Person *personData = trackingData->QueryPersonDataById(request.personId); + if (!personData) + { + ROS_ERROR_STREAM("Couldn't find recognition request target"); + response.status = realsense_ros_person::RecognitionRegisterResponse::REGISTRATION_FAILED; + return true; + } + ROS_INFO_STREAM("Found register request target"); + PXCPersonTrackingData::PersonRecognition *recognition = personData->QueryRecognition(); + int32_t outputRecognitionId; + int32_t outputTrackingId; + int32_t outDescriptorId; + ROS_INFO("User registration"); + auto status = recognition->RegisterUser(&outputRecognitionId, &outputTrackingId, &outDescriptorId); + response.status = m_pt2rosHelper.RegistrationStatus2RosRecognitionStatus(status); + switch (status) + { + case PXCPersonTrackingData::PersonRecognition::RegistrationStatus::RegistrationSuccessful: + ROS_INFO_STREAM("Registered person: " << outputRecognitionId << " TrackingId: " << outputTrackingId); + response.recognitionId = outputRecognitionId; + break; + case PXCPersonTrackingData::PersonRecognition::RegistrationStatus::RegistrationFailedAlreadyRegistered: + ROS_INFO_STREAM( + "Person already registered: " << outputRecognitionId << " TrackingId: " << outputTrackingId); + break; + case PXCPersonTrackingData::PersonRecognition::RegistrationStatus::RegistrationFailed: + ROS_INFO_STREAM( + "Registration failed: person: " << outputRecognitionId << " TrackingId: " << outputTrackingId); + break; + case PXCPersonTrackingData::PersonRecognition::RegistrationStatus::RegistrationFailedFaceNotDetected: + ROS_INFO_STREAM( + "Registration failed: face not detected, person: " << outputRecognitionId << " TrackingId: " + << outputTrackingId); + break; + case PXCPersonTrackingData::PersonRecognition::RegistrationStatus::RegistrationFailedFaceNotClear: + ROS_INFO_STREAM( + "Registration failed: face not clear, person: " << outputRecognitionId << " TrackingId: " + << outputTrackingId); + break; + case PXCPersonTrackingData::PersonRecognition::RegistrationStatus::RegistrationFailedPersonTooFar: + ROS_INFO_STREAM( + "Registration failed: person too far, person: " << outputRecognitionId << " TrackingId: " + << outputTrackingId); + break; + case PXCPersonTrackingData::PersonRecognition::RegistrationStatus::RegistrationFailedPersonTooClose: + ROS_INFO_STREAM( + "Registration failed: person to close, person: " << outputRecognitionId << " TrackingId: " + << outputTrackingId); + break; + } + + return true; +} + +bool PersonTrackingServer::startStopTracking(bool isStart, int personId) +{ + PXCPersonTrackingData *trackingData = mPersonTracking->QueryOutput(); + PXCPersonTrackingData::Person *personData = trackingData->QueryPersonDataById(personId); + if (!personData) + { + ROS_ERROR_STREAM("Couldn't find tracking request target"); + return false; + } + ROS_INFO_STREAM("Found tracking request target"); + if (isStart) + { + ROS_INFO_STREAM("start tracking on person: " << personId); + trackingData->StartTracking(personId); + } + else + { + ROS_INFO_STREAM("stop tracking on person: " << personId); + trackingData->StopTracking(personId); + } + return true; +} + +bool PersonTrackingServer::startTrackingRequestCallback(realsense_ros_person::StartTracking::Request &request, + realsense_ros_person::StartTracking::Response &response) +{ + ROS_INFO_STREAM("Received start tracking request for person: " << request.personId); + response.status = startStopTracking(true, request.personId); + return true; +} + +bool PersonTrackingServer::stopTrackingRequestCallback(realsense_ros_person::StopTracking::Request &request, + realsense_ros_person::StopTracking::Response &response) +{ + ROS_INFO_STREAM("Received stop tracking request for person: " << request.personId); + response.status = startStopTracking(false, request.personId); + return true; +} + +bool PersonTrackingServer::saveRecognitionDbCallback(realsense_ros_person::SaveRecognitionDB::Request &request, + realsense_ros_person::SaveRecognitionDB::Response &response) +{ + std::string dbPath = request.saveToPath; + + ROS_INFO_STREAM("Save database to: " << dbPath); + try + { + response.status = SaveDatabase(dbPath, mPersonTracking->QueryConfiguration()); + } + catch (const std::runtime_error &error) + { + ROS_ERROR_STREAM("Failed to save database to: " << dbPath); + response.status = false; + } + return true; +} + +bool PersonTrackingServer::loadRecognitionDbCallback(realsense_ros_person::LoadRecognitionDB::Request &request, + realsense_ros_person::LoadRecognitionDB::Response &response) +{ + std::string dbPath = request.loadFromPath; + ROS_INFO_STREAM("Loading database from: " << dbPath); + try + { + response.status = LoadDatabase(dbPath, mPersonTracking->QueryConfiguration());; + } + catch (const std::runtime_error &error) + { + ROS_ERROR_STREAM("Failed to load database from: " << dbPath); + response.status = false; + } + return true; +} +} \ No newline at end of file diff --git a/realsense_ros_person/src/apiwrapper/server/PersonTrackingServer.h b/realsense_ros_person/src/apiwrapper/server/PersonTrackingServer.h new file mode 100644 index 0000000..0982a3c --- /dev/null +++ b/realsense_ros_person/src/apiwrapper/server/PersonTrackingServer.h @@ -0,0 +1,71 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#pragma once +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "opencv2/opencv.hpp" + +#include "person_tracking_video_module_factory.h" +#include "PersonTrackingHelper.h" + +namespace realsense_ros_person +{ +class PersonTrackingServer +{ +public: + typedef Intel::RealSense::PersonTracking::PersonTrackingData PXCPersonTrackingData; + typedef Intel::RealSense::PersonTracking::PersonTrackingConfiguration PXCPersonTrackingConfiguration; + + PersonTrackingServer(); + + void + onInit(ros::NodeHandle &nodeHandle, rs::person_tracking::person_tracking_video_module_interface *personTracking, + PersonTrackingConfig &config); + +private: + bool trackingConfigRequestCallback(realsense_ros_person::TrackingConfig::Request &request, + realsense_ros_person::TrackingConfig::Response &response); + + bool recognitionRequestCallback(realsense_ros_person::RecognitionRequest &request, + realsense_ros_person::Recognition::Response &response); + + bool recognitionRegisterRequestCallback(realsense_ros_person::RecognitionRegister::Request &request, + realsense_ros_person::RecognitionRegister::Response &response); + + bool startTrackingRequestCallback(realsense_ros_person::StartTracking::Request &request, + realsense_ros_person::StartTracking::Response &response); + + bool stopTrackingRequestCallback(realsense_ros_person::StopTracking::Request &request, + realsense_ros_person::StopTracking::Response &response); + + bool startStopTracking(bool isStart, int personId); + + bool saveRecognitionDbCallback(realsense_ros_person::SaveRecognitionDB::Request &request, + realsense_ros_person::SaveRecognitionDB::Response &response); + + bool loadRecognitionDbCallback(realsense_ros_person::LoadRecognitionDB::Request &request, + realsense_ros_person::LoadRecognitionDB::Response &response); + + + ros::ServiceServer mTrackingConfigService; + ros::ServiceServer mRecognitionRequestService; + ros::ServiceServer mRecognitionRegisterRequestService; + ros::ServiceServer mStartTrackingService; + ros::ServiceServer mStopTrackingService; + ros::ServiceServer mSaveRecognitionDbService; + ros::ServiceServer mLoadRecognitionDbService; + ros::ServiceServer mRecognitionImgRequestService; + + realsense_ros_person::PersonTracking2RosHelper m_pt2rosHelper; + rs::person_tracking::person_tracking_video_module_interface *mPersonTracking; + PersonTrackingConfig mConfig; +}; +} \ No newline at end of file diff --git a/realsense_ros_person/src/sample/PersonTrackingSample.cpp b/realsense_ros_person/src/sample/PersonTrackingSample.cpp new file mode 100644 index 0000000..1a4101a --- /dev/null +++ b/realsense_ros_person/src/sample/PersonTrackingSample.cpp @@ -0,0 +1,346 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#include "PersonTrackingSample.h" + +#include "realsense_ros_person/TrackingConfig.h" +#include "realsense_ros_person/Recognition.h" +#include "realsense_ros_person/StartTracking.h" +#include "realsense_ros_person/StopTracking.h" +#include "realsense_ros_person/RecognitionRegister.h" +#include +#include + +#include +#include + +std::string PersonTrackingSample::PERSON_MODULE_STATE_TOPIC = "/person_tracking/module_state"; +std::string RegistrationResultToString(int status); +std::string RecognitionResultToString(int status); +std::string WaveGestureToString(int32_t waveGestureRos); + + +PersonTrackingSample::PersonTrackingSample() : m_viewer(false), m_trackingRenderer(m_viewer) +{ + m_trackingRenderer.SetPersonSelectionHandler([this](PersonData & data, TrackingRenderer::SelectType type) + { + PersonSelectedHandler(data, type); + }); + m_trackingRenderer.SetGlobalHandler([this](TrackingRenderer::SelectType type) + { + GlobalHandler(type); + }); +} + +void PersonTrackingSample::ProcessCommandLineArgs() +{ + ros::NodeHandle nodeHandle("~"); + nodeHandle.param("skeletonEnabled", mEnableSkeleton, false); + ROS_INFO_STREAM("mEnableSkeleton = " << mEnableSkeleton); + + nodeHandle.param("recognitionEnabled", mEnableRecognition, false); + ROS_INFO_STREAM("mEnableRecognition = " << mEnableRecognition); + + nodeHandle.param("pointingGestureEnabled", mEnablePointingGesture, false); + ROS_INFO_STREAM("mEnablePointingGesture = " << mEnablePointingGesture); + + nodeHandle.param("waveGestureEnabled", mEnableWaveGesture, false); + ROS_INFO_STREAM("mEnableWaveGesture = " << mEnableWaveGesture); + + nodeHandle.param("landmarksEnabled", mEnableLandmarks, false); + ROS_INFO_STREAM("mEnableLandmarks = " << mEnableLandmarks); + + nodeHandle.param("headBoundingBoxEnabled", mEnableHeadBoundingBox, false); + ROS_INFO_STREAM("headBoundingBox = " << mEnableHeadBoundingBox); + + nodeHandle.param("headPoseEnabled", mEnableHeadPose, false); + ROS_INFO_STREAM("headPose = " << mEnableHeadPose); +} + +void PersonTrackingSample::InitMessaging(ros::NodeHandle& nodeHandle) +{ + EnableTrackingFeatures(nodeHandle); + mTrackingOutputSubscriber = nodeHandle.subscribe("person_tracking_output_test", 1, &PersonTrackingSample::PersonTrackingCallback, this); + mRecognitionRequestClient = nodeHandle.serviceClient("person_tracking/recognition_request"); + mRegisterRequestClient = nodeHandle.serviceClient("person_tracking/register_request"); + mStartTrackingRequestClient = nodeHandle.serviceClient("person_tracking/start_tracking_request"); + mStopTrackingRequestClient = nodeHandle.serviceClient("person_tracking/stop_tracking_request"); +} + +void PersonTrackingSample::EnableTrackingFeatures(ros::NodeHandle& nodeHandle) +{ + mConfigClient = nodeHandle.serviceClient("person_tracking/tracking_config"); + + realsense_ros_person::TrackingConfig config; + config.request.enableRecognition = mEnableRecognition; + config.request.enableSkeleton = mEnableSkeleton; + config.request.enablePointingGesture = mEnablePointingGesture; + config.request.enableWaveGesture = mEnableWaveGesture; + config.request.enableLandmarks = mEnableLandmarks; + config.request.enableHeadBoundingBox = mEnableHeadBoundingBox; + config.request.enableHeadPose = mEnableHeadPose; + + while (!mConfigClient.call(config)) + { + ROS_INFO_STREAM("failed to send config to person tracking node, sleep 1 second and retry"); + usleep(1e6); + } +} + +void PersonTrackingSample::PersonTrackingCallback(const realsense_ros_person::FrameTest& msg) +{ +// ROS_INFO_STREAM("Received person tracking output message. Number of people: " << msg.frameData.numberOfUsers); + + cv_bridge::CvImageConstPtr ptr; + cv::Mat colorImage = cv_bridge::toCvShare(msg.colorImage, ptr, sensor_msgs::image_encodings::BGR8)->image; + + DrawDetectionResults(colorImage, msg); + + m_viewer.ShowImage(colorImage); +} + +void PersonTrackingSample::DrawDetectionResults(cv::Mat& colorImage, const realsense_ros_person::FrameTest& msg) +{ + std::lock_guard guard(mMutex); + + m_trackingRenderer.Reset(); + + realsense_ros_person::Frame frame = msg.frameData; + + for (realsense_ros_person::User& user : frame.usersData) + { + DrawPersonResults(colorImage, user); + } +} + + +void PersonTrackingSample::DrawPersonResults(cv::Mat& colorImage, realsense_ros_person::User& user) +{ + // person rectangle + cv::Point pt1(user.userRect.rectCorners[0].x, user.userRect.rectCorners[0].y); + cv::Point pt2(user.userRect.rectCorners[1].x, user.userRect.rectCorners[1].y); + cv::Rect userRectangle(pt1, pt2); + + int personId = user.userInfo.Id; + + // center of mass point + cv::Point centerMass(user.centerOfMassImage.x, user.centerOfMassImage.y); + cv::Point3f centerMassWorld(user.centerOfMassWorld.x, user.centerOfMassWorld.y, user.centerOfMassWorld.z); + + m_trackingRenderer.DrawPerson(colorImage, personId, userRectangle, centerMass, centerMassWorld); + DrawPersonSkeleton(colorImage, user); + DrawPersonGestures(colorImage, user); + DrawPersonLandmarks(colorImage, user); + DrawFace(colorImage, user); + DrawPersonSummaryReport(colorImage, user); +} + +void PersonTrackingSample::DrawPersonSkeleton(cv::Mat& colorImage, realsense_ros_person::User& user) +{ + if (!mEnableSkeleton) return; + std::vector points; + for (realsense_ros_person::SkeletonJoint joint : user.skeletonJoints) + { + if (joint.confidence > JOINT_CONFIDENCE_THR) + { + std::string x = std::to_string(joint.location.x); + std::string y = std::to_string(joint.location.y); + + cv::Point location(joint.location.x, joint.location.y); + points.push_back(location); + } + } + m_trackingRenderer.DrawSkeleton(colorImage, points); +} + +void PersonTrackingSample::DrawPersonLandmarks(cv::Mat& colorImage, realsense_ros_person::User& user) +{ + if (!mEnableLandmarks) return; + if (user.landmarksInfo.confidence < LANDMARKS_CONFIDENCE_THR) return; + + std::vector points; + for (realsense_ros_person::Landmark landmark : user.landmarksInfo.landmarks) + { + cv::Point location(landmark.location.x, landmark.location.y); + points.push_back(location); + } + m_trackingRenderer.DrawLandmarks(colorImage, points); +} + +void PersonTrackingSample::DrawFace(cv::Mat& colorImage, realsense_ros_person::User& user) +{ + if (!mEnableHeadBoundingBox) return; + if (user.headBoundingBox.confidence > HEAD_BOUNDING_BOX_THR) + { + cv::Point pt1(user.headBoundingBox.rectCorners[0].x, user.headBoundingBox.rectCorners[0].y); + cv::Point pt2(user.headBoundingBox.rectCorners[1].x, user.headBoundingBox.rectCorners[1].y); + cv::Rect headBoundingBoxRect(pt1, pt2); + m_trackingRenderer.DrawFace(colorImage, headBoundingBoxRect); + } +} + + +void PersonTrackingSample::DrawPersonGestures(cv::Mat& colorImage, realsense_ros_person::User& user) +{ + if (!mEnablePointingGesture) return; + + if (user.gestures.pointing.confidence > 0) + { + realsense_ros_person::Pointing pointing = user.gestures.pointing; + cv::Point origin(pointing.originColor.x, pointing.originColor.y); + cv::Point2f direction(pointing.orientationColor.x, pointing.orientationColor.y); + + m_trackingRenderer.DrawPointing(colorImage, origin, direction); + } +} + +void PersonTrackingSample::PersonSelectedHandler(PersonData& data, TrackingRenderer::SelectType type) +{ + std::lock_guard guard(mMutex); + + if (type == TrackingRenderer::SelectType::RECOGNITION) + { + realsense_ros_person::Recognition request; + request.request.personId = data.Id; + mRecognitionRequestClient.call(request); + data.rid = request.response.recognitionId; + ROS_INFO_STREAM("Recognition Status = " + RecognitionResultToString(request.response.status)); + } + else if (type == TrackingRenderer::SelectType::REGISTRATION) + { + realsense_ros_person::RecognitionRegister request; + request.request.personId = data.Id; + mRegisterRequestClient.call(request); + data.rid = request.response.recognitionId; + ROS_INFO_STREAM("Registration Status = " + RegistrationResultToString(request.response.status)); + + } + else if (type == TrackingRenderer::SelectType::TRACKING) + { + auto personState = ros::topic::waitForMessage(PERSON_MODULE_STATE_TOPIC, ros::Duration(5)); + if (personState == nullptr) + { + ROS_ERROR_STREAM("Failed to get person tracking state"); + return; + } + if (personState->trackingState == realsense_ros_person::PersonModuleState::TRACKING_STATE_DETECTING) + { + realsense_ros_person::StartTracking request; + request.request.personId = data.Id; + mStartTrackingRequestClient.call(request); + std::string res = request.response.status ? " SUCCEEDED" : " FAILED"; + ROS_INFO_STREAM("Start tracking of user ID " + std::to_string(data.Id) + res); + } + else + { + realsense_ros_person::StopTracking request; + request.request.personId = data.Id; + mStopTrackingRequestClient.call(request); + std::string res = request.response.status ? " SUCCEEDED" : " FAILED"; + ROS_INFO_STREAM("Stop tracking of user ID " + std::to_string(data.Id) + res); + } + } +} + +//Function For Handling glbal events (that are not specific for a user) +void PersonTrackingSample::GlobalHandler(TrackingRenderer::SelectType type) +{ +// std::lock_guard guard(mMutex); +} + + +void PersonTrackingSample::DrawPersonSummaryReport(cv::Mat image, realsense_ros_person::User &user) +{ + std::stringstream summaryText;// summary text at at top left corner of image (center of mass, orientation etc.) + + //add center of mass (world coordinates) + summaryText << user.userInfo.Id << ": " << + std::fixed << std::setprecision(3) << + "(" << user.centerOfMassWorld.x << "," << user.centerOfMassWorld.y << "," << user.centerOfMassWorld.z << ")"; + + if (user.headPose.confidence > 0) + { + //add head pose + summaryText << std::setprecision(1) << std::fixed << " head orientation " << "(" << + user.headPose.angles.pitch << ", " << + user.headPose.angles.roll << ", " << + user.headPose.angles.yaw << ")"; + } + + //add wave gesture + int32_t waveGesture = user.gestures.wave.type; + if (waveGesture != (int32_t)realsense_ros_person::Wave::WAVE_NOT_DETECTED) + { + summaryText << " wave gesture: " << WaveGestureToString(waveGesture).c_str() << "\n"; + } + + m_trackingRenderer.DrawLineAtSummaryReport(image, summaryText.str()); +} + +std::string RegistrationResultToString(int status) +{ + switch (status) + { + case realsense_ros_person::RecognitionRegisterResponse::REGISTRATION_SUCCESSFULL: + return "REGISTRATION_SUCCESSFULL"; + case realsense_ros_person::RecognitionRegisterResponse::REGISTRATION_FAILED: + return "REGISTRATION_FAILED"; + case realsense_ros_person::RecognitionRegisterResponse::REGISTRATION_FAILED_ALREADY_REGISTERED: + return "REGISTRATION_FAILED_ALREADY_REGISTERED"; + case realsense_ros_person::RecognitionRegisterResponse::REGISTRATION_FAILED_FACE_NOT_DETECTED: + return "REGISTRATION_FAILED_FACE_NOT_DETECTED"; + case realsense_ros_person::RecognitionRegisterResponse::REGISTRATION_FAILED_FACE_NOT_CLEAR: + return "REGISTRATION_FAILED_FACE_NOT_CLEAR"; + case realsense_ros_person::RecognitionRegisterResponse::REGISTRATION_FAILED_PERSON_TO_FAR: + return "REGISTRATION_FAILED_PERSON_TO_FAR"; + case realsense_ros_person::RecognitionRegisterResponse::REGISTRATION_FAILED_PERSON_TO_CLOSE: + return "REGISTRATION_FAILED_PERSON_TO_CLOSE"; + default: + return "REGISTRATION_UNKNOWN_ERROR"; + } +} + + +std::string RecognitionResultToString(int status) +{ + switch (status) + { + case realsense_ros_person::RecognitionResponse::RECOGNITION_PASSED_PERSON_RECOGNIZED: + return "RECOGNITION_PASSED_PERSON_RECOGNIZED"; + case realsense_ros_person::RecognitionResponse::RECOGNITION_PASSED_PERSON_NOT_RECOGNIZED: + return "RECOGNITION_PASSED_PERSON_NOT_RECOGNIZED"; + case realsense_ros_person::RecognitionResponse::RECOGNITION_FAILED: + return "RECOGNITION_FAILED"; + case realsense_ros_person::RecognitionResponse::RECOGNITION_FAILED_FACE_NOT_DETECTED: + return "RECOGNITION_FAILED_FACE_NOT_DETECTED"; + case realsense_ros_person::RecognitionResponse::RECOGNITION_FAILED_FACE_NOT_CLEAR: + return "RECOGNITION_FAILED_FACE_NOT_CLEAR"; + case realsense_ros_person::RecognitionResponse::RECOGNITION_FAILED_PERSON_TOO_FAR: + return "RECOGNITION_FAILED_PERSON_TOO_FAR"; + case realsense_ros_person::RecognitionResponse::RECOGNITION_FAILED_PERSON_TOO_CLOSE: + return "RECOGNITION_FAILED_PERSON_TOO_CLOSE"; + case realsense_ros_person::RecognitionResponse::RECOGNITION_FAILED_FACE_AMBIGUITY: + return "RECOGNITION_FAILED_FACE_AMBIGUITY"; + default: + return "RECOGNITION_UNKNOWN_ERROR"; + } +} + +std::string WaveGestureToString(int32_t waveGestureRos) +{ + switch (waveGestureRos) + { + case realsense_ros_person::Wave::WAVE_NOT_DETECTED: + return "Wave not detected"; + case realsense_ros_person::Wave::WAVE_LEFT_LA: + return "Wave left left area"; + case realsense_ros_person::Wave::WAVE_RIGHT_LA: + return "Wave right left area"; + case realsense_ros_person::Wave::WAVE_LEFT_RA: + return "Wave left right area"; + case realsense_ros_person::Wave::WAVE_RIGHT_RA: + return "Wave right right area"; + default: + throw std::runtime_error("unsupported wave gesture value"); + } +} diff --git a/realsense_ros_person/src/sample/PersonTrackingSample.h b/realsense_ros_person/src/sample/PersonTrackingSample.h new file mode 100644 index 0000000..bc883a7 --- /dev/null +++ b/realsense_ros_person/src/sample/PersonTrackingSample.h @@ -0,0 +1,65 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#pragma once +#include "ros/ros.h" +#include "realsense_ros_person/FrameTest.h" +#include "TrackingRenderer/TrackingRenderer.h" +#include + +const char WINDOW_NAME[] = "PersonTracking"; + +class PersonTrackingSample +{ +public: + PersonTrackingSample(); + + void ProcessCommandLineArgs(); + + void InitMessaging(ros::NodeHandle& nodeHandle); + +private: + void EnableTrackingFeatures(ros::NodeHandle& nodeHandle); + + void PersonTrackingCallback(const realsense_ros_person::FrameTest& msg); + void DrawDetectionResults(cv::Mat& colorImage, const realsense_ros_person::FrameTest& msg); + void DrawPersonResults(cv::Mat& colorImage, realsense_ros_person::User& user); + + + + void DrawPersonSkeleton(cv::Mat& colorImage, realsense_ros_person::User& user); + void DrawPersonGestures(cv::Mat& colorImage, realsense_ros_person::User& user); + void DrawPersonLandmarks(cv::Mat& colorImage, realsense_ros_person::User& user); + void DrawFace(cv::Mat& colorImage, realsense_ros_person::User& user); + void DrawPersonSummaryReport(cv::Mat image, realsense_ros_person::User &user); + + void PersonSelectedHandler(PersonData& data, TrackingRenderer::SelectType type); + void GlobalHandler(TrackingRenderer::SelectType type); + + + bool mEnableSkeleton; + bool mEnableRecognition; + bool mEnableLandmarks; + bool mEnableHeadBoundingBox; + bool mEnableHeadPose; + bool mEnablePointingGesture; + bool mEnableWaveGesture; + + const int JOINT_CONFIDENCE_THR = 90; + const int LANDMARKS_CONFIDENCE_THR = 90; + const int HEAD_BOUNDING_BOX_THR = 90; + + ros::Subscriber mTrackingOutputSubscriber; + ros::ServiceClient mRecognitionRequestClient; + ros::ServiceClient mStartTrackingRequestClient; + ros::ServiceClient mStopTrackingRequestClient; + ros::ServiceClient mRegisterRequestClient; + ros::ServiceClient mConfigClient; + + std::mutex mMutex; + + Viewer m_viewer; + TrackingRenderer m_trackingRenderer; + + static std::string PERSON_MODULE_STATE_TOPIC; +}; diff --git a/realsense_ros_person/src/sample/PersonTrackingSampleRunner.cpp b/realsense_ros_person/src/sample/PersonTrackingSampleRunner.cpp new file mode 100644 index 0000000..3e0e01f --- /dev/null +++ b/realsense_ros_person/src/sample/PersonTrackingSampleRunner.cpp @@ -0,0 +1,20 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#include "PersonTrackingSample.h" + +int main(int argc, char **argv) +{ + ROS_INFO("Start person tracking sample"); + + ros::init(argc, argv, "realsense_ros_person_sample"); + ros::NodeHandle nodeHandle; + + PersonTrackingSample test; + test.ProcessCommandLineArgs(); + test.InitMessaging(nodeHandle); + + ros::spin(); + + return 0; +} diff --git a/realsense_ros_person/src/sample/TrackingRenderer/Colors.h b/realsense_ros_person/src/sample/TrackingRenderer/Colors.h new file mode 100644 index 0000000..3a7b62b --- /dev/null +++ b/realsense_ros_person/src/sample/TrackingRenderer/Colors.h @@ -0,0 +1,12 @@ + +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved#pragma once + +#include + +cv::Scalar BLACK(0, 0, 0); +cv::Scalar WHITE(255, 255, 255); +cv::Scalar RED(0, 0, 255); +cv::Scalar BLUE(255, 0, 0); +cv::Scalar GREEN(0, 255, 0); +cv::Scalar YELLOW(0, 255, 255); diff --git a/realsense_ros_person/src/sample/TrackingRenderer/OpencvUtils.h b/realsense_ros_person/src/sample/TrackingRenderer/OpencvUtils.h new file mode 100644 index 0000000..2f2aeec --- /dev/null +++ b/realsense_ros_person/src/sample/TrackingRenderer/OpencvUtils.h @@ -0,0 +1,61 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#pragma once + +#include +#include + +struct Rectangle +{ + Rectangle(cv::Point topLeft, cv::Point bottomRight) : topLeft(topLeft), bottomRight(bottomRight) + { + width = bottomRight.x - topLeft.x; + height = bottomRight.y - topLeft.y; + } + cv::Point topLeft; + cv::Point bottomRight; + int width; + int height; +}; + +/** + Creates a black text on white background + + @param image - add the text on this image. + @param label - the text to add. + @param origin - bottom left corner of the text. + @param thickness - thickness of the text font + @return the rectangle of the text +*/ +Rectangle setLabel(cv::Mat& image, const std::string label, const cv::Point& origin, int thickness = 2, double scale = 0.5) +{ + int fontface = cv::FONT_HERSHEY_SIMPLEX; + int baseline = 0; + + cv::Size text = cv::getTextSize(label, fontface, scale, thickness, &baseline); + + cv::Point topLeft = origin + cv::Point(-3, -3); + cv::Point bottomRight = origin + cv::Point(text.width + 3, text.height + 3); + + cv::rectangle(image, topLeft, bottomRight, WHITE, CV_FILLED); + cv::putText(image, label, origin + cv::Point(0, text.height), fontface, scale, BLACK, thickness, 8); + + return Rectangle(topLeft, bottomRight); +} + +namespace cv +{ +void arrowedLine(cv::Mat& img, Point pt1, Point pt2, const Scalar& color, int thickness = 1, int line_type = 8, int shift = 0, double tipLength = 0.1) +{ + const double tipSize = norm(pt1 - pt2) * tipLength; // Factor to normalize the size of the tip depending on the length of the arrow + line(img, pt1, pt2, color, thickness, line_type, shift); + const double angle = atan2((double) pt1.y - pt2.y, (double) pt1.x - pt2.x); + Point p(cvRound(pt2.x + tipSize * cos(angle + CV_PI / 4)), + cvRound(pt2.y + tipSize * sin(angle + CV_PI / 4))); + line(img, p, pt2, color, thickness, line_type, shift); + p.x = cvRound(pt2.x + tipSize * cos(angle - CV_PI / 4)); + p.y = cvRound(pt2.y + tipSize * sin(angle - CV_PI / 4)); + line(img, p, pt2, color, thickness, line_type, shift); +} +} diff --git a/realsense_ros_person/src/sample/TrackingRenderer/PersonDataStorage.h b/realsense_ros_person/src/sample/TrackingRenderer/PersonDataStorage.h new file mode 100644 index 0000000..c0cd8c0 --- /dev/null +++ b/realsense_ros_person/src/sample/TrackingRenderer/PersonDataStorage.h @@ -0,0 +1,47 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#pragma once +#include +#include + +class PersonData +{ +public: + int Id; + int rid = 0; + cv::Rect rectangle; + bool valid; +}; + +class PersonDataStorage +{ +public: + void Add(PersonData data) + { + mPersonsData[data.Id] = data; + } + + const PersonData* get(int id) + { + auto iter = mPersonsData.find(id); + return (iter != mPersonsData.end()) ? &iter->second : nullptr; + } + + //void clear() { mPersonsData.clear(); } + + PersonData* MatchPersonToPoint(cv::Point point) + { + for (auto iter = mPersonsData.rbegin(); iter != mPersonsData.rend(); ++iter) + { + if (iter->second.rectangle.contains(point)) + { + return &iter->second; + } + } + return nullptr; + } + +private: + std::map mPersonsData; +}; diff --git a/realsense_ros_person/src/sample/TrackingRenderer/TrackingRenderer.cpp b/realsense_ros_person/src/sample/TrackingRenderer/TrackingRenderer.cpp new file mode 100644 index 0000000..0583939 --- /dev/null +++ b/realsense_ros_person/src/sample/TrackingRenderer/TrackingRenderer.cpp @@ -0,0 +1,118 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#include +#include + +#include "Colors.h" +#include "OpencvUtils.h" +#include "TrackingRenderer.h" + +TrackingRenderer::TrackingRenderer(Viewer& viewer) : m_viewer(viewer) +{ + viewer.SetMouseEventHandler([this](int event, int x, int y, int flags) + { + HandleMouseEvent(event, x, y, flags); + }); +} + +void TrackingRenderer::Reset() +{ + mSummaryTextBottom = 0; +} + +void TrackingRenderer::DrawPerson(cv::Mat image, int id, cv::Rect boundingBox, cv::Point com, cv::Point3f comWorld) +{ + cv::Point pt1(boundingBox.x, boundingBox.y); +// cv::Point pt2(boundingBox.x + boundingBox.width, boundingBox.y + boundingBox.height); + + cv::rectangle(image, boundingBox, YELLOW, 2); + + // person ID at top left corner of person rectangle + int personId = id; + std::string idText = "ID: " + std::to_string(personId); + Rectangle personIdTextRect = setLabel(image, idText, pt1 + cv::Point(5, 5), 1, 0.4); + + // recognition ID under person ID + auto personData = m_personDataStorage.get(id); + int rid = (personData != nullptr && personData->rid > 0) ? personData->rid : -1; + { + std::string recognitionIdText = "RID: " + ((rid > 0) ? std::to_string(rid) : std::string("?")); + setLabel(image, recognitionIdText, pt1 + cv::Point(5, 5 + personIdTextRect.height), 1, 0.4); + } + + // center of mass point + cv::Point centerMass = com; + cv::circle(image, centerMass, 2, RED, -1, 8, 0); + + // Add person data to storage + PersonData data; + data.Id = personId; + data.rid = rid; + data.rectangle = boundingBox; + m_personDataStorage.Add(data); +} + +void TrackingRenderer::DrawPoints(cv::Mat image, std::vector& points, cv::Scalar color) +{ + for (auto point : points) + { + cv::circle(image, point, 3, GREEN, -1, 8, 0); + } +} +void TrackingRenderer::DrawSkeleton(cv::Mat image, std::vector& points) +{ + DrawPoints(image, points, GREEN); +} + + +void TrackingRenderer::DrawLandmarks(cv::Mat image, std::vector& points) +{ + DrawPoints(image, points, GREEN); +} + + + +void TrackingRenderer::DrawPointing(cv::Mat image, cv::Point origin, cv::Point2f direction) +{ + cv::circle(image, origin, 3, BLUE, -1, 8, 0); + cv::Point vector(origin.x + 400 * direction.x, origin.y + 400 * direction.y); + cv::arrowedLine(image, origin, vector, GREEN, 2, 8, 0, 0.15); +} + +void TrackingRenderer::DrawFace(cv::Mat image, cv::Rect boundingBox) +{ + cv::Point pt1(boundingBox.x, boundingBox.y); + cv::Point pt2(boundingBox.x + boundingBox.width, boundingBox.y + boundingBox.height); + cv::rectangle(image, boundingBox, RED, 2); +} + + +void TrackingRenderer::HandleMouseEvent(int event, int x, int y, int flags) +{ + if (!m_personSelectedHandler) return; + PersonData *data = m_personDataStorage.MatchPersonToPoint(cv::Point(x, y)); + if (data == nullptr) return; + + if (event == cv::EVENT_LBUTTONDOWN) + { + if ((flags & cv::EVENT_FLAG_CTRLKEY)) + { + m_personSelectedHandler(*data, SelectType::RECOGNITION); + } + else + { + m_personSelectedHandler(*data, SelectType::REGISTRATION); + } + } + else if (event == cv::EVENT_MBUTTONDOWN) + { + m_personSelectedHandler(*data, SelectType::TRACKING); + } +} + +void TrackingRenderer::DrawLineAtSummaryReport(cv::Mat image, std::string line) +{ + Rectangle summaryTextRect = setLabel(image, line, cv::Point(0, mSummaryTextBottom), 2, 0.5); + mSummaryTextBottom = summaryTextRect.bottomRight.y + 0.02 * image.rows; +} \ No newline at end of file diff --git a/realsense_ros_person/src/sample/TrackingRenderer/TrackingRenderer.h b/realsense_ros_person/src/sample/TrackingRenderer/TrackingRenderer.h new file mode 100644 index 0000000..d42cde3 --- /dev/null +++ b/realsense_ros_person/src/sample/TrackingRenderer/TrackingRenderer.h @@ -0,0 +1,53 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#pragma once +#include "Viewer.h" +#include "PersonDataStorage.h" +#include +#include +#include +#include + +class TrackingRenderer +{ +public: + enum SelectType + { + ACTIVATE_RECOGNITION, + RECOGNITION, + REGISTRATION, + TRACKING + }; + + TrackingRenderer(Viewer& viewer); + + void SetPersonSelectionHandler(std::function handler) + { + m_personSelectedHandler = handler; + } + void SetGlobalHandler(std::function handler) + { + m_globaldHandler = handler; + } + + void Reset(); + void DrawPoints(cv::Mat image, std::vector& points, cv::Scalar color); + void DrawPerson(cv::Mat image, int id, cv::Rect boundingBox, cv::Point com, cv::Point3f comWorld); + void DrawFace(cv::Mat image, cv::Rect boundingBox); + void DrawSkeleton(cv::Mat image, std::vector& points); + void DrawLandmarks(cv::Mat image, std::vector& points); + void DrawPointing(cv::Mat image, cv::Point origin, cv::Point2f direction); + void DrawLineAtSummaryReport(cv::Mat image, std::string line); + +private: + void HandleMouseEvent(int event, int x, int y, int flags); + + int mSummaryTextBottom = 0; + Viewer& m_viewer; + + std::function m_personSelectedHandler; + std::function m_globaldHandler; + + PersonDataStorage m_personDataStorage; +}; diff --git a/realsense_ros_person/src/sample/TrackingRenderer/Viewer.cpp b/realsense_ros_person/src/sample/TrackingRenderer/Viewer.cpp new file mode 100644 index 0000000..147054f --- /dev/null +++ b/realsense_ros_person/src/sample/TrackingRenderer/Viewer.cpp @@ -0,0 +1,48 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#include "Viewer.h" +#include + +const char COLOR_WINDOW_NAME[] = "Person Tracking Sample"; +const char DEPTH_WINDOW_NAME[] = "Person Tracking Sample Depth"; + +void mouseCallBack(int event, int x, int y, int flags, void* userdata) +{ + Viewer* viewer = (Viewer*)userdata; + viewer->MouseEventCallback(event, x, y, flags); +} + +Viewer::Viewer(bool showDepth) : m_showDepth(showDepth) +{ + cv::namedWindow(COLOR_WINDOW_NAME, CV_WINDOW_AUTOSIZE); + cv::setMouseCallback(COLOR_WINDOW_NAME, mouseCallBack, this); + cv::startWindowThread(); + + if (showDepth) + { + cv::namedWindow(DEPTH_WINDOW_NAME, CV_WINDOW_AUTOSIZE); + cv::startWindowThread(); + } +} + +void Viewer::ShowImage(cv::Mat image) +{ + cv::imshow(COLOR_WINDOW_NAME, image); + cv::waitKey(25); +} + +void Viewer::ShowDepth(cv::Mat depth) +{ + cv::imshow(DEPTH_WINDOW_NAME, depth); +} + +void Viewer::SetMouseEventHandler(std::function mouseEventHandler) \ +{ + m_mouseEventHandler = mouseEventHandler; +} + +void Viewer::MouseEventCallback(int event, int x, int y, int flags) +{ + if (m_mouseEventHandler) m_mouseEventHandler(event, x, y, flags); +} diff --git a/realsense_ros_person/src/sample/TrackingRenderer/Viewer.h b/realsense_ros_person/src/sample/TrackingRenderer/Viewer.h new file mode 100644 index 0000000..5160548 --- /dev/null +++ b/realsense_ros_person/src/sample/TrackingRenderer/Viewer.h @@ -0,0 +1,24 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#pragma once +#include +#include + +class Viewer +{ +public: + Viewer(bool showDepth); + + void ShowImage(cv::Mat image); + void ShowDepth(cv::Mat depth); + + void SetMouseEventHandler(std::function mouseEventHandler); + + void MouseEventCallback(int event, int x, int y, int flags); + +private: + bool m_showDepth; + + std::function m_mouseEventHandler; +}; diff --git a/realsense_ros_person/srv/LoadRecognitionDB.srv b/realsense_ros_person/srv/LoadRecognitionDB.srv new file mode 100644 index 0000000..22fe155 --- /dev/null +++ b/realsense_ros_person/srv/LoadRecognitionDB.srv @@ -0,0 +1,3 @@ +string loadFromPath +--- +bool status diff --git a/realsense_ros_person/srv/Recognition.srv b/realsense_ros_person/srv/Recognition.srv new file mode 100644 index 0000000..f841f7c --- /dev/null +++ b/realsense_ros_person/srv/Recognition.srv @@ -0,0 +1,15 @@ +int32 personId +--- +int32 status +int32 recognitionId +float32 similarityScore + +#registration status constansts +int32 RECOGNITION_PASSED_PERSON_RECOGNIZED=0 +int32 RECOGNITION_PASSED_PERSON_NOT_RECOGNIZED=1 +int32 RECOGNITION_FAILED=2 +int32 RECOGNITION_FAILED_FACE_NOT_DETECTED=3 +int32 RECOGNITION_FAILED_FACE_NOT_CLEAR=4 +int32 RECOGNITION_FAILED_PERSON_TOO_FAR=5 +int32 RECOGNITION_FAILED_PERSON_TOO_CLOSE=6 +int32 RECOGNITION_FAILED_FACE_AMBIGUITY=7 diff --git a/realsense_ros_person/srv/RecognitionRegister.srv b/realsense_ros_person/srv/RecognitionRegister.srv new file mode 100644 index 0000000..650a1cd --- /dev/null +++ b/realsense_ros_person/srv/RecognitionRegister.srv @@ -0,0 +1,13 @@ +int32 personId +--- +int32 status +int32 recognitionId + +#recognition status constansts +int32 REGISTRATION_SUCCESSFULL=0 +int32 REGISTRATION_FAILED=1 +int32 REGISTRATION_FAILED_ALREADY_REGISTERED=2 +int32 REGISTRATION_FAILED_FACE_NOT_DETECTED=3 +int32 REGISTRATION_FAILED_FACE_NOT_CLEAR=4 +int32 REGISTRATION_FAILED_PERSON_TO_FAR=5 +int32 REGISTRATION_FAILED_PERSON_TO_CLOSE=6 diff --git a/realsense_ros_person/srv/SaveRecognitionDB.srv b/realsense_ros_person/srv/SaveRecognitionDB.srv new file mode 100644 index 0000000..b5e5be4 --- /dev/null +++ b/realsense_ros_person/srv/SaveRecognitionDB.srv @@ -0,0 +1,3 @@ +string saveToPath +--- +bool status diff --git a/realsense_ros_person/srv/StartTracking.srv b/realsense_ros_person/srv/StartTracking.srv new file mode 100644 index 0000000..6fd4cbc --- /dev/null +++ b/realsense_ros_person/srv/StartTracking.srv @@ -0,0 +1,3 @@ +int32 personId +--- +bool status diff --git a/realsense_ros_person/srv/StopTracking.srv b/realsense_ros_person/srv/StopTracking.srv new file mode 100644 index 0000000..6fd4cbc --- /dev/null +++ b/realsense_ros_person/srv/StopTracking.srv @@ -0,0 +1,3 @@ +int32 personId +--- +bool status diff --git a/realsense_ros_person/srv/TrackingConfig.srv b/realsense_ros_person/srv/TrackingConfig.srv new file mode 100644 index 0000000..c4248cc --- /dev/null +++ b/realsense_ros_person/srv/TrackingConfig.srv @@ -0,0 +1,9 @@ +bool enableRecognition +bool enableSkeleton +bool enablePointingGesture +bool enableWaveGesture +bool enableLandmarks +bool enableHeadBoundingBox +bool enableHeadPose +--- +bool status diff --git a/realsense_ros_person/test/person_detection.test b/realsense_ros_person/test/person_detection.test new file mode 100755 index 0000000..d01681e --- /dev/null +++ b/realsense_ros_person/test/person_detection.test @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/realsense_ros_person/test/person_test.cpp b/realsense_ros_person/test/person_test.cpp new file mode 100755 index 0000000..4ef9aa1 --- /dev/null +++ b/realsense_ros_person/test/person_test.cpp @@ -0,0 +1,157 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#include +#include +#include +#include + +#include "realsense_ros_person/Frame.h" +#include "realsense_ros_person/Recognition.h" +#include "realsense_ros_person/RecognitionRegister.h" +#include "realsense_ros_person/PersonModuleState.h" +#include "realsense_ros_person/StartTracking.h" + + +#include //TODO remove this include + +std::string DETECTION_TOPIC = "/person_tracking_output"; +std::string REGISTER_SERVICE = "/person_tracking/register_request"; +std::string RECOGNIZE_SERVICE = "/person_tracking/recognition_request"; +std::string START_TRACKING_SERVICE = "/person_tracking/start_tracking_request"; + +bool gDetectionRecv = false; +bool gDetectionBBoxRecv = false; +bool gDetectionComRecv = false; +bool gWaveRecv = false; + +realsense_ros_person::User gUsers; +ros::ServiceClient gRegisterClient; +ros::ServiceClient gRecognizeClient; +ros::ServiceClient gStartTrackingClient; + +realsense_ros_person::Frame g_latestFrameData; +std::mutex g_latestFrameDataMutex; + +void detectionCallback(const realsense_ros_person::Frame& frame) +{ + gDetectionRecv = true; + { + std::lock_guard lockGuard(g_latestFrameDataMutex); + g_latestFrameData = frame; + } + + for (realsense_ros_person::User person: frame.usersData) + { + if ((person.userRect.rectCorners[0].x != 0) || (person.userRect.rectCorners[0].y != 0) || + (person.userRect.rectCorners[1].x != 0) || (person.userRect.rectCorners[1].y != 0)) + { + gDetectionBBoxRecv = true; + } + else + { + gDetectionBBoxRecv = false; + } + + if ((person.centerOfMassWorld.x != 0) || (person.centerOfMassWorld.y != 0) || (person.centerOfMassWorld.z != 0) || + (person.centerOfMassImage.x != 0) || (person.centerOfMassImage.y != 0)) + { + gDetectionComRecv = true; + } + else + { + gDetectionComRecv = false; + } + + //if received minimum once while test - test passed + if (!gWaveRecv && person.gestures.wave.type != realsense_ros_person::Wave::WAVE_NOT_DETECTED) + { + gWaveRecv = true; + } + + if ((gDetectionBBoxRecv == false) || (gDetectionComRecv == false)) + { + break; + } + } +} + +TEST(PersonTests, PersonDetection) +{ + EXPECT_TRUE(gDetectionRecv); + EXPECT_TRUE(gDetectionBBoxRecv); + EXPECT_TRUE(gDetectionComRecv); +} + + +TEST(PersonTests, WaveDetection) +{ + EXPECT_TRUE(gDetectionRecv); + EXPECT_TRUE(gDetectionBBoxRecv); + EXPECT_TRUE(gDetectionComRecv); + EXPECT_TRUE(gWaveRecv); +} + +TEST(PersonTests, Recognition) +{ + //register person + realsense_ros_person::RecognitionRegister registerRequest; + { + std::lock_guard lockGuard(g_latestFrameDataMutex); + EXPECT_TRUE(g_latestFrameData.numberOfUsers > 0); + registerRequest.request.personId = g_latestFrameData.usersData[0].userInfo.Id; + } + gRegisterClient.call(registerRequest); + + + //recognize person + realsense_ros_person::Recognition recognitionRequest; + { + std::lock_guard lockGuard(g_latestFrameDataMutex); + EXPECT_TRUE(g_latestFrameData.numberOfUsers > 0); + recognitionRequest.request.personId = g_latestFrameData.usersData[0].userInfo.Id; + } + gRecognizeClient.call(recognitionRequest); + + ASSERT_EQ(recognitionRequest.response.recognitionId, registerRequest.response.recognitionId); +} + +TEST(PersonTests, Tracking) +{ + int personTrackingId = -1; + { + std::lock_guard lockGuard(g_latestFrameDataMutex); + EXPECT_TRUE(g_latestFrameData.numberOfUsers > 0); + personTrackingId = g_latestFrameData.usersData[0].userInfo.Id; + } + + realsense_ros_person::StartTracking startTrackingRequest; + startTrackingRequest.request.personId = personTrackingId; + gStartTrackingClient.call(startTrackingRequest); + + EXPECT_TRUE(startTrackingRequest.response.status); +} + +int main(int argc, char **argv) try +{ + testing::InitGoogleTest(&argc, argv); + ros::init(argc, argv, "test_topics"); + + ros::NodeHandle nh; + ROS_INFO_STREAM("RealSense person test - Initializing Tests..."); + + ros::Subscriber detection_sub = nh.subscribe(DETECTION_TOPIC, 1, detectionCallback); + gRegisterClient = nh.serviceClient(REGISTER_SERVICE); + gRecognizeClient = nh.serviceClient(RECOGNIZE_SERVICE); + gStartTrackingClient = nh.serviceClient(START_TRACKING_SERVICE); + + ros::Rate r(10); + for (int i = 0; i< 100; ++i) + { + ros::spinOnce(); + r.sleep(); + } + + return RUN_ALL_TESTS(); +} +catch(...) {} // catch the "testing::internal::::ClassUniqueToAlwaysTrue" from gtest diff --git a/realsense_ros_person/test/person_tracking.test b/realsense_ros_person/test/person_tracking.test new file mode 100755 index 0000000..1c71cbc --- /dev/null +++ b/realsense_ros_person/test/person_tracking.test @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/realsense_ros_person/test/recognition.test b/realsense_ros_person/test/recognition.test new file mode 100755 index 0000000..6dcb6f2 --- /dev/null +++ b/realsense_ros_person/test/recognition.test @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/realsense_ros_person/test/wave_detection.test b/realsense_ros_person/test/wave_detection.test new file mode 100755 index 0000000..d5ed510 --- /dev/null +++ b/realsense_ros_person/test/wave_detection.test @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/realsense_ros_slam/CHANGELOG.rst b/realsense_ros_slam/CHANGELOG.rst new file mode 100644 index 0000000..714585b --- /dev/null +++ b/realsense_ros_slam/CHANGELOG.rst @@ -0,0 +1,13 @@ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Changelog for package realsense_ros_slam +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +0.8.2 (2017-04-04) +------------------ +* Bug fixes and updates +* Contributors: Ben Hirashima, Curfman, Matthew C + +0.8.1 (2017-03-02) +------------------ +* Initial release +* Contributors: Ben Hirashima, Curfman, Matthew C diff --git a/realsense_ros_slam/CMakeLists.txt b/realsense_ros_slam/CMakeLists.txt new file mode 100644 index 0000000..96847da --- /dev/null +++ b/realsense_ros_slam/CMakeLists.txt @@ -0,0 +1,117 @@ +cmake_minimum_required(VERSION 2.8.3) +project(realsense_ros_slam) + +find_package(catkin REQUIRED COMPONENTS + nodelet + roscpp + rospy + sensor_msgs + std_msgs + geometry_msgs + image_transport + message_filters + message_generation + realsense_ros_camera + tf + tf2 + ) +find_package(Eigen3 REQUIRED) +find_package(OpenCV 3 REQUIRED) +set(CMAKE_CXX_FLAGS "-fPIE -fPIC -std=c++11 -O2 -D_FORTIFY_SOURCE=2 -fstack-protector -Wformat -Wformat-security -Wall ${CMAKE_CXX_FLAGS}") + +set(ROOT "${CMAKE_CURRENT_SOURCE_DIR}") +set(CMAKE_THREAD_LIBS_INIT pthread) + +add_message_files( + FILES + TrackingAccuracy.msg +) + +generate_messages( + DEPENDENCIES + sensor_msgs + std_msgs +) + +catkin_package( +# INCLUDE_DIRS include + LIBRARIES realsense_ros_slam + CATKIN_DEPENDS nodelet + roscpp + rospy + sensor_msgs + std_msgs + geometry_msgs + message_runtime +# DEPENDS system_lib +) + +include_directories( + ${catkin_INCLUDE_DIRS} + ${OpenCV_INCLUDE_DIRS} + ${ROOT}/include + ${EIGEN3_INCLUDE_DIR} +) +file(GLOB SOURCES + src/slam_nodelet.cpp) + +add_library(${PROJECT_NAME} ${SOURCES}) + +set(LIBRARIES_TO_LINK + realsense + realsense_image + realsense_slam + SP_Core + tracker + ${CMAKE_THREAD_LIBS_INIT} +) +target_link_libraries(${PROJECT_NAME} + ${catkin_LIBRARIES} + ${LIBRARIES_TO_LINK} +) + +if (REALSENSE_ENABLE_TESTING) + find_package(rostest REQUIRED) + + catkin_download_test_data( + slam_room_1.bag + https://s3-eu-west-1.amazonaws.com/realsense-rostest-public/realsense_ros_slam/slam_room_1.bag + DESTINATION ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_SHARE_DESTINATION}/tests + MD5 73200cdfbde1d83851e23f58d9fbd9ab) + + add_executable(tests_slam test/slam_test.cpp) + target_link_libraries(tests_slam + ${catkin_LIBRARIES} + ${GTEST_LIBRARIES} + ) + add_dependencies(tests_slam ${PROJECT_NAME}_generate_messages_cpp) + add_dependencies(tests_slam ${catkin_EXPORTED_TARGETS}) + add_dependencies(tests_slam slam_room_1.bag) +endif() + +# Install nodelet library +install(TARGETS ${PROJECT_NAME} + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} +) + + +# Install launch files +install(DIRECTORY launch/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/launch +) + +# Install lib files +install(DIRECTORY lib/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/lib +) +# Install include files +install(DIRECTORY include/ + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/include +) + +# Install xml files +install(FILES nodelet_plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) diff --git a/realsense_ros_slam/README.md b/realsense_ros_slam/README.md new file mode 100644 index 0000000..de05f1f --- /dev/null +++ b/realsense_ros_slam/README.md @@ -0,0 +1,176 @@ +# ROS Node for Intel® RealSense™ SLAM Library + +This package contains a ROS wrapper for Intel's SLAM library. The realsense_ros_slam package provides a solution for SLAM as a ROS nodelet. It consumes the messages sent by the realsense_ros_camera nodelet, and publishes messages for the camera pose and occupancy map. + +## Hardware/Software Requirements + +To use realsense_ros_slam, you need a mobile agent with a RealSense ZR300 camera. + +### Ubuntu requirements: +- Ubuntu 16.04 +- gcc 4.9.3 +- libeigen3-dev +- libc++ dev + +### Required Intel libraries: +- Linux SDK +- librealsense_slam +- Link: https://securewiki.ith.intel.com/pages/viewpage.action?pageId=510951805 + +## Inputs and Outputs + +### Subscribed Topics + +`camera/fisheye/image_raw` + +- Message type: `sensor_msgs::Image` +- The fisheye image with timestamp + +`camera/depth/image_raw` + +- Message type: `sensor_msgs::Image` +- The depth image with timestamp + +`camera/fisheye/camera_info` + +- Message type: `sensor_msgs::CameraInfo` +- The intrinsics of the fisheye camera + +`camera/depth/camera_info` + +- Message type: `sensor_msgs::CameraInfo` +- The intrinsics of the depth camera + +`camera/gyro/sample` + +- Message type: `sensor_msgs::Imu` +- Gyroscope sample with timestamp + +`camera/accel/sample` + +- Message type: `sensor_msgs::Imu` +- Accelerometer sample with timestamp + +`camera/gyro/imu_info` + +- Message type: `realsense_ros_camera::IMUInfo` +- Gyroscope intrinsics, noise and bias variances + +`camera/accel/imu_info` + +- Message type: `realsense_ros_camera::IMUInfo` +- Accelerometer intrinsics, noise and bias variances + +`camera/extrinsics/fisheye2imu` + +- Message type: `realsense_ros_camera::Extrinsics` +- Fisheye to IMU extrinsics + +`camera/extrinsics/fisheye2depth` + +- Message type: `realsense_ros_camera::Extrinsics` +- Fisheye to depth extrinsics + +### Published Topics + +`camera_pose` + +- Message type: `geometry_msgs::PoseStamped` +- The raw camera pose, in the camera's coordinate system (right-handed, +x right, +y down, +z forward) + +`reloc_pose` + +- Message type: `geometry_msgs::PoseStamped` +- The relocalized camera pose, in the camera's coordinate system. Published only when a relocalization has occurred. + +`pose2d` + +- Message type: `geometry_msgs::Pose2D` +- The 2D camera pose, projected onto a plane corresponding to the occupancy map + +`tracking_accuracy` + +- Message type: `realsense_ros_slam::TrackingAccuracy` +- The current 6DoF tracking accuracy (low/medium/high/failed). Currently `high` is not used. + +`map` + +- Message type: `nav_msgs::OccupancyGrid` +- The occupancy map + +### Parameters + +`trajectoryFilename` + +- `str::string`, default: 'trajectory.ppm' +- The name of the trajectory file to output. The files will be saved in the realsense_ros_slam directory. + +`relocalizationFilename` + +- `str::string`, default: 'relocalization.bin' +- The name of re-localization file to output. The files will be saved in the realsense_ros_slam directory. + +`occupancyFilename` +- `str::string`, default: 'occupancy.bin' +- The name of occupancy data file to output. + +`map_resolution` +- `double` +- Sets the size of the grid squares in the occupancy map, in meters. + +`hoi_min` +- `double` +- Sets the minimum height of interest for the occupancy map, in meters. + +`hoi_max` +- `double` +- Sets the maximum height of interest for the occupancy map, in meters. + +`doi_min` +- `double` +- Sets the minimum depth of interest for the occupancy map, in meters. + +`doi_max` +- `double` +- Sets the maximum depth of interest for the occupancy map, in meters. + +## Usage + +To run the slam engine: +```bash +$ cd catkin_ws +$ source devel/setup.bash +$ roslaunch realsense_ros_slam demo_slam.launch +``` + +To run the slam engine using a recorded bag file: +```bash +$ roslaunch realsense_ros_slam demo_slam_from_bag.launch bag_path:=~/test.bag +``` + +To record slam topics to a bag file: +```bash +$ roslaunch realsense_ros_slam record_bag_slam.launch +``` +The bag file will be written to your home directory, with a timestamp appended. + +To print the estimated pose messages, in another console window: +```bash +$ rostopic echo pose2d +``` + +The `demo_slam.launch` and `demo_slam_from_bag.launch` files will automatically start rviz using a configuration file located at `launch/demo_settings.rviz`. The raw camera pose, occupancy map, and odometry are shown in rviz. The odometry message is only sent by the SLAM nodelet for demo purposes, since the `pose2d` message cannot be displayed by rviz. The odometry message contains the same 2D pose that the `pose2d` message does. This shows where the robot is located relative to the occupancy map. + +## Testing + +The SLAM package can be tested with pre-recorded data using the provided ROS unit test. No physical camera needs to be present in order to run the test. The following steps can be used to build the unit test and download the pre-recorded ROS .bag data: + +```bash +$ cd ~/catkin_ws +$ catkin_make -DREALSENSE_ENABLE_TESTING=On +$ rostest realsense_ros_slam slam.test +``` + +You will see the test execute on the command line as it builds a map from the input data, then the test passes with a "RESULT: SUCCESS" status. + +**Note:** Depending on your internet connection speed, enabling 'REALSENSE_ENABLE_TESTING' can cause catkin_make to run for very long time (more than 5 minutes), as it downloads required pre-recorded .bag data. diff --git a/realsense_ros_slam/include/2DMapCommon.h b/realsense_ros_slam/include/2DMapCommon.h new file mode 100644 index 0000000..e222bf5 --- /dev/null +++ b/realsense_ros_slam/include/2DMapCommon.h @@ -0,0 +1,143 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#pragma once + +#include +#define INNAV_RENDER_DATA_INSIDE + +enum emNavStatus +{ + navInvalid, + navContinue, + navStay, + navNavigationFinished, + navNavigationFailed, + navExploreFinished, + navExploreFailed +}; + +typedef enum INNAV_TRACKING_ACCURACY : int +{ + innavLOW, + innavMED, + innavHIGH, + innavFAILED +} INNAV_TRACKING_ACCURACY; + +enum emNavMapPixel +{ + navUnobserved = 0, + navUnObservedOpenThreshold = 41, + navOpen = 80, + navOpenObstacleThreshold = 121, + navObstacle = 160, + navObstacleUncertainOrSmall = 180 +}; + +enum emMapUpdateStrategy +{ + innavNoUpdate, + innavReplace, + innavReplaceOnlyLargeObject, + innavAddOnlyLargeObject, + innavAdd, + innavClear +}; + +enum emCollisionAvoidanceCommand +{ + avoidCollisionNoNeed = 0, + avoidCollisionStop = 1, + avoidCollisionTurn = 2 +}; + +enum emRobotControlCommand +{ + robotcontrolStop = 0, + robotcontrolMoveAndTurn = 1, + robotcontrolTurnBody = 2, + robotcontrolTurnHead = 3, + robotcontrolIdle = 4 +}; + +typedef struct stRobotControlCommand +{ + emRobotControlCommand type; + float distance; + float dtheta; + // + float dpan; + float dtilt; + + float velocity_sugesstion; + + void Init() + { + type = robotcontrolIdle; + distance = 0.0f; + dtheta = 0.0f; + dpan = 0.0f; + dtilt = 0.0f; + + velocity_sugesstion = 1.0f; + } +} stRobotControlCommand; + +typedef struct stTrajTopology +{ + int hnext; + int vnext; + int node; + void Default() + { + node = -1; + hnext = -1; + vnext = -1; + } +} stTrajTopology; + +typedef struct stRobotPG +{ + INNAV_TRACKING_ACCURACY trackingAccuracy; + + float x; + float y; + float theta; + float pan; + float tilt; + emRobotControlCommand commandtype; + // + void initdefault() + { + trackingAccuracy = innavMED; + x = 0.0f; + y = 0.0f; + theta = 0.0f; + pan = 0.0f; + tilt = 0.0f; + commandtype = robotcontrolIdle; + } +} stRobotPG; + +typedef struct stTrajLeaf +{ + stRobotPG leafPG; + stRobotPG rootPG; + int rootID; + bool bLive; +} stTrajLeaf; + +float dis2D(CvPoint p1, CvPoint p2); +float dis2D(CvPoint2D32f p1, CvPoint2D32f p2); +CvPoint linelineIntersection(CvPoint P1, CvPoint P2, CvPoint P3, CvPoint P4); +float linelineAngle(CvPoint line1[2], CvPoint line2[2]); +float pointlineDis(CvPoint2D32f p1, CvPoint2D32f p2, CvPoint2D32f q); +//int min(int i, int j); +//int max(int i, int j); +void GetPixelofLine(CvPoint p1, CvPoint p2, std::vector &pts); +void GetPixelofLine(CvPoint p1, CvPoint p2, int w, int h, int thickness, std::vector &pts); +void GetPixelofLine(CvPoint p1, CvPoint p2, int thickness, std::vector &pts); +void RoundTheta(float &theta); +stRobotPG AddRobotPG(stRobotPG PG1, stRobotPG PG2); +stRobotPG SubRobotPG(stRobotPG PG1, stRobotPG PG2); diff --git a/realsense_ros_slam/include/ParamTypes.h b/realsense_ros_slam/include/ParamTypes.h new file mode 100644 index 0000000..9886bfb --- /dev/null +++ b/realsense_ros_slam/include/ParamTypes.h @@ -0,0 +1,134 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#pragma once +#include "stdio.h" +typedef struct stCameraWorldParam +{ + float ffar; + float fnear; + float fFOV; + float fCameraHeight2Width; + + void Default() + { + ffar = 2.5f; + fnear = 0.3f; + fFOV = 58.9f; + fCameraHeight2Width = 9.0f / 16.0f; + + } +} stCameraParam; + +struct stCameraImageParam +{ + int width; + int height; + int fps; + void Init(int w, int h, int ifps) + { + width = w; + height = h; + fps = ifps; + } +}; + +struct stCameraIntrinsics +{ + unsigned int iImageWidth; + unsigned int iImageHeight; + float fFocalLengthHorizontal; + float fFocalLengthVertical; + float fPrincipalPointCoordU; + float fPrincipalPointCoordV; + + void Default() + { + iImageWidth = 320u; + iImageHeight = 240u; + fFocalLengthHorizontal = 316.f; + fFocalLengthVertical = 316.f; + fPrincipalPointCoordU = 160.f; + fPrincipalPointCoordV = 120.f; + } +}; + +typedef struct stMapInfo +{ + int w, h; + float fLengthPerPixel; + float x0, y0; + int ix0, iy0; + + void Default() + { + w = 960; + h = 960; + fLengthPerPixel = 0.02f; + x0 = 0.0f; + y0 = 0.0f; + ix0 = 960 / 2; + iy0 = 960 / 2; + } + void Set(int _w, int _h, float _fLengthPerPixel, float _x0, float _y0, int _ix0, int _iy0) + { + w = _w; + h = _h; + fLengthPerPixel = _fLengthPerPixel; + x0 = _x0; + y0 = _y0; + ix0 = _ix0; + iy0 = _iy0; + } + void WorldToMap(float fx, float fy, int &ix, int &iy) + { + ix = (fx - x0) / fLengthPerPixel + ix0; + iy = (fy - y0) / fLengthPerPixel + iy0; + } + void MapToWorld(int ix, int iy, float &fx, float &fy) + { + fx = (ix - ix0) * fLengthPerPixel + x0; + fy = (iy - iy0) * fLengthPerPixel + y0; + } +} stMapInfo; + +typedef struct stRobotParam +{ + float fRobotHorizontalScale; + float fMinHeightLowerThanCamera; + float fMaxHeightHigherThanCamera; + float fTolerateRadiusForNoise; + + void Default() + { + fRobotHorizontalScale = 0.6f; + fMinHeightLowerThanCamera = 0.4f; + fMaxHeightHigherThanCamera = 0.4f; + fTolerateRadiusForNoise = 0.1f; + } +} stRobotParam; + +#define THETA_SAMPLE_NUM 31 +#define THETA_SAMPLE_DTHETA 2.0f + +typedef struct stCollisionAvoidanceParam +{ + float fDis2ObstacleMustStop; + + void Default() + { + fDis2ObstacleMustStop = 0.5f; + } +} stCollisionAvoidanceParam; + +// FOR JNI +typedef struct stCollisionAvoidanceCommand +{ + int command; + float theta; + void Default() + { + command = 0; + theta = 0.0f; + } +} stCollisionAvoidanceCommand; \ No newline at end of file diff --git a/realsense_ros_slam/include/realsense_ros_slam/slam_nodelet.h b/realsense_ros_slam/include/realsense_ros_slam/slam_nodelet.h new file mode 100644 index 0000000..a50e601 --- /dev/null +++ b/realsense_ros_slam/include/realsense_ros_slam/slam_nodelet.h @@ -0,0 +1,94 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#pragma once +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "ParamTypes.h" +#include "2DMapCommon.h" +#include +#include +#include +#include +#include +#include +#include +#include + +namespace realsense_ros_slam +{ +class SubscribeTopics +{ +public: + SubscribeTopics(); + ~SubscribeTopics(); + ros::NodeHandle l_nh; + rs::slam::slam* l_slam; + std::mutex mut_depth, mut_fisheye, mut_gyro_imu, mut_accel_imu; + std::vector data_list; + ros::Subscriber l_motion_gyro_sub, l_motion_accel_sub, l_fisheye_sub, l_depth_sub; + void subscribeStreamMessages(); + void subscribeMotion(); + void getStreamSample(const sensor_msgs::ImageConstPtr &imageMsg, rs::core::stream_type stream); + void depthMessageCallback(const sensor_msgs::ImageConstPtr &depthImageMsg); + void fisheyeMessageCallback(const sensor_msgs::ImageConstPtr &fisheyeImageMsg); + void motionGyroCallback(const sensor_msgs::ImuConstPtr &imuMsg); + void motionAccelCallback(const sensor_msgs::ImuConstPtr &imuMsg); + void getMotionSample(const sensor_msgs::ImuConstPtr &imuMsg, rs::core::motion_type motionType); + void onInit(ros::NodeHandle & nh, rs::slam::slam * slam); +}; + +class SNodeletSlam: public nodelet::Nodelet +{ +public: + SNodeletSlam(); + ~SNodeletSlam(); + ros::NodeHandle nh; + ros::Subscriber sub_depthInfo, sub_fisheyeInfo, sub_accelInfo, sub_gyroInfo, sub_fe2imu, sub_fe2depth; + rs::core::video_module_interface::supported_module_config supported_config; + rs::core::video_module_interface::actual_module_config actual_config; + SubscribeTopics sub; + + void onInit()override; +private: + sensor_msgs::CameraInfoConstPtr depthCameraInfo_, fisheyeCameraInfo_; + realsense_ros_camera::IMUInfoConstPtr accelInfo_, gyroInfo_; + realsense_ros_camera::ExtrinsicsConstPtr fe2imu_, fe2depth_; + std::mutex mut_init; + void depthInfoCallback(const sensor_msgs::CameraInfoConstPtr& depthCameraInfo); + void fisheyeInfoCallback(const sensor_msgs::CameraInfoConstPtr& fisheyeCameraInfo); + void accelInfoCallback(const realsense_ros_camera::IMUInfoConstPtr& accelInfo); + void gyroInfoCallback(const realsense_ros_camera::IMUInfoConstPtr& gyroInfo); + void fe2ImuCallback(const realsense_ros_camera::ExtrinsicsConstPtr& fe2imu); + void fe2depthCallback(const realsense_ros_camera::ExtrinsicsConstPtr& fe2depth); + void startIfReady(); + void startSlam(); + void setCalibrationData(const sensor_msgs::CameraInfoConstPtr & cameraInfoMsg, rs::core::intrinsics & cameraInfo); + void setStreamConfigIntrin(rs::core::stream_type stream, std::map< rs::core::stream_type, rs::core::intrinsics > intrinsics); + void setMotionData(realsense_ros_camera::IMUInfo& imu_res, rs::core::motion_device_intrinsics& motion_intrin); + void setMotionConfigIntrin(rs::core::motion_type motion, std::map< rs::core::motion_type, rs::core::motion_device_intrinsics > motion_intrinsics); + void setExtrinData(realsense_ros_camera::Extrinsics& fe_res, rs::core::extrinsics& extrinsics); +};//end class +} diff --git a/realsense_ros_slam/launch/demo_settings.rviz b/realsense_ros_slam/launch/demo_settings.rviz new file mode 100644 index 0000000..d1beeac --- /dev/null +++ b/realsense_ros_slam/launch/demo_settings.rviz @@ -0,0 +1,147 @@ +Panels: + - Class: rviz/Displays + Help Height: 78 + Name: Displays + Property Tree Widget: + Expanded: + - /Global Options1 + - /Status1 + - /Map1 + - /Odometry1 + Splitter Ratio: 0.495712012 + Tree Height: 1153 + - Class: rviz/Selection + Name: Selection + - Class: rviz/Tool Properties + Expanded: + - /2D Pose Estimate1 + - /2D Nav Goal1 + - /Publish Point1 + Name: Tool Properties + Splitter Ratio: 0.588679016 + - Class: rviz/Views + Expanded: + - /Current View1 + Name: Views + Splitter Ratio: 0.5 + - Class: rviz/Time + Experimental: false + Name: Time + SyncMode: 0 + SyncSource: "" +Visualization Manager: + Class: "" + Displays: + - Alpha: 0.5 + Cell Size: 1 + Class: rviz/Grid + Color: 160; 160; 164 + Enabled: true + Line Style: + Line Width: 0.0299999993 + Value: Lines + Name: Grid + Normal Cell Count: 0 + Offset: + X: 0 + Y: 0 + Z: 0 + Plane: XY + Plane Cell Count: 10 + Reference Frame: + Value: true + - Alpha: 1 + Axes Length: 1 + Axes Radius: 0.100000001 + Class: rviz/Pose + Color: 255; 25; 0 + Enabled: true + Head Length: 0.300000012 + Head Radius: 0.100000001 + Name: Pose + Shaft Length: 1 + Shaft Radius: 0.0500000007 + Shape: Axes + Topic: /camera_pose + Unreliable: false + Value: true + - Alpha: 0.699999988 + Class: rviz/Map + Color Scheme: map + Draw Behind: false + Enabled: true + Name: Map + Topic: /map + Unreliable: false + Value: true + - Angle Tolerance: 0.100000001 + Class: rviz/Odometry + Color: 255; 25; 0 + Enabled: true + Keep: 100 + Length: 0.5 + Name: Odometry + Position Tolerance: 0.100000001 + Topic: /odom + Value: true + Enabled: true + Global Options: + Background Color: 48; 48; 48 + Fixed Frame: map + Frame Rate: 30 + Name: root + Tools: + - Class: rviz/Interact + Hide Inactive Objects: true + - Class: rviz/MoveCamera + - Class: rviz/Select + - Class: rviz/FocusCamera + - Class: rviz/Measure + - Class: rviz/SetInitialPose + Topic: /initialpose + - Class: rviz/SetGoal + Topic: /move_base_simple/goal + - Class: rviz/PublishPoint + Single click: true + Topic: /clicked_point + Value: true + Views: + Current: + Class: rviz/Orbit + Distance: 14.6014929 + Enable Stereo Rendering: + Stereo Eye Separation: 0.0599999987 + Stereo Focal Distance: 1 + Swap Stereo Eyes: false + Value: false + Focal Point: + X: 0 + Y: 0 + Z: 0 + Focal Shape Fixed Size: true + Focal Shape Size: 0.0500000007 + Name: Current View + Near Clip Distance: 0.00999999978 + Pitch: 1.56979632 + Target Frame: + Value: Orbit (rviz) + Yaw: 3.86857152 + Saved: ~ +Window Geometry: + Displays: + collapsed: false + Height: 1487 + Hide Left Dock: false + Hide Right Dock: false + QMainWindow State: 000000ff00000000fd0000000400000000000001b000000522fc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006b00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003400000522000000ef00fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000015c00000522fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000003400000522000000cd00fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000008260000004afc0100000002fb0000000800540069006d00650100000000000008260000043900fffffffb0000000800540069006d00650100000000000004500000000000000000000005080000052200000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + Selection: + collapsed: false + Time: + collapsed: false + Tool Properties: + collapsed: false + Views: + collapsed: false + Width: 2086 + X: 98 + Y: 36 diff --git a/realsense_ros_slam/launch/demo_slam.launch b/realsense_ros_slam/launch/demo_slam.launch new file mode 100644 index 0000000..42278a9 --- /dev/null +++ b/realsense_ros_slam/launch/demo_slam.launch @@ -0,0 +1,12 @@ + + + + + + + + + + + + diff --git a/realsense_ros_slam/launch/demo_slam_from_bag.launch b/realsense_ros_slam/launch/demo_slam_from_bag.launch new file mode 100644 index 0000000..68772d4 --- /dev/null +++ b/realsense_ros_slam/launch/demo_slam_from_bag.launch @@ -0,0 +1,13 @@ + + + + + + + + diff --git a/realsense_ros_slam/launch/params.yaml b/realsense_ros_slam/launch/params.yaml new file mode 100644 index 0000000..9a8a08d --- /dev/null +++ b/realsense_ros_slam/launch/params.yaml @@ -0,0 +1,5 @@ +resolution: 0.1 +trajectoryFilename: 'trajectory_1019.ppm' +relocalizationFilename: 'relocalization_1019.bin' +occupancyFilename: 'occupancy_1019.bin' + diff --git a/realsense_ros_slam/launch/record_bag_slam.launch b/realsense_ros_slam/launch/record_bag_slam.launch new file mode 100644 index 0000000..f50e55c --- /dev/null +++ b/realsense_ros_slam/launch/record_bag_slam.launch @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + diff --git a/realsense_ros_slam/launch/run_from_bag.launch b/realsense_ros_slam/launch/run_from_bag.launch new file mode 100644 index 0000000..60fbe3e --- /dev/null +++ b/realsense_ros_slam/launch/run_from_bag.launch @@ -0,0 +1,12 @@ + + + + + + + + diff --git a/realsense_ros_slam/launch/setup_demo.launch b/realsense_ros_slam/launch/setup_demo.launch new file mode 100644 index 0000000..a66ff34 --- /dev/null +++ b/realsense_ros_slam/launch/setup_demo.launch @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/realsense_ros_slam/launch/slam_nodelet.launch b/realsense_ros_slam/launch/slam_nodelet.launch new file mode 100644 index 0000000..15522fb --- /dev/null +++ b/realsense_ros_slam/launch/slam_nodelet.launch @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/realsense_ros_slam/msg/TrackingAccuracy.msg b/realsense_ros_slam/msg/TrackingAccuracy.msg new file mode 100644 index 0000000..9400187 --- /dev/null +++ b/realsense_ros_slam/msg/TrackingAccuracy.msg @@ -0,0 +1,2 @@ +std_msgs/Header header +uint32 tracking_accuracy diff --git a/realsense_ros_slam/nodelet_plugins.xml b/realsense_ros_slam/nodelet_plugins.xml new file mode 100644 index 0000000..1472146 --- /dev/null +++ b/realsense_ros_slam/nodelet_plugins.xml @@ -0,0 +1,7 @@ + + + + Intel(R) RealSense(TM) SLAM nodelet. + + + diff --git a/realsense_ros_slam/occupancy_1019.bin b/realsense_ros_slam/occupancy_1019.bin new file mode 100644 index 0000000..a3609b9 Binary files /dev/null and b/realsense_ros_slam/occupancy_1019.bin differ diff --git a/realsense_ros_slam/package.xml b/realsense_ros_slam/package.xml new file mode 100644 index 0000000..5a0b90a --- /dev/null +++ b/realsense_ros_slam/package.xml @@ -0,0 +1,34 @@ + + + realsense_ros_slam + 0.8.2 + The realsense_ros_slam package + Intel RealSense + Apache 2.0 + catkin + nodelet + roscpp + rospy + sensor_msgs + std_msgs + geometry_msgs + nav_msgs + realsense_ros_camera + message_filters + image_transport + message_generation + message_runtime + message_filters + image_transport + realsense_ros_camera + nav_msgs + geometry_msgs + nodelet + roscpp + rospy + sensor_msgs + std_msgs + + + + diff --git a/realsense_ros_slam/relocalization_1019.bin b/realsense_ros_slam/relocalization_1019.bin new file mode 100644 index 0000000..24f2a94 Binary files /dev/null and b/realsense_ros_slam/relocalization_1019.bin differ diff --git a/realsense_ros_slam/rviz/rviz_slam.rviz b/realsense_ros_slam/rviz/rviz_slam.rviz new file mode 100644 index 0000000..ae5200a --- /dev/null +++ b/realsense_ros_slam/rviz/rviz_slam.rviz @@ -0,0 +1,161 @@ +Panels: + - Class: rviz/Displays + Help Height: 78 + Name: Displays + Property Tree Widget: + Expanded: + - /Global Options1 + - /Status1 + Splitter Ratio: 0.5 + Tree Height: 387 + - Class: rviz/Selection + Name: Selection + - Class: rviz/Tool Properties + Expanded: + - /2D Pose Estimate1 + - /2D Nav Goal1 + - /Publish Point1 + Name: Tool Properties + Splitter Ratio: 0.588679016 + - Class: rviz/Views + Expanded: + - /Current View1 + Name: Views + Splitter Ratio: 0.5 + - Class: rviz/Time + Experimental: false + Name: Time + SyncMode: 0 + SyncSource: Image +Visualization Manager: + Class: "" + Displays: + - Alpha: 0.5 + Cell Size: 1 + Class: rviz/Grid + Color: 160; 160; 164 + Enabled: true + Line Style: + Line Width: 0.0299999993 + Value: Lines + Name: Grid + Normal Cell Count: 0 + Offset: + X: 0 + Y: 0 + Z: 0 + Plane: XY + Plane Cell Count: 10 + Reference Frame: + Value: true + - Alpha: 1 + Axes Length: 1 + Axes Radius: 0.100000001 + Class: rviz/Pose + Color: 0; 0; 255 + Enabled: true + Head Length: 0.300000012 + Head Radius: 0.25 + Name: Pose + Shaft Length: 1 + Shaft Radius: 0.125 + Shape: Arrow + Topic: /camera_pose + Unreliable: false + Value: true + - Alpha: 0.699999988 + Class: rviz/Map + Color Scheme: map + Draw Behind: false + Enabled: true + Name: Map + Topic: /map + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /camera/fisheye/image_raw + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Image + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /camera/depth/image_raw + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Image + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + Enabled: true + Global Options: + Background Color: 48; 48; 48 + Fixed Frame: map + Frame Rate: 30 + Name: root + Tools: + - Class: rviz/Interact + Hide Inactive Objects: true + - Class: rviz/MoveCamera + - Class: rviz/Select + - Class: rviz/FocusCamera + - Class: rviz/Measure + - Class: rviz/SetInitialPose + Topic: /initialpose + - Class: rviz/SetGoal + Topic: /move_base_simple/goal + - Class: rviz/PublishPoint + Single click: true + Topic: /clicked_point + Value: true + Views: + Current: + Class: rviz/Orbit + Distance: 6.13056374 + Enable Stereo Rendering: + Stereo Eye Separation: 0.0599999987 + Stereo Focal Distance: 1 + Swap Stereo Eyes: false + Value: false + Focal Point: + X: 0 + Y: 0 + Z: 0 + Focal Shape Fixed Size: true + Focal Shape Size: 0.0500000007 + Name: Current View + Near Clip Distance: 0.00999999978 + Pitch: 0.785398006 + Target Frame: + Value: Orbit (rviz) + Yaw: 0.785398006 + Saved: ~ +Window Geometry: + Displays: + collapsed: false + Height: 668 + Hide Left Dock: false + Hide Right Dock: false + Image: + collapsed: false + QMainWindow State: 000000ff00000000fd00000004000000000000016a00000212fc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006400fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000002800000212000000dd00fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000016100000212fc0200000005fb0000000a0049006d0061006700650100000028000001060000001600fffffffb0000000a0049006d0061006700650100000134000001060000001600fffffffb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000002800000212000000b000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000004b00000003efc0100000002fb0000000800540069006d00650100000000000004b00000030000fffffffb0000000800540069006d00650100000000000004500000000000000000000001d90000021200000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + Selection: + collapsed: false + Time: + collapsed: false + Tool Properties: + collapsed: false + Views: + collapsed: false + Width: 1200 + X: 65 + Y: 24 diff --git a/realsense_ros_slam/src/slam_nodelet.cpp b/realsense_ros_slam/src/slam_nodelet.cpp new file mode 100644 index 0000000..4685164 --- /dev/null +++ b/realsense_ros_slam/src/slam_nodelet.cpp @@ -0,0 +1,733 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#include "realsense_ros_slam/slam_nodelet.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +PLUGINLIB_EXPORT_CLASS(realsense_ros_slam::SNodeletSlam, nodelet::Nodelet) + +uint64_t seq_depth = 0, seq_fisheye = 0; +cv::Mat image_depth[100], image_fisheye[100]; +uint64_t cpt_depth = 0, cpt_fisheye = 0; +rs::core::image_info info_fisheye[100], info_depth[100]; + +std::string pkgpath; +std::string trajectoryFilename; +std::string relocalizationFilename; +std::string occupancyFilename; +std::string topic_camera_pose, topic_reloc_pose, topic_pose2d, topic_map, topic_tracking_accuracy, topic_odom; +double map_resolution, hoi_min, hoi_max, doi_min, doi_max; +bool is_pub_odom = false; + +ros::Publisher pub_pose2d, pub_pose, pub_map, pub_accuracy, pub_reloc, pub_odom; +geometry_msgs::Pose2D pose2d; + +IplImage * ipNavMap = NULL; +std::vector< stRobotPG > g_robotPGStack; +std::vector< CvPoint > g_relocalizationPointStack; + +namespace realsense_ros_slam +{ + +SubscribeTopics::SubscribeTopics() +{ +} + + +SubscribeTopics::~SubscribeTopics() +{ +} + + +void SubscribeTopics::onInit(ros::NodeHandle & nh, rs::slam::slam * slam) +{ + l_nh = nh; + l_slam = slam; +} + +void SubscribeTopics::subscribeStreamMessages() +{ + std::string depthImageStream = "camera/depth/image_raw"; + std::string fisheyeImageStream = "camera/fisheye/image_raw"; + ROS_INFO_STREAM("Listening on " << depthImageStream); + ROS_INFO_STREAM("Listening on " << fisheyeImageStream); + l_depth_sub = l_nh.subscribe(depthImageStream, 100, & SubscribeTopics::depthMessageCallback, this); + l_fisheye_sub = l_nh.subscribe(fisheyeImageStream, 100, & SubscribeTopics::fisheyeMessageCallback, this); +} + +void SubscribeTopics::subscribeMotion() +{ + std::string motionInfo_gyro = "camera/gyro/sample"; + std::string motionInfo_accel = "camera/accel/sample"; + ROS_INFO_STREAM("Listening on " << motionInfo_gyro); + ROS_INFO_STREAM("Listening on " << motionInfo_accel); + l_motion_gyro_sub = l_nh.subscribe(motionInfo_gyro, 10000, & SubscribeTopics::motionGyroCallback, this); + l_motion_accel_sub = l_nh.subscribe(motionInfo_accel, 10000, & SubscribeTopics::motionAccelCallback, this); +} + +void SubscribeTopics::depthMessageCallback(const sensor_msgs::ImageConstPtr &depthImageMsg) +{ + mut_depth.lock(); + SubscribeTopics::getStreamSample(depthImageMsg, rs::core::stream_type::depth); + mut_depth.unlock(); +} + + +void SubscribeTopics::fisheyeMessageCallback(const sensor_msgs::ImageConstPtr &fisheyeImageMsg) +{ + mut_fisheye.lock(); + SubscribeTopics::getStreamSample(fisheyeImageMsg, rs::core::stream_type::fisheye); + mut_fisheye.unlock(); +} + +void SubscribeTopics::getStreamSample(const sensor_msgs::ImageConstPtr &imageMsg, rs::core::stream_type stream) +{ + int width = imageMsg->width; + int height = imageMsg->height; + rs::core::correlated_sample_set sample_set = {}; + if (stream == rs::core::stream_type::fisheye) + { + info_fisheye[cpt_fisheye] = + { + width, + height, + rs::utils::convert_pixel_format(rs::format::raw8), + 1 * width + }; + image_fisheye[cpt_fisheye] = cv::Mat(height, width, CV_8UC1, (unsigned char*)imageMsg->data.data()).clone(); + sample_set[stream] = rs::core::image_interface::create_instance_from_raw_data( + & info_fisheye[cpt_fisheye], + image_fisheye[cpt_fisheye].data, + stream, + rs::core::image_interface::flag::any, + imageMsg->header.stamp.toSec(), + (uint64_t)imageMsg->header.seq, + rs::core::timestamp_domain::microcontroller + ); + if (cpt_fisheye < 99) + { + cpt_fisheye++; + } + else + { + cpt_fisheye = 0; + } + } + else + { + info_depth[cpt_depth] = + { + width, + height, + rs::utils::convert_pixel_format(rs::format::z16), + 2 * width + }; + image_depth[cpt_depth] = cv::Mat(height, width, CV_16UC1, (unsigned char*)imageMsg->data.data()).clone(); + sample_set[stream] = rs::core::image_interface::create_instance_from_raw_data( + & info_depth[cpt_depth], + image_depth[cpt_depth].data, + stream, + rs::core::image_interface::flag::any, + imageMsg->header.stamp.toSec(), + (uint64_t)imageMsg->header.seq, + rs::core::timestamp_domain::microcontroller + ); + if (cpt_depth < 99) + { + cpt_depth++; + } + else + { + cpt_depth = 0; + } + } + if (l_slam->process_sample_set(sample_set) < rs::core::status_no_error) + { + ROS_ERROR("error: failed to process sample"); + } + sample_set[stream]->release(); +}//end of getStreamSample + + +void SubscribeTopics::motionGyroCallback(const sensor_msgs::ImuConstPtr &imuMsg) +{ + mut_gyro_imu.lock(); + SubscribeTopics::getMotionSample(imuMsg, rs::core::motion_type::gyro); + mut_gyro_imu.unlock(); +} + + +void SubscribeTopics::motionAccelCallback(const sensor_msgs::ImuConstPtr &imuMsg) +{ + mut_accel_imu.lock(); + SubscribeTopics::getMotionSample(imuMsg, rs::core::motion_type::accel); + mut_accel_imu.unlock(); +} + + +void SubscribeTopics::getMotionSample(const sensor_msgs::ImuConstPtr &imuMsg, rs::core::motion_type motionType) +{ + rs::core::correlated_sample_set sample_set = {}; + sample_set[motionType].timestamp = imuMsg->header.stamp.toSec(); + sample_set[motionType].type = motionType; + sample_set[motionType].frame_number = imuMsg->header.seq; + if (motionType == rs::core::motion_type::accel) + { + sample_set[motionType].data[0] = (float)imuMsg->linear_acceleration.x; + sample_set[motionType].data[1] = (float)imuMsg->linear_acceleration.y; + sample_set[motionType].data[2] = (float)imuMsg->linear_acceleration.z; + } + else if (motionType == rs::core::motion_type::gyro) + { + sample_set[motionType].data[0] = (float)imuMsg->angular_velocity.x; + sample_set[motionType].data[1] = (float)imuMsg->angular_velocity.y; + sample_set[motionType].data[2] = (float)imuMsg->angular_velocity.z; + } + + if (l_slam->process_sample_set(sample_set) < rs::core::status_no_error) + { + ROS_ERROR("error: failed to process sample"); + } +} + + +class slam_tracking_event_handler : public rs::slam::tracking_event_handler +{ +public: + slam_tracking_event_handler() + { + ROS_INFO("tracking_event_handler"); + } + + + void on_restart() + { + ROS_INFO("Restart--------------------------------"); + fflush(stdout); + } + + + ~slam_tracking_event_handler() + { + ROS_INFO("deconstructure"); + } +}; + + +void convertToPG(rs::slam::PoseMatrix4f & pose, Eigen::Vector3f & gravity, stRobotPG & robotPG) +{ + Eigen::Vector3f g = gravity; + Eigen::Vector3f g_startG(0.0f, 1.0f, 0.0f); + Eigen::Vector3f x3D(1.0f, 0.0f, 0.0f); + Eigen::Vector3f y3D(0.0f, 1.0f, 0.0f); + Eigen::Vector3f z3D(0.0f, 0.0f, 1.0f); + Eigen::Vector3f x3D_(pose.at(0, 0), pose.at(0, 1), pose.at(0, 2)); + Eigen::Vector3f y3D_(pose.at(1, 0), pose.at(1, 1), pose.at(1, 2)); + Eigen::Vector3f z3D_(pose.at(2, 0), pose.at(2, 1), pose.at(2, 2)); + Eigen::Vector3f z3DTo_(z3D.dot(x3D_), z3D.dot(y3D_), z3D.dot(z3D_)); + Eigen::Vector3f ProjX = g.cross(z3D); + Eigen::Vector3f ProjY = ProjX.cross(g); + Eigen::Vector2f v(z3DTo_.dot(ProjX), z3DTo_.dot(ProjY)); + Eigen::Vector2f v_(z3D.dot(ProjX), z3D.dot(ProjY)); + float theta1 = atan2(v.x(), v.y()); + float theta2 = atan2(v_.x(), v_.y()); + float theta = theta2 - theta1; + theta += CV_PI / 2; // Needed to point the right direction + if (theta < 0) + { + theta += 2 * CV_PI; + } + else if (theta >= 2 * CV_PI) + { + theta -= 2 * CV_PI; + } + Eigen::Vector3f origin_(pose.at(0, 3), pose.at(1, 3), pose.at(2, 3)); + Eigen::Vector3f ProjX0 = g_startG.cross(z3D); + Eigen::Vector3f ProjY0 = ProjX0.cross(g_startG); + float x = origin_.dot(ProjX0); + float y = origin_.dot(ProjY0); + robotPG.initdefault(); + robotPG.theta = theta; + robotPG.x = x; + robotPG.y = y; +} + + +void extrinsicsMsgToRSExtrinsics(const realsense_ros_camera::ExtrinsicsConstPtr &extrinsicsMsg, rs::core::extrinsics &rsExtrinsics) +{ + for (int i = 0; i < 9; ++i) + { + rsExtrinsics.rotation[i] = extrinsicsMsg->rotation[i]; + if (i < 3) rsExtrinsics.translation[i] = extrinsicsMsg->translation[i]; + } +} + +void imuInfoMsgToRSImuIntrinsics(const realsense_ros_camera::IMUInfoConstPtr &imuInfoMsg, rs::core::motion_device_intrinsics &rsImuIntrinsics) +{ + int index = 0; + for (int i = 0; i < 3; ++i) + { + for (int j = 0; j < 4; ++j) + { + rsImuIntrinsics.data[i][j] = imuInfoMsg->data[index]; + ++index; + } + rsImuIntrinsics.noise_variances[i] = imuInfoMsg->noise_variances[i]; + rsImuIntrinsics.bias_variances[i] = imuInfoMsg->bias_variances[i]; + } +} + +tf2::Quaternion quaternionFromPoseMatrix(rs::slam::PoseMatrix4f cameraPose) +{ + tf2::Matrix3x3 rotMat = tf2::Matrix3x3( + cameraPose.at(0, 0), + cameraPose.at(0, 1), + cameraPose.at(0, 2), + cameraPose.at(1, 0), + cameraPose.at(1, 1), + cameraPose.at(1, 2), + cameraPose.at(2, 0), + cameraPose.at(2, 1), + cameraPose.at(2, 2) + ); + + tf2::Quaternion quat; + rotMat.getRotation(quat); + return quat; +} + +geometry_msgs::Pose poseMatrixToMsg(rs::slam::PoseMatrix4f camera_pose) +{ + tf2::Quaternion quat = quaternionFromPoseMatrix(camera_pose); + geometry_msgs::Quaternion quat_msg; + tf2::convert(quat, quat_msg); + + geometry_msgs::Point point_msg; + point_msg.x = camera_pose.at(0, 3); + point_msg.y = camera_pose.at(1, 3); + point_msg.z = camera_pose.at(2, 3); + + geometry_msgs::Pose pose_msg; + pose_msg.orientation = quat_msg; + pose_msg.position = point_msg; + return pose_msg; +} + +geometry_msgs::PoseStamped getPoseStampedMsg(rs::slam::PoseMatrix4f cameraPose, uint64_t frameNum, double timestamp_ms) +{ + std_msgs::Header header; + header.stamp = ros::Time(timestamp_ms / 1000); + header.frame_id = "camera_link"; + if (frameNum > INT32_MAX) + { + header.seq = frameNum - INT32_MAX; + } + else + { + header.seq = frameNum; + } + + geometry_msgs::PoseStamped pose_stamped_msg; + pose_stamped_msg.header = header; + pose_stamped_msg.pose = poseMatrixToMsg(cameraPose); + return pose_stamped_msg; +} + +class slam_event_handler : public rs::core::video_module_interface::processing_event_handler +{ +public: + std::shared_ptr< rs::slam::occupancy_map > occ_map; + slam_event_handler() + { + std::cout << "created.........." << std::endl; + } + + void module_output_ready(rs::core::video_module_interface * sender, rs::core::correlated_sample_set * sample) + { + rs::slam::slam *slamPtr = dynamic_cast< rs::slam::slam * >(sender); + uint64_t feFrameNum = sample->images[static_cast(rs::core::stream_type::fisheye)]->query_frame_number(); + double feTimeStamp = sample->images[static_cast(rs::core::stream_type::fisheye)]->query_time_stamp(); + + // Publish camera pose + rs::slam::PoseMatrix4f cameraPose; + slamPtr->get_camera_pose(cameraPose); + geometry_msgs::PoseStamped pose_msg = getPoseStampedMsg(cameraPose, feFrameNum, feTimeStamp); + pub_pose.publish(pose_msg); + + // Publish relocalized camera pose, if any + rs::slam::PoseMatrix4f relocPose; + if (slamPtr->get_relocalization_pose(relocPose)) + { + geometry_msgs::PoseStamped reloc_pose_msg = getPoseStampedMsg(relocPose, feFrameNum, feTimeStamp); + pub_reloc.publish(reloc_pose_msg); + } + + // Publish tracking accuracy + rs::slam::tracking_accuracy accuracy = slamPtr->get_tracking_accuracy(); + TrackingAccuracy accuracyMsg; + accuracyMsg.header.stamp = ros::Time(feTimeStamp); + accuracyMsg.header.seq = feFrameNum; + accuracyMsg.tracking_accuracy = (uint32_t)accuracy; + pub_accuracy.publish(accuracyMsg); + + // Publish 2D pose + Eigen::Vector3f gravity = Eigen::Vector3f(0, 1, 0); + stRobotPG robotPG; + convertToPG(cameraPose, gravity, robotPG); + pose2d.x = robotPG.x; + pose2d.y = robotPG.y; + pose2d.theta = robotPG.theta; + pub_pose2d.publish(pose2d); + g_robotPGStack.push_back(robotPG); + + // Publish odometry + if (is_pub_odom) + { + nav_msgs::Odometry odom; + odom.header.stamp = ros::Time(feTimeStamp); + odom.header.seq = feFrameNum; + odom.header.frame_id = "odom"; + odom.pose.pose.position.x = pose2d.x; + odom.pose.pose.position.y = pose2d.y; + tf2::Quaternion quat(tf2::Vector3(0, 0, 1), pose2d.theta); // Rotate around the z axis by angle theta + tf2::convert(quat, odom.pose.pose.orientation); + pub_odom.publish(odom); + } + + // Publish occupancy map + int wmap = 512; + int hmap = 512; + if (!occ_map) + { + occ_map = slamPtr->create_occupancy_map(wmap * hmap); + std::cout << " creating occupancy map: resolution: " << slamPtr->get_occupancy_map_resolution() << std::endl; + } + if (ipNavMap == NULL) + { + ipNavMap = cvCreateImage(cvSize(wmap, hmap), 8, 1); + cvSet(ipNavMap, 10, NULL); + } + int status = slamPtr->get_occupancy_map_update(occ_map); + int count = occ_map->get_tile_count(); + nav_msgs::OccupancyGrid map_msg; + if (status >= 0 && count > 0) + { + const int32_t* map = occ_map->get_tile_coordinates(); + for (int i = 0; i < count; i++) + { + int _x = map[3 * i] + wmap / 2; + int _y = map[3 * i + 1] + hmap / 2; + if (_x >= 0 && _x < wmap) + { + if (_y >= 0 && _y < hmap) + { + int V = map[3 * i + 2]; + ipNavMap->imageData[wmap * _y + _x] = V; + } + } + } + } + std::vector vMap(ipNavMap->imageData, ipNavMap->imageData + wmap * hmap); + map_msg.data = vMap; + map_msg.info.resolution = map_resolution; + map_msg.info.width = wmap; + map_msg.info.height = hmap; + map_msg.info.origin.position.x = -(wmap / 2) * map_resolution; + map_msg.info.origin.position.y = -(hmap / 2) * map_resolution; + pub_map.publish(map_msg); + } + + ~slam_event_handler() + { + ROS_INFO("deconstructure event handler"); + } +}; + + +SNodeletSlam::SNodeletSlam() +{ +} + + +SNodeletSlam::~SNodeletSlam() +{ +} + + +void SNodeletSlam::onInit() +{ + ros::NodeHandle pnh = getPrivateNodeHandle(); + pnh.param< double >("map_resolution", map_resolution, 0.05); + pnh.param< double >("hoi_min", hoi_min, -0.5); + pnh.param< double >("hoi_max", hoi_max, 0.1); + pnh.param< double >("doi_min", doi_min, 0.3); + pnh.param< double >("doi_max", doi_max, 3.0); + pnh.param< std::string >("trajectoryFilename", trajectoryFilename, "trajectory.ppm"); + pnh.param< std::string >("relocalizationFilename", relocalizationFilename, "relocalization.bin"); + pnh.param< std::string >("occupancyFilename", occupancyFilename, "occupancy.bin"); + pnh.param< std::string >("topic_camera_pose", topic_camera_pose, "camera_pose"); + pnh.param< std::string >("topic_reloc_pose", topic_reloc_pose, "reloc_pose"); + pnh.param< std::string >("topic_pose2d", topic_pose2d, "pose2d"); + pnh.param< std::string >("topic_map", topic_map, "map"); + pnh.param< std::string >("topic_tracking_accuracy", topic_tracking_accuracy, "tracking_accuracy"); + pnh.param< std::string >("topic_odom", topic_odom, "odom"); + pnh.param< bool >("publish_odometry", is_pub_odom, false); + + nh = getMTNodeHandle(); + pub_pose = nh.advertise< geometry_msgs::PoseStamped >(topic_camera_pose, 1, true); + pub_reloc = nh.advertise< geometry_msgs::PoseStamped >(topic_reloc_pose, 1, true); + pub_pose2d = nh.advertise< geometry_msgs::Pose2D >(topic_pose2d, 2, true); + pub_map = nh.advertise< nav_msgs::OccupancyGrid >(topic_map, 1, true); + pub_accuracy = nh.advertise< realsense_ros_slam::TrackingAccuracy >(topic_tracking_accuracy, 1, true); + if (is_pub_odom) pub_odom = nh.advertise< nav_msgs::Odometry >(topic_odom, 1, true); + pkgpath = ros::package::getPath("realsense_ros_slam") + "/"; + + sub_depthInfo = nh.subscribe("camera/depth/camera_info", 1, &SNodeletSlam::depthInfoCallback, this); + sub_fisheyeInfo = nh.subscribe("camera/fisheye/camera_info", 1, &SNodeletSlam::fisheyeInfoCallback, this); + sub_accelInfo = nh.subscribe("camera/accel/imu_info", 1, &SNodeletSlam::accelInfoCallback, this); + sub_gyroInfo = nh.subscribe("camera/gyro/imu_info", 1, &SNodeletSlam::gyroInfoCallback, this); + sub_fe2imu = nh.subscribe("camera/extrinsics/fisheye2imu", 1, &SNodeletSlam::fe2ImuCallback, this); + sub_fe2depth = nh.subscribe("camera/extrinsics/fisheye2depth", 1, &SNodeletSlam::fe2depthCallback, this); + + actual_config = {}; + ROS_INFO("end of onInit"); +}//end onInit + +void SNodeletSlam::depthInfoCallback(const sensor_msgs::CameraInfoConstPtr& depthCameraInfo) +{ + sub_depthInfo.shutdown(); + mut_init.lock(); + depthCameraInfo_ = depthCameraInfo; + startIfReady(); + mut_init.unlock(); +} + +void SNodeletSlam::fisheyeInfoCallback(const sensor_msgs::CameraInfoConstPtr& fisheyeCameraInfo) +{ + sub_fisheyeInfo.shutdown(); + mut_init.lock(); + fisheyeCameraInfo_ = fisheyeCameraInfo; + startIfReady(); + mut_init.unlock(); +} + +void SNodeletSlam::accelInfoCallback(const realsense_ros_camera::IMUInfoConstPtr& accelInfo) +{ + sub_accelInfo.shutdown(); + mut_init.lock(); + accelInfo_ = accelInfo; + startIfReady(); + mut_init.unlock(); +} + +void SNodeletSlam::gyroInfoCallback(const realsense_ros_camera::IMUInfoConstPtr& gyroInfo) +{ + sub_gyroInfo.shutdown(); + mut_init.lock(); + gyroInfo_ = gyroInfo; + startIfReady(); + mut_init.unlock(); +} + +void SNodeletSlam::fe2ImuCallback(const realsense_ros_camera::ExtrinsicsConstPtr& fe2imu) +{ + sub_fe2imu.shutdown(); + mut_init.lock(); + fe2imu_ = fe2imu; + startIfReady(); + mut_init.unlock(); +} + +void SNodeletSlam::fe2depthCallback(const realsense_ros_camera::ExtrinsicsConstPtr& fe2depth) +{ + sub_fe2depth.shutdown(); + mut_init.lock(); + fe2depth_ = fe2depth; + startIfReady(); + mut_init.unlock(); +} + +void SNodeletSlam::startIfReady() +{ + // If we have all the messages we were waiting for, start SLAM. + if (depthCameraInfo_ && fisheyeCameraInfo_ && accelInfo_ && gyroInfo_ && fe2imu_ && fe2depth_) + { + startSlam(); + } +} + +void SNodeletSlam::startSlam() +{ + ROS_INFO("Staring SLAM..."); + + std::unique_ptr slam(new rs::slam::slam()); + slam->set_occupancy_map_resolution(map_resolution); + slam->set_occupancy_map_height_of_interest(hoi_min, hoi_max); + slam->set_occupancy_map_depth_of_interest(doi_min, doi_max); + slam->force_relocalization_pose(false); + + slam_event_handler scenePerceptionEventHandler; + slam->register_event_handler(&scenePerceptionEventHandler); + + slam_tracking_event_handler trackingEventHandler; + slam->register_tracking_event_handler(&trackingEventHandler); + + supported_config = {}; + if (slam->query_supported_module_config(0, supported_config) < rs::core::status_no_error) + { + std::cerr << "error : failed to query the first supported module configuration" << std::endl; + return ; + } + + // Set camera intrinsics + rs::core::intrinsics depth_intrinsics, fisheye_intrinsics; + SNodeletSlam::setCalibrationData(depthCameraInfo_, depth_intrinsics); + SNodeletSlam::setCalibrationData(fisheyeCameraInfo_, fisheye_intrinsics); + + std::map< rs::core::stream_type, rs::core::intrinsics > intrinsics; + intrinsics[rs::core::stream_type::depth] = depth_intrinsics; + intrinsics[rs::core::stream_type::fisheye] = fisheye_intrinsics; + SNodeletSlam::setStreamConfigIntrin(rs::core::stream_type::depth, intrinsics); + SNodeletSlam::setStreamConfigIntrin(rs::core::stream_type::fisheye, intrinsics); + + // Set IMU intrinsics + rs::core::motion_device_intrinsics acc, gyro; + imuInfoMsgToRSImuIntrinsics(accelInfo_, acc); + imuInfoMsgToRSImuIntrinsics(gyroInfo_, gyro); + actual_config[rs::core::motion_type::accel].is_enabled = true; + actual_config[rs::core::motion_type::accel].intrinsics = acc; + actual_config[rs::core::motion_type::gyro].is_enabled = true; + actual_config[rs::core::motion_type::gyro].intrinsics = gyro; + + // Set extrinsics + rs::core::extrinsics fe2imu, fe2depth; + extrinsicsMsgToRSExtrinsics(fe2imu_, fe2imu); + extrinsicsMsgToRSExtrinsics(fe2depth_, fe2depth); + actual_config[rs::core::stream_type::fisheye].extrinsics_motion = fe2imu; + actual_config[rs::core::stream_type::fisheye].extrinsics = fe2depth; + + // Set actual config + if (slam->set_module_config(actual_config) < rs::core::status_no_error) + { + NODELET_ERROR("error : failed to set the enabled module configuration"); + return ; + } + + //slam->load_relocalization_map(relocalizationFilename); + sub.onInit(nh, slam.get()); + sub.subscribeStreamMessages(); + sub.subscribeMotion(); + + ros::Rate r(30); + while (ros::ok()) + { + r.sleep(); + } + + std::string result = slam->save_occupancy_map_as_ppm(pkgpath + trajectoryFilename, true) == 0 ? "Saved trajectory to file." : "FAILED to save trajectory to file."; + std::cout << result << std::endl; + result = slam->save_relocalization_map(pkgpath + relocalizationFilename) == 0 ? "Saved relocalization map to file." : "FAILED to save relocalization map to file."; + std::cout << result << std::endl; + result = slam->save_occupancy_map(pkgpath + occupancyFilename) == 0 ? "Saved occupancy map to file." : "FAILED to save occupancy map to file."; + std::cout << result << std::endl; + + slam->flush_resources(); +}//end of callback + + +void SNodeletSlam::setCalibrationData(const sensor_msgs::CameraInfoConstPtr & cameraInfoMsg, rs::core::intrinsics & cameraInfo) +{ + NODELET_INFO("setCalibrationData "); + cameraInfo.width = cameraInfoMsg->width; + cameraInfo.height = cameraInfoMsg->height; + cameraInfo.fx = cameraInfoMsg->K[0]; + cameraInfo.fy = cameraInfoMsg->K[4]; + cameraInfo.ppx = cameraInfoMsg->K[2]; + cameraInfo.ppy = cameraInfoMsg->K[5]; + for (int i = 0; i < 5; i++) + cameraInfo.coeffs[i] = (float)cameraInfoMsg->D[i]; + std::string distortion_model = cameraInfoMsg->distortion_model; + if (distortion_model.compare("modified_brown_conrady") == 0) + { + cameraInfo.model = rs::core::distortion_type::modified_brown_conrady; + } + else if (distortion_model.compare("none") == 0) + { + cameraInfo.model = rs::core::distortion_type::none; + } + else if (distortion_model.compare("distortion_ftheta") == 0) + { + cameraInfo.model = rs::core::distortion_type::distortion_ftheta; + } + else + { + cameraInfo.model = rs::core::distortion_type::none; + } +} + + +void SNodeletSlam::setStreamConfigIntrin(rs::core::stream_type stream, std::map< rs::core::stream_type, rs::core::intrinsics > intrinsics) +{ + auto & supported_stream_config = supported_config[stream]; + if (!supported_stream_config.is_enabled || supported_stream_config.size.width != intrinsics[stream].width || supported_stream_config.size.height != intrinsics[stream].height) + { + ROS_ERROR("size of stream is not supported by slam"); + ROS_ERROR_STREAM(" supported: stream " << (uint32_t) stream << ", width: " << supported_stream_config.size.width << " height: " << supported_stream_config.size.height); + ROS_ERROR_STREAM(" received: stream " << (uint32_t) stream << ", width: " << intrinsics[stream].width << " height: " << intrinsics[stream].height); + + return; + } + rs::core::video_module_interface::actual_image_stream_config &actual_stream_config = actual_config[stream]; + actual_config[stream].size.width = intrinsics[stream].width; + actual_config[stream].size.height = intrinsics[stream].height; + actual_stream_config.frame_rate = supported_stream_config.frame_rate; + actual_stream_config.intrinsics = intrinsics[stream]; + actual_stream_config.is_enabled = true; +}//end of setStremConfigIntrin + + +void SNodeletSlam::setMotionData(realsense_ros_camera::IMUInfo & imu_res, rs::core::motion_device_intrinsics & motion_intrin) +{ + int index = 0; + for (int i = 0; i < 3; ++i) + { + for (int j = 0; j < 4; ++j) + { + motion_intrin.data[i][j] = imu_res.data[index]; + ++index; + } + motion_intrin.noise_variances[i] = imu_res.noise_variances[i]; + motion_intrin.bias_variances[i] = imu_res.bias_variances[i]; + } +}//end of setMotionData + + +void SNodeletSlam::setMotionConfigIntrin(rs::core::motion_type motion, std::map< rs::core::motion_type, rs::core::motion_device_intrinsics > motion_intrinsics) +{ + actual_config[motion].is_enabled = true; + actual_config[motion].intrinsics = motion_intrinsics[motion]; +}//end of setMotionConfigIntrin + + +void SNodeletSlam::setExtrinData(realsense_ros_camera::Extrinsics & fe_res, rs::core::extrinsics & extrinsics) +{ + for (int i = 0; i < 9; ++i) + { + extrinsics.rotation[i] = fe_res.rotation[i]; + if (i < 3) + { + extrinsics.translation[i] = fe_res.translation[i]; + } + } +}//end of setExtrinData +}//end namespace diff --git a/realsense_ros_slam/test/slam.test b/realsense_ros_slam/test/slam.test new file mode 100644 index 0000000..8d637df --- /dev/null +++ b/realsense_ros_slam/test/slam.test @@ -0,0 +1,13 @@ + + + + + + + + + + + diff --git a/realsense_ros_slam/test/slam_test.cpp b/realsense_ros_slam/test/slam_test.cpp new file mode 100644 index 0000000..c77dee4 --- /dev/null +++ b/realsense_ros_slam/test/slam_test.cpp @@ -0,0 +1,141 @@ +// License: Apache 2.0. See LICENSE file in root directory. +// Copyright(c) 2017 Intel Corporation. All Rights Reserved + +#include +#include // Added to satisfy roslint +#include // Added to satisfy roslint +#include +#include +#include +#include +#include + +bool camera_pose_received = false; +bool reloc_pose_received = false; +bool pose2d_received = false; +bool accuracy_received = false; +bool map_received = false; +uint32_t highest_accuracy = 0; + +geometry_msgs::PoseStamped last_cam_pose; +geometry_msgs::PoseStamped last_reloc_pose; +geometry_msgs::Pose2D last_pose2d; +nav_msgs::OccupancyGrid last_map; + +void testPose(geometry_msgs::PoseStamped pose_msg) +{ + EXPECT_GT(pose_msg.header.seq, 0); + EXPECT_GT(pose_msg.header.frame_id.length(), 0); + EXPECT_TRUE(pose_msg.header.stamp.isValid()); + + bool isPositionAllZeros = true; + if (pose_msg.pose.position.x || pose_msg.pose.position.y || pose_msg.pose.position.z) isPositionAllZeros = false; + EXPECT_FALSE(isPositionAllZeros); + + bool isOrientationAllZeros = true; + if (pose_msg.pose.orientation.x || pose_msg.pose.orientation.y || pose_msg.pose.orientation.z || pose_msg.pose.orientation.w) isOrientationAllZeros = false; + EXPECT_FALSE(isOrientationAllZeros); +} + +TEST(RealsenseTests, testCamPose) +{ + EXPECT_TRUE(camera_pose_received); + if (!camera_pose_received) return; + testPose(last_cam_pose); +} + +TEST(RealsenseTests, testRelocPose) +{ + EXPECT_TRUE(reloc_pose_received); + if (!reloc_pose_received) return; + testPose(last_reloc_pose); +} + +TEST(RealsenseTests, testPose2D) +{ + EXPECT_TRUE(pose2d_received); + if (!pose2d_received) return; + EXPECT_TRUE(last_pose2d.x || last_pose2d.y); + // we are not testing last_pose2d.theta because zero could be valid. +} + +TEST(RealsenseTests, testAccuracy) +{ + EXPECT_TRUE(accuracy_received); + if (!accuracy_received) return; + EXPECT_GE(highest_accuracy, 2); +} + +TEST(RealsenseTests, testMap) +{ + EXPECT_TRUE(map_received); + if (!map_received) return; + + EXPECT_GT(last_map.header.seq, 0); + EXPECT_TRUE(last_map.header.stamp.isValid()); + //EXPECT_GT(last_map.header.frame_id.length(), 0); // map header doesn't have frame_id + + EXPECT_GT(last_map.info.resolution, 0); + EXPECT_GT(last_map.info.width, 0); + EXPECT_GT(last_map.info.height, 0); + EXPECT_NE(last_map.info.origin.position.x, 0); + EXPECT_NE(last_map.info.origin.position.y, 0); + + EXPECT_GT(last_map.data.size(), 0); +} + +void camera_pose_callback(const geometry_msgs::PoseStampedConstPtr &ptr) +{ + camera_pose_received = true; + last_cam_pose = *ptr; +} + +void reloc_pose_callback(const geometry_msgs::PoseStampedConstPtr &ptr) +{ + reloc_pose_received = true; + last_reloc_pose = *ptr; +} + +void pose2d_callback(const geometry_msgs::Pose2DConstPtr &ptr) +{ + pose2d_received = true; + last_pose2d = *ptr; +} + +void accuracy_callback(const realsense_ros_slam::TrackingAccuracyConstPtr &ptr) +{ + accuracy_received = true; + if (ptr->tracking_accuracy > highest_accuracy) highest_accuracy = ptr->tracking_accuracy; +} + +void map_callback(const nav_msgs::OccupancyGridConstPtr &ptr) +{ + map_received = true; + last_map = *ptr; +} + +int main(int argc, char **argv) try +{ + testing::InitGoogleTest(&argc, argv); + + ros::init(argc, argv, "utest"); + ros::NodeHandle nh; + + ROS_INFO_STREAM("RealSense SLAM test - Initializing Tests..."); + + ros::Subscriber sub_cam_pose = nh.subscribe("camera_pose", 10, &camera_pose_callback); + ros::Subscriber sub_reloc_pose = nh.subscribe("reloc_pose", 10, &reloc_pose_callback); + ros::Subscriber sub_pose2d = nh.subscribe("pose2d", 10, &pose2d_callback); + ros::Subscriber sub_tracking_acc = nh.subscribe("tracking_accuracy", 10, &accuracy_callback); + ros::Subscriber sub_map = nh.subscribe("map", 10, &map_callback); + + ros::Duration duration; + duration.sec = 59; + duration.sleep(); + ros::spinOnce(); + + ROS_INFO_STREAM("RealSense SLAM test - Running Tests..."); + + return RUN_ALL_TESTS(); +} +catch (...) {} // catch the "testing::internal::::ClassUniqueToAlwaysTrue" from gtest diff --git a/realsense_ros_slam/trajectory_1019.ppm b/realsense_ros_slam/trajectory_1019.ppm new file mode 100644 index 0000000..dba7c95 Binary files /dev/null and b/realsense_ros_slam/trajectory_1019.ppm differ