Skip to content

Commit

Permalink
SR-LR Update Iron (luxonis#482)
Browse files Browse the repository at this point in the history
  • Loading branch information
Serafadam authored Jan 24, 2024
1 parent 69c5b87 commit 1950ae6
Show file tree
Hide file tree
Showing 34 changed files with 536 additions and 186 deletions.
8 changes: 1 addition & 7 deletions depthai_bridge/src/ImuConverter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -135,13 +135,7 @@ void ImuConverter::toRosDaiMsg(std::shared_ptr<dai::IMUData> inData, std::deque<
tstamp = accel.getTimestampDevice();
else
tstamp = accel.getTimestamp();
if(_enable_rotation) {
auto rot = inData->packets[i].rotationVector;
auto magn = inData->packets[i].magneticField;
CreateUnitMessage(msg, tstamp, accel, gyro, rot, magn);
} else {
CreateUnitMessage(msg, tstamp, accel, gyro);
}
CreateUnitMessage(msg, tstamp, accel, gyro, rot, magn);
outImuMsgs.push_back(msg);
}
}
Expand Down
2 changes: 1 addition & 1 deletion depthai_bridge/src/TFPublisher.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
#include "rclcpp/rclcpp.hpp"
#include "tf2/LinearMath/Matrix3x3.h"
#include "tf2/LinearMath/Quaternion.h"
#include "tf2_geometry_msgs/tf2_geometry_msgs/tf2_geometry_msgs.hpp"
#include "tf2_geometry_msgs/tf2_geometry_msgs.hpp"

namespace dai {
namespace ros {
Expand Down
66 changes: 45 additions & 21 deletions depthai_descriptions/urdf/include/depthai_macro.urdf.xacro
Original file line number Diff line number Diff line change
Expand Up @@ -12,29 +12,36 @@
<xacro:property name="has_imu" value="false" />
<xacro:property name="baseline" value="0.075" />

<xacro:if value="${model in ['OAK-D-SR']}">
<xacro:property name="baseline" value="0.02" />
</xacro:if>

<xacro:if value="${model in ['OAK-D', 'OAK-D-PRO', 'OAK-D-POE']}">
<xacro:property name="has_imu" value="true" />
</xacro:if>

<xacro:base camera_name="${camera_name}" parent="${parent}" camera_model="${camera_model}" base_frame="${base_frame}" cam_pos_x="${cam_pos_x}" cam_pos_y="${cam_pos_y}" cam_pos_z="${cam_pos_z}" cam_roll="${cam_roll}" cam_pitch="${cam_pitch}" cam_yaw="${cam_yaw}" has_imu="${has_imu}"/>

<!-- RGB Camera -->
<link name="${camera_name}_rgb_camera_frame" />

<joint name="${camera_name}_rgb_camera_joint" type="fixed">
<parent link="${base_frame}"/>
<child link="${camera_name}_rgb_camera_frame"/>
<origin xyz="0 0 0" rpy="0 0 0" />
</joint>

<link name="${camera_name}_rgb_camera_optical_frame"/>

<joint name="${camera_name}_rgb_camera_optical_joint" type="fixed">
<origin xyz="0 0 0" rpy="-${M_PI/2} 0.0 -${M_PI/2}"/>
<parent link="${camera_name}_rgb_camera_frame"/>
<child link="${camera_name}_rgb_camera_optical_frame"/>
</joint>

<xacro:unless value="${model in ['OAK-D-SR']}">
<link name="${camera_name}_rgb_camera_frame" />

<joint name="${camera_name}_rgb_camera_joint" type="fixed">
<parent link="${base_frame}"/>
<child link="${camera_name}_rgb_camera_frame"/>
<origin xyz="0 0 0" rpy="0 0 0" />
</joint>

<link name="${camera_name}_rgb_camera_optical_frame"/>

<joint name="${camera_name}_rgb_camera_optical_joint" type="fixed">
<origin xyz="0 0 0" rpy="-${M_PI/2} 0.0 -${M_PI/2}"/>
<parent link="${camera_name}_rgb_camera_frame"/>
<child link="${camera_name}_rgb_camera_optical_frame"/>
</joint>
</xacro:unless>
<xacro:unless value="${model in ['OAK-D-LR']}">

<!-- Left Camera -->
<link name="${camera_name}_left_camera_frame" />

Expand Down Expand Up @@ -70,16 +77,34 @@
<child link="${camera_name}_right_camera_optical_frame"/>
</joint>

</xacro:unless>

</xacro:macro>
<xacro:if value="${model in ['OAK-D-LR']}">

<!-- right Camera -->
<!-- left Camera -->
<link name="${camera_name}_left_camera_frame" />

<joint name="${camera_name}_left_camera_joint" type="fixed">
<parent link="${base_frame}"/>
<child link="${camera_name}_left_camera_frame"/>
<origin xyz="0 0.1 0" rpy="0 0 0" />
</joint>

<link name="${camera_name}_left_camera_optical_frame"/>

<joint name="${camera_name}_left_camera_optical_joint" type="fixed">
<origin xyz="0 0 0" rpy="-${M_PI/2} 0.0 -${M_PI/2}"/>
<parent link="${camera_name}_left_camera_frame"/>
<child link="${camera_name}_left_camera_optical_frame"/>
</joint>

<!-- right Camera -->
<link name="${camera_name}_right_camera_frame" />

<joint name="${camera_name}_right_camera_joint" type="fixed">
<parent link="${base_frame}"/>
<child link="${camera_name}_right_camera_frame"/>
<origin xyz="0 -${baseline/2} 0" rpy="0 0 0" />
<origin xyz="0 -0.05 0" rpy="0 0 0" />
</joint>

<link name="${camera_name}_right_camera_optical_frame"/>
Expand All @@ -89,8 +114,7 @@
<parent link="${camera_name}_right_camera_frame"/>
<child link="${camera_name}_right_camera_optical_frame"/>
</joint>


</xacro:if>
</xacro:macro>

</robot>
3 changes: 3 additions & 0 deletions depthai_examples/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -101,9 +101,11 @@ dai_add_node_ros2(stereo_inertial_node src/stereo_inertial_publisher.cpp)
dai_add_node_ros2(feature_tracker src/feature_tracker_publisher.cpp)
dai_add_node_ros2(stereo_node src/stereo_publisher.cpp)
dai_add_node_ros2(yolov4_spatial_node src/yolov4_spatial_publisher.cpp)
dai_add_node_ros2(yolov4_node src/yolov4_publisher.cpp)

target_compile_definitions(mobilenet_node PRIVATE BLOB_NAME="${mobilenet_blob_name}")
target_compile_definitions(yolov4_spatial_node PRIVATE BLOB_NAME="${tiny_yolo_v4_blob_name}")
target_compile_definitions(yolov4_node PRIVATE BLOB_NAME="${tiny_yolo_v4_blob_name}")
target_compile_definitions(stereo_inertial_node PRIVATE BLOB_NAME="${tiny_yolo_v4_blob_name}")

if($ENV{ROS_DISTRO} STREQUAL "galactic")
Expand All @@ -124,6 +126,7 @@ install(TARGETS
stereo_inertial_node
stereo_node
yolov4_spatial_node
yolov4_node
feature_tracker
DESTINATION lib/${PROJECT_NAME})

Expand Down
54 changes: 40 additions & 14 deletions depthai_examples/launch/yolov4_publisher.launch.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from launch import LaunchDescription, launch_description_sources
from launch.actions import IncludeLaunchDescription
from launch.actions import DeclareLaunchArgument
from launch.conditions import IfCondition,UnlessCondition
from launch.substitutions import LaunchConfiguration
import launch_ros.actions
import launch_ros.descriptions
Expand Down Expand Up @@ -131,7 +132,14 @@ def generate_launch_description():
'monoResolution',
default_value=monoResolution,
description='Contains the resolution of the Mono Cameras. Available resolutions are 800p, 720p & 400p for OAK-D & 480p for OAK-D-Lite.')


# declare_spatial_camera_cmd = DeclareLaunchArgument(
# 'spatial_camera',
# default_value="true",
# description='Enable spatial camera'
# )


urdf_launch = IncludeLaunchDescription(
launch_description_sources.PythonLaunchDescriptionSource(
os.path.join(urdf_launch_dir, 'urdf_launch.py')),
Expand All @@ -146,15 +154,32 @@ def generate_launch_description():
'cam_pitch' : cam_pitch,
'cam_yaw' : cam_yaw}.items())
yolov4_spatial_node = launch_ros.actions.Node(
package='depthai_examples', executable='yolov4_spatial_node',
output='screen',
parameters=[{'tf_prefix': tf_prefix},
{'camera_param_uri': camera_param_uri},
{'sync_nn': sync_nn},
{'nnName': nnName},
{'resourceBaseFolder': resourceBaseFolder},
{'monoResolution': monoResolution},
{'spatial_camera': spatial_camera}])
package='depthai_examples', executable='yolov4_spatial_node',
output='screen',
parameters=[{'tf_prefix': tf_prefix},
{'camera_param_uri': camera_param_uri},
{'sync_nn': sync_nn},
{'nnName': nnName},
{'resourceBaseFolder': resourceBaseFolder},
{'monoResolution': monoResolution},
{'subpixel': subpixel},
{'lrCheckTresh': lrCheckTresh},
{'confidence': confidence},
],
condition=IfCondition(LaunchConfiguration('spatial_camera'))
)

yolov4_node = launch_ros.actions.Node(
package='depthai_examples', executable='yolov4_node',
output='screen',
parameters=[{'tf_prefix': tf_prefix},
{'camera_param_uri': camera_param_uri},
{'sync_nn': sync_nn},
{'nnName': nnName},
{'resourceBaseFolder': resourceBaseFolder},
],
condition=UnlessCondition(LaunchConfiguration('spatial_camera'))
)

rviz_node = launch_ros.actions.Node(
package='rviz2', executable='rviz2', output='screen',
Expand All @@ -181,11 +206,12 @@ def generate_launch_description():
ld.add_action(declare_sync_nn_cmd)
ld.add_action(urdf_launch)

if spatial_camera == True:
ld.add_action(declare_subpixel_cmd)
ld.add_action(declare_lrCheckTresh_cmd)
ld.add_action(declare_monoResolution_cmd)
ld.add_action(declare_subpixel_cmd)
ld.add_action(declare_lrCheckTresh_cmd)
ld.add_action(declare_monoResolution_cmd)

ld.add_action(yolov4_spatial_node)
ld.add_action(yolov4_node)

return ld

141 changes: 141 additions & 0 deletions depthai_examples/src/yolov4_publisher.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@

#include <cstdio>
#include <iostream>

#include "camera_info_manager/camera_info_manager.hpp"
#include "depthai_bridge/BridgePublisher.hpp"
#include "depthai_bridge/ImageConverter.hpp"
#include "depthai_bridge/ImgDetectionConverter.hpp"
#include "rclcpp/executors.hpp"
#include "rclcpp/node.hpp"
#include "sensor_msgs/msg/image.hpp"
#include "vision_msgs/msg/detection2_d_array.hpp"

// Inludes common necessary includes for development using depthai library
#include "depthai/device/DataQueue.hpp"
#include "depthai/device/Device.hpp"
#include "depthai/pipeline/Pipeline.hpp"
#include "depthai/pipeline/node/ColorCamera.hpp"
#include "depthai/pipeline/node/DetectionNetwork.hpp"
#include "depthai/pipeline/node/XLinkOut.hpp"

const std::vector<std::string> label_map = {
"person", "bicycle", "car", "motorbike", "aeroplane", "bus", "train", "truck", "boat",
"traffic light", "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse",
"sheep", "cow", "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag",
"tie", "suitcase", "frisbee", "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove",
"skateboard", "surfboard", "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon",
"bowl", "banana", "apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza",
"donut", "cake", "chair", "sofa", "pottedplant", "bed", "diningtable", "toilet", "tvmonitor",
"laptop", "mouse", "remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink",
"refrigerator", "book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"};

dai::Pipeline createPipeline(bool syncNN, std::string nnPath) {
dai::Pipeline pipeline;
auto colorCam = pipeline.create<dai::node::ColorCamera>();
auto detectionNetwork = pipeline.create<dai::node::YoloDetectionNetwork>();

// create xlink connections
auto xoutRgb = pipeline.create<dai::node::XLinkOut>();
auto xoutNN = pipeline.create<dai::node::XLinkOut>();

xoutRgb->setStreamName("preview");
xoutNN->setStreamName("detections");

// Properties
colorCam->setPreviewSize(416, 416);
colorCam->setResolution(dai::ColorCameraProperties::SensorResolution::THE_1080_P);
colorCam->setInterleaved(false);
colorCam->setColorOrder(dai::ColorCameraProperties::ColorOrder::BGR);
colorCam->setFps(40);

// Network specific settings
detectionNetwork->setConfidenceThreshold(0.5f);
detectionNetwork->setNumClasses(80);
detectionNetwork->setCoordinateSize(4);
detectionNetwork->setAnchors({10, 14, 23, 27, 37, 58, 81, 82, 135, 169, 344, 319});
detectionNetwork->setAnchorMasks({{"side26", {1, 2, 3}}, {"side13", {3, 4, 5}}});
detectionNetwork->setIouThreshold(0.5f);
detectionNetwork->setBlobPath(nnPath);
detectionNetwork->setNumInferenceThreads(2);
detectionNetwork->input.setBlocking(false);

// Linking
colorCam->preview.link(detectionNetwork->input);
if(syncNN)
detectionNetwork->passthrough.link(xoutRgb->input);
else
colorCam->preview.link(xoutRgb->input);

detectionNetwork->out.link(xoutNN->input);
return pipeline;
}

int main(int argc, char** argv) {
rclcpp::init(argc, argv);
auto node = rclcpp::Node::make_shared("yolov4_node");

std::string tfPrefix, resourceBaseFolder, nnPath;
std::string camera_param_uri;
std::string nnName(BLOB_NAME); // Set your blob name for the model here
bool syncNN;
std::string monoResolution = "400p";

node->declare_parameter("tf_prefix", "oak");
node->declare_parameter("camera_param_uri", camera_param_uri);
node->declare_parameter("sync_nn", true);
node->declare_parameter("nnName", "");
node->declare_parameter("resourceBaseFolder", "");

node->get_parameter("tf_prefix", tfPrefix);
node->get_parameter("camera_param_uri", camera_param_uri);
node->get_parameter("sync_nn", syncNN);
node->get_parameter("resourceBaseFolder", resourceBaseFolder);

if(resourceBaseFolder.empty()) {
throw std::runtime_error("Send the path to the resouce folder containing NNBlob in \'resourceBaseFolder\' ");
}

std::string nnParam;
node->get_parameter("nnName", nnParam);
if(nnParam != "x") {
node->get_parameter("nnName", nnName);
}

nnPath = resourceBaseFolder + "/" + nnName;
dai::Pipeline pipeline = createPipeline(syncNN, nnPath);
dai::Device device(pipeline);

auto colorQueue = device.getOutputQueue("preview", 30, false);
auto detectionQueue = device.getOutputQueue("detections", 30, false);
auto calibrationHandler = device.readCalibration();

dai::rosBridge::ImageConverter rgbConverter(tfPrefix + "_rgb_camera_optical_frame", false);
auto rgbCameraInfo = rgbConverter.calibrationToCameraInfo(calibrationHandler, dai::CameraBoardSocket::CAM_A, -1, -1);
dai::rosBridge::BridgePublisher<sensor_msgs::msg::Image, dai::ImgFrame> rgbPublish(colorQueue,
node,
std::string("color/image"),
std::bind(&dai::rosBridge::ImageConverter::toRosMsg,
&rgbConverter, // since the converter has the same frame name
// and image type is also same we can reuse it
std::placeholders::_1,
std::placeholders::_2),
30,
rgbCameraInfo,
"color");

dai::rosBridge::ImgDetectionConverter detConverter(tfPrefix + "_rgb_camera_optical_frame", 416, 416, false);
dai::rosBridge::BridgePublisher<vision_msgs::msg::Detection2DArray, dai::ImgDetections> detectionPublish(
detectionQueue,
node,
std::string("color/yolov4_detections"),
std::bind(&dai::rosBridge::ImgDetectionConverter::toRosMsg, &detConverter, std::placeholders::_1, std::placeholders::_2),
30);

detectionPublish.addPublisherCallback();
rgbPublish.addPublisherCallback(); // addPublisherCallback works only when the dataqueue is non blocking.

rclcpp::spin(node);

return 0;
}
Loading

0 comments on commit 1950ae6

Please sign in to comment.