diff --git a/multisense_lib/sensor_api b/multisense_lib/sensor_api index da8ede1..ed16c8f 160000 --- a/multisense_lib/sensor_api +++ b/multisense_lib/sensor_api @@ -1 +1 @@ -Subproject commit da8ede1b91c718db59573ae5d9f153910c688920 +Subproject commit ed16c8ffa46c20aff35c43181067a03c7849df8c diff --git a/multisense_ros/include/multisense_ros/camera.h b/multisense_ros/include/multisense_ros/camera.h index 9df67dc..73691b0 100644 --- a/multisense_ros/include/multisense_ros/camera.h +++ b/multisense_ros/include/multisense_ros/camera.h @@ -77,6 +77,7 @@ class Camera { void colorizeCallback(const crl::multisense::image::Header& header); void groundSurfaceCallback(const crl::multisense::image::Header& header); void groundSurfaceSplineCallback(const crl::multisense::ground_surface::Header& header); + void dpuResultCallback(const crl::multisense::dpu_result::Header& header); void borderClipChanged(const BorderClip &borderClipType, double borderClipValue); @@ -96,6 +97,7 @@ class Camera { static constexpr char AUX[] = "aux"; static constexpr char CALIBRATION[] = "calibration"; static constexpr char GROUND_SURFACE[] = "ground_surface"; + static constexpr char DPU_RESULT[] = "dpu_result"; // // Frames @@ -140,6 +142,7 @@ class Camera { static constexpr char GROUND_SURFACE_IMAGE_TOPIC[] = "image"; static constexpr char GROUND_SURFACE_INFO_TOPIC[] = "camera_info"; static constexpr char GROUND_SURFACE_POINT_SPLINE_TOPIC[] = "spline"; + static constexpr char DPU_RESULT_TOPIC[] = "class_map"; // @@ -169,6 +172,7 @@ class Camera { ros::NodeHandle aux_nh_; ros::NodeHandle calibration_nh_; ros::NodeHandle ground_surface_nh_; + ros::NodeHandle dpu_result_nh_; // // Image transports @@ -189,6 +193,7 @@ class Camera { image_transport::ImageTransport aux_rect_transport_; image_transport::ImageTransport aux_rgb_rect_transport_; image_transport::ImageTransport ground_surface_transport_; + image_transport::ImageTransport dpu_result_transport_; // // Data publishers @@ -206,6 +211,7 @@ class Camera { image_transport::CameraPublisher aux_rect_cam_pub_; image_transport::CameraPublisher aux_rgb_rect_cam_pub_; image_transport::Publisher ground_surface_cam_pub_; + image_transport::Publisher dpu_result_pub_; ros::Publisher left_mono_cam_info_pub_; ros::Publisher right_mono_cam_info_pub_; @@ -276,6 +282,8 @@ class Camera { sensor_msgs::Image ground_surface_image_; + sensor_msgs::Image dpu_result_image_; + multisense_ros::RawCamData raw_cam_data_; std::vector pointcloud_color_buffer_; @@ -365,6 +373,11 @@ class Camera { bool can_support_ground_surface_ = false; + // + // Can support the deeplearning processing unit (DPU) + + bool can_support_dpu_ = false; + // // Diagnostics diagnostic_updater::Updater diagnostic_updater_; diff --git a/multisense_ros/src/camera.cpp b/multisense_ros/src/camera.cpp index c6a8613..49cb946 100644 --- a/multisense_ros/src/camera.cpp +++ b/multisense_ros/src/camera.cpp @@ -114,6 +114,8 @@ void groundSurfaceCB(const image::Header& header, void* userDataP) { reinterpret_cast(userDataP)->groundSurfaceCallback(header); } void groundSurfaceSplineCB(const ground_surface::Header& header, void* userDataP) { reinterpret_cast(userDataP)->groundSurfaceSplineCallback(header); } +void dpuResultCB(const dpu_result::Header& header, void* userDataP) +{ reinterpret_cast(userDataP)->dpuResultCallback(header); } bool isValidReprojectedPoint(const Eigen::Vector3f& pt, float squared_max_range) @@ -239,6 +241,7 @@ constexpr char Camera::RIGHT[]; constexpr char Camera::AUX[]; constexpr char Camera::CALIBRATION[]; constexpr char Camera::GROUND_SURFACE[]; +constexpr char Camera::DPU_RESULT[]; constexpr char Camera::ORIGIN_FRAME[]; constexpr char Camera::HEAD_FRAME[]; @@ -277,6 +280,7 @@ constexpr char Camera::COST_CAMERA_INFO_TOPIC[]; constexpr char Camera::GROUND_SURFACE_IMAGE_TOPIC[]; constexpr char Camera::GROUND_SURFACE_INFO_TOPIC[]; constexpr char Camera::GROUND_SURFACE_POINT_SPLINE_TOPIC[]; +constexpr char Camera::DPU_RESULT_TOPIC[]; Camera::Camera(Channel* driver, const std::string& tf_prefix) : driver_(driver), @@ -286,6 +290,7 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : aux_nh_(device_nh_, AUX), calibration_nh_(device_nh_, CALIBRATION), ground_surface_nh_(device_nh_, GROUND_SURFACE), + dpu_result_nh_(device_nh_, DPU_RESULT), left_mono_transport_(left_nh_), right_mono_transport_(right_nh_), left_rect_transport_(left_nh_), @@ -302,6 +307,7 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : aux_rect_transport_(aux_nh_), aux_rgb_rect_transport_(aux_nh_), ground_surface_transport_(ground_surface_nh_), + dpu_result_transport_(dpu_result_nh_), stereo_calibration_manager_(nullptr), frame_id_origin_(tf_prefix + ORIGIN_FRAME), frame_id_head_(tf_prefix + HEAD_FRAME), @@ -408,6 +414,23 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : ground_surface_spline_pub_ = ground_surface_nh_.advertise(GROUND_SURFACE_POINT_SPLINE_TOPIC, 5, true); } + // + // Create DPU publisher if supported + + std::vector deviceModes; + status = driver_->getDeviceModes(deviceModes); + if (Status_Ok != status) { + std::cerr << "Failed to get device modes: " << Channel::statusString(status) << std::endl; + } + can_support_dpu_ = std::any_of(deviceModes.begin(), deviceModes.end(), [](const auto &mode) { + return mode.supportedDataSources & Source_DPU_Result;}); + + if (can_support_dpu_) { + dpu_result_pub_ = dpu_result_transport_.advertise(DPU_RESULT_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_DPU_Result), + std::bind(&Camera::disconnectStream, this, Source_DPU_Result)); + } + if (system::DeviceInfo::HARDWARE_REV_BCAM == device_info_.hardwareRevision) { @@ -824,6 +847,13 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : driver_->addIsolatedCallback(groundSurfaceSplineCB, this); } + // + // Add DPU result callbacks for supported cameras + + if (can_support_dpu_) { + driver_->addIsolatedCallback(dpuResultCB, this); + } + // // Disable color point cloud strict frame syncing, if desired @@ -874,6 +904,11 @@ Camera::~Camera() driver_->removeIsolatedCallback(groundSurfaceCB); driver_->removeIsolatedCallback(groundSurfaceSplineCB); } + + if (can_support_dpu_) + { + driver_->removeIsolatedCallback(dpuResultCB); + } } void Camera::borderClipChanged(const BorderClip &borderClipType, double borderClipValue) @@ -2252,6 +2287,47 @@ void Camera::groundSurfaceSplineCallback(const ground_surface::Header& header) ground_surface_spline_pub_.publish(ground_surface_utilities::eigenToPointcloud(eigen_pcl, frame_id_origin_)); } +void Camera::dpuResultCallback(const dpu_result::Header& header) +{ + ros::Time t = ros::Time::now(); + + int height = 160; + int width = 256; + int image_length = height * width; + + dpu_result_image_.data.resize(image_length); + memcpy(&dpu_result_image_.data[0], header.maskArray, image_length); + + // Multiply mask values by 100 so that the three classes are visually distinguishable + for (int i = 0; i < image_length ; i++) { + dpu_result_image_.data[i] *= 100; + } + + // Vertical flip the mask to match the images coming off of the camera + uint8_t orig_result_mask[image_length]; + memcpy(orig_result_mask, &dpu_result_image_.data[0], image_length); + for (int row = 0; row < height; ++row) + { + for (int col = 0; col < width; ++col) + { + dpu_result_image_.data[(height - row - 1) * width + col] = orig_result_mask[row * width + col]; + } + } + + dpu_result_image_.header.frame_id = LEFT_RECTIFIED_FRAME; + dpu_result_image_.header.seq = header.frameId; + dpu_result_image_.header.stamp = t; + dpu_result_image_.height = height; + dpu_result_image_.width = width; + + dpu_result_image_.encoding = sensor_msgs::image_encodings::MONO8; + dpu_result_image_.step = width; + + dpu_result_image_.is_bigendian = (htonl(1) == 1); + + dpu_result_pub_.publish(dpu_result_image_); +} + void Camera::updateConfig(const image::Config& config) { if (!stereo_calibration_manager_)