From 02c52f11001668c696a9a86f61567aa2ff9e9fb7 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Sun, 20 Oct 2024 15:56:01 -0400 Subject: [PATCH 01/35] Mostly copy-paste basic setup stuff for the node that projects the detected point clouds from the image to the point cloud. --- all_seaing_perception/CMakeLists.txt | 7 +- .../bbox_project_pcloud.cpp | 96 +++++++++++++++++++ .../bbox_project_pcloud.hpp | 49 ++++++++++ 3 files changed, 151 insertions(+), 1 deletion(-) create mode 100644 all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp create mode 100644 all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp diff --git a/all_seaing_perception/CMakeLists.txt b/all_seaing_perception/CMakeLists.txt index 149fa5c5..a4ed24f9 100644 --- a/all_seaing_perception/CMakeLists.txt +++ b/all_seaing_perception/CMakeLists.txt @@ -31,6 +31,10 @@ ament_auto_add_executable( obstacle_bbox_visualizer ${PROJECT_NAME}/obstacle_bbox_visualizer.cpp ) +ament_auto_add_executable( + bbox_project_pcloud + ${PROJECT_NAME}/bbox_project_pcloud.cpp +) target_link_libraries(obstacle_bbox_visualizer yaml-cpp) @@ -39,7 +43,8 @@ install(TARGETS obstacle_bbox_overlay point_cloud_filter point_cloud_image_overlay - obstacle_bbox_visualizer + obstacle_bbox_visualizer + bbox_project_pcloud DESTINATION lib/${PROJECT_NAME} ) diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp new file mode 100644 index 00000000..af346844 --- /dev/null +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -0,0 +1,96 @@ +#include "all_seaing_perception/bbox_project_pcloud.hpp" + +#include "pcl/point_cloud.h" +#include "pcl/point_types.h" +#include "pcl_conversions/pcl_conversions.h" + +#include "cv_bridge/cv_bridge.h" + +#include "tf2_geometry_msgs/tf2_geometry_msgs.hpp" +#include "tf2_sensor_msgs/tf2_sensor_msgs.hpp" + +BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ + // Initialize tf_listener pointer + m_tf_buffer = std::make_unique(this->get_clock()); + m_tf_listener = std::make_shared(*m_tf_buffer); + + // Subscriptions + m_image_intrinsics_sub = this->create_subscription( + "camera_info", 10, std::bind(&PclImageOverlay::intrinsics_cb, this, std::placeholders::_1)); + m_image_sub.subscribe(this, "image", rmw_qos_profile_sensor_data); + m_cloud_sub.subscribe(this, "point_cloud", rmw_qos_profile_sensor_data); + + // Send pc msg and img msg to bb_pcl_project + m_pc_cam_sync = + std::make_shared(PointCloudCamPolicy(10), m_image_sub, m_cloud_sub); + m_pc_cam_sync->registerCallback(std::bind(&PclImageOverlay::bb_pcl_project, this, + std::placeholders::_1, std::placeholders::_2)); +} + +void BBoxProjectPCloud::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_msg) { + m_cam_model.fromCameraInfo(info_msg); +} + +void BBoxProjectPCloud::bb_pcl_project( + const sensor_msgs::msg::Image::ConstSharedPtr &in_img_msg, + const sensor_msgs::msg::PointCloud2::ConstSharedPtr &in_cloud_msg) { + + // Get transform the first iteration + if (!m_pc_cam_tf_ok) + m_pc_cam_tf = get_tf(in_cloud_msg->header.frame_id, in_img_msg->header.frame_id); + + // Convert msg to CvImage to work with CV2. Copy img since we will be modifying. + cv_bridge::CvImagePtr cv_ptr; + try { + cv_ptr = cv_bridge::toCvCopy(in_img_msg, sensor_msgs::image_encodings::BGR8); + } catch (cv_bridge::Exception &e) { + RCLCPP_ERROR(this->get_logger(), "cv_bridge exception: %s", e.what()); + return; + } + + // Transform in_cloud_msg and convert PointCloud2 to PCL PointCloud + sensor_msgs::msg::PointCloud2 in_cloud_tf; + tf2::doTransform(*in_cloud_msg, in_cloud_tf, m_pc_cam_tf); + pcl::PointCloud::Ptr in_cloud_tf_ptr(new pcl::PointCloud); + pcl::fromROSMsg(in_cloud_tf, *in_cloud_tf_ptr); + + for (pcl::PointXYZI &point_tf : in_cloud_tf_ptr->points) { + + // Project 3D point onto the image plane using the intrinsic matrix. + // Gazebo has a different coordinate system, so the y, z, and x coordinates are modified. + cv::Point2d xy_rect = + m_cam_model.project3dToPixel(cv::Point3d(point_tf.y, point_tf.z, -point_tf.x)); + + // Plot projected point onto image if within bounds and in front of the boat + if ((xy_rect.x >= 0) && (xy_rect.x < m_cam_model.cameraInfo().width) && (xy_rect.y >= 0) && + (xy_rect.y < m_cam_model.cameraInfo().height) && (point_tf.x >= 0)) { + cv::circle(cv_ptr->image, cv::Point(xy_rect.x, xy_rect.y), 2, cv::Scalar(255, 0, 0), 4); + } + } + m_image_pub->publish(*cv_ptr->toImageMsg()); +} + +geometry_msgs::msg::TransformStamped PclImageOverlay::get_tf(const std::string &in_target_frame, + const std::string &in_src_frame) { + geometry_msgs::msg::TransformStamped tf; + m_pc_cam_tf_ok = false; + try { + tf = m_tf_buffer->lookupTransform(in_target_frame, in_src_frame, tf2::TimePointZero); + m_pc_cam_tf_ok = true; + RCLCPP_INFO(this->get_logger(), "LiDAR to Camera Transform good"); + RCLCPP_INFO(this->get_logger(), "in_target_frame: %s, in_src_frame: %s", + in_target_frame.c_str(), in_src_frame.c_str()); + } catch (tf2::TransformException &ex) { + RCLCPP_ERROR(this->get_logger(), "%s", ex.what()); + } + return tf; +} + +BBoxProjectPCloud::~BBoxProjectPCloud() {} + +int main(int argc, char *argv[]) { + rclcpp::init(argc, argv); + rclcpp::spin(std::make_shared()); + rclcpp::shutdown(); + return 0; +} \ No newline at end of file diff --git a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp new file mode 100644 index 00000000..0b6fe838 --- /dev/null +++ b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp @@ -0,0 +1,49 @@ +#ifndef ALL_SEAING_PERCEPTION__BBOX_PROJECT_PCLOUD_HPP +#define ALL_SEAING_PERCEPTION__BBOX_PROJECT_PCLOUD_HPP + +#include + +#include "rclcpp/rclcpp.hpp" + +#include "tf2_ros/buffer.h" +#include "tf2_ros/transform_listener.h" + +#include "image_geometry/pinhole_camera_model.h" +#include "message_filters/subscriber.h" +#include "message_filters/sync_policies/approximate_time.h" +#include "message_filters/synchronizer.h" + +#include "geometry_msgs/msg/transform_stamped.hpp" +#include "sensor_msgs/msg/camera_info.hpp" +#include "sensor_msgs/msg/image.hpp" +#include "sensor_msgs/msg/point_cloud2.hpp" + +class BBoxProjectPointCloud : public rclcpp::Node{ +private: + // Publishers and subscribers + rclcpp::Publisher::SharedPtr m_image_pub; + rclcpp::Subscription::SharedPtr m_image_intrinsics_sub; + message_filters::Subscriber m_image_sub; + message_filters::Subscriber m_cloud_sub; + + // Transform variables + std::shared_ptr m_tf_listener{nullptr}; + std::unique_ptr m_tf_buffer; + geometry_msgs::msg::TransformStamped m_pc_cam_tf; + bool m_pc_cam_tf_ok; + + // Intrinsics callback camera model variables + image_geometry::PinholeCameraModel m_cam_model; + + // Pointcloud-camera sync policies + typedef message_filters::sync_policies::ApproximateTime + PointCloudCamPolicy; + typedef message_filters::Synchronizer PointCloudCamSync; + std::shared_ptr m_pc_cam_sync; +public: + BBoxProjectPointCloud(); + virtual ~BBoxProjectPointCloud(); +}; + +#endif // ALL_SEAING_PERCEPTION__BBOX_PROJECT_PCLOUD_HPP \ No newline at end of file From 74c657f8f8b9a46088abf4cac49a29fcc8cc6746 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Sun, 20 Oct 2024 17:29:49 -0400 Subject: [PATCH 02/35] Added some bbox specific stuff (still mostly setup, will now get to the actual logic) --- .../bbox_project_pcloud.cpp | 40 +++++-------------- .../bbox_project_pcloud.hpp | 19 +++++++-- 2 files changed, 27 insertions(+), 32 deletions(-) diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index af346844..86f18e2f 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -1,14 +1,5 @@ #include "all_seaing_perception/bbox_project_pcloud.hpp" -#include "pcl/point_cloud.h" -#include "pcl/point_types.h" -#include "pcl_conversions/pcl_conversions.h" - -#include "cv_bridge/cv_bridge.h" - -#include "tf2_geometry_msgs/tf2_geometry_msgs.hpp" -#include "tf2_sensor_msgs/tf2_sensor_msgs.hpp" - BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ // Initialize tf_listener pointer m_tf_buffer = std::make_unique(this->get_clock()); @@ -19,12 +10,13 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ "camera_info", 10, std::bind(&PclImageOverlay::intrinsics_cb, this, std::placeholders::_1)); m_image_sub.subscribe(this, "image", rmw_qos_profile_sensor_data); m_cloud_sub.subscribe(this, "point_cloud", rmw_qos_profile_sensor_data); + m_bbox_sub.subscribe(this, "bounding_boxes", rmw_qos_profile_sensor_data) // Send pc msg and img msg to bb_pcl_project - m_pc_cam_sync = - std::make_shared(PointCloudCamPolicy(10), m_image_sub, m_cloud_sub); - m_pc_cam_sync->registerCallback(std::bind(&PclImageOverlay::bb_pcl_project, this, - std::placeholders::_1, std::placeholders::_2)); + m_pc_cam_bbox_sync = + std::make_shared(PointCloudCamPolicy(10), m_image_sub, m_cloud_sub, m_bbox_sub); + m_pc_cam_bbox_sync->registerCallback(std::bind(&PclImageOverlay::bb_pcl_project, this, + std::placeholders::_1, std::placeholders::_2, std::placeholders::_3)); } void BBoxProjectPCloud::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_msg) { @@ -33,11 +25,12 @@ void BBoxProjectPCloud::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_m void BBoxProjectPCloud::bb_pcl_project( const sensor_msgs::msg::Image::ConstSharedPtr &in_img_msg, - const sensor_msgs::msg::PointCloud2::ConstSharedPtr &in_cloud_msg) { - - // Get transform the first iteration + const sensor_msgs::msg::PointCloud2::ConstSharedPtr &in_cloud_msg, + const all_seaing_interfaces::msg::LabeledBoundingBox2DArray &in_bbox_msg) { + + // LIDAR -> Camera transform (useful for projecting the camera bboxes onto the point cloud, have the origin on the camera frame) if (!m_pc_cam_tf_ok) - m_pc_cam_tf = get_tf(in_cloud_msg->header.frame_id, in_img_msg->header.frame_id); + m_pc_cam_tf = get_tf(in_img_msg->header.frame_id, in_cloud_msg->header.frame_id); // Convert msg to CvImage to work with CV2. Copy img since we will be modifying. cv_bridge::CvImagePtr cv_ptr; @@ -54,19 +47,8 @@ void BBoxProjectPCloud::bb_pcl_project( pcl::PointCloud::Ptr in_cloud_tf_ptr(new pcl::PointCloud); pcl::fromROSMsg(in_cloud_tf, *in_cloud_tf_ptr); - for (pcl::PointXYZI &point_tf : in_cloud_tf_ptr->points) { - - // Project 3D point onto the image plane using the intrinsic matrix. - // Gazebo has a different coordinate system, so the y, z, and x coordinates are modified. - cv::Point2d xy_rect = - m_cam_model.project3dToPixel(cv::Point3d(point_tf.y, point_tf.z, -point_tf.x)); + //TODO: Take each bounding box and filter the points of the point cloud that are "in" this bbox in 3D, using rays or smth - // Plot projected point onto image if within bounds and in front of the boat - if ((xy_rect.x >= 0) && (xy_rect.x < m_cam_model.cameraInfo().width) && (xy_rect.y >= 0) && - (xy_rect.y < m_cam_model.cameraInfo().height) && (point_tf.x >= 0)) { - cv::circle(cv_ptr->image, cv::Point(xy_rect.x, xy_rect.y), 2, cv::Scalar(255, 0, 0), 4); - } - } m_image_pub->publish(*cv_ptr->toImageMsg()); } diff --git a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp index 0b6fe838..78244b51 100644 --- a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp +++ b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp @@ -18,13 +18,26 @@ #include "sensor_msgs/msg/image.hpp" #include "sensor_msgs/msg/point_cloud2.hpp" +#include "pcl/point_cloud.h" +#include "pcl/point_types.h" +#include "pcl_conversions/pcl_conversions.h" + +#include "cv_bridge/cv_bridge.h" + +#include "tf2_geometry_msgs/tf2_geometry_msgs.hpp" +#include "tf2_sensor_msgs/tf2_sensor_msgs.hpp" + +#include "all_seaing_interfaces/msg/labeled_bounding_box2_d_array.hpp" + class BBoxProjectPointCloud : public rclcpp::Node{ private: // Publishers and subscribers rclcpp::Publisher::SharedPtr m_image_pub; rclcpp::Subscription::SharedPtr m_image_intrinsics_sub; + rclcpp::Subscription<> message_filters::Subscriber m_image_sub; message_filters::Subscriber m_cloud_sub; + message_filters::Subscriber m_bbox_sub; // Transform variables std::shared_ptr m_tf_listener{nullptr}; @@ -38,9 +51,9 @@ class BBoxProjectPointCloud : public rclcpp::Node{ // Pointcloud-camera sync policies typedef message_filters::sync_policies::ApproximateTime - PointCloudCamPolicy; - typedef message_filters::Synchronizer PointCloudCamSync; - std::shared_ptr m_pc_cam_sync; + PointCloudCamBBoxPolicy; + typedef message_filters::Synchronizer PointCloudCamBBoxSync; + std::shared_ptr m_pc_cam_bbox_sync; public: BBoxProjectPointCloud(); virtual ~BBoxProjectPointCloud(); From 4c353e2c2f3758a75849db28ccddd15d72f7ca2a Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Tue, 22 Oct 2024 23:43:19 -0400 Subject: [PATCH 03/35] Added the code that selects and publishes the point cloud points inside each detected object's bounding box (when projected onto the image), with the respective labels and probabilities, colcon build succesful, didn't test yet --- .../roboboat/waypoint_finder.py | 3 - all_seaing_interfaces/CMakeLists.txt | 3 + .../msg/LabeledObjectPointCloud.msg | 3 + .../msg/LabeledObjectPointCloudArray.msg | 1 + .../bbox_project_pcloud.cpp | 56 +++++++++++++------ .../bbox_project_pcloud.hpp | 35 +++++++++--- 6 files changed, 72 insertions(+), 29 deletions(-) create mode 100644 all_seaing_interfaces/msg/LabeledObjectPointCloud.msg create mode 100644 all_seaing_interfaces/msg/LabeledObjectPointCloudArray.msg diff --git a/all_seaing_autonomy/all_seaing_autonomy/roboboat/waypoint_finder.py b/all_seaing_autonomy/all_seaing_autonomy/roboboat/waypoint_finder.py index be374689..8ecd824f 100755 --- a/all_seaing_autonomy/all_seaing_autonomy/roboboat/waypoint_finder.py +++ b/all_seaing_autonomy/all_seaing_autonomy/roboboat/waypoint_finder.py @@ -13,9 +13,6 @@ import os from ament_index_python.packages import get_package_share_directory -# TODO: DOCUMENT THE CODE IN THE AUTONOMY WIKI (SEE PERCEPTION WIKI FOR REFERENCE) - - class WaypointFinder(Node): def __init__(self): super().__init__("waypoint_finder") diff --git a/all_seaing_interfaces/CMakeLists.txt b/all_seaing_interfaces/CMakeLists.txt index b327a5a0..2e7d8f55 100644 --- a/all_seaing_interfaces/CMakeLists.txt +++ b/all_seaing_interfaces/CMakeLists.txt @@ -23,8 +23,11 @@ rosidl_generate_interfaces(${PROJECT_NAME} "msg/WaypointArray.msg" "msg/ObstacleCounter.msg" "msg/ObstacleCounterArray.msg" + "msg/LabeledObjectPointCloud.msg" + "msg/LabeledObjectPointCloudArray.msg" DEPENDENCIES geometry_msgs + sensor_msgs ) ament_auto_package() diff --git a/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg b/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg new file mode 100644 index 00000000..8895d640 --- /dev/null +++ b/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg @@ -0,0 +1,3 @@ +int8 label +float64 probability +sensor_msgs/PointCloud2 cloud \ No newline at end of file diff --git a/all_seaing_interfaces/msg/LabeledObjectPointCloudArray.msg b/all_seaing_interfaces/msg/LabeledObjectPointCloudArray.msg new file mode 100644 index 00000000..9d847f39 --- /dev/null +++ b/all_seaing_interfaces/msg/LabeledObjectPointCloudArray.msg @@ -0,0 +1 @@ +all_seaing_interfaces/LabeledObjectPointCloud[] objects \ No newline at end of file diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index 86f18e2f..585fe703 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -5,18 +5,23 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ m_tf_buffer = std::make_unique(this->get_clock()); m_tf_listener = std::make_shared(*m_tf_buffer); + this->declare_parameter("bbox_object_margin", 0.0); + // Subscriptions m_image_intrinsics_sub = this->create_subscription( - "camera_info", 10, std::bind(&PclImageOverlay::intrinsics_cb, this, std::placeholders::_1)); + "camera_info", 10, std::bind(&BBoxProjectPCloud::intrinsics_cb, this, std::placeholders::_1)); m_image_sub.subscribe(this, "image", rmw_qos_profile_sensor_data); m_cloud_sub.subscribe(this, "point_cloud", rmw_qos_profile_sensor_data); - m_bbox_sub.subscribe(this, "bounding_boxes", rmw_qos_profile_sensor_data) + m_bbox_sub.subscribe(this, "bounding_boxes", rmw_qos_profile_sensor_data); // Send pc msg and img msg to bb_pcl_project m_pc_cam_bbox_sync = - std::make_shared(PointCloudCamPolicy(10), m_image_sub, m_cloud_sub, m_bbox_sub); - m_pc_cam_bbox_sync->registerCallback(std::bind(&PclImageOverlay::bb_pcl_project, this, + std::make_shared(PointCloudCamBBoxPolicy(10), m_image_sub, m_cloud_sub, m_bbox_sub); + m_pc_cam_bbox_sync->registerCallback(std::bind(&BBoxProjectPCloud::bb_pcl_project, this, std::placeholders::_1, std::placeholders::_2, std::placeholders::_3)); + + // Publishers + m_object_pcl_pub = this->create_publisher("labeled_object_point_clouds", 5); } void BBoxProjectPCloud::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_msg) { @@ -26,33 +31,48 @@ void BBoxProjectPCloud::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_m void BBoxProjectPCloud::bb_pcl_project( const sensor_msgs::msg::Image::ConstSharedPtr &in_img_msg, const sensor_msgs::msg::PointCloud2::ConstSharedPtr &in_cloud_msg, - const all_seaing_interfaces::msg::LabeledBoundingBox2DArray &in_bbox_msg) { + const all_seaing_interfaces::msg::LabeledBoundingBox2DArray::ConstSharedPtr &in_bbox_msg) { // LIDAR -> Camera transform (useful for projecting the camera bboxes onto the point cloud, have the origin on the camera frame) if (!m_pc_cam_tf_ok) + // TODO: get the camera frame another way and don't subscribe to the image topic, we only need the bounding boxes which are another topic m_pc_cam_tf = get_tf(in_img_msg->header.frame_id, in_cloud_msg->header.frame_id); - // Convert msg to CvImage to work with CV2. Copy img since we will be modifying. - cv_bridge::CvImagePtr cv_ptr; - try { - cv_ptr = cv_bridge::toCvCopy(in_img_msg, sensor_msgs::image_encodings::BGR8); - } catch (cv_bridge::Exception &e) { - RCLCPP_ERROR(this->get_logger(), "cv_bridge exception: %s", e.what()); - return; - } - - // Transform in_cloud_msg and convert PointCloud2 to PCL PointCloud + // Transform in_cloud_msg to the camera frame and convert PointCloud2 to PCL PointCloud sensor_msgs::msg::PointCloud2 in_cloud_tf; tf2::doTransform(*in_cloud_msg, in_cloud_tf, m_pc_cam_tf); pcl::PointCloud::Ptr in_cloud_tf_ptr(new pcl::PointCloud); pcl::fromROSMsg(in_cloud_tf, *in_cloud_tf_ptr); - //TODO: Take each bounding box and filter the points of the point cloud that are "in" this bbox in 3D, using rays or smth + auto object_pcls = all_seaing_interfaces::msg::LabeledObjectPointCloudArray(); - m_image_pub->publish(*cv_ptr->toImageMsg()); + // Just use the same pcloud to image projection, but check if it's within some binding box and assign it to that detection + for (all_seaing_interfaces::msg::LabeledBoundingBox2D bbox : in_bbox_msg->boxes){ + auto labeled_pcl = all_seaing_interfaces::msg::LabeledObjectPointCloud(); + pcl::PointCloud::Ptr obj_cloud_ptr(new pcl::PointCloud); + labeled_pcl.label = bbox.label; + labeled_pcl.probability = bbox.probability; + for (pcl::PointXYZI &point_tf : in_cloud_tf_ptr->points) { + // Project 3D point onto the image plane using the intrinsic matrix. + // Gazebo has a different coordinate system, so the y, z, and x coordinates are modified. + cv::Point2d xy_rect = m_cam_model.project3dToPixel(cv::Point3d(point_tf.y, point_tf.z, -point_tf.x)); + // Check if within bounds & in front of the boat + if ((xy_rect.x >= 0) && (xy_rect.x < m_cam_model.cameraInfo().width) && (xy_rect.y >= 0) && + (xy_rect.y < m_cam_model.cameraInfo().height) && (point_tf.x >= 0)) { + double bbox_margin = this->get_parameter("bbox_object_margin").as_double(); + // Check if point is in bbox + if(xy_rect.x >= bbox.min_x-bbox_margin && xy_rect.x <= bbox.max_x+bbox_margin && xy_rect.y >= bbox.min_y-bbox_margin && xy_rect.y <= bbox.max_y+bbox_margin){ + obj_cloud_ptr->push_back(point_tf); + } + } + } + pcl::toROSMsg(*obj_cloud_ptr, labeled_pcl.cloud); + object_pcls.objects.push_back(labeled_pcl); + } + m_object_pcl_pub->publish(object_pcls); } -geometry_msgs::msg::TransformStamped PclImageOverlay::get_tf(const std::string &in_target_frame, +geometry_msgs::msg::TransformStamped BBoxProjectPCloud::get_tf(const std::string &in_target_frame, const std::string &in_src_frame) { geometry_msgs::msg::TransformStamped tf; m_pc_cam_tf_ok = false; diff --git a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp index 78244b51..ba4a98f3 100644 --- a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp +++ b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp @@ -28,13 +28,15 @@ #include "tf2_sensor_msgs/tf2_sensor_msgs.hpp" #include "all_seaing_interfaces/msg/labeled_bounding_box2_d_array.hpp" +#include "all_seaing_interfaces/msg/labeled_bounding_box2_d.hpp" +#include "all_seaing_interfaces/msg/labeled_object_point_cloud_array.hpp" +#include "all_seaing_interfaces/msg/labeled_object_point_cloud.hpp" -class BBoxProjectPointCloud : public rclcpp::Node{ +class BBoxProjectPCloud : public rclcpp::Node{ private: // Publishers and subscribers - rclcpp::Publisher::SharedPtr m_image_pub; + rclcpp::Publisher::SharedPtr m_object_pcl_pub; rclcpp::Subscription::SharedPtr m_image_intrinsics_sub; - rclcpp::Subscription<> message_filters::Subscriber m_image_sub; message_filters::Subscriber m_cloud_sub; message_filters::Subscriber m_bbox_sub; @@ -50,13 +52,30 @@ class BBoxProjectPointCloud : public rclcpp::Node{ // Pointcloud-camera sync policies typedef message_filters::sync_policies::ApproximateTime + sensor_msgs::msg::PointCloud2, + all_seaing_interfaces::msg::LabeledBoundingBox2DArray> PointCloudCamBBoxPolicy; - typedef message_filters::Synchronizer PointCloudCamBBoxSync; - std::shared_ptr m_pc_cam_bbox_sync; + typedef message_filters::Synchronizer PointCloudCamBBoxSync; + std::shared_ptr m_pc_cam_bbox_sync; + + // Get intrinsic camera model information needed for projection + void intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_msg); + + // Get transform from source frame to target frame + geometry_msgs::msg::TransformStamped get_tf(const std::string &in_target_frame, + const std::string &in_src_frame); + + // Get the point cloud from the LiDAR, the image from the camera (only to get the camera frame), + // and the bounding boxes of the objects detected from the YOLOv8 (or other object detector/color segmentator) node, + // and output a set of point clouds representing the points included in the bounding box of each detected object + // (in 3D space, by projecting them onto the image and seeing if they are in the bounding box) + void bb_pcl_project( + const sensor_msgs::msg::Image::ConstSharedPtr &in_img_msg, + const sensor_msgs::msg::PointCloud2::ConstSharedPtr &in_cloud_msg, + const all_seaing_interfaces::msg::LabeledBoundingBox2DArray::ConstSharedPtr &in_bbox_msg); public: - BBoxProjectPointCloud(); - virtual ~BBoxProjectPointCloud(); + BBoxProjectPCloud(); + virtual ~BBoxProjectPCloud(); }; #endif // ALL_SEAING_PERCEPTION__BBOX_PROJECT_PCLOUD_HPP \ No newline at end of file From e76851f0dbc50ef331cba4ddddcbf53be246e6d2 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Wed, 6 Nov 2024 01:48:19 -0500 Subject: [PATCH 04/35] Made it work and visualize the points in the bounding boxes in RViZ (throws some out of range errors but I catched them and made them warnings and it seems to work fine) --- all_seaing_bringup/launch/sim.launch.py | 9 ++++ .../msg/LabeledObjectPointCloud.msg | 1 - .../bbox_project_pcloud.cpp | 53 ++++++++++++++++--- .../bbox_project_pcloud.hpp | 4 ++ .../launch/bbox_project_pcloud_test.launch.py | 15 ++++++ .../launch/yolov8_detection_sim.launch.py | 52 ++++++++++++++++++ 6 files changed, 127 insertions(+), 7 deletions(-) create mode 100644 all_seaing_perception/launch/bbox_project_pcloud_test.launch.py create mode 100644 all_seaing_perception/launch/yolov8_detection_sim.launch.py diff --git a/all_seaing_bringup/launch/sim.launch.py b/all_seaing_bringup/launch/sim.launch.py index 580e77bd..119b05bd 100644 --- a/all_seaing_bringup/launch/sim.launch.py +++ b/all_seaing_bringup/launch/sim.launch.py @@ -146,6 +146,14 @@ def generate_launch_description(): ], ) + bbox_project_pcloud_node = launch_ros.actions.Node( + package="all_seaing_perception", + executable="bbox_project_pcloud", + parameters=[ + {"bbox_object_margin": 0.0} + ] + ) + rviz_node = launch_ros.actions.Node( package="rviz2", executable="rviz2", @@ -232,6 +240,7 @@ def generate_launch_description(): color_segmentation_node, point_cloud_filter_node, obstacle_detector_node, + # bbox_project_pcloud_node, rviz_node, control_mux, controller_server, diff --git a/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg b/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg index 8895d640..53ef7f6a 100644 --- a/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg +++ b/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg @@ -1,3 +1,2 @@ int8 label -float64 probability sensor_msgs/PointCloud2 cloud \ No newline at end of file diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index 585fe703..f4404ab2 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -6,12 +6,14 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ m_tf_listener = std::make_shared(*m_tf_buffer); this->declare_parameter("bbox_object_margin", 0.0); + this->declare_parameter("camera_topic", "/wamv/sensors/cameras/front_left_camera_sensor/image_raw"); + this->declare_parameter("camera_info_topic", "/wamv/sensors/cameras/front_left_camera_sensor/camera_info"); // Subscriptions m_image_intrinsics_sub = this->create_subscription( - "camera_info", 10, std::bind(&BBoxProjectPCloud::intrinsics_cb, this, std::placeholders::_1)); - m_image_sub.subscribe(this, "image", rmw_qos_profile_sensor_data); - m_cloud_sub.subscribe(this, "point_cloud", rmw_qos_profile_sensor_data); + this->get_parameter("camera_info_topic").as_string(), 10, std::bind(&BBoxProjectPCloud::intrinsics_cb, this, std::placeholders::_1)); + m_image_sub.subscribe(this, this->get_parameter("camera_topic").as_string(), rmw_qos_profile_sensor_data); + m_cloud_sub.subscribe(this, "point_cloud/filtered", rmw_qos_profile_sensor_data); m_bbox_sub.subscribe(this, "bounding_boxes", rmw_qos_profile_sensor_data); // Send pc msg and img msg to bb_pcl_project @@ -22,9 +24,11 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ // Publishers m_object_pcl_pub = this->create_publisher("labeled_object_point_clouds", 5); + m_object_pcl_viz_pub = this->create_publisher("object_point_clouds_viz", 5); } void BBoxProjectPCloud::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_msg) { + RCLCPP_INFO(this->get_logger(), "GOT CAMERA INFO"); m_cam_model.fromCameraInfo(info_msg); } @@ -32,7 +36,7 @@ void BBoxProjectPCloud::bb_pcl_project( const sensor_msgs::msg::Image::ConstSharedPtr &in_img_msg, const sensor_msgs::msg::PointCloud2::ConstSharedPtr &in_cloud_msg, const all_seaing_interfaces::msg::LabeledBoundingBox2DArray::ConstSharedPtr &in_bbox_msg) { - + RCLCPP_INFO(this->get_logger(), "GOT DATA"); // LIDAR -> Camera transform (useful for projecting the camera bboxes onto the point cloud, have the origin on the camera frame) if (!m_pc_cam_tf_ok) // TODO: get the camera frame another way and don't subscribe to the image topic, we only need the bounding boxes which are another topic @@ -44,14 +48,27 @@ void BBoxProjectPCloud::bb_pcl_project( pcl::PointCloud::Ptr in_cloud_tf_ptr(new pcl::PointCloud); pcl::fromROSMsg(in_cloud_tf, *in_cloud_tf_ptr); - auto object_pcls = all_seaing_interfaces::msg::LabeledObjectPointCloudArray(); + RCLCPP_INFO(this->get_logger(), "%d POINTS, %d OBJECTS", in_cloud_tf_ptr->points.size(), in_bbox_msg->boxes.size()); + RCLCPP_INFO(this->get_logger(), "FRAME_ID BEFORE SELECTION, AFTER TRANSFORM TO CAMERA FRAME: %s", in_cloud_tf_ptr->header.frame_id); + auto object_pcls = all_seaing_interfaces::msg::LabeledObjectPointCloudArray(); + std::vector> obj_cloud_vec; + int max_len = 0; // Just use the same pcloud to image projection, but check if it's within some binding box and assign it to that detection + int obj = 0; + for (pcl::PointXYZI &point_tf : in_cloud_tf_ptr->points) { + // Project 3D point onto the image plane using the intrinsic matrix. + // Gazebo has a different coordinate system, so the y, z, and x coordinates are modified. + RCLCPP_INFO(this->get_logger(), "3D POINT: (%lf, %lf, %lf)", point_tf.x, point_tf.y, point_tf.z); + cv::Point2d xy_rect = m_cam_model.project3dToPixel(cv::Point3d(point_tf.y, point_tf.z, -point_tf.x)); + // Check if within bounds & in front of the boat + RCLCPP_INFO(this->get_logger(), "POINT PROJECTED ONTO IMAGE: (%lf, %lf)", xy_rect.x, xy_rect.y); + } for (all_seaing_interfaces::msg::LabeledBoundingBox2D bbox : in_bbox_msg->boxes){ auto labeled_pcl = all_seaing_interfaces::msg::LabeledObjectPointCloud(); pcl::PointCloud::Ptr obj_cloud_ptr(new pcl::PointCloud); labeled_pcl.label = bbox.label; - labeled_pcl.probability = bbox.probability; + RCLCPP_INFO(this->get_logger(), "BOUNDING BOX FOR OBJECT %d: (%lf,%lf), (%lf, %lf)", obj, bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y, obj); for (pcl::PointXYZI &point_tf : in_cloud_tf_ptr->points) { // Project 3D point onto the image plane using the intrinsic matrix. // Gazebo has a different coordinate system, so the y, z, and x coordinates are modified. @@ -63,13 +80,37 @@ void BBoxProjectPCloud::bb_pcl_project( // Check if point is in bbox if(xy_rect.x >= bbox.min_x-bbox_margin && xy_rect.x <= bbox.max_x+bbox_margin && xy_rect.y >= bbox.min_y-bbox_margin && xy_rect.y <= bbox.max_y+bbox_margin){ obj_cloud_ptr->push_back(point_tf); + RCLCPP_INFO(this->get_logger(), "SELECTED POINT PROJECTED ONTO IMAGE: (%lf, %lf)", xy_rect.x, xy_rect.y); } } } pcl::toROSMsg(*obj_cloud_ptr, labeled_pcl.cloud); object_pcls.objects.push_back(labeled_pcl); + obj_cloud_vec.push_back(*obj_cloud_ptr); + obj++; + RCLCPP_INFO(this->get_logger(), "%d POINTS IN OBJECT %d", obj_cloud_ptr->size(), obj); + max_len = std::max(max_len, (int)obj_cloud_ptr->size()); } + RCLCPP_INFO(this->get_logger(), "WILL NOW SEND OBJECT POINT CLOUDS"); m_object_pcl_pub->publish(object_pcls); + pcl::PointCloud::Ptr all_obj_pcls_ptr(new pcl::PointCloud); + all_obj_pcls_ptr->header = in_cloud_tf_ptr->header; + //convert vector of PointCloud to a single PointCloud with channels + all_obj_pcls_ptr->resize((pcl::uindex_t)max_len, (pcl::uindex_t)in_bbox_msg->boxes.size()); + RCLCPP_INFO(this->get_logger(), "PUBLISHED PCLOUD DIMENSIONS: height: %d, width: %d", (int)all_obj_pcls_ptr->height, (int)all_obj_pcls_ptr->width); + RCLCPP_INFO(this->get_logger(), "STORED PCLOUD DIMENSIONS: objects: %d, max_length: %d", (int)obj_cloud_vec.size(), max_len); + try{ + for(int i = 0; iat(i,j) = obj_cloud_vec[i][j]; + } + } + }catch(std::exception &ex){ + RCLCPP_WARN(this->get_logger(), "%s", ex.what()); + } + auto obj_pcls_msg = sensor_msgs::msg::PointCloud2(); + pcl::toROSMsg(*all_obj_pcls_ptr, obj_pcls_msg); + m_object_pcl_viz_pub->publish(obj_pcls_msg); } geometry_msgs::msg::TransformStamped BBoxProjectPCloud::get_tf(const std::string &in_target_frame, diff --git a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp index ba4a98f3..068ee2fc 100644 --- a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp +++ b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp @@ -2,6 +2,9 @@ #define ALL_SEAING_PERCEPTION__BBOX_PROJECT_PCLOUD_HPP #include +#include +#include +#include #include "rclcpp/rclcpp.hpp" @@ -36,6 +39,7 @@ class BBoxProjectPCloud : public rclcpp::Node{ private: // Publishers and subscribers rclcpp::Publisher::SharedPtr m_object_pcl_pub; + rclcpp::Publisher::SharedPtr m_object_pcl_viz_pub; rclcpp::Subscription::SharedPtr m_image_intrinsics_sub; message_filters::Subscriber m_image_sub; message_filters::Subscriber m_cloud_sub; diff --git a/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py b/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py new file mode 100644 index 00000000..e71d80b3 --- /dev/null +++ b/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py @@ -0,0 +1,15 @@ +from ament_index_python.packages import get_package_share_directory +from launch import LaunchDescription +from launch.actions import IncludeLaunchDescription +from launch.launch_description_sources import PythonLaunchDescriptionSource +import launch_ros + +def generate_launch_description(): + bbox_project_pcloud_node = launch_ros.actions.Node( + package="all_seaing_perception", + executable="bbox_project_pcloud", + output="screen" + ) + return LaunchDescription([ + bbox_project_pcloud_node + ]) \ No newline at end of file diff --git a/all_seaing_perception/launch/yolov8_detection_sim.launch.py b/all_seaing_perception/launch/yolov8_detection_sim.launch.py new file mode 100644 index 00000000..b72a76b7 --- /dev/null +++ b/all_seaing_perception/launch/yolov8_detection_sim.launch.py @@ -0,0 +1,52 @@ +from ament_index_python.packages import get_package_share_directory +from launch import LaunchDescription +from launch.actions import IncludeLaunchDescription +from launch.launch_description_sources import PythonLaunchDescriptionSource + +""" +Reference: https://github.com/mgonzs13/yolov8_ros + +Instructions: +$ cd ~/arcturus/dev_ws/src/ +$ git clone https://github.com/mgonzs13/yolov8_ros.git +$ pip install -r yolov8_ros/requirements.txt +$ cd ~/arcturus/dev_ws/ +$ rosdep install --from-paths src --ignore-src -r -y +$ colcon build --symlink-install +$ source ~/arcturus/dev_ws/install/setup.bash +$ source ~/arcturus/vrx_ws/install/setup.bash + +follow those instructions before running script + +Parameters to Yolov8 Launch + model: YOLOv8 model (default: yolov8m.pt) + tracker: tracker file (default: bytetrack.yaml) + device: GPU/CUDA (default: cuda:0) + enable: wether to start YOLOv8 enabled (default: True) + threshold: detection threshold (default: 0.5) + input_image_topic: camera topic of RGB images (default: /camera/rgb/image_raw) + image_reliability: reliability for the image topic: 0=system default, 1=Reliable, 2=Best Effort (default: 2) + +Ensure that the model parameter has the correct path to a yolov8 model +and that input_image_topic subscribes to the correct topic (such as zed) +""" + + +def generate_launch_description(): + + yolov8_prefix = get_package_share_directory("yolov8_bringup") + model_share_path = get_package_share_directory("all_seaing_perception") + + return LaunchDescription( + [ + IncludeLaunchDescription( + PythonLaunchDescriptionSource( + [yolov8_prefix, "/launch/yolov8.launch.py"] + ), + launch_arguments={ + "model": model_share_path + "/models/yolov8_roboboat_model.pt", + "input_image_topic": "/wamv/sensors/cameras/front_left_camera_sensor/image_raw", + }.items(), + ) + ] + ) From b2d402a82d41b3855bc9d36889b26db5ead0c2e8 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Wed, 6 Nov 2024 01:58:01 -0500 Subject: [PATCH 05/35] Minor change (forgot something), the custom message should work too now --- .../all_seaing_perception/bbox_project_pcloud.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index f4404ab2..29da65aa 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -68,6 +68,7 @@ void BBoxProjectPCloud::bb_pcl_project( auto labeled_pcl = all_seaing_interfaces::msg::LabeledObjectPointCloud(); pcl::PointCloud::Ptr obj_cloud_ptr(new pcl::PointCloud); labeled_pcl.label = bbox.label; + obj_cloud_ptr->header = in_cloud_tf_ptr->header; RCLCPP_INFO(this->get_logger(), "BOUNDING BOX FOR OBJECT %d: (%lf,%lf), (%lf, %lf)", obj, bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y, obj); for (pcl::PointXYZI &point_tf : in_cloud_tf_ptr->points) { // Project 3D point onto the image plane using the intrinsic matrix. From 25742ec3d1aaac63440981091dbc1d7f1427ea5b Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Thu, 7 Nov 2024 20:56:22 -0500 Subject: [PATCH 06/35] Made it actually work, fixed the out of bounds errors (swapped dimensions in resize, may be the wrong order in terms of channels so may need to change the order both in resize() and in at() again), seems to correctly select the points in the objects detected --- all_seaing_bringup/launch/sim.launch.py | 18 ++++ all_seaing_bringup/rviz/dashboard.rviz | 83 +++++++++++++++++-- .../bbox_project_pcloud.cpp | 14 ++-- 3 files changed, 103 insertions(+), 12 deletions(-) diff --git a/all_seaing_bringup/launch/sim.launch.py b/all_seaing_bringup/launch/sim.launch.py index 119b05bd..0e8948f5 100644 --- a/all_seaing_bringup/launch/sim.launch.py +++ b/all_seaing_bringup/launch/sim.launch.py @@ -229,6 +229,23 @@ def generate_launch_description(): }.items(), ) + yolov8_prefix = get_package_share_directory("yolov8_bringup") + model_share_path = get_package_share_directory("all_seaing_perception") + + yolov8_ld = LaunchDescription( + [ + IncludeLaunchDescription( + PythonLaunchDescriptionSource( + [yolov8_prefix, "/launch/yolov8.launch.py"] + ), + launch_arguments={ + "model": model_share_path + "/models/yolov8_roboboat_model.pt", + "input_image_topic": "/wamv/sensors/cameras/front_left_camera_sensor/image_raw", + }.items(), + ) + ] + ) + return LaunchDescription( [ launch_rviz_launch_arg, @@ -249,6 +266,7 @@ def generate_launch_description(): rviz_waypoint_sender, keyboard_ld, sim_ld, + # yolov8_ld, perception_eval_node, ] ) diff --git a/all_seaing_bringup/rviz/dashboard.rviz b/all_seaing_bringup/rviz/dashboard.rviz index eef71e4d..39347d87 100644 --- a/all_seaing_bringup/rviz/dashboard.rviz +++ b/all_seaing_bringup/rviz/dashboard.rviz @@ -5,6 +5,9 @@ Panels: Property Tree Widget: Expanded: - /Global Options1 + - /PointCloud21 + - /PointCloud22 + - /PointCloud22/Topic1 Splitter Ratio: 0.5 Tree Height: 603 - Class: rviz_common/Selection @@ -24,7 +27,7 @@ Panels: Experimental: false Name: Time SyncMode: 0 - SyncSource: "" + SyncSource: PointCloud2 Visualization Manager: Class: "" Displays: @@ -276,7 +279,7 @@ Visualization Manager: Enabled: true Name: MarkerArray Namespaces: - "": true + {} Topic: Depth: 5 Durability Policy: Volatile @@ -284,6 +287,74 @@ Visualization Manager: Reliability Policy: Reliable Value: /waypoint_markers Value: true + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 10 + Min Value: -10 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz_default_plugins/PointCloud2 + Color: 255; 255; 255 + Color Transformer: Intensity + Decay Time: 0 + Enabled: true + Invert Rainbow: false + Max Color: 255; 255; 255 + Max Intensity: 0 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: PointCloud2 + Position Transformer: XYZ + Selectable: true + Size (Pixels): 3 + Size (m): 0.10000000149011612 + Style: Flat Squares + Topic: + Depth: 5 + Durability Policy: Volatile + Filter size: 10 + History Policy: Keep Last + Reliability Policy: Best Effort + Value: /object_point_clouds_viz + Use Fixed Frame: true + Use rainbow: true + Value: true + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 10 + Min Value: -10 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz_default_plugins/PointCloud2 + Color: 255; 255; 255 + Color Transformer: Intensity + Decay Time: 0 + Enabled: true + Invert Rainbow: false + Max Color: 255; 255; 255 + Max Intensity: 0 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: PointCloud2 + Position Transformer: XYZ + Selectable: true + Size (Pixels): 3 + Size (m): 0.10000000149011612 + Style: Flat Squares + Topic: + Depth: 5 + Durability Policy: Volatile + Filter size: 10 + History Policy: Keep Last + Reliability Policy: Best Effort + Value: /point_cloud/filtered + Use Fixed Frame: true + Use rainbow: true + Value: true Enabled: true Global Options: Background Color: 48; 48; 48 @@ -330,16 +401,16 @@ Visualization Manager: Views: Current: Class: rviz_default_plugins/Orbit - Distance: 39.35666275024414 + Distance: 77.6830825805664 Enable Stereo Rendering: Stereo Eye Separation: 0.05999999865889549 Stereo Focal Distance: 1 Swap Stereo Eyes: false Value: false Focal Point: - X: 1.5132484436035156 - Y: 20.534038543701172 - Z: 3.7858240604400635 + X: 3.2419750690460205 + Y: 17.11014175415039 + Z: -1.9841415882110596 Focal Shape Fixed Size: true Focal Shape Size: 0.05000000074505806 Invert Z Axis: false diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index 29da65aa..2d450d87 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -59,17 +59,17 @@ void BBoxProjectPCloud::bb_pcl_project( for (pcl::PointXYZI &point_tf : in_cloud_tf_ptr->points) { // Project 3D point onto the image plane using the intrinsic matrix. // Gazebo has a different coordinate system, so the y, z, and x coordinates are modified. - RCLCPP_INFO(this->get_logger(), "3D POINT: (%lf, %lf, %lf)", point_tf.x, point_tf.y, point_tf.z); + RCLCPP_DEBUG(this->get_logger(), "3D POINT: (%lf, %lf, %lf)", point_tf.x, point_tf.y, point_tf.z); cv::Point2d xy_rect = m_cam_model.project3dToPixel(cv::Point3d(point_tf.y, point_tf.z, -point_tf.x)); // Check if within bounds & in front of the boat - RCLCPP_INFO(this->get_logger(), "POINT PROJECTED ONTO IMAGE: (%lf, %lf)", xy_rect.x, xy_rect.y); + RCLCPP_DEBUG(this->get_logger(), "POINT PROJECTED ONTO IMAGE: (%lf, %lf)", xy_rect.x, xy_rect.y); } for (all_seaing_interfaces::msg::LabeledBoundingBox2D bbox : in_bbox_msg->boxes){ auto labeled_pcl = all_seaing_interfaces::msg::LabeledObjectPointCloud(); pcl::PointCloud::Ptr obj_cloud_ptr(new pcl::PointCloud); labeled_pcl.label = bbox.label; obj_cloud_ptr->header = in_cloud_tf_ptr->header; - RCLCPP_INFO(this->get_logger(), "BOUNDING BOX FOR OBJECT %d: (%lf,%lf), (%lf, %lf)", obj, bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y, obj); + RCLCPP_DEBUG(this->get_logger(), "BOUNDING BOX FOR OBJECT %d: (%lf,%lf), (%lf, %lf)", obj, bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y, obj); for (pcl::PointXYZI &point_tf : in_cloud_tf_ptr->points) { // Project 3D point onto the image plane using the intrinsic matrix. // Gazebo has a different coordinate system, so the y, z, and x coordinates are modified. @@ -81,7 +81,7 @@ void BBoxProjectPCloud::bb_pcl_project( // Check if point is in bbox if(xy_rect.x >= bbox.min_x-bbox_margin && xy_rect.x <= bbox.max_x+bbox_margin && xy_rect.y >= bbox.min_y-bbox_margin && xy_rect.y <= bbox.max_y+bbox_margin){ obj_cloud_ptr->push_back(point_tf); - RCLCPP_INFO(this->get_logger(), "SELECTED POINT PROJECTED ONTO IMAGE: (%lf, %lf)", xy_rect.x, xy_rect.y); + RCLCPP_DEBUG(this->get_logger(), "SELECTED POINT PROJECTED ONTO IMAGE: (%lf, %lf)", xy_rect.x, xy_rect.y); } } } @@ -101,13 +101,15 @@ void BBoxProjectPCloud::bb_pcl_project( RCLCPP_INFO(this->get_logger(), "PUBLISHED PCLOUD DIMENSIONS: height: %d, width: %d", (int)all_obj_pcls_ptr->height, (int)all_obj_pcls_ptr->width); RCLCPP_INFO(this->get_logger(), "STORED PCLOUD DIMENSIONS: objects: %d, max_length: %d", (int)obj_cloud_vec.size(), max_len); try{ + //TODO: CHECK IF THE CHANNELS CORRECTLY REPRESENT DIFFERENT OBJECTS + //(I.E. HEIGHT & WIDTH ARE CORRECT, OTHERWISE SWAP THEIR ORDER BOTH IN THE RESIZE() AND THE AT() METHODS) for(int i = 0; iat(i,j) = obj_cloud_vec[i][j]; + all_obj_pcls_ptr->at(j,i) = obj_cloud_vec[i][j]; } } }catch(std::exception &ex){ - RCLCPP_WARN(this->get_logger(), "%s", ex.what()); + RCLCPP_ERROR(this->get_logger(), "%s", ex.what()); } auto obj_pcls_msg = sensor_msgs::msg::PointCloud2(); pcl::toROSMsg(*all_obj_pcls_ptr, obj_pcls_msg); From 65c73fecca593342b015136ccedd3d969e58022e Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Sun, 10 Nov 2024 20:15:34 -0500 Subject: [PATCH 07/35] progress in refining all point clouds (for each object) to have actual meaningful clusters based on the detections --- all_seaing_bringup/launch/sim.launch.py | 15 ++- .../bbox_project_pcloud.cpp | 103 +++++++++++++++++- .../bbox_project_pcloud.hpp | 22 ++++ 3 files changed, 136 insertions(+), 4 deletions(-) diff --git a/all_seaing_bringup/launch/sim.launch.py b/all_seaing_bringup/launch/sim.launch.py index bc676acb..0a8f98a0 100644 --- a/all_seaing_bringup/launch/sim.launch.py +++ b/all_seaing_bringup/launch/sim.launch.py @@ -153,7 +153,18 @@ def generate_launch_description(): package="all_seaing_perception", executable="bbox_project_pcloud", parameters=[ - {"bbox_object_margin": 0.0} + {"bbox_object_margin": 0.0}, + {"camera_info_topic", "/wamv/sensors/cameras/front_left_camera_sensor/camera_info"}, + {"camera_topic", "/wamv/sensors/cameras/front_left_camera_sensor/image_raw"}, + {"lidar_topic", "point_cloud/filtered"}, + {"color_label_mappings_file": color_label_mappings}, + {"color_ranges_file": color_ranges}, + {"obstacle_size_min": 2}, + {"obstacle_size_max": 60}, + {"clustering_distance": 1.0}, + {"obstacle_seg_thresh": 10.0}, + {"obstacle_drop_thresh": 1.0}, + {"polygon_area_thresh": 100000.0} ] ) @@ -259,7 +270,7 @@ def generate_launch_description(): color_segmentation_node, point_cloud_filter_node, obstacle_detector_node, - # bbox_project_pcloud_node, + bbox_project_pcloud_node, rviz_node, control_mux, controller_server, diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index 2d450d87..a25d4656 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -5,15 +5,39 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ m_tf_buffer = std::make_unique(this->get_clock()); m_tf_listener = std::make_shared(*m_tf_buffer); + //essential ones this->declare_parameter("bbox_object_margin", 0.0); + this->declare_parameter("lidar_topic", "/wamv/sensors/lidars/lidar_wamv_sensor/points"); this->declare_parameter("camera_topic", "/wamv/sensors/cameras/front_left_camera_sensor/image_raw"); this->declare_parameter("camera_info_topic", "/wamv/sensors/cameras/front_left_camera_sensor/camera_info"); + // for cluster extraction + this->declare_parameter("obstacle_size_min", 20); + this->declare_parameter("obstacle_size_max", 100000); + this->declare_parameter("clustering_distance", 0.75); + this->declare_parameter("obstacle_seg_thresh", 1.0); + this->declare_parameter("obstacle_drop_thresh", 1.0); + this->declare_parameter("polygon_area_thresh", 100000.0); + + m_obstacle_size_min = this->get_parameter("obstacle_size_min").as_int(); + m_obstacle_size_max = this->get_parameter("obstacle_size_max").as_int(); + m_clustering_distance = this->get_parameter("clustering_distance").as_double(); + m_obstacle_seg_thresh = this->get_parameter("obstacle_seg_thresh").as_double(); + m_obstacle_drop_thresh = this->get_parameter("obstacle_drop_thresh").as_double(); + m_polygon_area_thresh = this->get_parameter("polygon_area_thresh").as_double(); + + // for color segmentation + this->declare_parameter('color_ranges_file', ''); + this->declare_parameter('color_label_mappings_file', '') + + color_ranges_file = this->get_parameter('color_ranges_file').as_string() + color_label_mappings_file = this->get_parameter('color_label_mappings_file').as_string() + // Subscriptions m_image_intrinsics_sub = this->create_subscription( this->get_parameter("camera_info_topic").as_string(), 10, std::bind(&BBoxProjectPCloud::intrinsics_cb, this, std::placeholders::_1)); m_image_sub.subscribe(this, this->get_parameter("camera_topic").as_string(), rmw_qos_profile_sensor_data); - m_cloud_sub.subscribe(this, "point_cloud/filtered", rmw_qos_profile_sensor_data); + m_cloud_sub.subscribe(this, this->get_parameter("lidar_topic").as_string(), rmw_qos_profile_sensor_data); m_bbox_sub.subscribe(this, "bounding_boxes", rmw_qos_profile_sensor_data); // Send pc msg and img msg to bb_pcl_project @@ -25,6 +49,34 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ // Publishers m_object_pcl_pub = this->create_publisher("labeled_object_point_clouds", 5); m_object_pcl_viz_pub = this->create_publisher("object_point_clouds_viz", 5); + + // get color label mappings from yaml + std::ifstream label_yaml(color_label_mappings_file); + if (label_yaml.is_open()) { + label_config_yaml = YAML::Load(label_yaml); + for (YAML::const_iterator it = label_config_yaml.begin(); it != label_config_yaml.end(); ++it) { + // RCLCPP_INFO(this->get_logger(), "label %d and color %s", it->second.as(), it->second.as().c_str()); + label_color_map[it->second.as()] = it->first.as(); + } + } + else { + RCLCPP_ERROR(this->get_logger(), "Failed to open YAML file: %s", color_label_mappings_file.c_str()); + } + label_yaml.close(); + + // get color ranges from yaml + std::ifstream ranges_yaml(color_ranges_file); + if (ranges_yaml.is_open()) { + ranges_config_yaml = YAML::Load(ranges_yaml); + for (YAML::const_iterator it = ranges_config_yaml.begin(); it != ranges_config_yaml.end(); ++it) { + // RCLCPP_INFO(this->get_logger(), "label %d and color %s", it->second.as(), it->second.as().c_str()); + color_range_map[it->first.as()] = it->second.as(); + } + } + else { + RCLCPP_ERROR(this->get_logger(), "Failed to open YAML file: %s", color_ranges_file.c_str()); + } + ranges_yaml.close(); } void BBoxProjectPCloud::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_msg) { @@ -39,7 +91,6 @@ void BBoxProjectPCloud::bb_pcl_project( RCLCPP_INFO(this->get_logger(), "GOT DATA"); // LIDAR -> Camera transform (useful for projecting the camera bboxes onto the point cloud, have the origin on the camera frame) if (!m_pc_cam_tf_ok) - // TODO: get the camera frame another way and don't subscribe to the image topic, we only need the bounding boxes which are another topic m_pc_cam_tf = get_tf(in_img_msg->header.frame_id, in_cloud_msg->header.frame_id); // Transform in_cloud_msg to the camera frame and convert PointCloud2 to PCL PointCloud @@ -64,6 +115,7 @@ void BBoxProjectPCloud::bb_pcl_project( // Check if within bounds & in front of the boat RCLCPP_DEBUG(this->get_logger(), "POINT PROJECTED ONTO IMAGE: (%lf, %lf)", xy_rect.x, xy_rect.y); } + std::vector>::Ptr> bbox_pcloud_objects; for (all_seaing_interfaces::msg::LabeledBoundingBox2D bbox : in_bbox_msg->boxes){ auto labeled_pcl = all_seaing_interfaces::msg::LabeledObjectPointCloud(); pcl::PointCloud::Ptr obj_cloud_ptr(new pcl::PointCloud); @@ -88,6 +140,7 @@ void BBoxProjectPCloud::bb_pcl_project( pcl::toROSMsg(*obj_cloud_ptr, labeled_pcl.cloud); object_pcls.objects.push_back(labeled_pcl); obj_cloud_vec.push_back(*obj_cloud_ptr); + bbox_pcloud_objects.push_back(std::make_pair(bbox, obj_cloud_ptr)); obj++; RCLCPP_INFO(this->get_logger(), "%d POINTS IN OBJECT %d", obj_cloud_ptr->size(), obj); max_len = std::max(max_len, (int)obj_cloud_ptr->size()); @@ -114,6 +167,52 @@ void BBoxProjectPCloud::bb_pcl_project( auto obj_pcls_msg = sensor_msgs::msg::PointCloud2(); pcl::toROSMsg(*all_obj_pcls_ptr, obj_pcls_msg); m_object_pcl_viz_pub->publish(obj_pcls_msg); + + // Convert msg to CvImage to work with CV2. Copy img since we will be modifying. + cv_bridge::CvImagePtr cv_ptr; + try { + cv_ptr = cv_bridge::toCvCopy(in_img_msg, sensor_msgs::image_encodings::BGR8); + } catch (cv_bridge::Exception &e) { + RCLCPP_ERROR(this->get_logger(), "cv_bridge exception: %s", e.what()); + return; + } + + cv_bridge::CvImagePtr cv_hsv_ptr = cv_bridge::cvtColor(cv_ptr.toCvShare(), cv::COLOR_RGB2HSV); + + // REFINE OBJECT POINT CLOUDS + for(auto bbox_pcloud_pair : bbox_pcloud_objects){ + all_seaing_interfaces::msg::LabeledBoundingBox2D bbox = bbox_pcloud_pair.first; + pcl::PointCloud::Ptr pcloud_ptr = bbox_pcloud_pair.second; + + //extract clusters + pcl::search::KdTree::Ptr tree(new pcl::search::KdTree()); + if (!pcloud_ptr->points.empty()) + tree->setInputCloud(pcloud_ptr); + std::vector obstacles_indices; + pcl::EuclideanClusterExtraction ec; + ec.setClusterTolerance(m_clustering_distance); + ec.setMinClusterSize(m_obstacle_size_min); + ec.setMaxClusterSize(m_obstacle_size_max); + ec.setSearchMethod(tree); + ec.setInputCloud(pcloud_ptr); + ec.extract(obstacles_indices); + + //color segmentation using the color label + cv::Mat hsv_img(cv_hsv_ptr->image, cv::Range(bbox.min_x, bbox.max_x), cv::Range(bbox.min_y, bbox.max_y)); + cv::Size img_sz; + cv::Point bbox_offset; + hsv_img.locateROI(img_sz, bbox_offset); + int lims[6] = label_color_map[bbox.label]=="red"?color_range_map["red2"]:color_range_map[label_color_map[bbox.label]]; + int h_min=lims[0], h_max=lims[1], s_min=lims[2], s_max=lims[3], v_min=lims[4], v_max=lims[5]; + cv::Mat mask; + cv::inRange(hsv_img, cv::Scalar(h_min, s_min, v_min), cv::Scalar(h_max, s_max, v_max), mask); + cv::erode(mask, mask, cv::getStructuringElement(cv::MORPH_RECT, 5)); + cv::dilate(mask, mask, cv::getStructuringElement(cv::MORPH_RECT, 7)); + //go through the obstacle clusters and take the most matching one + for (auto it = obstacles_indices.begin(); it != obstacles_indices.end(); it++) { + + } + } } geometry_msgs::msg::TransformStamped BBoxProjectPCloud::get_tf(const std::string &in_target_frame, diff --git a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp index 068ee2fc..a27d4751 100644 --- a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp +++ b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp @@ -5,6 +5,7 @@ #include #include #include +#include #include "rclcpp/rclcpp.hpp" @@ -35,6 +36,10 @@ #include "all_seaing_interfaces/msg/labeled_object_point_cloud_array.hpp" #include "all_seaing_interfaces/msg/labeled_object_point_cloud.hpp" +#include +#include +#include "yaml-cpp/yaml.h" + class BBoxProjectPCloud : public rclcpp::Node{ private: // Publishers and subscribers @@ -77,6 +82,23 @@ class BBoxProjectPCloud : public rclcpp::Node{ const sensor_msgs::msg::Image::ConstSharedPtr &in_img_msg, const sensor_msgs::msg::PointCloud2::ConstSharedPtr &in_cloud_msg, const all_seaing_interfaces::msg::LabeledBoundingBox2DArray::ConstSharedPtr &in_bbox_msg); + + int m_obstacle_id; + + // for cluster extraction + int m_obstacle_size_min; + int m_obstacle_size_max; + double m_clustering_distance; + double m_obstacle_seg_thresh; + double m_obstacle_drop_thresh; + double m_polygon_area_thresh; + + // for color segmentation + std::string color_ranges_file; + std::string color_label_mappings_file; + YAML::Node label_config_yaml, ranges_config_yaml; + std::map label_color_map; + std::map color_range_map; public: BBoxProjectPCloud(); virtual ~BBoxProjectPCloud(); From aed17e1403413a1d8196b66627be30a49d45c6b6 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Sun, 10 Nov 2024 20:19:51 -0500 Subject: [PATCH 08/35] add to-do --- .../all_seaing_perception/bbox_project_pcloud.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index a25d4656..3f760782 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -208,6 +208,8 @@ void BBoxProjectPCloud::bb_pcl_project( cv::inRange(hsv_img, cv::Scalar(h_min, s_min, v_min), cv::Scalar(h_max, s_max, v_max), mask); cv::erode(mask, mask, cv::getStructuringElement(cv::MORPH_RECT, 5)); cv::dilate(mask, mask, cv::getStructuringElement(cv::MORPH_RECT, 7)); + //TODO: now do the rest for color segmentation, starting from the contours (see color_segmentation.py) + //go through the obstacle clusters and take the most matching one for (auto it = obstacles_indices.begin(); it != obstacles_indices.end(); it++) { From c5cfa9c7789c0178b921c628520b1336d620783b Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Tue, 12 Nov 2024 23:10:58 -0500 Subject: [PATCH 09/35] Not much, just added the command that finds the contours --- .../all_seaing_perception/bbox_project_pcloud.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index 3f760782..250fd0bb 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -198,6 +198,7 @@ void BBoxProjectPCloud::bb_pcl_project( ec.extract(obstacles_indices); //color segmentation using the color label + //TODO: in the end, after finishing the processing, put the results back into the original image (using bbox_offset) to be able to use it with the LiDAR cv::Mat hsv_img(cv_hsv_ptr->image, cv::Range(bbox.min_x, bbox.max_x), cv::Range(bbox.min_y, bbox.max_y)); cv::Size img_sz; cv::Point bbox_offset; @@ -209,6 +210,11 @@ void BBoxProjectPCloud::bb_pcl_project( cv::erode(mask, mask, cv::getStructuringElement(cv::MORPH_RECT, 5)); cv::dilate(mask, mask, cv::getStructuringElement(cv::MORPH_RECT, 7)); //TODO: now do the rest for color segmentation, starting from the contours (see color_segmentation.py) + std::vector> contours; + cv::findContours(mask, contours, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE); + for(auto contour : contours){ + + } //go through the obstacle clusters and take the most matching one for (auto it = obstacles_indices.begin(); it != obstacles_indices.end(); it++) { From 2ce7b8f44055d57e197ea589a569ae7adc0cd90e Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Sun, 17 Nov 2024 23:39:01 -0500 Subject: [PATCH 10/35] Setup the parameters for the cluster and contour matching and selection, need to find a euclidean clustering algorithm for n-dimensional data or with customizable distance function, or just one that takes color into account (there are some in PCL but they mostly rely on color thresholds or difference limits rather than adding their squared difference to the distance) or implement it from scratch (and KD-tree from scratch?) --- .../config/perception/matching_weights.yaml | 6 ++ all_seaing_bringup/launch/sim.launch.py | 7 +- .../bbox_project_pcloud.cpp | 80 ++++++++++++------- .../bbox_project_pcloud.hpp | 13 +++ .../launch/bbox_project_pcloud_test.launch.py | 28 ++++++- 5 files changed, 102 insertions(+), 32 deletions(-) create mode 100644 all_seaing_bringup/config/perception/matching_weights.yaml diff --git a/all_seaing_bringup/config/perception/matching_weights.yaml b/all_seaing_bringup/config/perception/matching_weights.yaml new file mode 100644 index 00000000..0d632e80 --- /dev/null +++ b/all_seaing_bringup/config/perception/matching_weights.yaml @@ -0,0 +1,6 @@ +clustering_distance_weight: 1.0 +clustering_color_weights: [1.0, 1.0, 1.0] +cluster_contour_distance_weight: 1.0 +cluster_contour_color_weights: [1.0, 1.0, 1.0] +contour_detection_color_weights: [1.0, 1.0, 1.0] +cluster_contour_size_weight: 1.0 \ No newline at end of file diff --git a/all_seaing_bringup/launch/sim.launch.py b/all_seaing_bringup/launch/sim.launch.py index 0a8f98a0..0497c403 100644 --- a/all_seaing_bringup/launch/sim.launch.py +++ b/all_seaing_bringup/launch/sim.launch.py @@ -25,7 +25,9 @@ def generate_launch_description(): color_ranges = os.path.join( bringup_prefix, "config", "perception", "color_ranges.yaml" ) - + matching_weights = os.path.join( + bringup_prefix, "config", "perception", "filtering_weights.yaml" + ) subprocess.run(["cp", "-r", os.path.join(bringup_prefix, "tile"), "/tmp"]) launch_rviz = LaunchConfiguration("launch_rviz") @@ -164,7 +166,8 @@ def generate_launch_description(): {"clustering_distance": 1.0}, {"obstacle_seg_thresh": 10.0}, {"obstacle_drop_thresh": 1.0}, - {"polygon_area_thresh": 100000.0} + {"polygon_area_thresh": 100000.0}, + {"matching_weights_file": matching_weights} ] ) diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index 250fd0bb..5beca3ee 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -28,10 +28,14 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ // for color segmentation this->declare_parameter('color_ranges_file', ''); - this->declare_parameter('color_label_mappings_file', '') + this->declare_parameter('color_label_mappings_file', ''); - color_ranges_file = this->get_parameter('color_ranges_file').as_string() - color_label_mappings_file = this->get_parameter('color_label_mappings_file').as_string() + color_ranges_file = this->get_parameter('color_ranges_file').as_string(); + color_label_mappings_file = this->get_parameter('color_label_mappings_file').as_string(); + + // for cluster-contour matching & selection + this->declare_parameter('matching_weights_file', ''); + matching_weights_file = this->get_parameter('matching_weights_file').as_string(); // Subscriptions m_image_intrinsics_sub = this->create_subscription( @@ -76,7 +80,24 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ else { RCLCPP_ERROR(this->get_logger(), "Failed to open YAML file: %s", color_ranges_file.c_str()); } + ranges_yaml.close(); + + // get cluster-contour matching & selection parameters from yaml + std::ifstream matching_yaml(matching_weights_file); + if (matching_yaml.is_open()) { + matching_weights_yaml = YAML::Load(matching_yaml); + + m_clustering_distance_weight = matching_weights_yaml["clustering_distance_weight"]; + m_clustering_color_weights = matching_weights_yaml["clustering_color_weights"]; + m_cluster_contour_distance_weight = matching_weights_yaml["cluster_contour_distance_weight"]; + m_cluster_contour_color_weights = matching_weights_yaml["cluster_contour_color_weights"]; + m_contour_detection_color_weights = matching_weights_yaml["contour_detection_color_weights"]; + m_cluster_contour_size_weight = matching_weights_yaml["cluster_contour_size_weight"]; + } + else { + RCLCPP_ERROR(this->get_logger(), "Failed to open YAML file: %s", matching_weights_file.c_str()); + } } void BBoxProjectPCloud::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_msg) { @@ -115,10 +136,19 @@ void BBoxProjectPCloud::bb_pcl_project( // Check if within bounds & in front of the boat RCLCPP_DEBUG(this->get_logger(), "POINT PROJECTED ONTO IMAGE: (%lf, %lf)", xy_rect.x, xy_rect.y); } - std::vector>::Ptr> bbox_pcloud_objects; + // Convert msg to CvImage to work with CV2. Copy img since we will be modifying. + cv_bridge::CvImagePtr cv_ptr; + try { + cv_ptr = cv_bridge::toCvCopy(in_img_msg, sensor_msgs::image_encodings::BGR8); + } catch (cv_bridge::Exception &e) { + RCLCPP_ERROR(this->get_logger(), "cv_bridge exception: %s", e.what()); + return; + } + cv_bridge::CvImagePtr cv_hsv_ptr = cv_bridge::cvtColor(cv_ptr.toCvShare(), cv::COLOR_RGB2HSV); + std::vector>::Ptr> bbox_pcloud_objects; for (all_seaing_interfaces::msg::LabeledBoundingBox2D bbox : in_bbox_msg->boxes){ auto labeled_pcl = all_seaing_interfaces::msg::LabeledObjectPointCloud(); - pcl::PointCloud::Ptr obj_cloud_ptr(new pcl::PointCloud); + pcl::PointCloud::Ptr obj_cloud_ptr(new pcl::PointCloud); labeled_pcl.label = bbox.label; obj_cloud_ptr->header = in_cloud_tf_ptr->header; RCLCPP_DEBUG(this->get_logger(), "BOUNDING BOX FOR OBJECT %d: (%lf,%lf), (%lf, %lf)", obj, bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y, obj); @@ -132,8 +162,9 @@ void BBoxProjectPCloud::bb_pcl_project( double bbox_margin = this->get_parameter("bbox_object_margin").as_double(); // Check if point is in bbox if(xy_rect.x >= bbox.min_x-bbox_margin && xy_rect.x <= bbox.max_x+bbox_margin && xy_rect.y >= bbox.min_y-bbox_margin && xy_rect.y <= bbox.max_y+bbox_margin){ - obj_cloud_ptr->push_back(point_tf); - RCLCPP_DEBUG(this->get_logger(), "SELECTED POINT PROJECTED ONTO IMAGE: (%lf, %lf)", xy_rect.x, xy_rect.y); + cv::Vec3b hsv = cv_hsv_ptr->image.at(xy_rect); + obj_cloud_ptr->push_back(pcl::PointXYZHSV(point_tf.x, point_tf.y, point_tf.z, hsv[0], hsv[1], hsv[2])); + RCLCPP_DEBUG(this->get_logger(), "SELECTED POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%lf, %lf, %lf)", xy_rect.x, xy_rect.y, hsv[0], hsv[1], hsv[2]); } } } @@ -147,7 +178,7 @@ void BBoxProjectPCloud::bb_pcl_project( } RCLCPP_INFO(this->get_logger(), "WILL NOW SEND OBJECT POINT CLOUDS"); m_object_pcl_pub->publish(object_pcls); - pcl::PointCloud::Ptr all_obj_pcls_ptr(new pcl::PointCloud); + pcl::PointCloud::Ptr all_obj_pcls_ptr(new pcl::PointCloud); all_obj_pcls_ptr->header = in_cloud_tf_ptr->header; //convert vector of PointCloud to a single PointCloud with channels all_obj_pcls_ptr->resize((pcl::uindex_t)max_len, (pcl::uindex_t)in_bbox_msg->boxes.size()); @@ -168,28 +199,21 @@ void BBoxProjectPCloud::bb_pcl_project( pcl::toROSMsg(*all_obj_pcls_ptr, obj_pcls_msg); m_object_pcl_viz_pub->publish(obj_pcls_msg); - // Convert msg to CvImage to work with CV2. Copy img since we will be modifying. - cv_bridge::CvImagePtr cv_ptr; - try { - cv_ptr = cv_bridge::toCvCopy(in_img_msg, sensor_msgs::image_encodings::BGR8); - } catch (cv_bridge::Exception &e) { - RCLCPP_ERROR(this->get_logger(), "cv_bridge exception: %s", e.what()); - return; - } - - cv_bridge::CvImagePtr cv_hsv_ptr = cv_bridge::cvtColor(cv_ptr.toCvShare(), cv::COLOR_RGB2HSV); - // REFINE OBJECT POINT CLOUDS for(auto bbox_pcloud_pair : bbox_pcloud_objects){ all_seaing_interfaces::msg::LabeledBoundingBox2D bbox = bbox_pcloud_pair.first; - pcl::PointCloud::Ptr pcloud_ptr = bbox_pcloud_pair.second; + pcl::PointCloud::Ptr pcloud_ptr = bbox_pcloud_pair.second; //extract clusters - pcl::search::KdTree::Ptr tree(new pcl::search::KdTree()); + //TODO: MAKE THE CLUSTER EXTRACTION ALGORITHM USE MY OWN DISTANCE METRIC THAT INCORPORATES HSV VALUES WITH DIFFERENT WEIGHTS + //WE CAN WORK OUR WAY AROUND THAT BY USING A DIFFERENT PCL POINT TYPE (ONE WITH MORE CHANNELS, MAYBE INHERIT FROM THE POINTT TYPE AND MAKE MY OWN) OR NORMAL TYPE + //AND EITHER MAKE THE VALUES IN THE CHANNELS MULTIPLIED BY THE SQRT OF THE WEIGHTS (SO THAT IT'S THE SAME FUNCTION) OR CHANGE THE DISTANCE FUNCTION (IF I MAKE MY OWN TYPE) + //OR FIND A GENERIC CLUSTERING ALGORITHM FOR N-DIMENSIONAL DATA AND DO THE ABOVE + pcl::search::KdTree::Ptr tree(new pcl::search::KdTree()); if (!pcloud_ptr->points.empty()) tree->setInputCloud(pcloud_ptr); std::vector obstacles_indices; - pcl::EuclideanClusterExtraction ec; + pcl::EuclideanClusterExtraction ec; ec.setClusterTolerance(m_clustering_distance); ec.setMinClusterSize(m_obstacle_size_min); ec.setMaxClusterSize(m_obstacle_size_max); @@ -198,7 +222,6 @@ void BBoxProjectPCloud::bb_pcl_project( ec.extract(obstacles_indices); //color segmentation using the color label - //TODO: in the end, after finishing the processing, put the results back into the original image (using bbox_offset) to be able to use it with the LiDAR cv::Mat hsv_img(cv_hsv_ptr->image, cv::Range(bbox.min_x, bbox.max_x), cv::Range(bbox.min_y, bbox.max_y)); cv::Size img_sz; cv::Point bbox_offset; @@ -209,16 +232,15 @@ void BBoxProjectPCloud::bb_pcl_project( cv::inRange(hsv_img, cv::Scalar(h_min, s_min, v_min), cv::Scalar(h_max, s_max, v_max), mask); cv::erode(mask, mask, cv::getStructuringElement(cv::MORPH_RECT, 5)); cv::dilate(mask, mask, cv::getStructuringElement(cv::MORPH_RECT, 7)); - //TODO: now do the rest for color segmentation, starting from the contours (see color_segmentation.py) std::vector> contours; cv::findContours(mask, contours, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE); - for(auto contour : contours){ + //TODO: Now convert the contour points to fit the original image (using bbox_offset) to be able to use it with the LiDAR (projected) points - } + //TODO: GO THROUGH ALL THE CLUSTER/CONTOUR PAIRS AND FIND THE BEST ONE BASED ON THE OPTIMALITY METRIC WITH THE WEIGHTS + for (std::vector contour : contours){ + for (pcl::PointIndices obstacle : obstacles_indices) { - //go through the obstacle clusters and take the most matching one - for (auto it = obstacles_indices.begin(); it != obstacles_indices.end(); it++) { - + } } } } diff --git a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp index a27d4751..cabf7d1b 100644 --- a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp +++ b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp @@ -99,6 +99,19 @@ class BBoxProjectPCloud : public rclcpp::Node{ YAML::Node label_config_yaml, ranges_config_yaml; std::map label_color_map; std::map color_range_map; + + // for cluster-contour matching/selection + std::string matching_weights_file; + + YAML::Node matching_weights_yaml; + + double m_clustering_distance_weight; + std::vector m_clustering_color_weights; + double m_cluster_contour_distance_weight; + std::vector m_cluster_contour_color_weights; + std::vector m_contour_detection_color_weights; + double m_cluster_contour_size_weight; + public: BBoxProjectPCloud(); virtual ~BBoxProjectPCloud(); diff --git a/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py b/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py index e71d80b3..f3851409 100644 --- a/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py +++ b/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py @@ -3,12 +3,38 @@ from launch.actions import IncludeLaunchDescription from launch.launch_description_sources import PythonLaunchDescriptionSource import launch_ros +import os def generate_launch_description(): + bringup_prefix = get_package_share_directory("all_seaing_bringup") + color_label_mappings = os.path.join( + bringup_prefix, "config", "perception", "color_label_mappings.yaml" + ) + color_ranges = os.path.join( + bringup_prefix, "config", "perception", "color_ranges.yaml" + ) + matching_weights = os.path.join( + bringup_prefix, "config", "perception", "filtering_weights.yaml" + ) bbox_project_pcloud_node = launch_ros.actions.Node( package="all_seaing_perception", executable="bbox_project_pcloud", - output="screen" + output="screen", + parameters=[ + {"bbox_object_margin": 0.0}, + {"camera_info_topic", "/wamv/sensors/cameras/front_left_camera_sensor/camera_info"}, + {"camera_topic", "/wamv/sensors/cameras/front_left_camera_sensor/image_raw"}, + {"lidar_topic", "point_cloud/filtered"}, + {"color_label_mappings_file": color_label_mappings}, + {"color_ranges_file": color_ranges}, + {"obstacle_size_min": 2}, + {"obstacle_size_max": 60}, + {"clustering_distance": 1.0}, + {"obstacle_seg_thresh": 10.0}, + {"obstacle_drop_thresh": 1.0}, + {"polygon_area_thresh": 100000.0}, + {"matching_weights_file": matching_weights} + ] ) return LaunchDescription([ bbox_project_pcloud_node From 220a4ce8655f1e90e3fcb015a9366eeba3188f30 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Thu, 21 Nov 2024 22:18:55 -0500 Subject: [PATCH 11/35] Added the conditional euclidean clustering code (using an HSV difference based condition) --- .../config/perception/matching_weights.yaml | 7 +-- .../bbox_project_pcloud.cpp | 51 ++++++++++++++----- .../bbox_project_pcloud.hpp | 4 ++ 3 files changed, 47 insertions(+), 15 deletions(-) diff --git a/all_seaing_bringup/config/perception/matching_weights.yaml b/all_seaing_bringup/config/perception/matching_weights.yaml index 0d632e80..dd5b6228 100644 --- a/all_seaing_bringup/config/perception/matching_weights.yaml +++ b/all_seaing_bringup/config/perception/matching_weights.yaml @@ -1,6 +1,7 @@ clustering_distance_weight: 1.0 -clustering_color_weights: [1.0, 1.0, 1.0] +clustering_color_weights: [1.0, 0.0, 0.0] +clustering_color_thres: 30 cluster_contour_distance_weight: 1.0 -cluster_contour_color_weights: [1.0, 1.0, 1.0] -contour_detection_color_weights: [1.0, 1.0, 1.0] +cluster_contour_color_weights: [1.0, 0.0, 0.0] +contour_detection_color_weights: [1.0, 0.0, 0.0] cluster_contour_size_weight: 1.0 \ No newline at end of file diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index 5beca3ee..b66ddd53 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -90,6 +90,7 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ m_clustering_distance_weight = matching_weights_yaml["clustering_distance_weight"]; m_clustering_color_weights = matching_weights_yaml["clustering_color_weights"]; + m_clustering_color_thres = matching_weights_yaml["clustering_color_thres"]; m_cluster_contour_distance_weight = matching_weights_yaml["cluster_contour_distance_weight"]; m_cluster_contour_color_weights = matching_weights_yaml["cluster_contour_color_weights"]; m_contour_detection_color_weights = matching_weights_yaml["contour_detection_color_weights"]; @@ -105,6 +106,10 @@ void BBoxProjectPCloud::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_m m_cam_model.fromCameraInfo(info_msg); } +bool hsv_diff_condition(const pcl::PointXYZHSV& p1, const pcl::PointXYZHSV& p2, float sq_dist){ + return m_clustering_color_weights[0]*p1.h^2+m_clustering_color_weights[1]*p1.s^2+m_clustering_color_weights[2]*p1.v^2= bbox.min_x-bbox_margin && xy_rect.x <= bbox.max_x+bbox_margin && xy_rect.y >= bbox.min_y-bbox_margin && xy_rect.y <= bbox.max_y+bbox_margin){ cv::Vec3b hsv = cv_hsv_ptr->image.at(xy_rect); + // cv::Vec3b rgb = cv_ptr->image.at(xy_rect); obj_cloud_ptr->push_back(pcl::PointXYZHSV(point_tf.x, point_tf.y, point_tf.z, hsv[0], hsv[1], hsv[2])); - RCLCPP_DEBUG(this->get_logger(), "SELECTED POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%lf, %lf, %lf)", xy_rect.x, xy_rect.y, hsv[0], hsv[1], hsv[2]); + // obj_cloud_ptr->push_back(pcl::PointXYZRGB(point_tf.x, point_tf.y, point_tf.z, rgb[0], rgb[1], rgb[2])); + RCLCPP_DEBUG(this->get_logger(), "SELECTED HSV POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%lf, %lf, %lf)", xy_rect.x, xy_rect.y, hsv[0], hsv[1], hsv[2]); + // RCLCPP_DEBUG(this->get_logger(), "SELECTED RGB POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%lf, %lf, %lf)", xy_rect.x, xy_rect.y, rgb[0], rgb[1], rgb[2]); } } } @@ -179,6 +187,7 @@ void BBoxProjectPCloud::bb_pcl_project( RCLCPP_INFO(this->get_logger(), "WILL NOW SEND OBJECT POINT CLOUDS"); m_object_pcl_pub->publish(object_pcls); pcl::PointCloud::Ptr all_obj_pcls_ptr(new pcl::PointCloud); + // pcl::PointCloud::Ptr all_obj_pcls_ptr(new pcl::PointCloud); all_obj_pcls_ptr->header = in_cloud_tf_ptr->header; //convert vector of PointCloud to a single PointCloud with channels all_obj_pcls_ptr->resize((pcl::uindex_t)max_len, (pcl::uindex_t)in_bbox_msg->boxes.size()); @@ -203,23 +212,41 @@ void BBoxProjectPCloud::bb_pcl_project( for(auto bbox_pcloud_pair : bbox_pcloud_objects){ all_seaing_interfaces::msg::LabeledBoundingBox2D bbox = bbox_pcloud_pair.first; pcl::PointCloud::Ptr pcloud_ptr = bbox_pcloud_pair.second; + // pcl::PointCloud::Ptr pcloud_ptr = bbox_pcloud_pair.second; + // pcl::PointCloud::Ptr pcloud_ptr_hsv(new pcl::PointCloud); + // // CONVERT RGB POINTS TO HSV + // for(auto pt : pcloud_ptr->points){ + // pcl::PointXYZHSV pt_hsv; + // pcl::PointXYZRGBtoXYZHSV(*pt, pt_hsv); + // pcloud_ptr_hsv->push_back(pt_hsv); + // } //extract clusters - //TODO: MAKE THE CLUSTER EXTRACTION ALGORITHM USE MY OWN DISTANCE METRIC THAT INCORPORATES HSV VALUES WITH DIFFERENT WEIGHTS - //WE CAN WORK OUR WAY AROUND THAT BY USING A DIFFERENT PCL POINT TYPE (ONE WITH MORE CHANNELS, MAYBE INHERIT FROM THE POINTT TYPE AND MAKE MY OWN) OR NORMAL TYPE - //AND EITHER MAKE THE VALUES IN THE CHANNELS MULTIPLIED BY THE SQRT OF THE WEIGHTS (SO THAT IT'S THE SAME FUNCTION) OR CHANGE THE DISTANCE FUNCTION (IF I MAKE MY OWN TYPE) - //OR FIND A GENERIC CLUSTERING ALGORITHM FOR N-DIMENSIONAL DATA AND DO THE ABOVE pcl::search::KdTree::Ptr tree(new pcl::search::KdTree()); + // pcl::search::KdTree::Ptr tree(new pcl::search::KdTree()); if (!pcloud_ptr->points.empty()) tree->setInputCloud(pcloud_ptr); std::vector obstacles_indices; - pcl::EuclideanClusterExtraction ec; - ec.setClusterTolerance(m_clustering_distance); - ec.setMinClusterSize(m_obstacle_size_min); - ec.setMaxClusterSize(m_obstacle_size_max); - ec.setSearchMethod(tree); - ec.setInputCloud(pcloud_ptr); - ec.extract(obstacles_indices); + + // EUCLIDEAN CLUSTERING (DEPRECATED) + // // pcl::EuclideanClusterExtraction ec; + // pcl::EuclideanClusterExtraction ec; + // ec.setClusterTolerance(m_clustering_distance); + // ec.setMinClusterSize(m_obstacle_size_min); + // ec.setMaxClusterSize(m_obstacle_size_max); + // ec.setSearchMethod(tree); + // ec.setInputCloud(pcloud_ptr); + // ec.extract(obstacles_indices); + + // CONDITIONAL (WITH HSV-BASED CONDITION) EUCLIDEAN CLUSTERING + pcl::ConditionalEuclideanClustering cec; + cec.setClusterTolerance(m_clustering_distance); + cec.setMinClusterSize(m_obstacle_size_min); + cec.setMaxClusterSize(m_obstacle_size_max); + cec.setSearchMethod(tree); + cec.setInputCloud(pcloud_ptr); + cec.setConditionFunction(&hsv_diff_condition); + cec.segment(obstacles_indices); //color segmentation using the color label cv::Mat hsv_img(cv_hsv_ptr->image, cv::Range(bbox.min_x, bbox.max_x), cv::Range(bbox.min_y, bbox.max_y)); diff --git a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp index cabf7d1b..b90df3ad 100644 --- a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp +++ b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp @@ -83,6 +83,9 @@ class BBoxProjectPCloud : public rclcpp::Node{ const sensor_msgs::msg::PointCloud2::ConstSharedPtr &in_cloud_msg, const all_seaing_interfaces::msg::LabeledBoundingBox2DArray::ConstSharedPtr &in_bbox_msg); + // The HSV-similarity condition for Conditional Euclidean Clustering + bool hsv_diff_condition(const pcl::PointXYZHSV& p1, const pcl::PointXYZHSV& p2, float sq_dist); + int m_obstacle_id; // for cluster extraction @@ -107,6 +110,7 @@ class BBoxProjectPCloud : public rclcpp::Node{ double m_clustering_distance_weight; std::vector m_clustering_color_weights; + double m_clustering_hue_thres; double m_cluster_contour_distance_weight; std::vector m_cluster_contour_color_weights; std::vector m_contour_detection_color_weights; From 812df903156e77e3c8988cb36ed3d2e3919793e8 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Fri, 22 Nov 2024 00:31:48 -0500 Subject: [PATCH 12/35] Small commit, converting contour points to original image coordinates from the bounding box ones --- .../all_seaing_perception/bbox_project_pcloud.cpp | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index b66ddd53..85f897de 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -248,7 +248,7 @@ void BBoxProjectPCloud::bb_pcl_project( cec.setConditionFunction(&hsv_diff_condition); cec.segment(obstacles_indices); - //color segmentation using the color label + // color segmentation using the color label cv::Mat hsv_img(cv_hsv_ptr->image, cv::Range(bbox.min_x, bbox.max_x), cv::Range(bbox.min_y, bbox.max_y)); cv::Size img_sz; cv::Point bbox_offset; @@ -261,8 +261,12 @@ void BBoxProjectPCloud::bb_pcl_project( cv::dilate(mask, mask, cv::getStructuringElement(cv::MORPH_RECT, 7)); std::vector> contours; cv::findContours(mask, contours, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE); - //TODO: Now convert the contour points to fit the original image (using bbox_offset) to be able to use it with the LiDAR (projected) points - + // Convert the contour points to fit the original image (using image_sz and bbox_offset -> just bbox_offset+pt_coords) to be able to use it with the LiDAR (projected) points + for(int ctr = 0; ctr < contours.size(); ctr++){ + for(int pt=0; pt < contours[ctr].size(); pt++){ + contours[ctr][pt]+=bbox_offset; + } + } //TODO: GO THROUGH ALL THE CLUSTER/CONTOUR PAIRS AND FIND THE BEST ONE BASED ON THE OPTIMALITY METRIC WITH THE WEIGHTS for (std::vector contour : contours){ for (pcl::PointIndices obstacle : obstacles_indices) { From 0c5b21c7bc38f79ea4013223833a23b4b8911713 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Sun, 24 Nov 2024 19:15:03 -0500 Subject: [PATCH 13/35] Finished the code that selects the optimal cluster-contour pair, need to publish it now --- .../contour_matching_color_ranges.yaml | 6 + .../bbox_project_pcloud.cpp | 148 ++++++++++++++++-- .../bbox_project_pcloud.hpp | 7 +- 3 files changed, 143 insertions(+), 18 deletions(-) create mode 100644 all_seaing_bringup/config/perception/contour_matching_color_ranges.yaml diff --git a/all_seaing_bringup/config/perception/contour_matching_color_ranges.yaml b/all_seaing_bringup/config/perception/contour_matching_color_ranges.yaml new file mode 100644 index 00000000..ce67fb52 --- /dev/null +++ b/all_seaing_bringup/config/perception/contour_matching_color_ranges.yaml @@ -0,0 +1,6 @@ +orange: [10, 31, 125, 255, 60, 255] +red: [0, 10, 175, 255, 70, 255] +red2: [170, 179, 175, 255, 70, 255] +green: [59, 82, 142, 255, 30, 255] +black: [0, 0, 0, 0, 0, 50] +white: [0, 0, 0, 10, 200, 255] \ No newline at end of file diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index 85f897de..a031ef2d 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -30,12 +30,13 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ this->declare_parameter('color_ranges_file', ''); this->declare_parameter('color_label_mappings_file', ''); - color_ranges_file = this->get_parameter('color_ranges_file').as_string(); color_label_mappings_file = this->get_parameter('color_label_mappings_file').as_string(); // for cluster-contour matching & selection this->declare_parameter('matching_weights_file', ''); matching_weights_file = this->get_parameter('matching_weights_file').as_string(); + this->declare_parameter('contour_matching_color_ranges_file', ''); + contour_matching_color_ranges_file = this->get_parameter('contour_matching_color_ranges_file').as_string(); // Subscriptions m_image_intrinsics_sub = this->create_subscription( @@ -59,7 +60,6 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ if (label_yaml.is_open()) { label_config_yaml = YAML::Load(label_yaml); for (YAML::const_iterator it = label_config_yaml.begin(); it != label_config_yaml.end(); ++it) { - // RCLCPP_INFO(this->get_logger(), "label %d and color %s", it->second.as(), it->second.as().c_str()); label_color_map[it->second.as()] = it->first.as(); } } @@ -73,7 +73,6 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ if (ranges_yaml.is_open()) { ranges_config_yaml = YAML::Load(ranges_yaml); for (YAML::const_iterator it = ranges_config_yaml.begin(); it != ranges_config_yaml.end(); ++it) { - // RCLCPP_INFO(this->get_logger(), "label %d and color %s", it->second.as(), it->second.as().c_str()); color_range_map[it->first.as()] = it->second.as(); } } @@ -99,6 +98,18 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ else { RCLCPP_ERROR(this->get_logger(), "Failed to open YAML file: %s", matching_weights_file.c_str()); } + + // get contour matching color ranges from yaml + std::ifstream contour_ranges_yaml(contour_matching_color_ranges_file); + if (ranges_yaml.is_open()) { + contour_ranges_config_yaml = YAML::Load(contour_ranges_yaml); + for (YAML::const_iterator it = contour_ranges_config_yaml.begin(); it != contour_ranges_config_yaml.end(); ++it) { + contour_matching_color_range_map[it->first.as()] = it->second.as(); + } + } + else { + RCLCPP_ERROR(this->get_logger(), "Failed to open YAML file: %s", contour_matching_color_ranges_file.c_str()); + } } void BBoxProjectPCloud::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_msg) { @@ -106,8 +117,14 @@ void BBoxProjectPCloud::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_m m_cam_model.fromCameraInfo(info_msg); } -bool hsv_diff_condition(const pcl::PointXYZHSV& p1, const pcl::PointXYZHSV& p2, float sq_dist){ - return m_clustering_color_weights[0]*p1.h^2+m_clustering_color_weights[1]*p1.s^2+m_clustering_color_weights[2]*p1.v^2 weights, int[6] color_range, cv::Vec3b pt_color){ + // Average of squared distances from each point in the range + double h_min=color_range[0], h_max=color_range[1], s_min=color_range[2], s_max=color_range[3], v_min=color_range[4], v_max=color_range[5]; + return weights[0]*(pow(h_max-pt_color[0],3)-pow(h_min-pt_color[0],3))/(h_max-h_min) + weights[1]*(pow(s_max-pt_color[1],3)-pow(s_min-pt_color[1],3))/(s_max-s_min) + weights[2]*(pow(v_max-pt_color[2],3)-pow(v_min-pt_color[2],3))/(v_max-v_min); } void BBoxProjectPCloud::bb_pcl_project( @@ -149,7 +166,8 @@ void BBoxProjectPCloud::bb_pcl_project( RCLCPP_ERROR(this->get_logger(), "cv_bridge exception: %s", e.what()); return; } - cv_bridge::CvImagePtr cv_hsv_ptr = cv_bridge::cvtColor(cv_ptr.toCvShare(), cv::COLOR_RGB2HSV); + cv::Mat cv_hsv; + cv::cvtColor(cv_ptr->image, cv_hsv, cv::COLOR_BGR2HSV); std::vector>::Ptr> bbox_pcloud_objects; for (all_seaing_interfaces::msg::LabeledBoundingBox2D bbox : in_bbox_msg->boxes){ auto labeled_pcl = all_seaing_interfaces::msg::LabeledObjectPointCloud(); @@ -167,12 +185,12 @@ void BBoxProjectPCloud::bb_pcl_project( double bbox_margin = this->get_parameter("bbox_object_margin").as_double(); // Check if point is in bbox if(xy_rect.x >= bbox.min_x-bbox_margin && xy_rect.x <= bbox.max_x+bbox_margin && xy_rect.y >= bbox.min_y-bbox_margin && xy_rect.y <= bbox.max_y+bbox_margin){ - cv::Vec3b hsv = cv_hsv_ptr->image.at(xy_rect); - // cv::Vec3b rgb = cv_ptr->image.at(xy_rect); + cv::Vec3b hsv = cv_hsv.at(xy_rect); + // cv::Vec3b bgr = cv_ptr->image.at(xy_rect); obj_cloud_ptr->push_back(pcl::PointXYZHSV(point_tf.x, point_tf.y, point_tf.z, hsv[0], hsv[1], hsv[2])); - // obj_cloud_ptr->push_back(pcl::PointXYZRGB(point_tf.x, point_tf.y, point_tf.z, rgb[0], rgb[1], rgb[2])); + // obj_cloud_ptr->push_back(pcl::PointXYZRGB(point_tf.x, point_tf.y, point_tf.z, bgr[2], bgr[1], bgr[0])); RCLCPP_DEBUG(this->get_logger(), "SELECTED HSV POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%lf, %lf, %lf)", xy_rect.x, xy_rect.y, hsv[0], hsv[1], hsv[2]); - // RCLCPP_DEBUG(this->get_logger(), "SELECTED RGB POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%lf, %lf, %lf)", xy_rect.x, xy_rect.y, rgb[0], rgb[1], rgb[2]); + // RCLCPP_DEBUG(this->get_logger(), "SELECTED RGB POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%lf, %lf, %lf)", xy_rect.x, xy_rect.y, bgr[2], bgr[1], bgr[0]); } } } @@ -226,7 +244,7 @@ void BBoxProjectPCloud::bb_pcl_project( // pcl::search::KdTree::Ptr tree(new pcl::search::KdTree()); if (!pcloud_ptr->points.empty()) tree->setInputCloud(pcloud_ptr); - std::vector obstacles_indices; + std::vector clusters_indices; // EUCLIDEAN CLUSTERING (DEPRECATED) // // pcl::EuclideanClusterExtraction ec; @@ -236,9 +254,10 @@ void BBoxProjectPCloud::bb_pcl_project( // ec.setMaxClusterSize(m_obstacle_size_max); // ec.setSearchMethod(tree); // ec.setInputCloud(pcloud_ptr); - // ec.extract(obstacles_indices); + // ec.extract(clusters_indices); // CONDITIONAL (WITH HSV-BASED CONDITION) EUCLIDEAN CLUSTERING + // TODO: USE AN ALGORITHM THAT MAKES SURE THE CONTOUR IS CONTINUOUS (WITHOUT NON-SELECTED POINTS INSIDE IT, AS IT'S PROBABLY THE CASE NOW) pcl::ConditionalEuclideanClustering cec; cec.setClusterTolerance(m_clustering_distance); cec.setMinClusterSize(m_obstacle_size_min); @@ -246,10 +265,10 @@ void BBoxProjectPCloud::bb_pcl_project( cec.setSearchMethod(tree); cec.setInputCloud(pcloud_ptr); cec.setConditionFunction(&hsv_diff_condition); - cec.segment(obstacles_indices); + cec.segment(clusters_indices); // color segmentation using the color label - cv::Mat hsv_img(cv_hsv_ptr->image, cv::Range(bbox.min_x, bbox.max_x), cv::Range(bbox.min_y, bbox.max_y)); + cv::Mat hsv_img(cv_hsv, cv::Range(bbox.min_x, bbox.max_x), cv::Range(bbox.min_y, bbox.max_y)); cv::Size img_sz; cv::Point bbox_offset; hsv_img.locateROI(img_sz, bbox_offset); @@ -267,12 +286,107 @@ void BBoxProjectPCloud::bb_pcl_project( contours[ctr][pt]+=bbox_offset; } } - //TODO: GO THROUGH ALL THE CLUSTER/CONTOUR PAIRS AND FIND THE BEST ONE BASED ON THE OPTIMALITY METRIC WITH THE WEIGHTS - for (std::vector contour : contours){ - for (pcl::PointIndices obstacle : obstacles_indices) { + // Convert both the pcloud and the contours to a vector of pair and compute sum and sum of squares (can be used to compute all the needed metrics) + std::vector>> contours_pts; + std::vector, std::pair>> contours_qts; + for (std::vector contour : contours){// point.x, point.y the coords + std::vector> contour_pts; + std::pair, std::pair> contour_qts; + for(cv::Point image_pt : contour){ + cv::Vec3b image_pt_hsv = cv_hsv.at(image_pt); + cv::Point2d image_pt_xy = cv::Point2d(image_pt.x, image_pt.y); + contour_pts.push_back(std::make_pair(image_pt_xy, image_pt_hsv)); + //store sums + contour_qts.first.first.x+=image_pt_xy.x; + contour_qts.first.first.y+=image_pt_xy.y; + contour_qts.first.second[0]+=image_pt_hsv[0]; + contour_qts.first.second[1]+=image_pt_hsv[1]; + contour_qts.first.second[2]+=image_pt_hsv[2]; + //store sum of squares + contour_qts.second.first.x+=image_pt_xy.x*image_pt_xy.x; + contour_qts.second.first.y+=image_pt_xy.y*image_pt_xy.y; + contour_qts.second.second[0]+=image_pt_hsv[0]*image_pt_hsv[0]; + contour_qts.second.second[1]+=image_pt_hsv[1]*image_pt_hsv[1]; + contour_qts.second.second[2]+=image_pt_hsv[2]*image_pt_hsv[2]; + } + contours_pts.push_back(contour_pts); + contours_qts.push_back(contour_qts); + } + std::vector>> clusters_pts; + std::vector, std::pair>> clusters_qts; + for (pcl::PointIndices cluster : clusters_indices){// vector is cluster.indices + std::vector> cluster_pts; + std::pair, std::pair> cluster_qts; + for(index_t ind : cluster.indices){ + cv::Vec3b cloud_pt_hsv = cv::Vec3b(pcloud_ptr->points[ind].h, pcloud_ptr->points[ind].s, pcloud_ptr->points[ind].v); + cv::Point2d cloud_pt_xy = m_cam_model.project3dToPixel(cv::Point3d(cloud_pt.y, cloud_pt.z, -cloud_pt.x)); + cluster_pts.push_back(std::make_pair(cloud_pt_xy, cloud_pt_hsv)); + //store sums + cluster_qts.first.first.x+=cloud_pt_xy.x; + cluster_pts.first.first.y+=cloud_pt_xy.y; + cluster_pts.first.second[0]+=cloud_pt_hsv[0]; + cluster_pts.first.second[1]+=cloud_pt_hsv[1]; + cluster_pts.first.second[2]+=cloud_pt_hsv[2]; + //store sum of squares + cluster_pts.second.first.x+=cloud_pt_xy.x*cloud_pt_xy.x; + cluster_pts.second.first.y+=cloud_pt_xy.y*cloud_pt_xy.y; + cluster_pts.second.second[0]+=cloud_pt_hsv[0]*cloud_pt_hsv[0]; + cluster_pts.second.second[1]+=cloud_pt_hsv[1]*cloud_pt_hsv[1]; + cluster_pts.second.second[2]+=cloud_pt_hsv[2]*cloud_pt_hsv[2]; + } + clusters_pts.push_back(cluster_pts); + clusters_qts.push_back(cluster_qts); + } + // GO THROUGH ALL THE CLUSTER/CONTOUR PAIRS AND FIND THE BEST ONE BASED ON THE OPTIMALITY METRIC WITH THE WEIGHTS + double min_pair_cost = -1; + int opt_contour_id = -1, opt_cluster_id = -1; + for (int contour = 0; contour < contours_pts.size(); contour++){ + double contour_size = contours_pts[contour].size(); + std::pair, std::pair> contour_qts = contours_qts[contour]; + + double contour_cost = 0; + for (std::pair contour_pt : contours_pts[contour]){ + contour_cost += color_range_penalty(m_contour_detection_color_weights, label_color_map[bbox.label]=="red"?contour_matching_color_range_map["red2"]:contour_matching_color_range_map[label_color_map[bbox.label]], contour_pt.second)/contour_size; + } + for (int cluster = 0; cluster < clusters_pts.size(); cluster++){ + double cluster_size = clusters_pts[cluster].size(); + std::pair, std::pair> cluster_qts = clusters_qts[cluster]; + + double contour_cluster_dist_rms = sqrt((contour_qts.second.first.x+cluster_qts.second.first.x//sum(x_i^2+x_j^2) + -2*contour_qts.first.first.x*cluster_qts.first.first.x//-2*sum(x_i*x_j) + contour_qts.second.first.y+cluster_qts.second.first.y//sum(y_i^2+y_j^2) + -2*contour_qts.first.first.x*cluster_qts.first.first.x)//-2*sum(y_i*y_j) + /(cluster_size*contour_size);//divide by cluster_size*contour_size + + cv::Vec3b avg_contour_col = (1/contour_size)*contour_qts.first.second; + + double cluster_dh_rms = sqrt((cluster_qts.second.second[0]-2*cluster_qts.first.second[0]*avg_contour_col[0]+avg_contour_col[0]*avg_contour_col[0])/cluster_size);//sum((h_i-m_h)^2=h_i^2-2*h_i*m_h+m_h^2)/cluster_size + double cluster_ds_rms = sqrt((cluster_qts.second.second[1]-2*cluster_qts.first.second[1]*avg_contour_col[1]+avg_contour_col[1]*avg_contour_col[1])/cluster_size);//same for s + double cluster_dv_rms = sqrt((cluster_qts.second.second[2]-2*cluster_qts.first.second[2]*avg_contour_col[2]+avg_contour_col[2]*avg_contour_col[2])/cluster_size);//same for v + + double pair_cost = m_cluster_contour_distance_weight*contour_cluster_dist_rms*contour_cluster_dist_rms + + m_cluster_contour_color_weights[0]*cluster_dh_rms*cluster_dh_rms + + m_cluster_contour_color_weights[1]*cluster_ds_rms*cluster_ds_rms + + m_cluster_contour_color_weights[2]*cluster_dv_rms*cluster_dv_rms + - m_cluster_contour_size_weight*cluster_size*contour_size + + contour_cost; + if (min_pair_cost == -1 || pair_cost < min_pair_cost){ + min_pair_cost = pair_cost; + opt_contour_id = contour; + opt_cluster_id = cluster; + } } } + // TODO: Process & publsh selected contour + for (cv::Point image_pt : contours[opt_contour_id]){ + + } + + // TODO: Process & publish selected cluster + for (index_t ind : clusters_indices[opt_cluster_id].indices){ + pcl::PointXYZHSV pt = pcloud_ptr->points[ind]; + } } } diff --git a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp index b90df3ad..421b282e 100644 --- a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp +++ b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp @@ -5,7 +5,7 @@ #include #include #include -#include +#include #include "rclcpp/rclcpp.hpp" @@ -86,6 +86,9 @@ class BBoxProjectPCloud : public rclcpp::Node{ // The HSV-similarity condition for Conditional Euclidean Clustering bool hsv_diff_condition(const pcl::PointXYZHSV& p1, const pcl::PointXYZHSV& p2, float sq_dist); + // The penalty function that compares an HSV color value to an HSV color range + double color_range_penalty(vector weights, int[6] color_range, cv::Vec3b pt_color); + int m_obstacle_id; // for cluster extraction @@ -105,6 +108,7 @@ class BBoxProjectPCloud : public rclcpp::Node{ // for cluster-contour matching/selection std::string matching_weights_file; + std::string contour_matching_color_ranges_file; YAML::Node matching_weights_yaml; @@ -115,6 +119,7 @@ class BBoxProjectPCloud : public rclcpp::Node{ std::vector m_cluster_contour_color_weights; std::vector m_contour_detection_color_weights; double m_cluster_contour_size_weight; + std::map contour_matching_color_range_map; public: BBoxProjectPCloud(); From f5665255dc5f7e13c6c8e8eb0a7cca453f0c23c6 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Wed, 27 Nov 2024 02:04:11 -0500 Subject: [PATCH 14/35] Code for publishing the results of the refined object clouds & visualizing them in RViz (not tested or built yet) --- .../msg/LabeledObjectPointCloud.msg | 3 +- .../bbox_project_pcloud.cpp | 51 ++++++++++++++++--- .../bbox_project_pcloud.hpp | 4 ++ 3 files changed, 51 insertions(+), 7 deletions(-) diff --git a/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg b/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg index 53ef7f6a..2caae308 100644 --- a/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg +++ b/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg @@ -1,2 +1,3 @@ int8 label -sensor_msgs/PointCloud2 cloud \ No newline at end of file +sensor_msgs/PointCloud2 cloud +sensor_msgs/Image contour \ No newline at end of file diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index a031ef2d..9992ea43 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -54,6 +54,9 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ // Publishers m_object_pcl_pub = this->create_publisher("labeled_object_point_clouds", 5); m_object_pcl_viz_pub = this->create_publisher("object_point_clouds_viz", 5); + m_refined_object_pcl_contour_pub = this->create_publisher("refined_object_point_clouds_contours", 5); + m_refined_object_pcl_viz_pub = this->create_publisher("refined_object_point_clouds_viz"); + m_refined_object_contour_viz_pub = this->create_publisher("refined_object_contours_viz"); // get color label mappings from yaml std::ifstream label_yaml(color_label_mappings_file); @@ -146,7 +149,7 @@ void BBoxProjectPCloud::bb_pcl_project( RCLCPP_INFO(this->get_logger(), "FRAME_ID BEFORE SELECTION, AFTER TRANSFORM TO CAMERA FRAME: %s", in_cloud_tf_ptr->header.frame_id); auto object_pcls = all_seaing_interfaces::msg::LabeledObjectPointCloudArray(); - std::vector> obj_cloud_vec; + std::vector> obj_cloud_vec; int max_len = 0; // Just use the same pcloud to image projection, but check if it's within some binding box and assign it to that detection int obj = 0; @@ -210,7 +213,7 @@ void BBoxProjectPCloud::bb_pcl_project( //convert vector of PointCloud to a single PointCloud with channels all_obj_pcls_ptr->resize((pcl::uindex_t)max_len, (pcl::uindex_t)in_bbox_msg->boxes.size()); RCLCPP_INFO(this->get_logger(), "PUBLISHED PCLOUD DIMENSIONS: height: %d, width: %d", (int)all_obj_pcls_ptr->height, (int)all_obj_pcls_ptr->width); - RCLCPP_INFO(this->get_logger(), "STORED PCLOUD DIMENSIONS: objects: %d, max_length: %d", (int)obj_cloud_vec.size(), max_len); + // RCLCPP_INFO(this->get_logger(), "STORED PCLOUD DIMENSIONS: objects: %d, max_length: %d", (int)obj_cloud_vec.size(), max_len); try{ //TODO: CHECK IF THE CHANNELS CORRECTLY REPRESENT DIFFERENT OBJECTS //(I.E. HEIGHT & WIDTH ARE CORRECT, OTHERWISE SWAP THEIR ORDER BOTH IN THE RESIZE() AND THE AT() METHODS) @@ -227,6 +230,10 @@ void BBoxProjectPCloud::bb_pcl_project( m_object_pcl_viz_pub->publish(obj_pcls_msg); // REFINE OBJECT POINT CLOUDS + auto refined_objects_pub = all_seaing_interfaces::msg::LabeledObjectPointCloudArray(); + std::vector, cv::Mat>> refined_cloud_contour_vec; + int max_refined_len = 0; + for(auto bbox_pcloud_pair : bbox_pcloud_objects){ all_seaing_interfaces::msg::LabeledBoundingBox2D bbox = bbox_pcloud_pair.first; pcl::PointCloud::Ptr pcloud_ptr = bbox_pcloud_pair.second; @@ -378,16 +385,48 @@ void BBoxProjectPCloud::bb_pcl_project( } } } - // TODO: Process & publsh selected contour - for (cv::Point image_pt : contours[opt_contour_id]){ + auto refined_pcl_contours = all_seaing_interfaces::msg::LabeledObjectPointCloud(); + refined_pcl_contours.label = bbox.label; + pcl::PointCloud::Ptr refined_cloud_ptr(new pcl::PointCloud); + refined_cloud_ptr->header = pcloud_ptr->header; + cv::Mat refined_obj_contour_mat.zeros(image_sz, cv::CV_8UC1); + for (cv::Point image_pt : contours[opt_contour_id]){ + refined_obj_contour_mat.at(image_pt) = 1; } - - // TODO: Process & publish selected cluster for (index_t ind : clusters_indices[opt_cluster_id].indices){ pcl::PointXYZHSV pt = pcloud_ptr->points[ind]; + refined_cloud_ptr->push_back(pt); } + pcl::toROSMsg(*refined_cloud_ptr, refined_pcl_contours.cloud); + cv_bridge::CvImagePtr refined_obj_contour_ptr(new cv_bridge::CvImage(in_img_msg->header, sensor_msgs::image_encodings::TYPE_8UC1, refined_obj_contour_mat)); + refined_pcl_contours.contour = *refined_obj_contour_ptr->toImageMsg(); + refined_objects_pub.objects.push_back(refined_pcl_contours); + refined_cloud_contour_vec.push_back(std::make_pair(*refined_cloud_ptr,refined_obj_contour_mat)); + max_refined_len = std::max(max_refined_len, (int)obj_cloud_ptr->size()); + } + RCLCPP_INFO(this->get_logger(), "WILL NOW SEND REFINED OBJECT POINT CLOUDS"); + m_refined_object_pcl_contour_pub->publish(refined_pcl_contours); + pcl::PointCloud::Ptr all_obj_refined_pcls_ptr(new pcl::PointCloud); + all_obj_refined_pcls_ptr->header = in_cloud_tf_ptr->header; + //convert vector of PointCloud to a single PointCloud with channels + all_obj_refined_pcls_ptr->resize((pcl::uindex_t)max_refined_len, (pcl::uindex_t)in_bbox_msg->boxes.size()); + cv::Mat all_obj_refined_contours.zeros(cv_ptr->image.size, cv::CV_8UC1); + try{ + for(int i = 0; iat(j,i) = obj_cloud_vec[i][j]; + } + all_obj_refined_contours += refined_cloud_contour_vec[i].second; + } + }catch(std::exception &ex){ + RCLCPP_ERROR(this->get_logger(), "%s", ex.what()); } + auto obj_refined_pcls_msg = sensor_msgs::msg::PointCloud2(); + pcl::toROSMsg(*all_obj_refined_pcls_ptr, obj_refined_pcls_msg); + m_refined_object_pcl_viz_pub->publish(obj_refined_pcls_msg); + cv_bridge::CvImagePtr all_obj_refined_contour_ptr(new cv_bridge::CvImage(in_img_msg->header, sensor_msgs::image_encodings::TYPE_8UC1, all_obj_refined_contours)); + m_refined_object_contour_viz_pub->publish(*refined_obj_contour_ptr->toImageMsg()); } geometry_msgs::msg::TransformStamped BBoxProjectPCloud::get_tf(const std::string &in_target_frame, diff --git a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp index 421b282e..07d9a4a7 100644 --- a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp +++ b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp @@ -6,6 +6,7 @@ #include #include #include +#include #include "rclcpp/rclcpp.hpp" @@ -45,6 +46,9 @@ class BBoxProjectPCloud : public rclcpp::Node{ // Publishers and subscribers rclcpp::Publisher::SharedPtr m_object_pcl_pub; rclcpp::Publisher::SharedPtr m_object_pcl_viz_pub; + rclcpp::Publisher::SharedPtr m_refined_object_pcl_contour_pub; + rclcpp::Publisher::SharedPtr m_refined_object_pcl_viz_pub; + rclcpp::Publisher::SharedPtr m_refined_object_contour_viz_pub; rclcpp::Subscription::SharedPtr m_image_intrinsics_sub; message_filters::Subscriber m_image_sub; message_filters::Subscriber m_cloud_sub; From 9e1037bf3792b6a319d43bedd30dcbd7500f6598 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Sun, 1 Dec 2024 19:07:05 -0500 Subject: [PATCH 15/35] Fixed compile-time errors, colcon builds, to be tested --- all_seaing_bringup/launch/sim.launch.py | 26 +--- all_seaing_perception/CMakeLists.txt | 1 + .../bbox_project_pcloud.cpp | 133 +++++++++--------- .../bbox_project_pcloud.hpp | 24 ++-- .../launch/bbox_project_pcloud_test.launch.py | 6 +- .../launch/yolov8_detection.launch.py | 52 ------- .../launch/yolov8_detection_sim.launch.py | 52 ------- 7 files changed, 94 insertions(+), 200 deletions(-) delete mode 100644 all_seaing_perception/launch/yolov8_detection.launch.py delete mode 100644 all_seaing_perception/launch/yolov8_detection_sim.launch.py diff --git a/all_seaing_bringup/launch/sim.launch.py b/all_seaing_bringup/launch/sim.launch.py index 0497c403..948902cf 100644 --- a/all_seaing_bringup/launch/sim.launch.py +++ b/all_seaing_bringup/launch/sim.launch.py @@ -28,6 +28,9 @@ def generate_launch_description(): matching_weights = os.path.join( bringup_prefix, "config", "perception", "filtering_weights.yaml" ) + contour_matching_color_ranges = os.path.join( + bringup_prefix, "config", "perception", "contour_matching_color_ranges.yaml" + ) subprocess.run(["cp", "-r", os.path.join(bringup_prefix, "tile"), "/tmp"]) launch_rviz = LaunchConfiguration("launch_rviz") @@ -167,7 +170,8 @@ def generate_launch_description(): {"obstacle_seg_thresh": 10.0}, {"obstacle_drop_thresh": 1.0}, {"polygon_area_thresh": 100000.0}, - {"matching_weights_file": matching_weights} + {"matching_weights_file": matching_weights}, + {"contour_matching_color_ranges_file": contour_matching_color_ranges} ] ) @@ -245,23 +249,6 @@ def generate_launch_description(): }.items(), ) - yolov8_prefix = get_package_share_directory("yolov8_bringup") - model_share_path = get_package_share_directory("all_seaing_perception") - - yolov8_ld = LaunchDescription( - [ - IncludeLaunchDescription( - PythonLaunchDescriptionSource( - [yolov8_prefix, "/launch/yolov8.launch.py"] - ), - launch_arguments={ - "model": model_share_path + "/models/yolov8_roboboat_model.pt", - "input_image_topic": "/wamv/sensors/cameras/front_left_camera_sensor/image_raw", - }.items(), - ) - ] - ) - return LaunchDescription( [ launch_rviz_launch_arg, @@ -273,7 +260,7 @@ def generate_launch_description(): color_segmentation_node, point_cloud_filter_node, obstacle_detector_node, - bbox_project_pcloud_node, + # bbox_project_pcloud_node, rviz_node, control_mux, controller_server, @@ -282,7 +269,6 @@ def generate_launch_description(): rviz_waypoint_sender, keyboard_ld, sim_ld, - # yolov8_ld, perception_eval_node, ] ) diff --git a/all_seaing_perception/CMakeLists.txt b/all_seaing_perception/CMakeLists.txt index 4007524a..be2fbc7b 100644 --- a/all_seaing_perception/CMakeLists.txt +++ b/all_seaing_perception/CMakeLists.txt @@ -37,6 +37,7 @@ ament_auto_add_executable( ) target_link_libraries(obstacle_bbox_visualizer yaml-cpp) +target_link_libraries(bbox_project_pcloud yaml-cpp) install(TARGETS obstacle_detector diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index 9992ea43..0c4ec7f1 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -27,16 +27,16 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ m_polygon_area_thresh = this->get_parameter("polygon_area_thresh").as_double(); // for color segmentation - this->declare_parameter('color_ranges_file', ''); - this->declare_parameter('color_label_mappings_file', ''); + this->declare_parameter("color_ranges_file", ""); + this->declare_parameter("color_label_mappings_file", ""); - color_label_mappings_file = this->get_parameter('color_label_mappings_file').as_string(); + color_label_mappings_file = this->get_parameter("color_label_mappings_file").as_string(); // for cluster-contour matching & selection - this->declare_parameter('matching_weights_file', ''); - matching_weights_file = this->get_parameter('matching_weights_file').as_string(); - this->declare_parameter('contour_matching_color_ranges_file', ''); - contour_matching_color_ranges_file = this->get_parameter('contour_matching_color_ranges_file').as_string(); + this->declare_parameter("matching_weights_file", ""); + matching_weights_file = this->get_parameter("matching_weights_file").as_string(); + this->declare_parameter("contour_matching_color_ranges_file", ""); + contour_matching_color_ranges_file = this->get_parameter("contour_matching_color_ranges_file").as_string(); // Subscriptions m_image_intrinsics_sub = this->create_subscription( @@ -55,8 +55,8 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ m_object_pcl_pub = this->create_publisher("labeled_object_point_clouds", 5); m_object_pcl_viz_pub = this->create_publisher("object_point_clouds_viz", 5); m_refined_object_pcl_contour_pub = this->create_publisher("refined_object_point_clouds_contours", 5); - m_refined_object_pcl_viz_pub = this->create_publisher("refined_object_point_clouds_viz"); - m_refined_object_contour_viz_pub = this->create_publisher("refined_object_contours_viz"); + m_refined_object_pcl_viz_pub = this->create_publisher("refined_object_point_clouds_viz", 5); + m_refined_object_contour_viz_pub = this->create_publisher("refined_object_contours_viz", 5); // get color label mappings from yaml std::ifstream label_yaml(color_label_mappings_file); @@ -76,43 +76,44 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ if (ranges_yaml.is_open()) { ranges_config_yaml = YAML::Load(ranges_yaml); for (YAML::const_iterator it = ranges_config_yaml.begin(); it != ranges_config_yaml.end(); ++it) { - color_range_map[it->first.as()] = it->second.as(); + color_range_map[it->first.as()] = it->second.as>(); } } else { RCLCPP_ERROR(this->get_logger(), "Failed to open YAML file: %s", color_ranges_file.c_str()); } - ranges_yaml.close(); // get cluster-contour matching & selection parameters from yaml std::ifstream matching_yaml(matching_weights_file); if (matching_yaml.is_open()) { - matching_weights_yaml = YAML::Load(matching_yaml); - - m_clustering_distance_weight = matching_weights_yaml["clustering_distance_weight"]; - m_clustering_color_weights = matching_weights_yaml["clustering_color_weights"]; - m_clustering_color_thres = matching_weights_yaml["clustering_color_thres"]; - m_cluster_contour_distance_weight = matching_weights_yaml["cluster_contour_distance_weight"]; - m_cluster_contour_color_weights = matching_weights_yaml["cluster_contour_color_weights"]; - m_contour_detection_color_weights = matching_weights_yaml["contour_detection_color_weights"]; - m_cluster_contour_size_weight = matching_weights_yaml["cluster_contour_size_weight"]; + matching_weights_config_yaml = YAML::Load(matching_yaml); + + m_clustering_distance_weight = matching_weights_config_yaml["clustering_distance_weight"].as(); + m_clustering_color_weights = matching_weights_config_yaml["clustering_color_weights"].as>(); + m_clustering_color_thres = matching_weights_config_yaml["clustering_color_thres"].as(); + m_cluster_contour_distance_weight = matching_weights_config_yaml["cluster_contour_distance_weight"].as(); + m_cluster_contour_color_weights = matching_weights_config_yaml["cluster_contour_color_weights"].as>(); + m_contour_detection_color_weights = matching_weights_config_yaml["contour_detection_color_weights"].as>(); + m_cluster_contour_size_weight = matching_weights_config_yaml["cluster_contour_size_weight"].as(); } else { RCLCPP_ERROR(this->get_logger(), "Failed to open YAML file: %s", matching_weights_file.c_str()); } + matching_yaml.close(); // get contour matching color ranges from yaml std::ifstream contour_ranges_yaml(contour_matching_color_ranges_file); - if (ranges_yaml.is_open()) { - contour_ranges_config_yaml = YAML::Load(contour_ranges_yaml); - for (YAML::const_iterator it = contour_ranges_config_yaml.begin(); it != contour_ranges_config_yaml.end(); ++it) { - contour_matching_color_range_map[it->first.as()] = it->second.as(); + if (contour_ranges_yaml.is_open()) { + contour_matching_ranges_config_yaml = YAML::Load(contour_ranges_yaml); + for (YAML::const_iterator it = contour_matching_ranges_config_yaml.begin(); it != contour_matching_ranges_config_yaml.end(); ++it) { + contour_matching_color_range_map[it->first.as()] = it->second.as>(); } } else { RCLCPP_ERROR(this->get_logger(), "Failed to open YAML file: %s", contour_matching_color_ranges_file.c_str()); } + contour_ranges_yaml.close(); } void BBoxProjectPCloud::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_msg) { @@ -120,11 +121,11 @@ void BBoxProjectPCloud::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_m m_cam_model.fromCameraInfo(info_msg); } -bool BBoxProjectPCloud::hsv_diff_condition(const pcl::PointXYZHSV& p1, const pcl::PointXYZHSV& p2, float sq_dist){ - return m_clustering_color_weights[0]*(p1.h-p2.h)*(p1.h-p2.h)+m_clustering_color_weights[1]*(p1.s-p2.s)*(p1.s-p2.s)+m_clustering_color_weights[2]*(p1.v-p2.v)*(p1.v-p2.v) weights, double thres, const pcl::PointXYZHSV& p1, const pcl::PointXYZHSV& p2, float sq_dist){ + return weights[0]*(p1.h-p2.h)*(p1.h-p2.h)+weights[1]*(p1.s-p2.s)*(p1.s-p2.s)+weights[2]*(p1.v-p2.v)*(p1.v-p2.v) < thres*thres; } -double BBoxProjectPCloud::color_range_penalty(vector weights, int[6] color_range, cv::Vec3b pt_color){ +double color_range_penalty(std::vector weights, std::vector color_range, cv::Vec3b pt_color){ // Average of squared distances from each point in the range double h_min=color_range[0], h_max=color_range[1], s_min=color_range[2], s_max=color_range[3], v_min=color_range[4], v_max=color_range[5]; return weights[0]*(pow(h_max-pt_color[0],3)-pow(h_min-pt_color[0],3))/(h_max-h_min) + weights[1]*(pow(s_max-pt_color[1],3)-pow(s_min-pt_color[1],3))/(s_max-s_min) + weights[2]*(pow(v_max-pt_color[2],3)-pow(v_min-pt_color[2],3))/(v_max-v_min); @@ -171,13 +172,13 @@ void BBoxProjectPCloud::bb_pcl_project( } cv::Mat cv_hsv; cv::cvtColor(cv_ptr->image, cv_hsv, cv::COLOR_BGR2HSV); - std::vector>::Ptr> bbox_pcloud_objects; + std::vector::Ptr>> bbox_pcloud_objects; for (all_seaing_interfaces::msg::LabeledBoundingBox2D bbox : in_bbox_msg->boxes){ auto labeled_pcl = all_seaing_interfaces::msg::LabeledObjectPointCloud(); pcl::PointCloud::Ptr obj_cloud_ptr(new pcl::PointCloud); labeled_pcl.label = bbox.label; obj_cloud_ptr->header = in_cloud_tf_ptr->header; - RCLCPP_DEBUG(this->get_logger(), "BOUNDING BOX FOR OBJECT %d: (%lf,%lf), (%lf, %lf)", obj, bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y, obj); + RCLCPP_DEBUG(this->get_logger(), "BOUNDING BOX FOR OBJECT %d: (%d,%d), (%d, %d)", obj, bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y); for (pcl::PointXYZI &point_tf : in_cloud_tf_ptr->points) { // Project 3D point onto the image plane using the intrinsic matrix. // Gazebo has a different coordinate system, so the y, z, and x coordinates are modified. @@ -192,8 +193,8 @@ void BBoxProjectPCloud::bb_pcl_project( // cv::Vec3b bgr = cv_ptr->image.at(xy_rect); obj_cloud_ptr->push_back(pcl::PointXYZHSV(point_tf.x, point_tf.y, point_tf.z, hsv[0], hsv[1], hsv[2])); // obj_cloud_ptr->push_back(pcl::PointXYZRGB(point_tf.x, point_tf.y, point_tf.z, bgr[2], bgr[1], bgr[0])); - RCLCPP_DEBUG(this->get_logger(), "SELECTED HSV POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%lf, %lf, %lf)", xy_rect.x, xy_rect.y, hsv[0], hsv[1], hsv[2]); - // RCLCPP_DEBUG(this->get_logger(), "SELECTED RGB POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%lf, %lf, %lf)", xy_rect.x, xy_rect.y, bgr[2], bgr[1], bgr[0]); + RCLCPP_DEBUG(this->get_logger(), "SELECTED HSV POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%d, %d, %d)", xy_rect.x, xy_rect.y, hsv[0], hsv[1], hsv[2]); + // RCLCPP_DEBUG(this->get_logger(), "SELECTED RGB POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%d, %d, %d)", xy_rect.x, xy_rect.y, bgr[2], bgr[1], bgr[0]); } } } @@ -248,7 +249,7 @@ void BBoxProjectPCloud::bb_pcl_project( //extract clusters pcl::search::KdTree::Ptr tree(new pcl::search::KdTree()); - // pcl::search::KdTree::Ptr tree(new pcl::search::KdTree()); + // pcl::search::KdTree::Ptr tree(new pcl::search::KdTree); if (!pcloud_ptr->points.empty()) tree->setInputCloud(pcloud_ptr); std::vector clusters_indices; @@ -265,13 +266,14 @@ void BBoxProjectPCloud::bb_pcl_project( // CONDITIONAL (WITH HSV-BASED CONDITION) EUCLIDEAN CLUSTERING // TODO: USE AN ALGORITHM THAT MAKES SURE THE CONTOUR IS CONTINUOUS (WITHOUT NON-SELECTED POINTS INSIDE IT, AS IT'S PROBABLY THE CASE NOW) - pcl::ConditionalEuclideanClustering cec; + pcl::ConditionalEuclideanClustering cec; cec.setClusterTolerance(m_clustering_distance); cec.setMinClusterSize(m_obstacle_size_min); cec.setMaxClusterSize(m_obstacle_size_max); cec.setSearchMethod(tree); cec.setInputCloud(pcloud_ptr); - cec.setConditionFunction(&hsv_diff_condition); + std::function cond_func = std::bind(&hsv_diff_condition, m_clustering_color_weights, m_clustering_color_thres, std::placeholders::_1, std::placeholders::_2, std::placeholders::_3); + cec.setConditionFunction(cond_func); cec.segment(clusters_indices); // color segmentation using the color label @@ -279,12 +281,12 @@ void BBoxProjectPCloud::bb_pcl_project( cv::Size img_sz; cv::Point bbox_offset; hsv_img.locateROI(img_sz, bbox_offset); - int lims[6] = label_color_map[bbox.label]=="red"?color_range_map["red2"]:color_range_map[label_color_map[bbox.label]]; + std::vector lims = label_color_map[bbox.label]=="red"?color_range_map["red2"]:color_range_map[label_color_map[bbox.label]]; int h_min=lims[0], h_max=lims[1], s_min=lims[2], s_max=lims[3], v_min=lims[4], v_max=lims[5]; cv::Mat mask; cv::inRange(hsv_img, cv::Scalar(h_min, s_min, v_min), cv::Scalar(h_max, s_max, v_max), mask); - cv::erode(mask, mask, cv::getStructuringElement(cv::MORPH_RECT, 5)); - cv::dilate(mask, mask, cv::getStructuringElement(cv::MORPH_RECT, 7)); + cv::erode(mask, mask, cv::getStructuringElement(cv::MORPH_RECT, cv::Size(5,5))); + cv::dilate(mask, mask, cv::getStructuringElement(cv::MORPH_RECT, cv::Size(7,7))); std::vector> contours; cv::findContours(mask, contours, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE); // Convert the contour points to fit the original image (using image_sz and bbox_offset -> just bbox_offset+pt_coords) to be able to use it with the LiDAR (projected) points @@ -324,22 +326,22 @@ void BBoxProjectPCloud::bb_pcl_project( for (pcl::PointIndices cluster : clusters_indices){// vector is cluster.indices std::vector> cluster_pts; std::pair, std::pair> cluster_qts; - for(index_t ind : cluster.indices){ + for(pcl::index_t ind : cluster.indices){ cv::Vec3b cloud_pt_hsv = cv::Vec3b(pcloud_ptr->points[ind].h, pcloud_ptr->points[ind].s, pcloud_ptr->points[ind].v); - cv::Point2d cloud_pt_xy = m_cam_model.project3dToPixel(cv::Point3d(cloud_pt.y, cloud_pt.z, -cloud_pt.x)); + cv::Point2d cloud_pt_xy = m_cam_model.project3dToPixel(cv::Point3d(pcloud_ptr->points[ind].y, pcloud_ptr->points[ind].z, -pcloud_ptr->points[ind].x)); cluster_pts.push_back(std::make_pair(cloud_pt_xy, cloud_pt_hsv)); //store sums cluster_qts.first.first.x+=cloud_pt_xy.x; - cluster_pts.first.first.y+=cloud_pt_xy.y; - cluster_pts.first.second[0]+=cloud_pt_hsv[0]; - cluster_pts.first.second[1]+=cloud_pt_hsv[1]; - cluster_pts.first.second[2]+=cloud_pt_hsv[2]; + cluster_qts.first.first.y+=cloud_pt_xy.y; + cluster_qts.first.second[0]+=cloud_pt_hsv[0]; + cluster_qts.first.second[1]+=cloud_pt_hsv[1]; + cluster_qts.first.second[2]+=cloud_pt_hsv[2]; //store sum of squares - cluster_pts.second.first.x+=cloud_pt_xy.x*cloud_pt_xy.x; - cluster_pts.second.first.y+=cloud_pt_xy.y*cloud_pt_xy.y; - cluster_pts.second.second[0]+=cloud_pt_hsv[0]*cloud_pt_hsv[0]; - cluster_pts.second.second[1]+=cloud_pt_hsv[1]*cloud_pt_hsv[1]; - cluster_pts.second.second[2]+=cloud_pt_hsv[2]*cloud_pt_hsv[2]; + cluster_qts.second.first.x+=cloud_pt_xy.x*cloud_pt_xy.x; + cluster_qts.second.first.y+=cloud_pt_xy.y*cloud_pt_xy.y; + cluster_qts.second.second[0]+=cloud_pt_hsv[0]*cloud_pt_hsv[0]; + cluster_qts.second.second[1]+=cloud_pt_hsv[1]*cloud_pt_hsv[1]; + cluster_qts.second.second[2]+=cloud_pt_hsv[2]*cloud_pt_hsv[2]; } clusters_pts.push_back(cluster_pts); clusters_qts.push_back(cluster_qts); @@ -360,11 +362,12 @@ void BBoxProjectPCloud::bb_pcl_project( double cluster_size = clusters_pts[cluster].size(); std::pair, std::pair> cluster_qts = clusters_qts[cluster]; - double contour_cluster_dist_rms = sqrt((contour_qts.second.first.x+cluster_qts.second.first.x//sum(x_i^2+x_j^2) - -2*contour_qts.first.first.x*cluster_qts.first.first.x//-2*sum(x_i*x_j) - contour_qts.second.first.y+cluster_qts.second.first.y//sum(y_i^2+y_j^2) - -2*contour_qts.first.first.x*cluster_qts.first.first.x)//-2*sum(y_i*y_j) - /(cluster_size*contour_size);//divide by cluster_size*contour_size + //(sum(x_i^2+x_j^2) -2*sum(x_i*x_j) +sum(y_i^2+y_j^2) -2*sum(y_i*y_j)) /(cluster_size*contour_size) + double contour_cluster_dist_rms = sqrt((contour_qts.second.first.x+cluster_qts.second.first.x\ + - 2*contour_qts.first.first.x*cluster_qts.first.first.x\ + + contour_qts.second.first.y+cluster_qts.second.first.y\ + - 2*contour_qts.first.first.x*cluster_qts.first.first.x)\ + /(cluster_size*contour_size)); cv::Vec3b avg_contour_col = (1/contour_size)*contour_qts.first.second; @@ -372,11 +375,11 @@ void BBoxProjectPCloud::bb_pcl_project( double cluster_ds_rms = sqrt((cluster_qts.second.second[1]-2*cluster_qts.first.second[1]*avg_contour_col[1]+avg_contour_col[1]*avg_contour_col[1])/cluster_size);//same for s double cluster_dv_rms = sqrt((cluster_qts.second.second[2]-2*cluster_qts.first.second[2]*avg_contour_col[2]+avg_contour_col[2]*avg_contour_col[2])/cluster_size);//same for v - double pair_cost = m_cluster_contour_distance_weight*contour_cluster_dist_rms*contour_cluster_dist_rms - + m_cluster_contour_color_weights[0]*cluster_dh_rms*cluster_dh_rms - + m_cluster_contour_color_weights[1]*cluster_ds_rms*cluster_ds_rms - + m_cluster_contour_color_weights[2]*cluster_dv_rms*cluster_dv_rms - - m_cluster_contour_size_weight*cluster_size*contour_size + double pair_cost = m_cluster_contour_distance_weight*contour_cluster_dist_rms*contour_cluster_dist_rms\ + + m_cluster_contour_color_weights[0]*cluster_dh_rms*cluster_dh_rms\ + + m_cluster_contour_color_weights[1]*cluster_ds_rms*cluster_ds_rms\ + + m_cluster_contour_color_weights[2]*cluster_dv_rms*cluster_dv_rms\ + - m_cluster_contour_size_weight*cluster_size*contour_size\ + contour_cost; if (min_pair_cost == -1 || pair_cost < min_pair_cost){ min_pair_cost = pair_cost; @@ -390,11 +393,11 @@ void BBoxProjectPCloud::bb_pcl_project( refined_pcl_contours.label = bbox.label; pcl::PointCloud::Ptr refined_cloud_ptr(new pcl::PointCloud); refined_cloud_ptr->header = pcloud_ptr->header; - cv::Mat refined_obj_contour_mat.zeros(image_sz, cv::CV_8UC1); + cv::Mat refined_obj_contour_mat = cv::Mat::zeros(img_sz, CV_8UC1); for (cv::Point image_pt : contours[opt_contour_id]){ - refined_obj_contour_mat.at(image_pt) = 1; + refined_obj_contour_mat.at(image_pt) = 1; } - for (index_t ind : clusters_indices[opt_cluster_id].indices){ + for (pcl::index_t ind : clusters_indices[opt_cluster_id].indices){ pcl::PointXYZHSV pt = pcloud_ptr->points[ind]; refined_cloud_ptr->push_back(pt); } @@ -403,15 +406,15 @@ void BBoxProjectPCloud::bb_pcl_project( refined_pcl_contours.contour = *refined_obj_contour_ptr->toImageMsg(); refined_objects_pub.objects.push_back(refined_pcl_contours); refined_cloud_contour_vec.push_back(std::make_pair(*refined_cloud_ptr,refined_obj_contour_mat)); - max_refined_len = std::max(max_refined_len, (int)obj_cloud_ptr->size()); + max_refined_len = std::max(max_refined_len, (int)refined_cloud_ptr->size()); } RCLCPP_INFO(this->get_logger(), "WILL NOW SEND REFINED OBJECT POINT CLOUDS"); - m_refined_object_pcl_contour_pub->publish(refined_pcl_contours); + m_refined_object_pcl_contour_pub->publish(refined_objects_pub); pcl::PointCloud::Ptr all_obj_refined_pcls_ptr(new pcl::PointCloud); all_obj_refined_pcls_ptr->header = in_cloud_tf_ptr->header; //convert vector of PointCloud to a single PointCloud with channels all_obj_refined_pcls_ptr->resize((pcl::uindex_t)max_refined_len, (pcl::uindex_t)in_bbox_msg->boxes.size()); - cv::Mat all_obj_refined_contours.zeros(cv_ptr->image.size, cv::CV_8UC1); + cv::Mat all_obj_refined_contours = cv::Mat::zeros(cv_ptr->image.size(), CV_8UC1); try{ for(int i = 0; ipublish(obj_refined_pcls_msg); cv_bridge::CvImagePtr all_obj_refined_contour_ptr(new cv_bridge::CvImage(in_img_msg->header, sensor_msgs::image_encodings::TYPE_8UC1, all_obj_refined_contours)); - m_refined_object_contour_viz_pub->publish(*refined_obj_contour_ptr->toImageMsg()); + m_refined_object_contour_viz_pub->publish(*all_obj_refined_contour_ptr->toImageMsg()); } geometry_msgs::msg::TransformStamped BBoxProjectPCloud::get_tf(const std::string &in_target_frame, diff --git a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp index 07d9a4a7..9879be69 100644 --- a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp +++ b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp @@ -6,7 +6,10 @@ #include #include #include -#include + +#include +#include +#include "yaml-cpp/yaml.h" #include "rclcpp/rclcpp.hpp" @@ -23,8 +26,10 @@ #include "sensor_msgs/msg/image.hpp" #include "sensor_msgs/msg/point_cloud2.hpp" -#include "pcl/point_cloud.h" -#include "pcl/point_types.h" +#include +#include +#include +#include #include "pcl_conversions/pcl_conversions.h" #include "cv_bridge/cv_bridge.h" @@ -39,7 +44,6 @@ #include #include -#include "yaml-cpp/yaml.h" class BBoxProjectPCloud : public rclcpp::Node{ private: @@ -88,10 +92,10 @@ class BBoxProjectPCloud : public rclcpp::Node{ const all_seaing_interfaces::msg::LabeledBoundingBox2DArray::ConstSharedPtr &in_bbox_msg); // The HSV-similarity condition for Conditional Euclidean Clustering - bool hsv_diff_condition(const pcl::PointXYZHSV& p1, const pcl::PointXYZHSV& p2, float sq_dist); + // bool hsv_diff_condition(std::vector weights, double thres, const pcl::PointXYZHSV& p1, const pcl::PointXYZHSV& p2, float sq_dist); // The penalty function that compares an HSV color value to an HSV color range - double color_range_penalty(vector weights, int[6] color_range, cv::Vec3b pt_color); + // double color_range_penalty(std::vector weights, std::vector color_range, cv::Vec3b pt_color); int m_obstacle_id; @@ -108,22 +112,22 @@ class BBoxProjectPCloud : public rclcpp::Node{ std::string color_label_mappings_file; YAML::Node label_config_yaml, ranges_config_yaml; std::map label_color_map; - std::map color_range_map; + std::map> color_range_map; // for cluster-contour matching/selection std::string matching_weights_file; std::string contour_matching_color_ranges_file; - YAML::Node matching_weights_yaml; + YAML::Node matching_weights_config_yaml, contour_matching_ranges_config_yaml; double m_clustering_distance_weight; std::vector m_clustering_color_weights; - double m_clustering_hue_thres; + double m_clustering_color_thres; double m_cluster_contour_distance_weight; std::vector m_cluster_contour_color_weights; std::vector m_contour_detection_color_weights; double m_cluster_contour_size_weight; - std::map contour_matching_color_range_map; + std::map> contour_matching_color_range_map; public: BBoxProjectPCloud(); diff --git a/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py b/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py index f3851409..52aecc55 100644 --- a/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py +++ b/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py @@ -16,6 +16,9 @@ def generate_launch_description(): matching_weights = os.path.join( bringup_prefix, "config", "perception", "filtering_weights.yaml" ) + contour_matching_color_ranges = os.path.join( + bringup_prefix, "config", "perception", "contour_matching_color_ranges.yaml" + ) bbox_project_pcloud_node = launch_ros.actions.Node( package="all_seaing_perception", executable="bbox_project_pcloud", @@ -33,7 +36,8 @@ def generate_launch_description(): {"obstacle_seg_thresh": 10.0}, {"obstacle_drop_thresh": 1.0}, {"polygon_area_thresh": 100000.0}, - {"matching_weights_file": matching_weights} + {"matching_weights_file": matching_weights}, + {"contour_matching_color_ranges_file": contour_matching_color_ranges} ] ) return LaunchDescription([ diff --git a/all_seaing_perception/launch/yolov8_detection.launch.py b/all_seaing_perception/launch/yolov8_detection.launch.py deleted file mode 100644 index f371b883..00000000 --- a/all_seaing_perception/launch/yolov8_detection.launch.py +++ /dev/null @@ -1,52 +0,0 @@ -from ament_index_python.packages import get_package_share_directory -from launch import LaunchDescription -from launch.actions import IncludeLaunchDescription -from launch.launch_description_sources import PythonLaunchDescriptionSource - -""" -Reference: https://github.com/mgonzs13/yolov8_ros - -Instructions: -$ cd ~/arcturus/dev_ws/src/ -$ git clone https://github.com/mgonzs13/yolov8_ros.git -$ pip install -r yolov8_ros/requirements.txt -$ cd ~/arcturus/dev_ws/ -$ rosdep install --from-paths src --ignore-src -r -y -$ colcon build --symlink-install -$ source ~/arcturus/dev_ws/install/setup.bash -$ source ~/arcturus/vrx_ws/install/setup.bash - -follow those instructions before running script - -Parameters to Yolov8 Launch - model: YOLOv8 model (default: yolov8m.pt) - tracker: tracker file (default: bytetrack.yaml) - device: GPU/CUDA (default: cuda:0) - enable: wether to start YOLOv8 enabled (default: True) - threshold: detection threshold (default: 0.5) - input_image_topic: camera topic of RGB images (default: /camera/rgb/image_raw) - image_reliability: reliability for the image topic: 0=system default, 1=Reliable, 2=Best Effort (default: 2) - -Ensure that the model parameter has the correct path to a yolov8 model -and that input_image_topic subscribes to the correct topic (such as zed) -""" - - -def generate_launch_description(): - - yolov8_prefix = get_package_share_directory("yolov8_bringup") - model_share_path = get_package_share_directory("all_seaing_perception") - - return LaunchDescription( - [ - IncludeLaunchDescription( - PythonLaunchDescriptionSource( - [yolov8_prefix, "/launch/yolov8.launch.py"] - ), - launch_arguments={ - "model": model_share_path + "/models/yolov8_roboboat_model.pt", - "input_image_topic": "/webcam_image", - }.items(), - ) - ] - ) diff --git a/all_seaing_perception/launch/yolov8_detection_sim.launch.py b/all_seaing_perception/launch/yolov8_detection_sim.launch.py deleted file mode 100644 index b72a76b7..00000000 --- a/all_seaing_perception/launch/yolov8_detection_sim.launch.py +++ /dev/null @@ -1,52 +0,0 @@ -from ament_index_python.packages import get_package_share_directory -from launch import LaunchDescription -from launch.actions import IncludeLaunchDescription -from launch.launch_description_sources import PythonLaunchDescriptionSource - -""" -Reference: https://github.com/mgonzs13/yolov8_ros - -Instructions: -$ cd ~/arcturus/dev_ws/src/ -$ git clone https://github.com/mgonzs13/yolov8_ros.git -$ pip install -r yolov8_ros/requirements.txt -$ cd ~/arcturus/dev_ws/ -$ rosdep install --from-paths src --ignore-src -r -y -$ colcon build --symlink-install -$ source ~/arcturus/dev_ws/install/setup.bash -$ source ~/arcturus/vrx_ws/install/setup.bash - -follow those instructions before running script - -Parameters to Yolov8 Launch - model: YOLOv8 model (default: yolov8m.pt) - tracker: tracker file (default: bytetrack.yaml) - device: GPU/CUDA (default: cuda:0) - enable: wether to start YOLOv8 enabled (default: True) - threshold: detection threshold (default: 0.5) - input_image_topic: camera topic of RGB images (default: /camera/rgb/image_raw) - image_reliability: reliability for the image topic: 0=system default, 1=Reliable, 2=Best Effort (default: 2) - -Ensure that the model parameter has the correct path to a yolov8 model -and that input_image_topic subscribes to the correct topic (such as zed) -""" - - -def generate_launch_description(): - - yolov8_prefix = get_package_share_directory("yolov8_bringup") - model_share_path = get_package_share_directory("all_seaing_perception") - - return LaunchDescription( - [ - IncludeLaunchDescription( - PythonLaunchDescriptionSource( - [yolov8_prefix, "/launch/yolov8.launch.py"] - ), - launch_arguments={ - "model": model_share_path + "/models/yolov8_roboboat_model.pt", - "input_image_topic": "/wamv/sensors/cameras/front_left_camera_sensor/image_raw", - }.items(), - ) - ] - ) From 4c24a27138999dd2b2f4b09bbd3142f4d9537540 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Mon, 2 Dec 2024 02:29:50 -0500 Subject: [PATCH 16/35] debugging, finding clusters & contours seems to work now, now need to test & troubleshoot/tune the matching --- .../contour_matching_color_ranges.yaml | 8 +- all_seaing_bringup/launch/sim.launch.py | 12 ++- .../bbox_project_pcloud.cpp | 82 ++++++++++++++++--- .../color_segmentation.py | 6 +- .../launch/bbox_project_pcloud_test.launch.py | 2 +- 5 files changed, 89 insertions(+), 21 deletions(-) diff --git a/all_seaing_bringup/config/perception/contour_matching_color_ranges.yaml b/all_seaing_bringup/config/perception/contour_matching_color_ranges.yaml index ce67fb52..f78a8fa5 100644 --- a/all_seaing_bringup/config/perception/contour_matching_color_ranges.yaml +++ b/all_seaing_bringup/config/perception/contour_matching_color_ranges.yaml @@ -1,6 +1,6 @@ -orange: [10, 31, 125, 255, 60, 255] -red: [0, 10, 175, 255, 70, 255] -red2: [170, 179, 175, 255, 70, 255] -green: [59, 82, 142, 255, 30, 255] +orange: [10, 30, 40, 255, 40, 255] +red: [0, 15, 40, 255, 40, 255] +red2: [150, 179, 40, 255, 40, 255] +green: [35, 80, 40, 255, 40, 255] black: [0, 0, 0, 0, 0, 50] white: [0, 0, 0, 10, 200, 255] \ No newline at end of file diff --git a/all_seaing_bringup/launch/sim.launch.py b/all_seaing_bringup/launch/sim.launch.py index 948902cf..37c03850 100644 --- a/all_seaing_bringup/launch/sim.launch.py +++ b/all_seaing_bringup/launch/sim.launch.py @@ -26,7 +26,7 @@ def generate_launch_description(): bringup_prefix, "config", "perception", "color_ranges.yaml" ) matching_weights = os.path.join( - bringup_prefix, "config", "perception", "filtering_weights.yaml" + bringup_prefix, "config", "perception", "matching_weights.yaml" ) contour_matching_color_ranges = os.path.join( bringup_prefix, "config", "perception", "contour_matching_color_ranges.yaml" @@ -249,6 +249,15 @@ def generate_launch_description(): }.items(), ) + yolov8_node = launch_ros.actions.Node( + package="all_seaing_perception", + executable="yolov8_node.py", + output="screen", + remappings=[ + ("image_raw", "/zed/zed_node/rgb/image_rect_color"), + ] + ) + return LaunchDescription( [ launch_rviz_launch_arg, @@ -258,6 +267,7 @@ def generate_launch_description(): obstacle_bbox_overlay_node, obstacle_bbox_visualizer_node, color_segmentation_node, + # yolov8_node, point_cloud_filter_node, obstacle_detector_node, # bbox_project_pcloud_node, diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index 0c4ec7f1..8d40a63e 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -30,12 +30,14 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ this->declare_parameter("color_ranges_file", ""); this->declare_parameter("color_label_mappings_file", ""); + color_ranges_file = this->get_parameter("color_ranges_file").as_string(); color_label_mappings_file = this->get_parameter("color_label_mappings_file").as_string(); // for cluster-contour matching & selection this->declare_parameter("matching_weights_file", ""); - matching_weights_file = this->get_parameter("matching_weights_file").as_string(); this->declare_parameter("contour_matching_color_ranges_file", ""); + + matching_weights_file = this->get_parameter("matching_weights_file").as_string(); contour_matching_color_ranges_file = this->get_parameter("contour_matching_color_ranges_file").as_string(); // Subscriptions @@ -59,11 +61,13 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ m_refined_object_contour_viz_pub = this->create_publisher("refined_object_contours_viz", 5); // get color label mappings from yaml + RCLCPP_DEBUG(this->get_logger(), "READING COLOR LABEL MAPPINGS"); std::ifstream label_yaml(color_label_mappings_file); if (label_yaml.is_open()) { label_config_yaml = YAML::Load(label_yaml); for (YAML::const_iterator it = label_config_yaml.begin(); it != label_config_yaml.end(); ++it) { label_color_map[it->second.as()] = it->first.as(); + RCLCPP_DEBUG(this->get_logger(), "%d -> %s", it->second.as(), it->first.as().c_str()); } } else { @@ -72,11 +76,13 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ label_yaml.close(); // get color ranges from yaml + RCLCPP_DEBUG(this->get_logger(), "READING COLOR RANGES"); std::ifstream ranges_yaml(color_ranges_file); if (ranges_yaml.is_open()) { ranges_config_yaml = YAML::Load(ranges_yaml); for (YAML::const_iterator it = ranges_config_yaml.begin(); it != ranges_config_yaml.end(); ++it) { color_range_map[it->first.as()] = it->second.as>(); + RCLCPP_DEBUG(this->get_logger(), "%s -> [%d, %d, %d, %d, %d, %d]", it->first.as().c_str(), it->second.as>()[0], it->second.as>()[1], it->second.as>()[2], it->second.as>()[3], it->second.as>()[4], it->second.as>()[5]); } } else { @@ -85,6 +91,7 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ ranges_yaml.close(); // get cluster-contour matching & selection parameters from yaml + RCLCPP_DEBUG(this->get_logger(), "READING MATCHING PARAMETERS"); std::ifstream matching_yaml(matching_weights_file); if (matching_yaml.is_open()) { matching_weights_config_yaml = YAML::Load(matching_yaml); @@ -96,6 +103,13 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ m_cluster_contour_color_weights = matching_weights_config_yaml["cluster_contour_color_weights"].as>(); m_contour_detection_color_weights = matching_weights_config_yaml["contour_detection_color_weights"].as>(); m_cluster_contour_size_weight = matching_weights_config_yaml["cluster_contour_size_weight"].as(); + + RCLCPP_DEBUG(this->get_logger(), "clustering_distance_weight: %lf", m_clustering_distance_weight); + RCLCPP_DEBUG(this->get_logger(), "clustering_color_weights: [%lf, %lf, %lf]", m_clustering_color_weights[0], m_clustering_color_weights[1], m_clustering_color_weights[2]); + RCLCPP_DEBUG(this->get_logger(), "clustering_color_thres: %lf", m_clustering_color_thres); + RCLCPP_DEBUG(this->get_logger(), "cluster_contour_distance_weight: %lf", m_cluster_contour_distance_weight); + RCLCPP_DEBUG(this->get_logger(), "cluster_contour_color_weights: [%lf, %lf, %lf]", m_cluster_contour_color_weights[0], m_cluster_contour_color_weights[1], m_cluster_contour_color_weights[2]); + RCLCPP_DEBUG(this->get_logger(), "contour_detection_color_weights: [%lf, %lf, %lf]", m_contour_detection_color_weights[0], m_contour_detection_color_weights[1], m_contour_detection_color_weights[2]); } else { RCLCPP_ERROR(this->get_logger(), "Failed to open YAML file: %s", matching_weights_file.c_str()); @@ -103,11 +117,13 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ matching_yaml.close(); // get contour matching color ranges from yaml + RCLCPP_DEBUG(this->get_logger(), "READING CONTOUR MATCHING COLOR RANGES"); std::ifstream contour_ranges_yaml(contour_matching_color_ranges_file); if (contour_ranges_yaml.is_open()) { contour_matching_ranges_config_yaml = YAML::Load(contour_ranges_yaml); for (YAML::const_iterator it = contour_matching_ranges_config_yaml.begin(); it != contour_matching_ranges_config_yaml.end(); ++it) { contour_matching_color_range_map[it->first.as()] = it->second.as>(); + RCLCPP_DEBUG(this->get_logger(), "%s -> [%d, %d, %d, %d, %d, %d]", it->first.as().c_str(), it->second.as>()[0], it->second.as>()[1], it->second.as>()[2], it->second.as>()[3], it->second.as>()[4], it->second.as>()[5]); } } else { @@ -117,7 +133,7 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ } void BBoxProjectPCloud::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_msg) { - RCLCPP_INFO(this->get_logger(), "GOT CAMERA INFO"); + RCLCPP_DEBUG(this->get_logger(), "GOT CAMERA INFO"); m_cam_model.fromCameraInfo(info_msg); } @@ -125,10 +141,14 @@ bool hsv_diff_condition(std::vector weights, double thres, const pcl::Po return weights[0]*(p1.h-p2.h)*(p1.h-p2.h)+weights[1]*(p1.s-p2.s)*(p1.s-p2.s)+weights[2]*(p1.v-p2.v)*(p1.v-p2.v) < thres*thres; } -double color_range_penalty(std::vector weights, std::vector color_range, cv::Vec3b pt_color){ +double line_range_penalty(int a, int b, int x){ // Average of squared distances from each point in the range + return (a<=x && x<=b)?0:(pow(b-x,3)-pow(x-a,3))/(b-a); +} + +double color_range_penalty(std::vector weights, std::vector color_range, cv::Vec3b pt_color){ double h_min=color_range[0], h_max=color_range[1], s_min=color_range[2], s_max=color_range[3], v_min=color_range[4], v_max=color_range[5]; - return weights[0]*(pow(h_max-pt_color[0],3)-pow(h_min-pt_color[0],3))/(h_max-h_min) + weights[1]*(pow(s_max-pt_color[1],3)-pow(s_min-pt_color[1],3))/(s_max-s_min) + weights[2]*(pow(v_max-pt_color[2],3)-pow(v_min-pt_color[2],3))/(v_max-v_min); + return weights[0]*line_range_penalty(h_min, h_max, pt_color[0]) + weights[1]*line_range_penalty(s_min, s_max, pt_color[1]) + weights[2]*line_range_penalty(v_min, v_max, pt_color[2]); } void BBoxProjectPCloud::bb_pcl_project( @@ -146,8 +166,8 @@ void BBoxProjectPCloud::bb_pcl_project( pcl::PointCloud::Ptr in_cloud_tf_ptr(new pcl::PointCloud); pcl::fromROSMsg(in_cloud_tf, *in_cloud_tf_ptr); - RCLCPP_INFO(this->get_logger(), "%d POINTS, %d OBJECTS", in_cloud_tf_ptr->points.size(), in_bbox_msg->boxes.size()); - RCLCPP_INFO(this->get_logger(), "FRAME_ID BEFORE SELECTION, AFTER TRANSFORM TO CAMERA FRAME: %s", in_cloud_tf_ptr->header.frame_id); + RCLCPP_DEBUG(this->get_logger(), "%d POINTS, %d OBJECTS", in_cloud_tf_ptr->points.size(), in_bbox_msg->boxes.size()); + RCLCPP_DEBUG(this->get_logger(), "FRAME_ID BEFORE SELECTION, AFTER TRANSFORM TO CAMERA FRAME: %s", in_cloud_tf_ptr->header.frame_id); auto object_pcls = all_seaing_interfaces::msg::LabeledObjectPointCloudArray(); std::vector> obj_cloud_vec; @@ -170,6 +190,9 @@ void BBoxProjectPCloud::bb_pcl_project( RCLCPP_ERROR(this->get_logger(), "cv_bridge exception: %s", e.what()); return; } + //show received image + cv::imshow("Received Image", cv_ptr->image); + cv::waitKey(); cv::Mat cv_hsv; cv::cvtColor(cv_ptr->image, cv_hsv, cv::COLOR_BGR2HSV); std::vector::Ptr>> bbox_pcloud_objects; @@ -191,7 +214,7 @@ void BBoxProjectPCloud::bb_pcl_project( if(xy_rect.x >= bbox.min_x-bbox_margin && xy_rect.x <= bbox.max_x+bbox_margin && xy_rect.y >= bbox.min_y-bbox_margin && xy_rect.y <= bbox.max_y+bbox_margin){ cv::Vec3b hsv = cv_hsv.at(xy_rect); // cv::Vec3b bgr = cv_ptr->image.at(xy_rect); - obj_cloud_ptr->push_back(pcl::PointXYZHSV(point_tf.x, point_tf.y, point_tf.z, hsv[0], hsv[1], hsv[2])); + obj_cloud_ptr->push_back(pcl::PointXYZHSV(point_tf.x, point_tf.y, point_tf.z, 2*hsv[0], ((float)hsv[1])/255.0, ((float)hsv[2])/255.0)); // obj_cloud_ptr->push_back(pcl::PointXYZRGB(point_tf.x, point_tf.y, point_tf.z, bgr[2], bgr[1], bgr[0])); RCLCPP_DEBUG(this->get_logger(), "SELECTED HSV POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%d, %d, %d)", xy_rect.x, xy_rect.y, hsv[0], hsv[1], hsv[2]); // RCLCPP_DEBUG(this->get_logger(), "SELECTED RGB POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%d, %d, %d)", xy_rect.x, xy_rect.y, bgr[2], bgr[1], bgr[0]); @@ -203,7 +226,7 @@ void BBoxProjectPCloud::bb_pcl_project( obj_cloud_vec.push_back(*obj_cloud_ptr); bbox_pcloud_objects.push_back(std::make_pair(bbox, obj_cloud_ptr)); obj++; - RCLCPP_INFO(this->get_logger(), "%d POINTS IN OBJECT %d", obj_cloud_ptr->size(), obj); + RCLCPP_DEBUG(this->get_logger(), "%d POINTS IN OBJECT %d", obj_cloud_ptr->size(), obj); max_len = std::max(max_len, (int)obj_cloud_ptr->size()); } RCLCPP_INFO(this->get_logger(), "WILL NOW SEND OBJECT POINT CLOUDS"); @@ -214,9 +237,9 @@ void BBoxProjectPCloud::bb_pcl_project( //convert vector of PointCloud to a single PointCloud with channels all_obj_pcls_ptr->resize((pcl::uindex_t)max_len, (pcl::uindex_t)in_bbox_msg->boxes.size()); RCLCPP_INFO(this->get_logger(), "PUBLISHED PCLOUD DIMENSIONS: height: %d, width: %d", (int)all_obj_pcls_ptr->height, (int)all_obj_pcls_ptr->width); - // RCLCPP_INFO(this->get_logger(), "STORED PCLOUD DIMENSIONS: objects: %d, max_length: %d", (int)obj_cloud_vec.size(), max_len); + // RCLCPP_DEBUG(this->get_logger(), "STORED PCLOUD DIMENSIONS: objects: %d, max_length: %d", (int)obj_cloud_vec.size(), max_len); try{ - //TODO: CHECK IF THE CHANNELS CORRECTLY REPRESENT DIFFERENT OBJECTS + //TODO: CHECK IF THE CHANNELS CORRECTLY REPRESENT DIFFERENT OBJECTS (ALTHOUGH THAT'S ONLY FOR RVIZ SO WE DON'T CARE) //(I.E. HEIGHT & WIDTH ARE CORRECT, OTHERWISE SWAP THEIR ORDER BOTH IN THE RESIZE() AND THE AT() METHODS) for(int i = 0; i, cv::Mat>> refined_cloud_contour_vec; int max_refined_len = 0; + RCLCPP_INFO(this->get_logger(), "# OF FOUND BBOXES: %d", bbox_pcloud_objects.size()); for(auto bbox_pcloud_pair : bbox_pcloud_objects){ all_seaing_interfaces::msg::LabeledBoundingBox2D bbox = bbox_pcloud_pair.first; pcl::PointCloud::Ptr pcloud_ptr = bbox_pcloud_pair.second; @@ -275,26 +299,56 @@ void BBoxProjectPCloud::bb_pcl_project( std::function cond_func = std::bind(&hsv_diff_condition, m_clustering_color_weights, m_clustering_color_thres, std::placeholders::_1, std::placeholders::_2, std::placeholders::_3); cec.setConditionFunction(cond_func); cec.segment(clusters_indices); + RCLCPP_INFO(this->get_logger(), "# OF EXTRACTED CLUSTERS: %d", clusters_indices.size()); + int clust_id = 0; + for(auto ind_set : clusters_indices){ + RCLCPP_INFO(this->get_logger(), "SIZE OF CLUSTER %d: %d", clust_id, ind_set.indices.size()); + clust_id++; + } // color segmentation using the color label - cv::Mat hsv_img(cv_hsv, cv::Range(bbox.min_x, bbox.max_x), cv::Range(bbox.min_y, bbox.max_y)); + RCLCPP_INFO(this->get_logger(), "BBOX COORDS: (%d, %d), (%d, %d) in img of sz %d x %d", bbox.min_x, bbox.min_y, bbox.max_x+1, bbox.max_y+1, cv_hsv.cols, cv_hsv.rows); + if(bbox.min_x > bbox.max_x || bbox.min_y > bbox.max_y) continue; + //make sure bbox coords are inside image + int adj_min_x = std::max((int)bbox.min_x,0); + int adj_max_x = std::min((int)bbox.max_x+1, cv_hsv.cols); + int adj_min_y = std::max((int)bbox.min_y,0); + int adj_max_y = std::min((int)bbox.max_y+1, cv_hsv.rows); + RCLCPP_INFO(this->get_logger(), "ADJUSTED BBOX COORDS: (%d, %d), (%d, %d) in img of sz %d x %d", adj_min_x, adj_min_y, adj_max_x, adj_max_y, cv_hsv.cols, cv_hsv.rows); + cv::Mat hsv_img(cv_hsv, cv::Range(adj_min_y, adj_max_y), cv::Range(adj_min_x, adj_max_x)); + //show received bbox + cv::Mat bgr_bbox; + cv::cvtColor(hsv_img, bgr_bbox, cv::COLOR_HSV2BGR); + cv::imshow("BBox", bgr_bbox); + cv::waitKey(); cv::Size img_sz; cv::Point bbox_offset; hsv_img.locateROI(img_sz, bbox_offset); - std::vector lims = label_color_map[bbox.label]=="red"?color_range_map["red2"]:color_range_map[label_color_map[bbox.label]]; + RCLCPP_INFO(this->get_logger(), "IMAGE SIZE: (%d, %d)", img_sz.height, img_sz.width); + RCLCPP_INFO(this->get_logger(), "BBOX OFFSET: (%d, %d)", bbox_offset.x, bbox_offset.y); + std::vector lims = label_color_map[bbox.label]=="red"?contour_matching_color_range_map["red2"]:contour_matching_color_range_map[label_color_map[bbox.label]]; int h_min=lims[0], h_max=lims[1], s_min=lims[2], s_max=lims[3], v_min=lims[4], v_max=lims[5]; + RCLCPP_INFO(this->get_logger(), "COLOR: %s, & SEGMENTATION COLOR LIMITS-> H: %d - %d, S: %d - %d, V: %d - %d", label_color_map[bbox.label].c_str(), h_min, h_max, s_min, s_max, v_min, v_max); + cv::Scalar bbox_mean_hsv = cv::mean(hsv_img); + RCLCPP_INFO(this->get_logger(), "MEAN BBOX IMAGE COLOR: (%lf, %lf, %lf)", bbox_mean_hsv[0], bbox_mean_hsv[1], bbox_mean_hsv[2]); cv::Mat mask; cv::inRange(hsv_img, cv::Scalar(h_min, s_min, v_min), cv::Scalar(h_max, s_max, v_max), mask); + //show mask + cv::imshow("Mask", mask); + cv::waitKey(); cv::erode(mask, mask, cv::getStructuringElement(cv::MORPH_RECT, cv::Size(5,5))); cv::dilate(mask, mask, cv::getStructuringElement(cv::MORPH_RECT, cv::Size(7,7))); std::vector> contours; cv::findContours(mask, contours, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE); + RCLCPP_INFO(this->get_logger(), "# OF FOUND CONTOURS: %d", contours.size()); // Convert the contour points to fit the original image (using image_sz and bbox_offset -> just bbox_offset+pt_coords) to be able to use it with the LiDAR (projected) points for(int ctr = 0; ctr < contours.size(); ctr++){ + RCLCPP_INFO(this->get_logger(), "SIZE OF CONTOUR %d: %d", ctr, contours[ctr].size()); for(int pt=0; pt < contours[ctr].size(); pt++){ contours[ctr][pt]+=bbox_offset; } } + if(contours.empty() || clusters_indices.empty()) continue; // Convert both the pcloud and the contours to a vector of pair and compute sum and sum of squares (can be used to compute all the needed metrics) std::vector>> contours_pts; std::vector, std::pair>> contours_qts; @@ -303,6 +357,10 @@ void BBoxProjectPCloud::bb_pcl_project( std::pair, std::pair> contour_qts; for(cv::Point image_pt : contour){ cv::Vec3b image_pt_hsv = cv_hsv.at(image_pt); + //transform to PCL HSV + image_pt_hsv[0]*=2; + image_pt_hsv[1]/=255.0; + image_pt_hsv[2]/=255.0; cv::Point2d image_pt_xy = cv::Point2d(image_pt.x, image_pt.y); contour_pts.push_back(std::make_pair(image_pt_xy, image_pt_hsv)); //store sums diff --git a/all_seaing_perception/all_seaing_perception/color_segmentation.py b/all_seaing_perception/all_seaing_perception/color_segmentation.py index c0d573ef..374e924b 100755 --- a/all_seaing_perception/all_seaing_perception/color_segmentation.py +++ b/all_seaing_perception/all_seaing_perception/color_segmentation.py @@ -46,10 +46,10 @@ def img_callback(self, img): bboxes.header = img.header try: - img = self.bridge.imgmsg_to_cv2(img, "rgb8") + img = self.bridge.imgmsg_to_cv2(img, "bgr8") except cv_bridge.CvBridgeError as e: self.get_logger().info(str(e)) - hsv_img = cv2.cvtColor(img, cv2.COLOR_RGB2HSV) + hsv_img = cv2.cvtColor(img, cv2.COLOR_BGR2HSV) colors = self.colors label_dict = self.label_dict @@ -104,7 +104,7 @@ def img_callback(self, img): 4, ) - self.img_pub.publish(self.bridge.cv2_to_imgmsg(img, "rgb8")) + self.img_pub.publish(self.bridge.cv2_to_imgmsg(img, "bgr8")) self.bbox_pub.publish(bboxes) diff --git a/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py b/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py index 52aecc55..18362a92 100644 --- a/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py +++ b/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py @@ -14,7 +14,7 @@ def generate_launch_description(): bringup_prefix, "config", "perception", "color_ranges.yaml" ) matching_weights = os.path.join( - bringup_prefix, "config", "perception", "filtering_weights.yaml" + bringup_prefix, "config", "perception", "matching_weights.yaml" ) contour_matching_color_ranges = os.path.join( bringup_prefix, "config", "perception", "contour_matching_color_ranges.yaml" From 627968f43ff70260cfde3be26cd515e4a10701b1 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Wed, 4 Dec 2024 04:59:37 -0500 Subject: [PATCH 17/35] Added visualizations, seems to mostly work, crashes at some points --- all_seaing_perception/CMakeLists.txt | 4 +- .../bbox_project_pcloud.cpp | 128 +++++++++++++----- .../bbox_project_pcloud.hpp | 12 +- 3 files changed, 102 insertions(+), 42 deletions(-) diff --git a/all_seaing_perception/CMakeLists.txt b/all_seaing_perception/CMakeLists.txt index d4fb5075..6fe3427c 100644 --- a/all_seaing_perception/CMakeLists.txt +++ b/all_seaing_perception/CMakeLists.txt @@ -10,6 +10,8 @@ ament_auto_find_build_dependencies() find_package(yaml-cpp REQUIRED) +find_package(PCL 1.12 REQUIRED COMPONENTS common visualization io) + ament_auto_add_executable( obstacle_detector ${PROJECT_NAME}/obstacle_detector.cpp @@ -37,7 +39,7 @@ ament_auto_add_executable( ) target_link_libraries(obstacle_bbox_visualizer yaml-cpp) -target_link_libraries(bbox_project_pcloud yaml-cpp) +target_link_libraries(bbox_project_pcloud yaml-cpp ${PCL_LIBRARIES} ${PCL_COMMON_LIBRARIES} ${PCL_IO_LIBRARIES} ${PCL_VISUALIZATION_LIBRARIES}) install(TARGETS obstacle_detector diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index 8d40a63e..b8315961 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -137,6 +137,7 @@ void BBoxProjectPCloud::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_m m_cam_model.fromCameraInfo(info_msg); } +// The HSV-similarity condition for Conditional Euclidean Clustering bool hsv_diff_condition(std::vector weights, double thres, const pcl::PointXYZHSV& p1, const pcl::PointXYZHSV& p2, float sq_dist){ return weights[0]*(p1.h-p2.h)*(p1.h-p2.h)+weights[1]*(p1.s-p2.s)*(p1.s-p2.s)+weights[2]*(p1.v-p2.v)*(p1.v-p2.v) < thres*thres; } @@ -146,11 +147,19 @@ double line_range_penalty(int a, int b, int x){ return (a<=x && x<=b)?0:(pow(b-x,3)-pow(x-a,3))/(b-a); } +// The penalty function that compares an HSV color value to an HSV color range double color_range_penalty(std::vector weights, std::vector color_range, cv::Vec3b pt_color){ double h_min=color_range[0], h_max=color_range[1], s_min=color_range[2], s_max=color_range[3], v_min=color_range[4], v_max=color_range[5]; return weights[0]*line_range_penalty(h_min, h_max, pt_color[0]) + weights[1]*line_range_penalty(s_min, s_max, pt_color[1]) + weights[2]*line_range_penalty(v_min, v_max, pt_color[2]); } +cv::Vec3b int_to_bgr(double i, double k){ + cv::Mat hsvimg(1,1,CV_8UC3,cv::Vec3b(i/k*180,255,255)); + cv::Mat bgrimg; + cv::cvtColor(hsvimg, bgrimg, cv::COLOR_HSV2BGR); + return bgrimg.at(0,0); +} + void BBoxProjectPCloud::bb_pcl_project( const sensor_msgs::msg::Image::ConstSharedPtr &in_img_msg, const sensor_msgs::msg::PointCloud2::ConstSharedPtr &in_cloud_msg, @@ -191,8 +200,8 @@ void BBoxProjectPCloud::bb_pcl_project( return; } //show received image - cv::imshow("Received Image", cv_ptr->image); - cv::waitKey(); + // cv::imshow("Received Image", cv_ptr->image); + // cv::waitKey(); cv::Mat cv_hsv; cv::cvtColor(cv_ptr->image, cv_hsv, cv::COLOR_BGR2HSV); std::vector::Ptr>> bbox_pcloud_objects; @@ -271,6 +280,30 @@ void BBoxProjectPCloud::bb_pcl_project( // pcloud_ptr_hsv->push_back(pt_hsv); // } + //image & bbox relation & adjustment (make sure in-bounds) + RCLCPP_INFO(this->get_logger(), "BBOX COORDS: (%d, %d), (%d, %d) in img of sz %d x %d", bbox.min_x, bbox.min_y, bbox.max_x+1, bbox.max_y+1, cv_hsv.cols, cv_hsv.rows); + if(bbox.min_x > bbox.max_x || bbox.min_y > bbox.max_y) continue; + //make sure bbox coords are inside image + int adj_min_x = std::max((int)bbox.min_x,0); + int adj_max_x = std::min((int)bbox.max_x+1, cv_hsv.cols); + int adj_min_y = std::max((int)bbox.min_y,0); + int adj_max_y = std::min((int)bbox.max_y+1, cv_hsv.rows); + RCLCPP_INFO(this->get_logger(), "ADJUSTED BBOX COORDS: (%d, %d), (%d, %d) in img of sz %d x %d", adj_min_x, adj_min_y, adj_max_x, adj_max_y, cv_hsv.cols, cv_hsv.rows); + cv::Mat cv_hsv_copy = cv_hsv.clone(); + cv::Mat hsv_img(cv_hsv, cv::Range(adj_min_y, adj_max_y), cv::Range(adj_min_x, adj_max_x)); + cv::Size img_sz; + cv::Point bbox_offset; + hsv_img.locateROI(img_sz, bbox_offset); + RCLCPP_INFO(this->get_logger(), "IMAGE SIZE: (%d, %d)", img_sz.height, img_sz.width); + RCLCPP_INFO(this->get_logger(), "BBOX OFFSET: (%d, %d)", bbox_offset.x, bbox_offset.y); + + //to display points inside contours, and the clusters projected (debugging) + cv::Mat cv_img_copy = cv_ptr->image.clone(); + cv::Mat mat_clusters(cv_img_copy, cv::Range(adj_min_y, adj_max_y), cv::Range(adj_min_x, adj_max_x)); + cv::Mat mat_contours = mat_clusters.clone(); + cv::Mat mat_opt_cluster = mat_clusters.clone(); + cv::Mat mat_opt_contour = mat_contours.clone(); + //extract clusters pcl::search::KdTree::Ptr tree(new pcl::search::KdTree()); // pcl::search::KdTree::Ptr tree(new pcl::search::KdTree); @@ -303,29 +336,19 @@ void BBoxProjectPCloud::bb_pcl_project( int clust_id = 0; for(auto ind_set : clusters_indices){ RCLCPP_INFO(this->get_logger(), "SIZE OF CLUSTER %d: %d", clust_id, ind_set.indices.size()); + for(pcl::index_t ind : ind_set.indices){ + cv::Point2d cloud_pt_xy = m_cam_model.project3dToPixel(cv::Point3d(pcloud_ptr->points[ind].y, pcloud_ptr->points[ind].z, -pcloud_ptr->points[ind].x)); + mat_clusters.at((cv::Point)cloud_pt_xy-bbox_offset) = int_to_bgr(clust_id, clusters_indices.size()); + } clust_id++; } + //show received bbox + // cv::Mat bgr_bbox; + // cv::cvtColor(hsv_img, bgr_bbox, cv::COLOR_HSV2BGR); + // cv::imshow("BBox", bgr_bbox); + // cv::waitKey(); // color segmentation using the color label - RCLCPP_INFO(this->get_logger(), "BBOX COORDS: (%d, %d), (%d, %d) in img of sz %d x %d", bbox.min_x, bbox.min_y, bbox.max_x+1, bbox.max_y+1, cv_hsv.cols, cv_hsv.rows); - if(bbox.min_x > bbox.max_x || bbox.min_y > bbox.max_y) continue; - //make sure bbox coords are inside image - int adj_min_x = std::max((int)bbox.min_x,0); - int adj_max_x = std::min((int)bbox.max_x+1, cv_hsv.cols); - int adj_min_y = std::max((int)bbox.min_y,0); - int adj_max_y = std::min((int)bbox.max_y+1, cv_hsv.rows); - RCLCPP_INFO(this->get_logger(), "ADJUSTED BBOX COORDS: (%d, %d), (%d, %d) in img of sz %d x %d", adj_min_x, adj_min_y, adj_max_x, adj_max_y, cv_hsv.cols, cv_hsv.rows); - cv::Mat hsv_img(cv_hsv, cv::Range(adj_min_y, adj_max_y), cv::Range(adj_min_x, adj_max_x)); - //show received bbox - cv::Mat bgr_bbox; - cv::cvtColor(hsv_img, bgr_bbox, cv::COLOR_HSV2BGR); - cv::imshow("BBox", bgr_bbox); - cv::waitKey(); - cv::Size img_sz; - cv::Point bbox_offset; - hsv_img.locateROI(img_sz, bbox_offset); - RCLCPP_INFO(this->get_logger(), "IMAGE SIZE: (%d, %d)", img_sz.height, img_sz.width); - RCLCPP_INFO(this->get_logger(), "BBOX OFFSET: (%d, %d)", bbox_offset.x, bbox_offset.y); std::vector lims = label_color_map[bbox.label]=="red"?contour_matching_color_range_map["red2"]:contour_matching_color_range_map[label_color_map[bbox.label]]; int h_min=lims[0], h_max=lims[1], s_min=lims[2], s_max=lims[3], v_min=lims[4], v_max=lims[5]; RCLCPP_INFO(this->get_logger(), "COLOR: %s, & SEGMENTATION COLOR LIMITS-> H: %d - %d, S: %d - %d, V: %d - %d", label_color_map[bbox.label].c_str(), h_min, h_max, s_min, s_max, v_min, v_max); @@ -334,28 +357,45 @@ void BBoxProjectPCloud::bb_pcl_project( cv::Mat mask; cv::inRange(hsv_img, cv::Scalar(h_min, s_min, v_min), cv::Scalar(h_max, s_max, v_max), mask); //show mask - cv::imshow("Mask", mask); - cv::waitKey(); + // cv::imshow("Mask", mask); + // cv::waitKey(); cv::erode(mask, mask, cv::getStructuringElement(cv::MORPH_RECT, cv::Size(5,5))); cv::dilate(mask, mask, cv::getStructuringElement(cv::MORPH_RECT, cv::Size(7,7))); std::vector> contours; cv::findContours(mask, contours, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE); - RCLCPP_INFO(this->get_logger(), "# OF FOUND CONTOURS: %d", contours.size()); + std::vector> in_contours; + //Get image points inside contour, put them into a vector, will then process those (not the contours themselves, they are just a boundary) + for(std::vector contour : contours){ + //TAKE THE BOUNDING BOX (BOUNDINGRECT), TAKE ALL THE PIXELS IN IT (NOT NOT CHECK EVERY PIXEL IN THE IMAGE) AND CHECK IF IT'S INSIDE THE CONTOUR USING POINTPOLYGONTEST + cv::Rect cont_bbox = cv::boundingRect(contour); + std::vector in_contour; + for(int x = cont_bbox.x; x < cont_bbox.x + cont_bbox.width; x++){ + for(int y = cont_bbox.y; y < cont_bbox.y + cont_bbox.height; y++){ + cv::Point pt = cv::Point(x,y); + if(cv::pointPolygonTest(contour, pt, false)>=0){ + in_contour.push_back(pt); + } + } + } + in_contours.push_back(in_contour); + } + RCLCPP_INFO(this->get_logger(), "# OF FOUND CONTOURS: %d", in_contours.size()); // Convert the contour points to fit the original image (using image_sz and bbox_offset -> just bbox_offset+pt_coords) to be able to use it with the LiDAR (projected) points - for(int ctr = 0; ctr < contours.size(); ctr++){ - RCLCPP_INFO(this->get_logger(), "SIZE OF CONTOUR %d: %d", ctr, contours[ctr].size()); - for(int pt=0; pt < contours[ctr].size(); pt++){ - contours[ctr][pt]+=bbox_offset; + for(int ctr = 0; ctr < in_contours.size(); ctr++){ + RCLCPP_INFO(this->get_logger(), "SIZE OF CONTOUR %d: %d", ctr, in_contours[ctr].size()); + for(int pt=0; pt < in_contours[ctr].size(); pt++){ + in_contours[ctr][pt]+=bbox_offset; + mat_contours.at(in_contours[ctr][pt]-bbox_offset) = int_to_bgr(ctr, in_contours.size()); } } - if(contours.empty() || clusters_indices.empty()) continue; + if(in_contours.empty() || clusters_indices.empty()) continue; // Convert both the pcloud and the contours to a vector of pair and compute sum and sum of squares (can be used to compute all the needed metrics) std::vector>> contours_pts; std::vector, std::pair>> contours_qts; - for (std::vector contour : contours){// point.x, point.y the coords + for (std::vector in_contour : in_contours){// point.x, point.y the coords std::vector> contour_pts; std::pair, std::pair> contour_qts; - for(cv::Point image_pt : contour){ + for(cv::Point image_pt : in_contour){ cv::Vec3b image_pt_hsv = cv_hsv.at(image_pt); //transform to PCL HSV image_pt_hsv[0]*=2; @@ -451,13 +491,16 @@ void BBoxProjectPCloud::bb_pcl_project( refined_pcl_contours.label = bbox.label; pcl::PointCloud::Ptr refined_cloud_ptr(new pcl::PointCloud); refined_cloud_ptr->header = pcloud_ptr->header; - cv::Mat refined_obj_contour_mat = cv::Mat::zeros(img_sz, CV_8UC1); - for (cv::Point image_pt : contours[opt_contour_id]){ - refined_obj_contour_mat.at(image_pt) = 1; + cv::Mat refined_obj_contour_mat = cv::Mat::zeros(img_sz, CV_8UC3); + for (cv::Point image_pt : in_contours[opt_contour_id]){ + refined_obj_contour_mat.at(image_pt) = cv::Vec3b(255,255,255); + mat_opt_contour.at(image_pt-bbox_offset) = int_to_bgr(opt_contour_id, in_contours.size()); } for (pcl::index_t ind : clusters_indices[opt_cluster_id].indices){ pcl::PointXYZHSV pt = pcloud_ptr->points[ind]; refined_cloud_ptr->push_back(pt); + cv::Point2d cloud_pt_xy = m_cam_model.project3dToPixel(cv::Point3d(pt.y, pt.z, -pt.x)); + mat_opt_cluster.at((cv::Point)cloud_pt_xy-bbox_offset) = int_to_bgr(opt_cluster_id, clusters_indices.size()); } pcl::toROSMsg(*refined_cloud_ptr, refined_pcl_contours.cloud); cv_bridge::CvImagePtr refined_obj_contour_ptr(new cv_bridge::CvImage(in_img_msg->header, sensor_msgs::image_encodings::TYPE_8UC1, refined_obj_contour_mat)); @@ -465,6 +508,21 @@ void BBoxProjectPCloud::bb_pcl_project( refined_objects_pub.objects.push_back(refined_pcl_contours); refined_cloud_contour_vec.push_back(std::make_pair(*refined_cloud_ptr,refined_obj_contour_mat)); max_refined_len = std::max(max_refined_len, (int)refined_cloud_ptr->size()); + + //show clusters & contours & respective matching (split image) + cv::Mat clust_cont_arr[] = {mat_clusters, mat_contours}; + cv::Mat opt_clust_cont_arr[] = {mat_opt_cluster, mat_opt_contour}; + cv::Mat clust_cont; + cv::Mat opt_clust_cont; + cv::hconcat(clust_cont_arr, 2, clust_cont); + cv::hconcat(opt_clust_cont_arr, 2, opt_clust_cont); + cv::Mat clust_cont_all_arr[] = {clust_cont, opt_clust_cont}; + cv::Mat clust_cont_all; + cv::vconcat(clust_cont_all_arr, 2, clust_cont_all); + cv::Mat upscaled; + cv::resize(clust_cont_all, upscaled, cv::Size(), 5, 5, cv::INTER_CUBIC); + cv::imshow("Clusters & contours & matching", upscaled); + cv::waitKey(); } RCLCPP_INFO(this->get_logger(), "WILL NOW SEND REFINED OBJECT POINT CLOUDS"); m_refined_object_pcl_contour_pub->publish(refined_objects_pub); @@ -472,7 +530,7 @@ void BBoxProjectPCloud::bb_pcl_project( all_obj_refined_pcls_ptr->header = in_cloud_tf_ptr->header; //convert vector of PointCloud to a single PointCloud with channels all_obj_refined_pcls_ptr->resize((pcl::uindex_t)max_refined_len, (pcl::uindex_t)in_bbox_msg->boxes.size()); - cv::Mat all_obj_refined_contours = cv::Mat::zeros(cv_ptr->image.size(), CV_8UC1); + cv::Mat all_obj_refined_contours = cv::Mat::zeros(cv_ptr->image.size(), CV_8UC3); try{ for(int i = 0; i #include #include +#include +#include +#include #include #include @@ -31,6 +34,9 @@ #include #include #include "pcl_conversions/pcl_conversions.h" +#include +#include +#include #include "cv_bridge/cv_bridge.h" @@ -91,12 +97,6 @@ class BBoxProjectPCloud : public rclcpp::Node{ const sensor_msgs::msg::PointCloud2::ConstSharedPtr &in_cloud_msg, const all_seaing_interfaces::msg::LabeledBoundingBox2DArray::ConstSharedPtr &in_bbox_msg); - // The HSV-similarity condition for Conditional Euclidean Clustering - // bool hsv_diff_condition(std::vector weights, double thres, const pcl::PointXYZHSV& p1, const pcl::PointXYZHSV& p2, float sq_dist); - - // The penalty function that compares an HSV color value to an HSV color range - // double color_range_penalty(std::vector weights, std::vector color_range, cv::Vec3b pt_color); - int m_obstacle_id; // for cluster extraction From 05288211063499b9b916d6923b1d23fc8cbdbe20 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Fri, 6 Dec 2024 02:09:04 -0500 Subject: [PATCH 18/35] Debugging, fixed minor mistake, doesn't crash now, need to check the formulas for the choosing & matching metrics --- .../bbox_project_pcloud.cpp | 37 +++++++++++-------- 1 file changed, 22 insertions(+), 15 deletions(-) diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index b8315961..9b8d3075 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -164,7 +164,7 @@ void BBoxProjectPCloud::bb_pcl_project( const sensor_msgs::msg::Image::ConstSharedPtr &in_img_msg, const sensor_msgs::msg::PointCloud2::ConstSharedPtr &in_cloud_msg, const all_seaing_interfaces::msg::LabeledBoundingBox2DArray::ConstSharedPtr &in_bbox_msg) { - RCLCPP_INFO(this->get_logger(), "GOT DATA"); + RCLPP_DEBUG(this->get_logger(), "GOT DATA"); // LIDAR -> Camera transform (useful for projecting the camera bboxes onto the point cloud, have the origin on the camera frame) if (!m_pc_cam_tf_ok) m_pc_cam_tf = get_tf(in_img_msg->header.frame_id, in_cloud_msg->header.frame_id); @@ -238,14 +238,14 @@ void BBoxProjectPCloud::bb_pcl_project( RCLCPP_DEBUG(this->get_logger(), "%d POINTS IN OBJECT %d", obj_cloud_ptr->size(), obj); max_len = std::max(max_len, (int)obj_cloud_ptr->size()); } - RCLCPP_INFO(this->get_logger(), "WILL NOW SEND OBJECT POINT CLOUDS"); + RCLPP_DEBUG(this->get_logger(), "WILL NOW SEND OBJECT POINT CLOUDS"); m_object_pcl_pub->publish(object_pcls); pcl::PointCloud::Ptr all_obj_pcls_ptr(new pcl::PointCloud); // pcl::PointCloud::Ptr all_obj_pcls_ptr(new pcl::PointCloud); all_obj_pcls_ptr->header = in_cloud_tf_ptr->header; //convert vector of PointCloud to a single PointCloud with channels all_obj_pcls_ptr->resize((pcl::uindex_t)max_len, (pcl::uindex_t)in_bbox_msg->boxes.size()); - RCLCPP_INFO(this->get_logger(), "PUBLISHED PCLOUD DIMENSIONS: height: %d, width: %d", (int)all_obj_pcls_ptr->height, (int)all_obj_pcls_ptr->width); + RCLPP_DEBUG(this->get_logger(), "PUBLISHED PCLOUD DIMENSIONS: height: %d, width: %d", (int)all_obj_pcls_ptr->height, (int)all_obj_pcls_ptr->width); // RCLCPP_DEBUG(this->get_logger(), "STORED PCLOUD DIMENSIONS: objects: %d, max_length: %d", (int)obj_cloud_vec.size(), max_len); try{ //TODO: CHECK IF THE CHANNELS CORRECTLY REPRESENT DIFFERENT OBJECTS (ALTHOUGH THAT'S ONLY FOR RVIZ SO WE DON'T CARE) @@ -281,21 +281,21 @@ void BBoxProjectPCloud::bb_pcl_project( // } //image & bbox relation & adjustment (make sure in-bounds) - RCLCPP_INFO(this->get_logger(), "BBOX COORDS: (%d, %d), (%d, %d) in img of sz %d x %d", bbox.min_x, bbox.min_y, bbox.max_x+1, bbox.max_y+1, cv_hsv.cols, cv_hsv.rows); + RCLCPP_DEBUG(this->get_logger(), "BBOX COORDS: (%d, %d), (%d, %d) in img of sz %d x %d", bbox.min_x, bbox.min_y, bbox.max_x+1, bbox.max_y+1, cv_hsv.cols, cv_hsv.rows); if(bbox.min_x > bbox.max_x || bbox.min_y > bbox.max_y) continue; //make sure bbox coords are inside image int adj_min_x = std::max((int)bbox.min_x,0); int adj_max_x = std::min((int)bbox.max_x+1, cv_hsv.cols); int adj_min_y = std::max((int)bbox.min_y,0); int adj_max_y = std::min((int)bbox.max_y+1, cv_hsv.rows); - RCLCPP_INFO(this->get_logger(), "ADJUSTED BBOX COORDS: (%d, %d), (%d, %d) in img of sz %d x %d", adj_min_x, adj_min_y, adj_max_x, adj_max_y, cv_hsv.cols, cv_hsv.rows); + RCLCPP_DEBUG(this->get_logger(), "ADJUSTED BBOX COORDS: (%d, %d), (%d, %d) in img of sz %d x %d", adj_min_x, adj_min_y, adj_max_x, adj_max_y, cv_hsv.cols, cv_hsv.rows); cv::Mat cv_hsv_copy = cv_hsv.clone(); cv::Mat hsv_img(cv_hsv, cv::Range(adj_min_y, adj_max_y), cv::Range(adj_min_x, adj_max_x)); cv::Size img_sz; cv::Point bbox_offset; hsv_img.locateROI(img_sz, bbox_offset); - RCLCPP_INFO(this->get_logger(), "IMAGE SIZE: (%d, %d)", img_sz.height, img_sz.width); - RCLCPP_INFO(this->get_logger(), "BBOX OFFSET: (%d, %d)", bbox_offset.x, bbox_offset.y); + RCLCPP_DEBUG(this->get_logger(), "IMAGE SIZE: (%d, %d)", img_sz.height, img_sz.width); + RCLCPP_DEBUG(this->get_logger(), "BBOX OFFSET: (%d, %d)", bbox_offset.x, bbox_offset.y); //to display points inside contours, and the clusters projected (debugging) cv::Mat cv_img_copy = cv_ptr->image.clone(); @@ -351,9 +351,9 @@ void BBoxProjectPCloud::bb_pcl_project( // color segmentation using the color label std::vector lims = label_color_map[bbox.label]=="red"?contour_matching_color_range_map["red2"]:contour_matching_color_range_map[label_color_map[bbox.label]]; int h_min=lims[0], h_max=lims[1], s_min=lims[2], s_max=lims[3], v_min=lims[4], v_max=lims[5]; - RCLCPP_INFO(this->get_logger(), "COLOR: %s, & SEGMENTATION COLOR LIMITS-> H: %d - %d, S: %d - %d, V: %d - %d", label_color_map[bbox.label].c_str(), h_min, h_max, s_min, s_max, v_min, v_max); + RCLCPP_DEBUG(this->get_logger(), "COLOR: %s, & SEGMENTATION COLOR LIMITS-> H: %d - %d, S: %d - %d, V: %d - %d", label_color_map[bbox.label].c_str(), h_min, h_max, s_min, s_max, v_min, v_max); cv::Scalar bbox_mean_hsv = cv::mean(hsv_img); - RCLCPP_INFO(this->get_logger(), "MEAN BBOX IMAGE COLOR: (%lf, %lf, %lf)", bbox_mean_hsv[0], bbox_mean_hsv[1], bbox_mean_hsv[2]); + RCLCPP_DEBUG(this->get_logger(), "MEAN BBOX IMAGE COLOR: (%lf, %lf, %lf)", bbox_mean_hsv[0], bbox_mean_hsv[1], bbox_mean_hsv[2]); cv::Mat mask; cv::inRange(hsv_img, cv::Scalar(h_min, s_min, v_min), cv::Scalar(h_max, s_max, v_max), mask); //show mask @@ -489,7 +489,7 @@ void BBoxProjectPCloud::bb_pcl_project( auto refined_pcl_contours = all_seaing_interfaces::msg::LabeledObjectPointCloud(); refined_pcl_contours.label = bbox.label; - pcl::PointCloud::Ptr refined_cloud_ptr(new pcl::PointCloud); + pcl::PointCloud::Ptr refined_cloud_ptr(new pcl::PointCloud()); refined_cloud_ptr->header = pcloud_ptr->header; cv::Mat refined_obj_contour_mat = cv::Mat::zeros(img_sz, CV_8UC3); for (cv::Point image_pt : in_contours[opt_contour_id]){ @@ -503,7 +503,7 @@ void BBoxProjectPCloud::bb_pcl_project( mat_opt_cluster.at((cv::Point)cloud_pt_xy-bbox_offset) = int_to_bgr(opt_cluster_id, clusters_indices.size()); } pcl::toROSMsg(*refined_cloud_ptr, refined_pcl_contours.cloud); - cv_bridge::CvImagePtr refined_obj_contour_ptr(new cv_bridge::CvImage(in_img_msg->header, sensor_msgs::image_encodings::TYPE_8UC1, refined_obj_contour_mat)); + cv_bridge::CvImagePtr refined_obj_contour_ptr(new cv_bridge::CvImage(in_img_msg->header, sensor_msgs::image_encodings::TYPE_8UC3, refined_obj_contour_mat)); refined_pcl_contours.contour = *refined_obj_contour_ptr->toImageMsg(); refined_objects_pub.objects.push_back(refined_pcl_contours); refined_cloud_contour_vec.push_back(std::make_pair(*refined_cloud_ptr,refined_obj_contour_mat)); @@ -524,28 +524,35 @@ void BBoxProjectPCloud::bb_pcl_project( cv::imshow("Clusters & contours & matching", upscaled); cv::waitKey(); } - RCLCPP_INFO(this->get_logger(), "WILL NOW SEND REFINED OBJECT POINT CLOUDS"); + RCLPP_DEBUG(this->get_logger(), "WILL NOW SEND REFINED OBJECT POINT CLOUDS & CONTOURS"); m_refined_object_pcl_contour_pub->publish(refined_objects_pub); - pcl::PointCloud::Ptr all_obj_refined_pcls_ptr(new pcl::PointCloud); + RCLPP_DEBUG(this->get_logger(), "PUBLISHED REFINED OBJECT POINT CLOUDS & CONTOURS"); + pcl::PointCloud::Ptr all_obj_refined_pcls_ptr(new pcl::PointCloud()); all_obj_refined_pcls_ptr->header = in_cloud_tf_ptr->header; //convert vector of PointCloud to a single PointCloud with channels - all_obj_refined_pcls_ptr->resize((pcl::uindex_t)max_refined_len, (pcl::uindex_t)in_bbox_msg->boxes.size()); + all_obj_refined_pcls_ptr->resize((pcl::uindex_t)max_refined_len, (pcl::uindex_t)refined_cloud_contour_vec.size()); cv::Mat all_obj_refined_contours = cv::Mat::zeros(cv_ptr->image.size(), CV_8UC3); try{ for(int i = 0; iget_logger(), "BBOX %d/%d", i, refined_cloud_contour_vec.size()); for(int j = 0; jat(j,i) = obj_cloud_vec[i][j]; + all_obj_refined_pcls_ptr->at(j,i) = refined_cloud_contour_vec[i].first[j]; + RCLPP_DEBUG(this->get_logger(), "CLUSTER %d/%d", j, refined_cloud_contour_vec[i].first.size()); } + RCLPP_DEBUG(this->get_logger(), "BEFORE CONTOUR ADDITION"); all_obj_refined_contours += refined_cloud_contour_vec[i].second; + RCLPP_DEBUG(this->get_logger(), "AFTER CONTOUR ADDITION"); } }catch(std::exception &ex){ RCLCPP_ERROR(this->get_logger(), "%s", ex.what()); } + RCLPP_DEBUG(this->get_logger(), "STORED CONTOURS TO BE PUBLISHED"); auto obj_refined_pcls_msg = sensor_msgs::msg::PointCloud2(); pcl::toROSMsg(*all_obj_refined_pcls_ptr, obj_refined_pcls_msg); m_refined_object_pcl_viz_pub->publish(obj_refined_pcls_msg); cv_bridge::CvImagePtr all_obj_refined_contour_ptr(new cv_bridge::CvImage(in_img_msg->header, sensor_msgs::image_encodings::TYPE_8UC1, all_obj_refined_contours)); m_refined_object_contour_viz_pub->publish(*all_obj_refined_contour_ptr->toImageMsg()); + RCLPP_DEBUG(this->get_logger(), "SENT OBJECT POINT CLOUDS FOR VISUALIZATION"); } geometry_msgs::msg::TransformStamped BBoxProjectPCloud::get_tf(const std::string &in_target_frame, From 14d224000526f8a93f52f63f065ca17fad9be8ff Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Sun, 8 Dec 2024 17:21:15 -0500 Subject: [PATCH 19/35] Debugging, having issues retrieving & converting colors from image (and those projected to cluster points) --- .../bbox_project_pcloud.cpp | 100 +++++++++++++----- 1 file changed, 74 insertions(+), 26 deletions(-) diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index 9b8d3075..3ff8b7a6 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -164,7 +164,7 @@ void BBoxProjectPCloud::bb_pcl_project( const sensor_msgs::msg::Image::ConstSharedPtr &in_img_msg, const sensor_msgs::msg::PointCloud2::ConstSharedPtr &in_cloud_msg, const all_seaing_interfaces::msg::LabeledBoundingBox2DArray::ConstSharedPtr &in_bbox_msg) { - RCLPP_DEBUG(this->get_logger(), "GOT DATA"); + RCLCPP_DEBUG(this->get_logger(), "GOT DATA"); // LIDAR -> Camera transform (useful for projecting the camera bboxes onto the point cloud, have the origin on the camera frame) if (!m_pc_cam_tf_ok) m_pc_cam_tf = get_tf(in_img_msg->header.frame_id, in_cloud_msg->header.frame_id); @@ -203,7 +203,10 @@ void BBoxProjectPCloud::bb_pcl_project( // cv::imshow("Received Image", cv_ptr->image); // cv::waitKey(); cv::Mat cv_hsv; + cv::waitKey(); cv::cvtColor(cv_ptr->image, cv_hsv, cv::COLOR_BGR2HSV); + //show hsv (false color) image + // cv::imshow("HSV Image", cv_hsv); std::vector::Ptr>> bbox_pcloud_objects; for (all_seaing_interfaces::msg::LabeledBoundingBox2D bbox : in_bbox_msg->boxes){ auto labeled_pcl = all_seaing_interfaces::msg::LabeledObjectPointCloud(); @@ -223,9 +226,9 @@ void BBoxProjectPCloud::bb_pcl_project( if(xy_rect.x >= bbox.min_x-bbox_margin && xy_rect.x <= bbox.max_x+bbox_margin && xy_rect.y >= bbox.min_y-bbox_margin && xy_rect.y <= bbox.max_y+bbox_margin){ cv::Vec3b hsv = cv_hsv.at(xy_rect); // cv::Vec3b bgr = cv_ptr->image.at(xy_rect); - obj_cloud_ptr->push_back(pcl::PointXYZHSV(point_tf.x, point_tf.y, point_tf.z, 2*hsv[0], ((float)hsv[1])/255.0, ((float)hsv[2])/255.0)); + obj_cloud_ptr->push_back(pcl::PointXYZHSV(point_tf.x, point_tf.y, point_tf.z, ((float)hsv[0])*2, ((float)hsv[1])/((float)255.0), ((float)hsv[2])/((float)255.0))); // obj_cloud_ptr->push_back(pcl::PointXYZRGB(point_tf.x, point_tf.y, point_tf.z, bgr[2], bgr[1], bgr[0])); - RCLCPP_DEBUG(this->get_logger(), "SELECTED HSV POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%d, %d, %d)", xy_rect.x, xy_rect.y, hsv[0], hsv[1], hsv[2]); + RCLCPP_INFO(this->get_logger(), "SELECTED HSV POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%d, %d, %d)", xy_rect.x, xy_rect.y, hsv[0], hsv[1], hsv[2]); // RCLCPP_DEBUG(this->get_logger(), "SELECTED RGB POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%d, %d, %d)", xy_rect.x, xy_rect.y, bgr[2], bgr[1], bgr[0]); } } @@ -238,14 +241,14 @@ void BBoxProjectPCloud::bb_pcl_project( RCLCPP_DEBUG(this->get_logger(), "%d POINTS IN OBJECT %d", obj_cloud_ptr->size(), obj); max_len = std::max(max_len, (int)obj_cloud_ptr->size()); } - RCLPP_DEBUG(this->get_logger(), "WILL NOW SEND OBJECT POINT CLOUDS"); + RCLCPP_DEBUG(this->get_logger(), "WILL NOW SEND OBJECT POINT CLOUDS"); m_object_pcl_pub->publish(object_pcls); pcl::PointCloud::Ptr all_obj_pcls_ptr(new pcl::PointCloud); // pcl::PointCloud::Ptr all_obj_pcls_ptr(new pcl::PointCloud); all_obj_pcls_ptr->header = in_cloud_tf_ptr->header; //convert vector of PointCloud to a single PointCloud with channels all_obj_pcls_ptr->resize((pcl::uindex_t)max_len, (pcl::uindex_t)in_bbox_msg->boxes.size()); - RCLPP_DEBUG(this->get_logger(), "PUBLISHED PCLOUD DIMENSIONS: height: %d, width: %d", (int)all_obj_pcls_ptr->height, (int)all_obj_pcls_ptr->width); + RCLCPP_DEBUG(this->get_logger(), "PUBLISHED PCLOUD DIMENSIONS: height: %d, width: %d", (int)all_obj_pcls_ptr->height, (int)all_obj_pcls_ptr->width); // RCLCPP_DEBUG(this->get_logger(), "STORED PCLOUD DIMENSIONS: objects: %d, max_length: %d", (int)obj_cloud_vec.size(), max_len); try{ //TODO: CHECK IF THE CHANNELS CORRECTLY REPRESENT DIFFERENT OBJECTS (ALTHOUGH THAT'S ONLY FOR RVIZ SO WE DON'T CARE) @@ -397,10 +400,7 @@ void BBoxProjectPCloud::bb_pcl_project( std::pair, std::pair> contour_qts; for(cv::Point image_pt : in_contour){ cv::Vec3b image_pt_hsv = cv_hsv.at(image_pt); - //transform to PCL HSV - image_pt_hsv[0]*=2; - image_pt_hsv[1]/=255.0; - image_pt_hsv[2]/=255.0; + RCLCPP_INFO(this->get_logger(), "IMAGE POINT COLOR AT POINT (%d, %d) (size: %d, %d): (%lf, %lf, %lf)", image_pt.x, image_pt.y, cv_hsv.cols, cv_hsv.rows, image_pt_hsv[0], image_pt_hsv[1], image_pt_hsv[2]); cv::Point2d image_pt_xy = cv::Point2d(image_pt.x, image_pt.y); contour_pts.push_back(std::make_pair(image_pt_xy, image_pt_hsv)); //store sums @@ -425,7 +425,10 @@ void BBoxProjectPCloud::bb_pcl_project( std::vector> cluster_pts; std::pair, std::pair> cluster_qts; for(pcl::index_t ind : cluster.indices){ - cv::Vec3b cloud_pt_hsv = cv::Vec3b(pcloud_ptr->points[ind].h, pcloud_ptr->points[ind].s, pcloud_ptr->points[ind].v); + RCLCPP_INFO(this->get_logger(), "INITIAL CLUSTER POINT COLOR: (%lf, %lf, %lf)", pcloud_ptr->points[ind].h, pcloud_ptr->points[ind].s, pcloud_ptr->points[ind].v); + //transform to OpenCV HSV + cv::Vec3b cloud_pt_hsv = cv::Vec3b(pcloud_ptr->points[ind].h/2, pcloud_ptr->points[ind].s*((float)255.0), pcloud_ptr->points[ind].v*((float)255.0)); + RCLCPP_INFO(this->get_logger(), "CONVERTED CLUSTER POINT COLOR: (%lf, %lf, %lf)", cloud_pt_hsv[0], cloud_pt_hsv[1], cloud_pt_hsv[2]); cv::Point2d cloud_pt_xy = m_cam_model.project3dToPixel(cv::Point3d(pcloud_ptr->points[ind].y, pcloud_ptr->points[ind].z, -pcloud_ptr->points[ind].x)); cluster_pts.push_back(std::make_pair(cloud_pt_xy, cloud_pt_hsv)); //store sums @@ -460,18 +463,63 @@ void BBoxProjectPCloud::bb_pcl_project( double cluster_size = clusters_pts[cluster].size(); std::pair, std::pair> cluster_qts = clusters_qts[cluster]; - //(sum(x_i^2+x_j^2) -2*sum(x_i*x_j) +sum(y_i^2+y_j^2) -2*sum(y_i*y_j)) /(cluster_size*contour_size) - double contour_cluster_dist_rms = sqrt((contour_qts.second.first.x+cluster_qts.second.first.x\ + //sum(cluster_size*x_i^2+contour_size*x_j^2) -2*sum(x_i*x_j) +sum(cluster_size*y_i^2+contour_size*y_j^2) -2*sum(y_i*y_j)) + double fast_contour_cluster_sq_dist_sum = cluster_size*contour_qts.second.first.x+contour_size*cluster_qts.second.first.x\ - 2*contour_qts.first.first.x*cluster_qts.first.first.x\ - + contour_qts.second.first.y+cluster_qts.second.first.y\ - - 2*contour_qts.first.first.x*cluster_qts.first.first.x)\ - /(cluster_size*contour_size)); + + cluster_size*contour_qts.second.first.y+contour_size*cluster_qts.second.first.y\ + - 2*contour_qts.first.first.y*cluster_qts.first.first.y; + double contour_cluster_dist_rms = sqrt(fast_contour_cluster_sq_dist_sum/(cluster_size*contour_size)); + + // RCLCPP_INFO(this->get_logger(), "FAST SQUARED DIST SUM: cluster_size (%lf)*sum(x_i^2) (%lf) +contour_size (%lf)*sum(x_j^2) (%lf) -2*sum(x_i) (%lf)*sum(x_j) (%lf) +cluster_size (%lf)*sum(y_i^2) (%lf)+contour_size (%lf)*sum(y_j^2) (%lf) -2*sum(y_i) (%lf)* sum(y_j) (%lf) = %lf", cluster_size, contour_qts.second.first.x, contour_size, cluster_qts.second.first.x, contour_qts.first.first.x, cluster_qts.first.first.x, cluster_size, contour_qts.second.first.y, contour_size, cluster_qts.second.first.y, contour_qts.first.first.y, cluster_qts.first.first.y, fast_contour_cluster_sq_dist_sum); + + // //debugging, brute force + // double sq_dist_sum = 0; + // for(cv::Point image_pt : in_contours[contour]){ + // for(pcl::index_t ind : clusters_indices[cluster].indices){ + // cv::Vec3b image_pt_hsv = cv_hsv.at(image_pt); + // cv::Point2d image_pt_xy = cv::Point2d(image_pt.x, image_pt.y); + + // cv::Vec3b cloud_pt_hsv = cv::Vec3b(pcloud_ptr->points[ind].h, pcloud_ptr->points[ind].s, pcloud_ptr->points[ind].v); + // cv::Point2d cloud_pt_xy = m_cam_model.project3dToPixel(cv::Point3d(pcloud_ptr->points[ind].y, pcloud_ptr->points[ind].z, -pcloud_ptr->points[ind].x)); + + // sq_dist_sum += pow(cv::norm(cloud_pt_xy - image_pt_xy),2); + // } + // } + + // RCLCPP_INFO(this->get_logger(), "BRUTE FORCE SQUARED DIST SUM: %lf", sq_dist_sum); cv::Vec3b avg_contour_col = (1/contour_size)*contour_qts.first.second; - double cluster_dh_rms = sqrt((cluster_qts.second.second[0]-2*cluster_qts.first.second[0]*avg_contour_col[0]+avg_contour_col[0]*avg_contour_col[0])/cluster_size);//sum((h_i-m_h)^2=h_i^2-2*h_i*m_h+m_h^2)/cluster_size - double cluster_ds_rms = sqrt((cluster_qts.second.second[1]-2*cluster_qts.first.second[1]*avg_contour_col[1]+avg_contour_col[1]*avg_contour_col[1])/cluster_size);//same for s - double cluster_dv_rms = sqrt((cluster_qts.second.second[2]-2*cluster_qts.first.second[2]*avg_contour_col[2]+avg_contour_col[2]*avg_contour_col[2])/cluster_size);//same for v + double fast_cluster_sq_dh_sum = cluster_qts.second.second[0]-2*cluster_qts.first.second[0]*avg_contour_col[0]+avg_contour_col[0]*avg_contour_col[0];//sum((h_i-m_h)^2=h_i^2-2*h_i*m_h+m_h^2) + double fast_cluster_sq_ds_sum = cluster_qts.second.second[1]-2*cluster_qts.first.second[1]*avg_contour_col[1]+avg_contour_col[1]*avg_contour_col[1];//same for s + double fast_cluster_sq_dv_sum = cluster_qts.second.second[2]-2*cluster_qts.first.second[2]*avg_contour_col[2]+avg_contour_col[2]*avg_contour_col[2];//same for v + + double cluster_dh_rms = sqrt(fast_cluster_sq_dh_sum/cluster_size); + double cluster_ds_rms = sqrt(fast_cluster_sq_ds_sum/cluster_size); + double cluster_dv_rms = sqrt(fast_cluster_sq_dv_sum/cluster_size); + + RCLCPP_INFO(this->get_logger(), "FAST SQUARED DH SUM: sum(h_i^2) (%lf)-2*sum(h_i) (%lf)*m_h (%lf)+m_h^2 (%lf)) = %lf", cluster_qts.second.second[0], cluster_qts.first.second[0], avg_contour_col[0], avg_contour_col[0]*avg_contour_col[0], fast_cluster_sq_dh_sum); + + //debugging, brute force + double sq_dh_sum = 0; + double avg_contour_h = 0; + for(cv::Point image_pt : in_contours[contour]){ + cv::Vec3b image_pt_hsv = cv_hsv.at(image_pt); + cv::Point2d image_pt_xy = cv::Point2d(image_pt.x, image_pt.y); + + avg_contour_h += image_pt_hsv[0]/contour_size; + } + for(pcl::index_t ind : clusters_indices[cluster].indices){ + cv::Vec3b cloud_pt_hsv = cv::Vec3b(pcloud_ptr->points[ind].h, pcloud_ptr->points[ind].s, pcloud_ptr->points[ind].v); + cv::Point2d cloud_pt_xy = m_cam_model.project3dToPixel(cv::Point3d(pcloud_ptr->points[ind].y, pcloud_ptr->points[ind].z, -pcloud_ptr->points[ind].x)); + //transform to OpenCV HSV + cloud_pt_hsv[0]/=2; + cloud_pt_hsv[1]*=255; + cloud_pt_hsv[2]*=255; + sq_dh_sum += pow(cloud_pt_hsv[0] - avg_contour_h,2); + } + + RCLCPP_INFO(this->get_logger(), "BRUTE FORCE SQUARE DH SUM: %lf, AVERAGE CONTOUR H: %lf", sq_dh_sum, avg_contour_h); double pair_cost = m_cluster_contour_distance_weight*contour_cluster_dist_rms*contour_cluster_dist_rms\ + m_cluster_contour_color_weights[0]*cluster_dh_rms*cluster_dh_rms\ @@ -524,9 +572,9 @@ void BBoxProjectPCloud::bb_pcl_project( cv::imshow("Clusters & contours & matching", upscaled); cv::waitKey(); } - RCLPP_DEBUG(this->get_logger(), "WILL NOW SEND REFINED OBJECT POINT CLOUDS & CONTOURS"); + RCLCPP_DEBUG(this->get_logger(), "WILL NOW SEND REFINED OBJECT POINT CLOUDS & CONTOURS"); m_refined_object_pcl_contour_pub->publish(refined_objects_pub); - RCLPP_DEBUG(this->get_logger(), "PUBLISHED REFINED OBJECT POINT CLOUDS & CONTOURS"); + RCLCPP_DEBUG(this->get_logger(), "PUBLISHED REFINED OBJECT POINT CLOUDS & CONTOURS"); pcl::PointCloud::Ptr all_obj_refined_pcls_ptr(new pcl::PointCloud()); all_obj_refined_pcls_ptr->header = in_cloud_tf_ptr->header; //convert vector of PointCloud to a single PointCloud with channels @@ -534,25 +582,25 @@ void BBoxProjectPCloud::bb_pcl_project( cv::Mat all_obj_refined_contours = cv::Mat::zeros(cv_ptr->image.size(), CV_8UC3); try{ for(int i = 0; iget_logger(), "BBOX %d/%d", i, refined_cloud_contour_vec.size()); + RCLCPP_DEBUG(this->get_logger(), "BBOX %d/%d", i, refined_cloud_contour_vec.size()); for(int j = 0; jat(j,i) = refined_cloud_contour_vec[i].first[j]; - RCLPP_DEBUG(this->get_logger(), "CLUSTER %d/%d", j, refined_cloud_contour_vec[i].first.size()); + RCLCPP_DEBUG(this->get_logger(), "CLUSTER %d/%d", j, refined_cloud_contour_vec[i].first.size()); } - RCLPP_DEBUG(this->get_logger(), "BEFORE CONTOUR ADDITION"); + RCLCPP_DEBUG(this->get_logger(), "BEFORE CONTOUR ADDITION"); all_obj_refined_contours += refined_cloud_contour_vec[i].second; - RCLPP_DEBUG(this->get_logger(), "AFTER CONTOUR ADDITION"); + RCLCPP_DEBUG(this->get_logger(), "AFTER CONTOUR ADDITION"); } }catch(std::exception &ex){ RCLCPP_ERROR(this->get_logger(), "%s", ex.what()); } - RCLPP_DEBUG(this->get_logger(), "STORED CONTOURS TO BE PUBLISHED"); + RCLCPP_DEBUG(this->get_logger(), "STORED CONTOURS TO BE PUBLISHED"); auto obj_refined_pcls_msg = sensor_msgs::msg::PointCloud2(); pcl::toROSMsg(*all_obj_refined_pcls_ptr, obj_refined_pcls_msg); m_refined_object_pcl_viz_pub->publish(obj_refined_pcls_msg); cv_bridge::CvImagePtr all_obj_refined_contour_ptr(new cv_bridge::CvImage(in_img_msg->header, sensor_msgs::image_encodings::TYPE_8UC1, all_obj_refined_contours)); m_refined_object_contour_viz_pub->publish(*all_obj_refined_contour_ptr->toImageMsg()); - RCLPP_DEBUG(this->get_logger(), "SENT OBJECT POINT CLOUDS FOR VISUALIZATION"); + RCLCPP_DEBUG(this->get_logger(), "SENT OBJECT POINT CLOUDS FOR VISUALIZATION"); } geometry_msgs::msg::TransformStamped BBoxProjectPCloud::get_tf(const std::string &in_target_frame, From 75eb1a2a752b5e0423f62f7565879281cd8cc193 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Tue, 10 Dec 2024 22:48:24 -0500 Subject: [PATCH 20/35] IT FINALLY WORKS (seems to work good, only includes points that are in the actual object, but more testing/tuning would be good) --- .../bbox_project_pcloud.cpp | 164 +++++++++--------- 1 file changed, 84 insertions(+), 80 deletions(-) diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index 3ff8b7a6..cc6f4762 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -148,7 +148,7 @@ double line_range_penalty(int a, int b, int x){ } // The penalty function that compares an HSV color value to an HSV color range -double color_range_penalty(std::vector weights, std::vector color_range, cv::Vec3b pt_color){ +double color_range_penalty(std::vector weights, std::vector color_range, std::vector pt_color){ double h_min=color_range[0], h_max=color_range[1], s_min=color_range[2], s_max=color_range[3], v_min=color_range[4], v_max=color_range[5]; return weights[0]*line_range_penalty(h_min, h_max, pt_color[0]) + weights[1]*line_range_penalty(s_min, s_max, pt_color[1]) + weights[2]*line_range_penalty(v_min, v_max, pt_color[2]); } @@ -203,10 +203,10 @@ void BBoxProjectPCloud::bb_pcl_project( // cv::imshow("Received Image", cv_ptr->image); // cv::waitKey(); cv::Mat cv_hsv; - cv::waitKey(); cv::cvtColor(cv_ptr->image, cv_hsv, cv::COLOR_BGR2HSV); //show hsv (false color) image // cv::imshow("HSV Image", cv_hsv); + // cv::waitKey(); std::vector::Ptr>> bbox_pcloud_objects; for (all_seaing_interfaces::msg::LabeledBoundingBox2D bbox : in_bbox_msg->boxes){ auto labeled_pcl = all_seaing_interfaces::msg::LabeledObjectPointCloud(); @@ -224,11 +224,12 @@ void BBoxProjectPCloud::bb_pcl_project( double bbox_margin = this->get_parameter("bbox_object_margin").as_double(); // Check if point is in bbox if(xy_rect.x >= bbox.min_x-bbox_margin && xy_rect.x <= bbox.max_x+bbox_margin && xy_rect.y >= bbox.min_y-bbox_margin && xy_rect.y <= bbox.max_y+bbox_margin){ - cv::Vec3b hsv = cv_hsv.at(xy_rect); + cv::Vec3b hsv_vec3b = cv_hsv.at(xy_rect); + std::vector hsv = {(long long)hsv_vec3b[0], (long long)hsv_vec3b[1], (long long)hsv_vec3b[2]}; // cv::Vec3b bgr = cv_ptr->image.at(xy_rect); obj_cloud_ptr->push_back(pcl::PointXYZHSV(point_tf.x, point_tf.y, point_tf.z, ((float)hsv[0])*2, ((float)hsv[1])/((float)255.0), ((float)hsv[2])/((float)255.0))); // obj_cloud_ptr->push_back(pcl::PointXYZRGB(point_tf.x, point_tf.y, point_tf.z, bgr[2], bgr[1], bgr[0])); - RCLCPP_INFO(this->get_logger(), "SELECTED HSV POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%d, %d, %d)", xy_rect.x, xy_rect.y, hsv[0], hsv[1], hsv[2]); + RCLCPP_DEBUG(this->get_logger(), "SELECTED HSV POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%d, %d, %d)", xy_rect.x, xy_rect.y, hsv[0], hsv[1], hsv[2]); // RCLCPP_DEBUG(this->get_logger(), "SELECTED RGB POINT PROJECTED ONTO IMAGE: (%lf, %lf) -> (%d, %d, %d)", xy_rect.x, xy_rect.y, bgr[2], bgr[1], bgr[0]); } } @@ -270,7 +271,7 @@ void BBoxProjectPCloud::bb_pcl_project( std::vector, cv::Mat>> refined_cloud_contour_vec; int max_refined_len = 0; - RCLCPP_INFO(this->get_logger(), "# OF FOUND BBOXES: %d", bbox_pcloud_objects.size()); + RCLCPP_DEBUG(this->get_logger(), "# OF FOUND BBOXES: %d", bbox_pcloud_objects.size()); for(auto bbox_pcloud_pair : bbox_pcloud_objects){ all_seaing_interfaces::msg::LabeledBoundingBox2D bbox = bbox_pcloud_pair.first; pcl::PointCloud::Ptr pcloud_ptr = bbox_pcloud_pair.second; @@ -335,10 +336,10 @@ void BBoxProjectPCloud::bb_pcl_project( std::function cond_func = std::bind(&hsv_diff_condition, m_clustering_color_weights, m_clustering_color_thres, std::placeholders::_1, std::placeholders::_2, std::placeholders::_3); cec.setConditionFunction(cond_func); cec.segment(clusters_indices); - RCLCPP_INFO(this->get_logger(), "# OF EXTRACTED CLUSTERS: %d", clusters_indices.size()); + RCLCPP_DEBUG(this->get_logger(), "# OF EXTRACTED CLUSTERS: %d", clusters_indices.size()); int clust_id = 0; for(auto ind_set : clusters_indices){ - RCLCPP_INFO(this->get_logger(), "SIZE OF CLUSTER %d: %d", clust_id, ind_set.indices.size()); + RCLCPP_DEBUG(this->get_logger(), "SIZE OF CLUSTER %d: %d", clust_id, ind_set.indices.size()); for(pcl::index_t ind : ind_set.indices){ cv::Point2d cloud_pt_xy = m_cam_model.project3dToPixel(cv::Point3d(pcloud_ptr->points[ind].y, pcloud_ptr->points[ind].z, -pcloud_ptr->points[ind].x)); mat_clusters.at((cv::Point)cloud_pt_xy-bbox_offset) = int_to_bgr(clust_id, clusters_indices.size()); @@ -382,10 +383,10 @@ void BBoxProjectPCloud::bb_pcl_project( } in_contours.push_back(in_contour); } - RCLCPP_INFO(this->get_logger(), "# OF FOUND CONTOURS: %d", in_contours.size()); + RCLCPP_DEBUG(this->get_logger(), "# OF FOUND CONTOURS: %d", in_contours.size()); // Convert the contour points to fit the original image (using image_sz and bbox_offset -> just bbox_offset+pt_coords) to be able to use it with the LiDAR (projected) points for(int ctr = 0; ctr < in_contours.size(); ctr++){ - RCLCPP_INFO(this->get_logger(), "SIZE OF CONTOUR %d: %d", ctr, in_contours[ctr].size()); + RCLCPP_DEBUG(this->get_logger(), "SIZE OF CONTOUR %d: %d", ctr, in_contours[ctr].size()); for(int pt=0; pt < in_contours[ctr].size(); pt++){ in_contours[ctr][pt]+=bbox_offset; mat_contours.at(in_contours[ctr][pt]-bbox_offset) = int_to_bgr(ctr, in_contours.size()); @@ -393,16 +394,19 @@ void BBoxProjectPCloud::bb_pcl_project( } if(in_contours.empty() || clusters_indices.empty()) continue; // Convert both the pcloud and the contours to a vector of pair and compute sum and sum of squares (can be used to compute all the needed metrics) - std::vector>> contours_pts; - std::vector, std::pair>> contours_qts; + std::vector>>> contours_pts; + std::vector>, std::pair>>> contours_qts; for (std::vector in_contour : in_contours){// point.x, point.y the coords - std::vector> contour_pts; - std::pair, std::pair> contour_qts; + std::vector>> contour_pts; + std::pair>, std::pair>> contour_qts; + contour_qts.first.second = contour_qts.second.second = {0,0,0}; for(cv::Point image_pt : in_contour){ - cv::Vec3b image_pt_hsv = cv_hsv.at(image_pt); - RCLCPP_INFO(this->get_logger(), "IMAGE POINT COLOR AT POINT (%d, %d) (size: %d, %d): (%lf, %lf, %lf)", image_pt.x, image_pt.y, cv_hsv.cols, cv_hsv.rows, image_pt_hsv[0], image_pt_hsv[1], image_pt_hsv[2]); + cv::Vec3b image_pt_hsv_vec3b = cv_hsv.at(image_pt); + std::vector image_pt_hsv = {(long long)image_pt_hsv_vec3b[0], (long long)image_pt_hsv_vec3b[1], (long long)image_pt_hsv_vec3b[2]}; + RCLCPP_DEBUG(this->get_logger(), "IMAGE POINT COLOR AT POINT (%d, %d) (size: %d, %d): (%d, %d, %d)", image_pt.x, image_pt.y, cv_hsv.cols, cv_hsv.rows, image_pt_hsv[0], image_pt_hsv[1], image_pt_hsv[2]); cv::Point2d image_pt_xy = cv::Point2d(image_pt.x, image_pt.y); contour_pts.push_back(std::make_pair(image_pt_xy, image_pt_hsv)); + RCLCPP_DEBUG(this->get_logger(), "IMAGE H SUM/SQUARED BEFORE POINT UPDATE: %lld/%lld", contour_qts.first.second[0], contour_qts.second.second[0]); //store sums contour_qts.first.first.x+=image_pt_xy.x; contour_qts.first.first.y+=image_pt_xy.y; @@ -415,20 +419,22 @@ void BBoxProjectPCloud::bb_pcl_project( contour_qts.second.second[0]+=image_pt_hsv[0]*image_pt_hsv[0]; contour_qts.second.second[1]+=image_pt_hsv[1]*image_pt_hsv[1]; contour_qts.second.second[2]+=image_pt_hsv[2]*image_pt_hsv[2]; + RCLCPP_DEBUG(this->get_logger(), "IMAGE H SUM/SQUARED AFTER POINT UPDATE: %lld/%lld", contour_qts.first.second[0], contour_qts.second.second[0]); } contours_pts.push_back(contour_pts); contours_qts.push_back(contour_qts); } - std::vector>> clusters_pts; - std::vector, std::pair>> clusters_qts; + std::vector>>> clusters_pts; + std::vector>, std::pair>>> clusters_qts; for (pcl::PointIndices cluster : clusters_indices){// vector is cluster.indices - std::vector> cluster_pts; - std::pair, std::pair> cluster_qts; + std::vector>> cluster_pts; + std::pair>, std::pair>> cluster_qts; + cluster_qts.first.second = cluster_qts.second.second = {0,0,0}; for(pcl::index_t ind : cluster.indices){ - RCLCPP_INFO(this->get_logger(), "INITIAL CLUSTER POINT COLOR: (%lf, %lf, %lf)", pcloud_ptr->points[ind].h, pcloud_ptr->points[ind].s, pcloud_ptr->points[ind].v); + RCLCPP_DEBUG(this->get_logger(), "INITIAL CLUSTER POINT COLOR: (%lf, %lf, %lf)", pcloud_ptr->points[ind].h, pcloud_ptr->points[ind].s, pcloud_ptr->points[ind].v); //transform to OpenCV HSV - cv::Vec3b cloud_pt_hsv = cv::Vec3b(pcloud_ptr->points[ind].h/2, pcloud_ptr->points[ind].s*((float)255.0), pcloud_ptr->points[ind].v*((float)255.0)); - RCLCPP_INFO(this->get_logger(), "CONVERTED CLUSTER POINT COLOR: (%lf, %lf, %lf)", cloud_pt_hsv[0], cloud_pt_hsv[1], cloud_pt_hsv[2]); + std::vector cloud_pt_hsv = {pcloud_ptr->points[ind].h/2, pcloud_ptr->points[ind].s*((float)255.0), pcloud_ptr->points[ind].v*((float)255.0)}; + RCLCPP_DEBUG(this->get_logger(), "CONVERTED CLUSTER POINT COLOR: (%d, %d, %d)", cloud_pt_hsv[0], cloud_pt_hsv[1], cloud_pt_hsv[2]); cv::Point2d cloud_pt_xy = m_cam_model.project3dToPixel(cv::Point3d(pcloud_ptr->points[ind].y, pcloud_ptr->points[ind].z, -pcloud_ptr->points[ind].x)); cluster_pts.push_back(std::make_pair(cloud_pt_xy, cloud_pt_hsv)); //store sums @@ -449,82 +455,80 @@ void BBoxProjectPCloud::bb_pcl_project( } // GO THROUGH ALL THE CLUSTER/CONTOUR PAIRS AND FIND THE BEST ONE BASED ON THE OPTIMALITY METRIC WITH THE WEIGHTS - double min_pair_cost = -1; + long long min_pair_cost = -1; int opt_contour_id = -1, opt_cluster_id = -1; for (int contour = 0; contour < contours_pts.size(); contour++){ - double contour_size = contours_pts[contour].size(); - std::pair, std::pair> contour_qts = contours_qts[contour]; + long long contour_size = contours_pts[contour].size(); + std::pair>, std::pair>> contour_qts = contours_qts[contour]; - double contour_cost = 0; - for (std::pair contour_pt : contours_pts[contour]){ - contour_cost += color_range_penalty(m_contour_detection_color_weights, label_color_map[bbox.label]=="red"?contour_matching_color_range_map["red2"]:contour_matching_color_range_map[label_color_map[bbox.label]], contour_pt.second)/contour_size; + long long contour_cost = 0; + for (std::pair> contour_pt : contours_pts[contour]){ + contour_cost += (long long)color_range_penalty(m_contour_detection_color_weights, label_color_map[bbox.label]=="red"?contour_matching_color_range_map["red2"]:contour_matching_color_range_map[label_color_map[bbox.label]], contour_pt.second)/contour_size; } for (int cluster = 0; cluster < clusters_pts.size(); cluster++){ - double cluster_size = clusters_pts[cluster].size(); - std::pair, std::pair> cluster_qts = clusters_qts[cluster]; + long long cluster_size = clusters_pts[cluster].size(); + std::pair>, std::pair>> cluster_qts = clusters_qts[cluster]; //sum(cluster_size*x_i^2+contour_size*x_j^2) -2*sum(x_i*x_j) +sum(cluster_size*y_i^2+contour_size*y_j^2) -2*sum(y_i*y_j)) - double fast_contour_cluster_sq_dist_sum = cluster_size*contour_qts.second.first.x+contour_size*cluster_qts.second.first.x\ + long long fast_contour_cluster_sq_dist_sum = cluster_size*contour_qts.second.first.x+contour_size*cluster_qts.second.first.x\ - 2*contour_qts.first.first.x*cluster_qts.first.first.x\ + cluster_size*contour_qts.second.first.y+contour_size*cluster_qts.second.first.y\ - 2*contour_qts.first.first.y*cluster_qts.first.first.y; - double contour_cluster_dist_rms = sqrt(fast_contour_cluster_sq_dist_sum/(cluster_size*contour_size)); + long long contour_cluster_dist_ms = fast_contour_cluster_sq_dist_sum/(cluster_size*contour_size); - // RCLCPP_INFO(this->get_logger(), "FAST SQUARED DIST SUM: cluster_size (%lf)*sum(x_i^2) (%lf) +contour_size (%lf)*sum(x_j^2) (%lf) -2*sum(x_i) (%lf)*sum(x_j) (%lf) +cluster_size (%lf)*sum(y_i^2) (%lf)+contour_size (%lf)*sum(y_j^2) (%lf) -2*sum(y_i) (%lf)* sum(y_j) (%lf) = %lf", cluster_size, contour_qts.second.first.x, contour_size, cluster_qts.second.first.x, contour_qts.first.first.x, cluster_qts.first.first.x, cluster_size, contour_qts.second.first.y, contour_size, cluster_qts.second.first.y, contour_qts.first.first.y, cluster_qts.first.first.y, fast_contour_cluster_sq_dist_sum); + // RCLCPP_DEBUG(this->get_logger(), "FAST SQUARED DIST SUM: cluster_size (%lf)*sum(x_i^2) (%lf) +contour_size (%lf)*sum(x_j^2) (%lf) -2*sum(x_i) (%lf)*sum(x_j) (%lf) +cluster_size (%lf)*sum(y_i^2) (%lf)+contour_size (%lf)*sum(y_j^2) (%lf) -2*sum(y_i) (%lf)* sum(y_j) (%lf) = %lf", cluster_size, contour_qts.second.first.x, contour_size, cluster_qts.second.first.x, contour_qts.first.first.x, cluster_qts.first.first.x, cluster_size, contour_qts.second.first.y, contour_size, cluster_qts.second.first.y, contour_qts.first.first.y, cluster_qts.first.first.y, fast_contour_cluster_sq_dist_sum); // //debugging, brute force - // double sq_dist_sum = 0; + // long long sq_dist_sum = 0; // for(cv::Point image_pt : in_contours[contour]){ // for(pcl::index_t ind : clusters_indices[cluster].indices){ - // cv::Vec3b image_pt_hsv = cv_hsv.at(image_pt); // cv::Point2d image_pt_xy = cv::Point2d(image_pt.x, image_pt.y); - // cv::Vec3b cloud_pt_hsv = cv::Vec3b(pcloud_ptr->points[ind].h, pcloud_ptr->points[ind].s, pcloud_ptr->points[ind].v); // cv::Point2d cloud_pt_xy = m_cam_model.project3dToPixel(cv::Point3d(pcloud_ptr->points[ind].y, pcloud_ptr->points[ind].z, -pcloud_ptr->points[ind].x)); // sq_dist_sum += pow(cv::norm(cloud_pt_xy - image_pt_xy),2); // } // } - // RCLCPP_INFO(this->get_logger(), "BRUTE FORCE SQUARED DIST SUM: %lf", sq_dist_sum); + // RCLCPP_DEBUG(this->get_logger(), "BRUTE FORCE SQUARED DIST SUM: %lf", sq_dist_sum); - cv::Vec3b avg_contour_col = (1/contour_size)*contour_qts.first.second; + std::vector avg_contour_col = {contour_qts.first.second[0]/contour_size, contour_qts.first.second[1]/contour_size, contour_qts.first.second[2]/contour_size}; - double fast_cluster_sq_dh_sum = cluster_qts.second.second[0]-2*cluster_qts.first.second[0]*avg_contour_col[0]+avg_contour_col[0]*avg_contour_col[0];//sum((h_i-m_h)^2=h_i^2-2*h_i*m_h+m_h^2) - double fast_cluster_sq_ds_sum = cluster_qts.second.second[1]-2*cluster_qts.first.second[1]*avg_contour_col[1]+avg_contour_col[1]*avg_contour_col[1];//same for s - double fast_cluster_sq_dv_sum = cluster_qts.second.second[2]-2*cluster_qts.first.second[2]*avg_contour_col[2]+avg_contour_col[2]*avg_contour_col[2];//same for v + long long fast_cluster_sq_dh_sum = cluster_qts.second.second[0]-2*cluster_qts.first.second[0]*avg_contour_col[0]+cluster_size*avg_contour_col[0]*avg_contour_col[0];//sum((h_i-m_h)^2=h_i^2-2*h_i*m_h+cluster_size*m_h^2) + long long fast_cluster_sq_ds_sum = cluster_qts.second.second[1]-2*cluster_qts.first.second[1]*avg_contour_col[1]+cluster_size*avg_contour_col[1]*avg_contour_col[1];//same for s + long long fast_cluster_sq_dv_sum = cluster_qts.second.second[2]-2*cluster_qts.first.second[2]*avg_contour_col[2]+cluster_size*avg_contour_col[2]*avg_contour_col[2];//same for v - double cluster_dh_rms = sqrt(fast_cluster_sq_dh_sum/cluster_size); - double cluster_ds_rms = sqrt(fast_cluster_sq_ds_sum/cluster_size); - double cluster_dv_rms = sqrt(fast_cluster_sq_dv_sum/cluster_size); + long long cluster_dh_ms = fast_cluster_sq_dh_sum/cluster_size; + long long cluster_ds_ms = fast_cluster_sq_ds_sum/cluster_size; + long long cluster_dv_ms = fast_cluster_sq_dv_sum/cluster_size; - RCLCPP_INFO(this->get_logger(), "FAST SQUARED DH SUM: sum(h_i^2) (%lf)-2*sum(h_i) (%lf)*m_h (%lf)+m_h^2 (%lf)) = %lf", cluster_qts.second.second[0], cluster_qts.first.second[0], avg_contour_col[0], avg_contour_col[0]*avg_contour_col[0], fast_cluster_sq_dh_sum); + // RCLCPP_DEBUG(this->get_logger(), "FAST SQUARED DH SUM: sum(h_i^2) (%lld)-2*sum(h_i) (%lld)*m_h (%lld)+cluster_size(%lld)*m_h^2 (%lld)) = %lld", cluster_qts.second.second[0], cluster_qts.first.second[0], avg_contour_col[0], cluster_size, avg_contour_col[0]*avg_contour_col[0], fast_cluster_sq_dh_sum); //debugging, brute force - double sq_dh_sum = 0; - double avg_contour_h = 0; - for(cv::Point image_pt : in_contours[contour]){ - cv::Vec3b image_pt_hsv = cv_hsv.at(image_pt); - cv::Point2d image_pt_xy = cv::Point2d(image_pt.x, image_pt.y); - - avg_contour_h += image_pt_hsv[0]/contour_size; - } - for(pcl::index_t ind : clusters_indices[cluster].indices){ - cv::Vec3b cloud_pt_hsv = cv::Vec3b(pcloud_ptr->points[ind].h, pcloud_ptr->points[ind].s, pcloud_ptr->points[ind].v); - cv::Point2d cloud_pt_xy = m_cam_model.project3dToPixel(cv::Point3d(pcloud_ptr->points[ind].y, pcloud_ptr->points[ind].z, -pcloud_ptr->points[ind].x)); - //transform to OpenCV HSV - cloud_pt_hsv[0]/=2; - cloud_pt_hsv[1]*=255; - cloud_pt_hsv[2]*=255; - sq_dh_sum += pow(cloud_pt_hsv[0] - avg_contour_h,2); - } + // long long sq_dh_sum = 0; + // long long sum_contour_h = 0; + // for(cv::Point image_pt : in_contours[contour]){ + // cv::Vec3b image_pt_hsv_vec3b = cv_hsv.at(image_pt); + // std::vector image_pt_hsv = {(long long)image_pt_hsv_vec3b[0], (long long)image_pt_hsv_vec3b[1], (long long)image_pt_hsv_vec3b[2]}; - RCLCPP_INFO(this->get_logger(), "BRUTE FORCE SQUARE DH SUM: %lf, AVERAGE CONTOUR H: %lf", sq_dh_sum, avg_contour_h); + // sum_contour_h += image_pt_hsv[0]; + // } + // long long avg_contour_h = sum_contour_h/contour_size; + // for(pcl::index_t ind : clusters_indices[cluster].indices){ + // std::vector cloud_pt_hsv = {pcloud_ptr->points[ind].h, pcloud_ptr->points[ind].s, pcloud_ptr->points[ind].v}; + // //transform to OpenCV HSV + // cloud_pt_hsv[0]/=2; + // cloud_pt_hsv[1]*=255; + // cloud_pt_hsv[2]*=255; + // sq_dh_sum += pow(cloud_pt_hsv[0] - avg_contour_h,2); + // } + + // RCLCPP_DEBUG(this->get_logger(), "BRUTE FORCE SQUARE DH SUM: %lld, AVERAGE CONTOUR H: %lld", sq_dh_sum, avg_contour_h); - double pair_cost = m_cluster_contour_distance_weight*contour_cluster_dist_rms*contour_cluster_dist_rms\ - + m_cluster_contour_color_weights[0]*cluster_dh_rms*cluster_dh_rms\ - + m_cluster_contour_color_weights[1]*cluster_ds_rms*cluster_ds_rms\ - + m_cluster_contour_color_weights[2]*cluster_dv_rms*cluster_dv_rms\ + long long pair_cost = m_cluster_contour_distance_weight*contour_cluster_dist_ms\ + + m_cluster_contour_color_weights[0]*cluster_dh_ms\ + + m_cluster_contour_color_weights[1]*cluster_ds_ms\ + + m_cluster_contour_color_weights[2]*cluster_dv_ms\ - m_cluster_contour_size_weight*cluster_size*contour_size\ + contour_cost; if (min_pair_cost == -1 || pair_cost < min_pair_cost){ @@ -558,19 +562,19 @@ void BBoxProjectPCloud::bb_pcl_project( max_refined_len = std::max(max_refined_len, (int)refined_cloud_ptr->size()); //show clusters & contours & respective matching (split image) - cv::Mat clust_cont_arr[] = {mat_clusters, mat_contours}; - cv::Mat opt_clust_cont_arr[] = {mat_opt_cluster, mat_opt_contour}; - cv::Mat clust_cont; - cv::Mat opt_clust_cont; - cv::hconcat(clust_cont_arr, 2, clust_cont); - cv::hconcat(opt_clust_cont_arr, 2, opt_clust_cont); - cv::Mat clust_cont_all_arr[] = {clust_cont, opt_clust_cont}; - cv::Mat clust_cont_all; - cv::vconcat(clust_cont_all_arr, 2, clust_cont_all); - cv::Mat upscaled; - cv::resize(clust_cont_all, upscaled, cv::Size(), 5, 5, cv::INTER_CUBIC); - cv::imshow("Clusters & contours & matching", upscaled); - cv::waitKey(); + // cv::Mat clust_cont_arr[] = {mat_clusters, mat_contours}; + // cv::Mat opt_clust_cont_arr[] = {mat_opt_cluster, mat_opt_contour}; + // cv::Mat clust_cont; + // cv::Mat opt_clust_cont; + // cv::hconcat(clust_cont_arr, 2, clust_cont); + // cv::hconcat(opt_clust_cont_arr, 2, opt_clust_cont); + // cv::Mat clust_cont_all_arr[] = {clust_cont, opt_clust_cont}; + // cv::Mat clust_cont_all; + // cv::vconcat(clust_cont_all_arr, 2, clust_cont_all); + // cv::Mat upscaled; + // cv::resize(clust_cont_all, upscaled, cv::Size(), 5, 5, cv::INTER_CUBIC); + // cv::imshow("Clusters & contours & matching", upscaled); + // cv::waitKey(); } RCLCPP_DEBUG(this->get_logger(), "WILL NOW SEND REFINED OBJECT POINT CLOUDS & CONTOURS"); m_refined_object_pcl_contour_pub->publish(refined_objects_pub); From f0a87aa98ca0a2a4e8ee2af459c8713c8b449a11 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Tue, 10 Dec 2024 23:18:26 -0500 Subject: [PATCH 21/35] Now the object contour visualization publisher works too --- .../bbox_project_pcloud.cpp | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index cc6f4762..34fba910 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -268,7 +268,7 @@ void BBoxProjectPCloud::bb_pcl_project( // REFINE OBJECT POINT CLOUDS auto refined_objects_pub = all_seaing_interfaces::msg::LabeledObjectPointCloudArray(); - std::vector, cv::Mat>> refined_cloud_contour_vec; + std::vector, std::vector>> refined_cloud_contour_vec; int max_refined_len = 0; RCLCPP_DEBUG(this->get_logger(), "# OF FOUND BBOXES: %d", bbox_pcloud_objects.size()); @@ -556,9 +556,11 @@ void BBoxProjectPCloud::bb_pcl_project( } pcl::toROSMsg(*refined_cloud_ptr, refined_pcl_contours.cloud); cv_bridge::CvImagePtr refined_obj_contour_ptr(new cv_bridge::CvImage(in_img_msg->header, sensor_msgs::image_encodings::TYPE_8UC3, refined_obj_contour_mat)); + // cv::imshow("Object contour image to be published:", refined_obj_contour_mat); + // cv::waitKey(); refined_pcl_contours.contour = *refined_obj_contour_ptr->toImageMsg(); refined_objects_pub.objects.push_back(refined_pcl_contours); - refined_cloud_contour_vec.push_back(std::make_pair(*refined_cloud_ptr,refined_obj_contour_mat)); + refined_cloud_contour_vec.push_back(std::make_pair(*refined_cloud_ptr,in_contours[opt_contour_id])); max_refined_len = std::max(max_refined_len, (int)refined_cloud_ptr->size()); //show clusters & contours & respective matching (split image) @@ -589,11 +591,10 @@ void BBoxProjectPCloud::bb_pcl_project( RCLCPP_DEBUG(this->get_logger(), "BBOX %d/%d", i, refined_cloud_contour_vec.size()); for(int j = 0; jat(j,i) = refined_cloud_contour_vec[i].first[j]; - RCLCPP_DEBUG(this->get_logger(), "CLUSTER %d/%d", j, refined_cloud_contour_vec[i].first.size()); } - RCLCPP_DEBUG(this->get_logger(), "BEFORE CONTOUR ADDITION"); - all_obj_refined_contours += refined_cloud_contour_vec[i].second; - RCLCPP_DEBUG(this->get_logger(), "AFTER CONTOUR ADDITION"); + for(cv::Point pt : refined_cloud_contour_vec[i].second){ + all_obj_refined_contours.at(pt)=cv::Vec3b(255, 255, 255); + } } }catch(std::exception &ex){ RCLCPP_ERROR(this->get_logger(), "%s", ex.what()); @@ -602,7 +603,9 @@ void BBoxProjectPCloud::bb_pcl_project( auto obj_refined_pcls_msg = sensor_msgs::msg::PointCloud2(); pcl::toROSMsg(*all_obj_refined_pcls_ptr, obj_refined_pcls_msg); m_refined_object_pcl_viz_pub->publish(obj_refined_pcls_msg); - cv_bridge::CvImagePtr all_obj_refined_contour_ptr(new cv_bridge::CvImage(in_img_msg->header, sensor_msgs::image_encodings::TYPE_8UC1, all_obj_refined_contours)); + cv_bridge::CvImagePtr all_obj_refined_contour_ptr(new cv_bridge::CvImage(in_img_msg->header, sensor_msgs::image_encodings::TYPE_8UC3, all_obj_refined_contours)); + // cv::imshow("Object contour image to be published:", all_obj_refined_contours); + // cv::waitKey(); m_refined_object_contour_viz_pub->publish(*all_obj_refined_contour_ptr->toImageMsg()); RCLCPP_DEBUG(this->get_logger(), "SENT OBJECT POINT CLOUDS FOR VISUALIZATION"); } From 962cb8511a7b76c1081a76f698154f68436ad466 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Tue, 10 Dec 2024 23:22:40 -0500 Subject: [PATCH 22/35] Removed some debugging/old stuff --- .../bbox_project_pcloud.cpp | 50 +------------------ 1 file changed, 1 insertion(+), 49 deletions(-) diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index 34fba910..86b37ba8 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -315,18 +315,9 @@ void BBoxProjectPCloud::bb_pcl_project( tree->setInputCloud(pcloud_ptr); std::vector clusters_indices; - // EUCLIDEAN CLUSTERING (DEPRECATED) - // // pcl::EuclideanClusterExtraction ec; - // pcl::EuclideanClusterExtraction ec; - // ec.setClusterTolerance(m_clustering_distance); - // ec.setMinClusterSize(m_obstacle_size_min); - // ec.setMaxClusterSize(m_obstacle_size_max); - // ec.setSearchMethod(tree); - // ec.setInputCloud(pcloud_ptr); - // ec.extract(clusters_indices); - // CONDITIONAL (WITH HSV-BASED CONDITION) EUCLIDEAN CLUSTERING // TODO: USE AN ALGORITHM THAT MAKES SURE THE CONTOUR IS CONTINUOUS (WITHOUT NON-SELECTED POINTS INSIDE IT, AS IT'S PROBABLY THE CASE NOW) + //(though seems to work pretty well as it is now) pcl::ConditionalEuclideanClustering cec; cec.setClusterTolerance(m_clustering_distance); cec.setMinClusterSize(m_obstacle_size_min); @@ -476,22 +467,6 @@ void BBoxProjectPCloud::bb_pcl_project( - 2*contour_qts.first.first.y*cluster_qts.first.first.y; long long contour_cluster_dist_ms = fast_contour_cluster_sq_dist_sum/(cluster_size*contour_size); - // RCLCPP_DEBUG(this->get_logger(), "FAST SQUARED DIST SUM: cluster_size (%lf)*sum(x_i^2) (%lf) +contour_size (%lf)*sum(x_j^2) (%lf) -2*sum(x_i) (%lf)*sum(x_j) (%lf) +cluster_size (%lf)*sum(y_i^2) (%lf)+contour_size (%lf)*sum(y_j^2) (%lf) -2*sum(y_i) (%lf)* sum(y_j) (%lf) = %lf", cluster_size, contour_qts.second.first.x, contour_size, cluster_qts.second.first.x, contour_qts.first.first.x, cluster_qts.first.first.x, cluster_size, contour_qts.second.first.y, contour_size, cluster_qts.second.first.y, contour_qts.first.first.y, cluster_qts.first.first.y, fast_contour_cluster_sq_dist_sum); - - // //debugging, brute force - // long long sq_dist_sum = 0; - // for(cv::Point image_pt : in_contours[contour]){ - // for(pcl::index_t ind : clusters_indices[cluster].indices){ - // cv::Point2d image_pt_xy = cv::Point2d(image_pt.x, image_pt.y); - - // cv::Point2d cloud_pt_xy = m_cam_model.project3dToPixel(cv::Point3d(pcloud_ptr->points[ind].y, pcloud_ptr->points[ind].z, -pcloud_ptr->points[ind].x)); - - // sq_dist_sum += pow(cv::norm(cloud_pt_xy - image_pt_xy),2); - // } - // } - - // RCLCPP_DEBUG(this->get_logger(), "BRUTE FORCE SQUARED DIST SUM: %lf", sq_dist_sum); - std::vector avg_contour_col = {contour_qts.first.second[0]/contour_size, contour_qts.first.second[1]/contour_size, contour_qts.first.second[2]/contour_size}; long long fast_cluster_sq_dh_sum = cluster_qts.second.second[0]-2*cluster_qts.first.second[0]*avg_contour_col[0]+cluster_size*avg_contour_col[0]*avg_contour_col[0];//sum((h_i-m_h)^2=h_i^2-2*h_i*m_h+cluster_size*m_h^2) @@ -502,29 +477,6 @@ void BBoxProjectPCloud::bb_pcl_project( long long cluster_ds_ms = fast_cluster_sq_ds_sum/cluster_size; long long cluster_dv_ms = fast_cluster_sq_dv_sum/cluster_size; - // RCLCPP_DEBUG(this->get_logger(), "FAST SQUARED DH SUM: sum(h_i^2) (%lld)-2*sum(h_i) (%lld)*m_h (%lld)+cluster_size(%lld)*m_h^2 (%lld)) = %lld", cluster_qts.second.second[0], cluster_qts.first.second[0], avg_contour_col[0], cluster_size, avg_contour_col[0]*avg_contour_col[0], fast_cluster_sq_dh_sum); - - //debugging, brute force - // long long sq_dh_sum = 0; - // long long sum_contour_h = 0; - // for(cv::Point image_pt : in_contours[contour]){ - // cv::Vec3b image_pt_hsv_vec3b = cv_hsv.at(image_pt); - // std::vector image_pt_hsv = {(long long)image_pt_hsv_vec3b[0], (long long)image_pt_hsv_vec3b[1], (long long)image_pt_hsv_vec3b[2]}; - - // sum_contour_h += image_pt_hsv[0]; - // } - // long long avg_contour_h = sum_contour_h/contour_size; - // for(pcl::index_t ind : clusters_indices[cluster].indices){ - // std::vector cloud_pt_hsv = {pcloud_ptr->points[ind].h, pcloud_ptr->points[ind].s, pcloud_ptr->points[ind].v}; - // //transform to OpenCV HSV - // cloud_pt_hsv[0]/=2; - // cloud_pt_hsv[1]*=255; - // cloud_pt_hsv[2]*=255; - // sq_dh_sum += pow(cloud_pt_hsv[0] - avg_contour_h,2); - // } - - // RCLCPP_DEBUG(this->get_logger(), "BRUTE FORCE SQUARE DH SUM: %lld, AVERAGE CONTOUR H: %lld", sq_dh_sum, avg_contour_h); - long long pair_cost = m_cluster_contour_distance_weight*contour_cluster_dist_ms\ + m_cluster_contour_color_weights[0]*cluster_dh_ms\ + m_cluster_contour_color_weights[1]*cluster_ds_ms\ From dc59a78e1328c45f04bd0da42634a8db6bb2d2a5 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Thu, 12 Dec 2024 23:54:11 -0500 Subject: [PATCH 23/35] removing useless stuff --- .../bbox_project_pcloud.cpp | 23 ------------------- .../bbox_project_pcloud.hpp | 7 +----- .../launch/bbox_project_pcloud_test.launch.py | 3 --- 3 files changed, 1 insertion(+), 32 deletions(-) diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index 86b37ba8..c3311d51 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -15,22 +15,14 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ this->declare_parameter("obstacle_size_min", 20); this->declare_parameter("obstacle_size_max", 100000); this->declare_parameter("clustering_distance", 0.75); - this->declare_parameter("obstacle_seg_thresh", 1.0); - this->declare_parameter("obstacle_drop_thresh", 1.0); - this->declare_parameter("polygon_area_thresh", 100000.0); m_obstacle_size_min = this->get_parameter("obstacle_size_min").as_int(); m_obstacle_size_max = this->get_parameter("obstacle_size_max").as_int(); m_clustering_distance = this->get_parameter("clustering_distance").as_double(); - m_obstacle_seg_thresh = this->get_parameter("obstacle_seg_thresh").as_double(); - m_obstacle_drop_thresh = this->get_parameter("obstacle_drop_thresh").as_double(); - m_polygon_area_thresh = this->get_parameter("polygon_area_thresh").as_double(); // for color segmentation - this->declare_parameter("color_ranges_file", ""); this->declare_parameter("color_label_mappings_file", ""); - color_ranges_file = this->get_parameter("color_ranges_file").as_string(); color_label_mappings_file = this->get_parameter("color_label_mappings_file").as_string(); // for cluster-contour matching & selection @@ -75,21 +67,6 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ } label_yaml.close(); - // get color ranges from yaml - RCLCPP_DEBUG(this->get_logger(), "READING COLOR RANGES"); - std::ifstream ranges_yaml(color_ranges_file); - if (ranges_yaml.is_open()) { - ranges_config_yaml = YAML::Load(ranges_yaml); - for (YAML::const_iterator it = ranges_config_yaml.begin(); it != ranges_config_yaml.end(); ++it) { - color_range_map[it->first.as()] = it->second.as>(); - RCLCPP_DEBUG(this->get_logger(), "%s -> [%d, %d, %d, %d, %d, %d]", it->first.as().c_str(), it->second.as>()[0], it->second.as>()[1], it->second.as>()[2], it->second.as>()[3], it->second.as>()[4], it->second.as>()[5]); - } - } - else { - RCLCPP_ERROR(this->get_logger(), "Failed to open YAML file: %s", color_ranges_file.c_str()); - } - ranges_yaml.close(); - // get cluster-contour matching & selection parameters from yaml RCLCPP_DEBUG(this->get_logger(), "READING MATCHING PARAMETERS"); std::ifstream matching_yaml(matching_weights_file); diff --git a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp index 057aadab..7b67084c 100644 --- a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp +++ b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp @@ -103,16 +103,11 @@ class BBoxProjectPCloud : public rclcpp::Node{ int m_obstacle_size_min; int m_obstacle_size_max; double m_clustering_distance; - double m_obstacle_seg_thresh; - double m_obstacle_drop_thresh; - double m_polygon_area_thresh; // for color segmentation - std::string color_ranges_file; std::string color_label_mappings_file; - YAML::Node label_config_yaml, ranges_config_yaml; + YAML::Node label_config_yaml; std::map label_color_map; - std::map> color_range_map; // for cluster-contour matching/selection std::string matching_weights_file; diff --git a/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py b/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py index 18362a92..861ae502 100644 --- a/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py +++ b/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py @@ -33,9 +33,6 @@ def generate_launch_description(): {"obstacle_size_min": 2}, {"obstacle_size_max": 60}, {"clustering_distance": 1.0}, - {"obstacle_seg_thresh": 10.0}, - {"obstacle_drop_thresh": 1.0}, - {"polygon_area_thresh": 100000.0}, {"matching_weights_file": matching_weights}, {"contour_matching_color_ranges_file": contour_matching_color_ranges} ] From 4d4826398cb7353d8fc097305d09fcaf2388f14f Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Sun, 15 Dec 2024 16:13:23 -0500 Subject: [PATCH 24/35] Changed topic parameters to remappings, and fixed the point glitching by publishing unorganized point clouds (channels don't matter anyways in visualization) --- all_seaing_bringup/launch/sim.launch.py | 14 ++++---- .../bbox_project_pcloud.cpp | 35 +++++++++---------- .../launch/bbox_project_pcloud_test.launch.py | 8 +++-- 3 files changed, 28 insertions(+), 29 deletions(-) diff --git a/all_seaing_bringup/launch/sim.launch.py b/all_seaing_bringup/launch/sim.launch.py index 37c03850..13f53bd0 100644 --- a/all_seaing_bringup/launch/sim.launch.py +++ b/all_seaing_bringup/launch/sim.launch.py @@ -157,19 +157,19 @@ def generate_launch_description(): bbox_project_pcloud_node = launch_ros.actions.Node( package="all_seaing_perception", executable="bbox_project_pcloud", + output="screen", + remappings=[ + ("camera_info_topic", "/wamv/sensors/cameras/front_left_camera_sensor/camera_info"), + ("camera_topic", "/wamv/sensors/cameras/front_left_camera_sensor/image_raw"), + ("lidar_topic", "point_cloud/filtered") + ], parameters=[ {"bbox_object_margin": 0.0}, - {"camera_info_topic", "/wamv/sensors/cameras/front_left_camera_sensor/camera_info"}, - {"camera_topic", "/wamv/sensors/cameras/front_left_camera_sensor/image_raw"}, - {"lidar_topic", "point_cloud/filtered"}, {"color_label_mappings_file": color_label_mappings}, {"color_ranges_file": color_ranges}, {"obstacle_size_min": 2}, {"obstacle_size_max": 60}, {"clustering_distance": 1.0}, - {"obstacle_seg_thresh": 10.0}, - {"obstacle_drop_thresh": 1.0}, - {"polygon_area_thresh": 100000.0}, {"matching_weights_file": matching_weights}, {"contour_matching_color_ranges_file": contour_matching_color_ranges} ] @@ -270,7 +270,7 @@ def generate_launch_description(): # yolov8_node, point_cloud_filter_node, obstacle_detector_node, - # bbox_project_pcloud_node, + bbox_project_pcloud_node, rviz_node, control_mux, controller_server, diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index c3311d51..ba9818ac 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -7,9 +7,6 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ //essential ones this->declare_parameter("bbox_object_margin", 0.0); - this->declare_parameter("lidar_topic", "/wamv/sensors/lidars/lidar_wamv_sensor/points"); - this->declare_parameter("camera_topic", "/wamv/sensors/cameras/front_left_camera_sensor/image_raw"); - this->declare_parameter("camera_info_topic", "/wamv/sensors/cameras/front_left_camera_sensor/camera_info"); // for cluster extraction this->declare_parameter("obstacle_size_min", 20); @@ -34,9 +31,9 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ // Subscriptions m_image_intrinsics_sub = this->create_subscription( - this->get_parameter("camera_info_topic").as_string(), 10, std::bind(&BBoxProjectPCloud::intrinsics_cb, this, std::placeholders::_1)); - m_image_sub.subscribe(this, this->get_parameter("camera_topic").as_string(), rmw_qos_profile_sensor_data); - m_cloud_sub.subscribe(this, this->get_parameter("lidar_topic").as_string(), rmw_qos_profile_sensor_data); + "camera_info_topic", 10, std::bind(&BBoxProjectPCloud::intrinsics_cb, this, std::placeholders::_1)); + m_image_sub.subscribe(this, "camera_topic", rmw_qos_profile_sensor_data); + m_cloud_sub.subscribe(this, "lidar_topic", rmw_qos_profile_sensor_data); m_bbox_sub.subscribe(this, "bounding_boxes", rmw_qos_profile_sensor_data); // Send pc msg and img msg to bb_pcl_project @@ -157,7 +154,7 @@ void BBoxProjectPCloud::bb_pcl_project( auto object_pcls = all_seaing_interfaces::msg::LabeledObjectPointCloudArray(); std::vector> obj_cloud_vec; - int max_len = 0; + // int max_len = 0; // Just use the same pcloud to image projection, but check if it's within some binding box and assign it to that detection int obj = 0; for (pcl::PointXYZI &point_tf : in_cloud_tf_ptr->points) { @@ -217,7 +214,7 @@ void BBoxProjectPCloud::bb_pcl_project( bbox_pcloud_objects.push_back(std::make_pair(bbox, obj_cloud_ptr)); obj++; RCLCPP_DEBUG(this->get_logger(), "%d POINTS IN OBJECT %d", obj_cloud_ptr->size(), obj); - max_len = std::max(max_len, (int)obj_cloud_ptr->size()); + // max_len = std::max(max_len, (int)obj_cloud_ptr->size()); } RCLCPP_DEBUG(this->get_logger(), "WILL NOW SEND OBJECT POINT CLOUDS"); m_object_pcl_pub->publish(object_pcls); @@ -225,19 +222,18 @@ void BBoxProjectPCloud::bb_pcl_project( // pcl::PointCloud::Ptr all_obj_pcls_ptr(new pcl::PointCloud); all_obj_pcls_ptr->header = in_cloud_tf_ptr->header; //convert vector of PointCloud to a single PointCloud with channels - all_obj_pcls_ptr->resize((pcl::uindex_t)max_len, (pcl::uindex_t)in_bbox_msg->boxes.size()); - RCLCPP_DEBUG(this->get_logger(), "PUBLISHED PCLOUD DIMENSIONS: height: %d, width: %d", (int)all_obj_pcls_ptr->height, (int)all_obj_pcls_ptr->width); + // all_obj_pcls_ptr->resize((pcl::uindex_t)max_len, (pcl::uindex_t)in_bbox_msg->boxes.size()); + // RCLCPP_DEBUG(this->get_logger(), "PUBLISHED PCLOUD DIMENSIONS: height: %d, width: %d", (int)all_obj_pcls_ptr->height, (int)all_obj_pcls_ptr->width); // RCLCPP_DEBUG(this->get_logger(), "STORED PCLOUD DIMENSIONS: objects: %d, max_length: %d", (int)obj_cloud_vec.size(), max_len); try{ - //TODO: CHECK IF THE CHANNELS CORRECTLY REPRESENT DIFFERENT OBJECTS (ALTHOUGH THAT'S ONLY FOR RVIZ SO WE DON'T CARE) - //(I.E. HEIGHT & WIDTH ARE CORRECT, OTHERWISE SWAP THEIR ORDER BOTH IN THE RESIZE() AND THE AT() METHODS) for(int i = 0; iat(j,i) = obj_cloud_vec[i][j]; + // all_obj_pcls_ptr->at(j,i) = obj_cloud_vec[i][j]; + all_obj_pcls_ptr->push_back(obj_cloud_vec[i][j]); } } }catch(std::exception &ex){ - RCLCPP_ERROR(this->get_logger(), "%s", ex.what()); + RCLCPP_ERROR(this->get_logger(), "PROJECT BBOX PCLOUD PUBLISHING ERROR: %s", ex.what()); } auto obj_pcls_msg = sensor_msgs::msg::PointCloud2(); pcl::toROSMsg(*all_obj_pcls_ptr, obj_pcls_msg); @@ -246,7 +242,7 @@ void BBoxProjectPCloud::bb_pcl_project( // REFINE OBJECT POINT CLOUDS auto refined_objects_pub = all_seaing_interfaces::msg::LabeledObjectPointCloudArray(); std::vector, std::vector>> refined_cloud_contour_vec; - int max_refined_len = 0; + // int max_refined_len = 0; RCLCPP_DEBUG(this->get_logger(), "# OF FOUND BBOXES: %d", bbox_pcloud_objects.size()); for(auto bbox_pcloud_pair : bbox_pcloud_objects){ @@ -490,7 +486,7 @@ void BBoxProjectPCloud::bb_pcl_project( refined_pcl_contours.contour = *refined_obj_contour_ptr->toImageMsg(); refined_objects_pub.objects.push_back(refined_pcl_contours); refined_cloud_contour_vec.push_back(std::make_pair(*refined_cloud_ptr,in_contours[opt_contour_id])); - max_refined_len = std::max(max_refined_len, (int)refined_cloud_ptr->size()); + // max_refined_len = std::max(max_refined_len, (int)refined_cloud_ptr->size()); //show clusters & contours & respective matching (split image) // cv::Mat clust_cont_arr[] = {mat_clusters, mat_contours}; @@ -513,20 +509,21 @@ void BBoxProjectPCloud::bb_pcl_project( pcl::PointCloud::Ptr all_obj_refined_pcls_ptr(new pcl::PointCloud()); all_obj_refined_pcls_ptr->header = in_cloud_tf_ptr->header; //convert vector of PointCloud to a single PointCloud with channels - all_obj_refined_pcls_ptr->resize((pcl::uindex_t)max_refined_len, (pcl::uindex_t)refined_cloud_contour_vec.size()); + // all_obj_refined_pcls_ptr->resize((pcl::uindex_t)max_refined_len, (pcl::uindex_t)refined_cloud_contour_vec.size()); cv::Mat all_obj_refined_contours = cv::Mat::zeros(cv_ptr->image.size(), CV_8UC3); try{ for(int i = 0; iget_logger(), "BBOX %d/%d", i, refined_cloud_contour_vec.size()); for(int j = 0; jat(j,i) = refined_cloud_contour_vec[i].first[j]; + // all_obj_refined_pcls_ptr->at(j,i) = refined_cloud_contour_vec[i].first[j]; + all_obj_refined_pcls_ptr->push_back(refined_cloud_contour_vec[i].first[j]); } for(cv::Point pt : refined_cloud_contour_vec[i].second){ all_obj_refined_contours.at(pt)=cv::Vec3b(255, 255, 255); } } }catch(std::exception &ex){ - RCLCPP_ERROR(this->get_logger(), "%s", ex.what()); + RCLCPP_ERROR(this->get_logger(), "REFINED POINT CLOUD PUBLISHING ERROR: %s", ex.what()); } RCLCPP_DEBUG(this->get_logger(), "STORED CONTOURS TO BE PUBLISHED"); auto obj_refined_pcls_msg = sensor_msgs::msg::PointCloud2(); diff --git a/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py b/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py index 861ae502..6227ac76 100644 --- a/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py +++ b/all_seaing_perception/launch/bbox_project_pcloud_test.launch.py @@ -23,11 +23,13 @@ def generate_launch_description(): package="all_seaing_perception", executable="bbox_project_pcloud", output="screen", + remappings=[ + ("camera_info_topic", "/wamv/sensors/cameras/front_left_camera_sensor/camera_info"), + ("camera_topic", "/wamv/sensors/cameras/front_left_camera_sensor/image_raw"), + ("lidar_topic", "point_cloud/filtered") + ], parameters=[ {"bbox_object_margin": 0.0}, - {"camera_info_topic", "/wamv/sensors/cameras/front_left_camera_sensor/camera_info"}, - {"camera_topic", "/wamv/sensors/cameras/front_left_camera_sensor/image_raw"}, - {"lidar_topic", "point_cloud/filtered"}, {"color_label_mappings_file": color_label_mappings}, {"color_ranges_file": color_ranges}, {"obstacle_size_min": 2}, From dca5455d2c7b4dd6882cea39e82531bec367aae4 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Sun, 15 Dec 2024 16:34:15 -0500 Subject: [PATCH 25/35] Minor name changes --- .../msg/LabeledObjectPointCloud.msg | 2 +- .../bbox_project_pcloud.cpp | 18 +++++++++--------- .../bbox_project_pcloud.hpp | 4 ++-- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg b/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg index 2caae308..30e95e28 100644 --- a/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg +++ b/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg @@ -1,3 +1,3 @@ int8 label sensor_msgs/PointCloud2 cloud -sensor_msgs/Image contour \ No newline at end of file +sensor_msgs/Image segment \ No newline at end of file diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index ba9818ac..d704bca7 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -45,9 +45,9 @@ BBoxProjectPCloud::BBoxProjectPCloud() : Node("bbox_project_pcloud"){ // Publishers m_object_pcl_pub = this->create_publisher("labeled_object_point_clouds", 5); m_object_pcl_viz_pub = this->create_publisher("object_point_clouds_viz", 5); - m_refined_object_pcl_contour_pub = this->create_publisher("refined_object_point_clouds_contours", 5); + m_refined_object_pcl_segment_pub = this->create_publisher("refined_object_point_clouds_segments", 5); m_refined_object_pcl_viz_pub = this->create_publisher("refined_object_point_clouds_viz", 5); - m_refined_object_contour_viz_pub = this->create_publisher("refined_object_contours_viz", 5); + m_refined_object_segment_viz_pub = this->create_publisher("refined_object_segments_viz", 5); // get color label mappings from yaml RCLCPP_DEBUG(this->get_logger(), "READING COLOR LABEL MAPPINGS"); @@ -464,8 +464,8 @@ void BBoxProjectPCloud::bb_pcl_project( } } - auto refined_pcl_contours = all_seaing_interfaces::msg::LabeledObjectPointCloud(); - refined_pcl_contours.label = bbox.label; + auto refined_pcl_segments = all_seaing_interfaces::msg::LabeledObjectPointCloud(); + refined_pcl_segments.label = bbox.label; pcl::PointCloud::Ptr refined_cloud_ptr(new pcl::PointCloud()); refined_cloud_ptr->header = pcloud_ptr->header; cv::Mat refined_obj_contour_mat = cv::Mat::zeros(img_sz, CV_8UC3); @@ -479,12 +479,12 @@ void BBoxProjectPCloud::bb_pcl_project( cv::Point2d cloud_pt_xy = m_cam_model.project3dToPixel(cv::Point3d(pt.y, pt.z, -pt.x)); mat_opt_cluster.at((cv::Point)cloud_pt_xy-bbox_offset) = int_to_bgr(opt_cluster_id, clusters_indices.size()); } - pcl::toROSMsg(*refined_cloud_ptr, refined_pcl_contours.cloud); + pcl::toROSMsg(*refined_cloud_ptr, refined_pcl_segments.cloud); cv_bridge::CvImagePtr refined_obj_contour_ptr(new cv_bridge::CvImage(in_img_msg->header, sensor_msgs::image_encodings::TYPE_8UC3, refined_obj_contour_mat)); // cv::imshow("Object contour image to be published:", refined_obj_contour_mat); // cv::waitKey(); - refined_pcl_contours.contour = *refined_obj_contour_ptr->toImageMsg(); - refined_objects_pub.objects.push_back(refined_pcl_contours); + refined_pcl_segments.segment = *refined_obj_contour_ptr->toImageMsg(); + refined_objects_pub.objects.push_back(refined_pcl_segments); refined_cloud_contour_vec.push_back(std::make_pair(*refined_cloud_ptr,in_contours[opt_contour_id])); // max_refined_len = std::max(max_refined_len, (int)refined_cloud_ptr->size()); @@ -504,7 +504,7 @@ void BBoxProjectPCloud::bb_pcl_project( // cv::waitKey(); } RCLCPP_DEBUG(this->get_logger(), "WILL NOW SEND REFINED OBJECT POINT CLOUDS & CONTOURS"); - m_refined_object_pcl_contour_pub->publish(refined_objects_pub); + m_refined_object_pcl_segment_pub->publish(refined_objects_pub); RCLCPP_DEBUG(this->get_logger(), "PUBLISHED REFINED OBJECT POINT CLOUDS & CONTOURS"); pcl::PointCloud::Ptr all_obj_refined_pcls_ptr(new pcl::PointCloud()); all_obj_refined_pcls_ptr->header = in_cloud_tf_ptr->header; @@ -532,7 +532,7 @@ void BBoxProjectPCloud::bb_pcl_project( cv_bridge::CvImagePtr all_obj_refined_contour_ptr(new cv_bridge::CvImage(in_img_msg->header, sensor_msgs::image_encodings::TYPE_8UC3, all_obj_refined_contours)); // cv::imshow("Object contour image to be published:", all_obj_refined_contours); // cv::waitKey(); - m_refined_object_contour_viz_pub->publish(*all_obj_refined_contour_ptr->toImageMsg()); + m_refined_object_segment_viz_pub->publish(*all_obj_refined_contour_ptr->toImageMsg()); RCLCPP_DEBUG(this->get_logger(), "SENT OBJECT POINT CLOUDS FOR VISUALIZATION"); } diff --git a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp index 7b67084c..93200f5a 100644 --- a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp +++ b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp @@ -56,9 +56,9 @@ class BBoxProjectPCloud : public rclcpp::Node{ // Publishers and subscribers rclcpp::Publisher::SharedPtr m_object_pcl_pub; rclcpp::Publisher::SharedPtr m_object_pcl_viz_pub; - rclcpp::Publisher::SharedPtr m_refined_object_pcl_contour_pub; + rclcpp::Publisher::SharedPtr m_refined_object_pcl_segment_pub; rclcpp::Publisher::SharedPtr m_refined_object_pcl_viz_pub; - rclcpp::Publisher::SharedPtr m_refined_object_contour_viz_pub; + rclcpp::Publisher::SharedPtr m_refined_object_segment_viz_pub; rclcpp::Subscription::SharedPtr m_image_intrinsics_sub; message_filters::Subscriber m_image_sub; message_filters::Subscriber m_cloud_sub; From bd61857933747799d995134fdc89522af43e0472 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Thu, 19 Dec 2024 00:01:29 -0500 Subject: [PATCH 26/35] Boilerplate code for object tracking & map publishing node --- all_seaing_interfaces/msg/Obstacle.msg | 3 ++ all_seaing_perception/CMakeLists.txt | 6 +++ .../object_tracking_map.cpp | 14 ++++++ .../all_seaing_perception/obstacle.cpp | 2 + .../object_tracking_map.hpp | 44 +++++++++++++++++++ 5 files changed, 69 insertions(+) create mode 100644 all_seaing_perception/all_seaing_perception/object_tracking_map.cpp create mode 100644 all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp diff --git a/all_seaing_interfaces/msg/Obstacle.msg b/all_seaing_interfaces/msg/Obstacle.msg index 280384d3..2a4dd02a 100644 --- a/all_seaing_interfaces/msg/Obstacle.msg +++ b/all_seaing_interfaces/msg/Obstacle.msg @@ -14,3 +14,6 @@ geometry_msgs/Point bbox_max geometry_msgs/Point global_bbox_min geometry_msgs/Point global_bbox_max + +sensor_msgs/PointCloud2 cloud +sensor_msgs/Image segment \ No newline at end of file diff --git a/all_seaing_perception/CMakeLists.txt b/all_seaing_perception/CMakeLists.txt index 09a9017b..6ae6293e 100644 --- a/all_seaing_perception/CMakeLists.txt +++ b/all_seaing_perception/CMakeLists.txt @@ -37,9 +37,14 @@ ament_auto_add_executable( bbox_project_pcloud ${PROJECT_NAME}/bbox_project_pcloud.cpp ) +ament_auto_add_executable( + object_tracking_map + ${PROJECT_NAME}/object_tracking_map.cpp +) target_link_libraries(obstacle_bbox_visualizer yaml-cpp) target_link_libraries(bbox_project_pcloud yaml-cpp ${PCL_LIBRARIES} ${PCL_COMMON_LIBRARIES} ${PCL_IO_LIBRARIES} ${PCL_VISUALIZATION_LIBRARIES}) +target_link_libraries(object_tracking_map yaml-cpp ${PCL_LIBRARIES} ${PCL_COMMON_LIBRARIES} ${PCL_IO_LIBRARIES} ${PCL_VISUALIZATION_LIBRARIES}) install(TARGETS obstacle_detector @@ -48,6 +53,7 @@ install(TARGETS point_cloud_image_overlay obstacle_bbox_visualizer bbox_project_pcloud + object_tracking_map DESTINATION lib/${PROJECT_NAME} ) diff --git a/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp new file mode 100644 index 00000000..137259d9 --- /dev/null +++ b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp @@ -0,0 +1,14 @@ +#include "all_seaing_perception/object_tracking_map.hpp" + +ObjectTrackingMap::ObjectTrackingMap() : Node("object_tracking_map"){ + +} + +ObjectTrackingMap::~ObjectTrackingMap() {} + +int main(int argc, char *argv[]) { + rclcpp::init(argc, argv); + rclcpp::spin(std::make_shared()); + rclcpp::shutdown(); + return 0; +} \ No newline at end of file diff --git a/all_seaing_perception/all_seaing_perception/obstacle.cpp b/all_seaing_perception/all_seaing_perception/obstacle.cpp index 2e223a94..674c606a 100644 --- a/all_seaing_perception/all_seaing_perception/obstacle.cpp +++ b/all_seaing_perception/all_seaing_perception/obstacle.cpp @@ -47,6 +47,8 @@ T Obstacle::convert_to_global(double nav_x, double nav_y, double nav_heading, T return new_point; } +//TODO: add the cluster and the image segments (respective frames should be correct in the header of both) to the published quantities, change the class attributes & methods to do that + void Obstacle::to_ros_msg(std_msgs::msg::Header local_header, std_msgs::msg::Header global_header, all_seaing_interfaces::msg::Obstacle &out_obstacle_msg) { out_obstacle_msg.id = this->get_id(); diff --git a/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp b/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp new file mode 100644 index 00000000..93abb937 --- /dev/null +++ b/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp @@ -0,0 +1,44 @@ +#ifndef ALL_SEAING_PERCEPTION__OBJECT_TRACKING_MAP_HPP +#define ALL_SEAING_PERCEPTION__OBJECT_TRACKING_MAP_HPP + +#include +#include +#include "yaml-cpp/yaml.h" + +#include "rclcpp/rclcpp.hpp" + +#include "tf2_ros/buffer.h" +#include "tf2_ros/transform_listener.h" + +#include "image_geometry/pinhole_camera_model.h" +#include "message_filters/subscriber.h" + +#include "geometry_msgs/msg/transform_stamped.hpp" +#include "sensor_msgs/msg/camera_info.hpp" +#include "sensor_msgs/msg/image.hpp" +#include "sensor_msgs/msg/point_cloud2.hpp" + +#include +#include +#include "pcl_conversions/pcl_conversions.h" + +#include "cv_bridge/cv_bridge.h" + +#include "tf2_geometry_msgs/tf2_geometry_msgs.hpp" +#include "tf2_sensor_msgs/tf2_sensor_msgs.hpp" + +#include "all_seaing_interfaces/msg/labeled_object_point_cloud_array.hpp" +#include "all_seaing_interfaces/msg/labeled_object_point_cloud.hpp" + +#include +#include + +class ObjectTrackingMap : public rclcpp::Node{ +private: + +public: + ObjectTrackingMap(); + virtual ~ObjectTrackingMap(); +}; + +#endif // ALL_SEAING_PERCEPTION__OBJECT_TRACKING_MAP_HPP \ No newline at end of file From 24a92db6581e89e2aaf51fe1d362cebd38d0157c Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Sat, 18 Jan 2025 02:06:05 +0200 Subject: [PATCH 27/35] Setup (mostly some copied stuff, with some modifications, from obstacle_detector.cpp that will serve the same purpose here) for untracked & tracked obstacle map publishing --- all_seaing_bringup/launch/sim.launch.py | 1 + .../msg/LabeledObjectPointCloud.msg | 1 + all_seaing_interfaces/msg/Obstacle.msg | 5 +- .../bbox_project_pcloud.cpp | 2 + .../object_tracking_map.cpp | 68 +++++++++++++++++++ .../all_seaing_perception/obstacle.cpp | 2 - .../object_tracking_map.hpp | 25 +++++++ .../launch/object_tracking_map_test.launch.py | 23 +++++++ 8 files changed, 121 insertions(+), 6 deletions(-) create mode 100644 all_seaing_perception/launch/object_tracking_map_test.launch.py diff --git a/all_seaing_bringup/launch/sim.launch.py b/all_seaing_bringup/launch/sim.launch.py index 1b113493..276779eb 100644 --- a/all_seaing_bringup/launch/sim.launch.py +++ b/all_seaing_bringup/launch/sim.launch.py @@ -286,6 +286,7 @@ def launch_setup(context, *args, **kwargs): controller_server, obstacle_bbox_overlay_node, obstacle_bbox_visualizer_node, + bbox_project_pcloud_node, color_segmentation_node, point_cloud_filter_node, obstacle_detector_node, diff --git a/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg b/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg index 30e95e28..73c489d3 100644 --- a/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg +++ b/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg @@ -1,3 +1,4 @@ +builtin_interfaces/Time last_seen int8 label sensor_msgs/PointCloud2 cloud sensor_msgs/Image segment \ No newline at end of file diff --git a/all_seaing_interfaces/msg/Obstacle.msg b/all_seaing_interfaces/msg/Obstacle.msg index 2a4dd02a..806144fc 100644 --- a/all_seaing_interfaces/msg/Obstacle.msg +++ b/all_seaing_interfaces/msg/Obstacle.msg @@ -13,7 +13,4 @@ geometry_msgs/Point bbox_min geometry_msgs/Point bbox_max geometry_msgs/Point global_bbox_min -geometry_msgs/Point global_bbox_max - -sensor_msgs/PointCloud2 cloud -sensor_msgs/Image segment \ No newline at end of file +geometry_msgs/Point global_bbox_max \ No newline at end of file diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index d704bca7..ea555680 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -185,6 +185,7 @@ void BBoxProjectPCloud::bb_pcl_project( for (all_seaing_interfaces::msg::LabeledBoundingBox2D bbox : in_bbox_msg->boxes){ auto labeled_pcl = all_seaing_interfaces::msg::LabeledObjectPointCloud(); pcl::PointCloud::Ptr obj_cloud_ptr(new pcl::PointCloud); + labeled_pcl.last_seen = in_cloud_tf_ptr->header.stamp; labeled_pcl.label = bbox.label; obj_cloud_ptr->header = in_cloud_tf_ptr->header; RCLCPP_DEBUG(this->get_logger(), "BOUNDING BOX FOR OBJECT %d: (%d,%d), (%d, %d)", obj, bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y); @@ -465,6 +466,7 @@ void BBoxProjectPCloud::bb_pcl_project( } auto refined_pcl_segments = all_seaing_interfaces::msg::LabeledObjectPointCloud(); + refined_pcl_segments.last_seen = in_cloud_tf_ptr->header.stamp; refined_pcl_segments.label = bbox.label; pcl::PointCloud::Ptr refined_cloud_ptr(new pcl::PointCloud()); refined_cloud_ptr->header = pcloud_ptr->header; diff --git a/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp index 137259d9..621deb98 100644 --- a/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp +++ b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp @@ -1,7 +1,75 @@ #include "all_seaing_perception/object_tracking_map.hpp" ObjectTrackingMap::ObjectTrackingMap() : Node("object_tracking_map"){ + // Initialize parameters + this->declare_parameter("global_frame_id", "odom"); + // Initialize member variables from parameters + m_global_frame_id = this->get_parameter("global_frame_id").as_string(); + + // Initialize navigation variables to 0 + m_nav_x = 0; + m_nav_y = 0; + m_nav_heading = 0; + + // Initialize publishers and subscribers + m_untracked_map_pub = + this->create_publisher("obstacle_map/refined_untracked", 10); + m_tracked_map_pub = this->create_publisher( + "obstacle_map/refined_tracked", 10); + m_object_sub = this->create_subscription( + "refined_object_point_clouds_segments", rclcpp::SensorDataQoS(), + std::bind(&ObjectTrackingMap::object_track_publish, this, std::placeholders::_1)); + m_odom_sub = this->create_subscription( + "odometry/filtered", 10, + std::bind(&ObjectTrackingMap::odom_callback, this, std::placeholders::_1)); +} + +//copied from obstacle_detector.cpp (same function, different class) +void ObjectTrackingMap::odom_callback(const nav_msgs::msg::Odometry &msg) { + m_nav_x = msg.pose.pose.position.x; + m_nav_y = msg.pose.pose.position.y; + tf2::Quaternion q; + q.setW(msg.pose.pose.orientation.w); + q.setX(msg.pose.pose.orientation.x); + q.setY(msg.pose.pose.orientation.y); + q.setZ(msg.pose.pose.orientation.z); + tf2::Matrix3x3 m(q); + double r, p, y; + m.getRPY(r, p, y); + m_nav_heading = y; + + m_nav_pose = msg.pose.pose; +} + +//TODO: Implement untracked (but labeled, since the data is provided that way) map publishing (just convert to required format using below copied map function and utility/message creating functions in obstacle.hpp) +//TODO: Implement object tracking and tracked map publishing +void object_track_map_publish(const all_seaing_interfaces::msg::LabeledObjectPointCloudArray &msg){ + +} + +void ObjectTrackingMap::publish_map( + std_msgs::msg::Header local_header, std::string ns, bool is_labeled, + const std::vector> &map, + rclcpp::Publisher::SharedPtr pub) { + + // Create global header + std_msgs::msg::Header global_header = std_msgs::msg::Header(); + global_header.frame_id = m_global_frame_id; + global_header.stamp = local_header.stamp; + + all_seaing_interfaces::msg::ObstacleMap map_msg; + map_msg.ns = ns; + map_msg.local_header = local_header; + map_msg.header = global_header; + map_msg.is_labeled = is_labeled; + for (unsigned int i = 0; i < map.size(); i++) { + all_seaing_interfaces::msg::Obstacle raw_obstacle; + map[i]->to_ros_msg(local_header, global_header, raw_obstacle); + map_msg.obstacles.push_back(raw_obstacle); + } + map_msg.pose = m_nav_pose; + pub->publish(map_msg); } ObjectTrackingMap::~ObjectTrackingMap() {} diff --git a/all_seaing_perception/all_seaing_perception/obstacle.cpp b/all_seaing_perception/all_seaing_perception/obstacle.cpp index 6716e32a..075bbcae 100644 --- a/all_seaing_perception/all_seaing_perception/obstacle.cpp +++ b/all_seaing_perception/all_seaing_perception/obstacle.cpp @@ -48,8 +48,6 @@ T Obstacle::convert_to_global(double nav_x, double nav_y, double nav_heading, T return new_point; } -//TODO: add the cluster and the image segments (respective frames should be correct in the header of both) to the published quantities, change the class attributes & methods to do that - void Obstacle::to_ros_msg(std_msgs::msg::Header local_header, std_msgs::msg::Header global_header, all_seaing_interfaces::msg::Obstacle &out_obstacle_msg) { out_obstacle_msg.id = this->get_id(); diff --git a/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp b/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp index 93abb937..a0fe5ce6 100644 --- a/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp +++ b/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp @@ -10,6 +10,9 @@ #include "tf2_ros/buffer.h" #include "tf2_ros/transform_listener.h" +#include "tf2/LinearMath/Matrix3x3.h" +#include "tf2/LinearMath/Quaternion.h" + #include "image_geometry/pinhole_camera_model.h" #include "message_filters/subscriber.h" @@ -29,13 +32,35 @@ #include "all_seaing_interfaces/msg/labeled_object_point_cloud_array.hpp" #include "all_seaing_interfaces/msg/labeled_object_point_cloud.hpp" +#include "all_seaing_interfaces/msg/obstacle_map.hpp" + +#include "all_seaing_perception/obstacle.hpp" #include #include class ObjectTrackingMap : public rclcpp::Node{ private: + void object_track_map_publish(const all_seaing_interfaces::msg::LabeledObjectPointCloudArray &msg); + void odom_callback(const nav_msgs::msg::Odometry &msg); + void publish_map(std_msgs::msg::Header local_header, std::string ns, bool is_labeled, + const std::vector> &map, + rclcpp::Publisher::SharedPtr pub); + + // Member variables + std::vector> m_tracked_obstacles; + std::string m_global_frame_id; + int m_obstacle_id; + + float m_nav_x, m_nav_y, m_nav_heading; + + geometry_msgs::msg::Pose m_nav_pose; + // Publishers and subscribers + rclcpp::Publisher::SharedPtr m_untracked_map_pub; + rclcpp::Publisher::SharedPtr m_tracked_map_pub; + rclcpp::Subscription::SharedPtr m_object_sub; + rclcpp::Subscription::SharedPtr m_odom_sub; public: ObjectTrackingMap(); virtual ~ObjectTrackingMap(); diff --git a/all_seaing_perception/launch/object_tracking_map_test.launch.py b/all_seaing_perception/launch/object_tracking_map_test.launch.py new file mode 100644 index 00000000..26a938b4 --- /dev/null +++ b/all_seaing_perception/launch/object_tracking_map_test.launch.py @@ -0,0 +1,23 @@ +from ament_index_python.packages import get_package_share_directory +from launch import LaunchDescription +from launch.actions import IncludeLaunchDescription +from launch.launch_description_sources import PythonLaunchDescriptionSource +import launch_ros +import os + +def generate_launch_description(): + bringup_prefix = get_package_share_directory("all_seaing_bringup") + object_tracking_map_node = launch_ros.actions.Node( + package="all_seaing_perception", + executable="object_tracking_map", + output="screen", + remappings=[ + + ], + parameters=[ + + ] + ) + return LaunchDescription([ + object_tracking_map_node + ]) \ No newline at end of file From f9dd21677cb86806e5c3048688c3c079daa30b7a Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Tue, 21 Jan 2025 20:14:34 +0200 Subject: [PATCH 28/35] Added code to publish untracked obstacles in the required obstacle map format (builds, not tested) --- .../msg/LabeledObjectPointCloud.msg | 2 +- all_seaing_perception/CMakeLists.txt | 1 + .../bbox_project_pcloud.cpp | 4 +- .../object_tracking_map.cpp | 52 +++++++++++++++++-- .../bbox_project_pcloud.hpp | 1 + .../object_tracking_map.hpp | 35 +++++++++++-- 6 files changed, 85 insertions(+), 10 deletions(-) diff --git a/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg b/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg index 73c489d3..71bb3f28 100644 --- a/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg +++ b/all_seaing_interfaces/msg/LabeledObjectPointCloud.msg @@ -1,4 +1,4 @@ -builtin_interfaces/Time last_seen +builtin_interfaces/Time time int8 label sensor_msgs/PointCloud2 cloud sensor_msgs/Image segment \ No newline at end of file diff --git a/all_seaing_perception/CMakeLists.txt b/all_seaing_perception/CMakeLists.txt index 049ea900..cc337da1 100644 --- a/all_seaing_perception/CMakeLists.txt +++ b/all_seaing_perception/CMakeLists.txt @@ -42,6 +42,7 @@ ament_auto_add_executable( ament_auto_add_executable( object_tracking_map ${PROJECT_NAME}/object_tracking_map.cpp + ${PROJECT_NAME}/obstacle.cpp ) target_link_libraries(obstacle_bbox_visualizer yaml-cpp) diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index ea555680..f8538a84 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -185,7 +185,7 @@ void BBoxProjectPCloud::bb_pcl_project( for (all_seaing_interfaces::msg::LabeledBoundingBox2D bbox : in_bbox_msg->boxes){ auto labeled_pcl = all_seaing_interfaces::msg::LabeledObjectPointCloud(); pcl::PointCloud::Ptr obj_cloud_ptr(new pcl::PointCloud); - labeled_pcl.last_seen = in_cloud_tf_ptr->header.stamp; + labeled_pcl.time = in_cloud_msg->header.stamp; labeled_pcl.label = bbox.label; obj_cloud_ptr->header = in_cloud_tf_ptr->header; RCLCPP_DEBUG(this->get_logger(), "BOUNDING BOX FOR OBJECT %d: (%d,%d), (%d, %d)", obj, bbox.min_x, bbox.min_y, bbox.max_x, bbox.max_y); @@ -466,7 +466,7 @@ void BBoxProjectPCloud::bb_pcl_project( } auto refined_pcl_segments = all_seaing_interfaces::msg::LabeledObjectPointCloud(); - refined_pcl_segments.last_seen = in_cloud_tf_ptr->header.stamp; + refined_pcl_segments.time = in_cloud_msg->header.stamp; refined_pcl_segments.label = bbox.label; pcl::PointCloud::Ptr refined_cloud_ptr(new pcl::PointCloud()); refined_cloud_ptr->header = pcloud_ptr->header; diff --git a/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp index 621deb98..331b246c 100644 --- a/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp +++ b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp @@ -19,13 +19,13 @@ ObjectTrackingMap::ObjectTrackingMap() : Node("object_tracking_map"){ "obstacle_map/refined_tracked", 10); m_object_sub = this->create_subscription( "refined_object_point_clouds_segments", rclcpp::SensorDataQoS(), - std::bind(&ObjectTrackingMap::object_track_publish, this, std::placeholders::_1)); + std::bind(&ObjectTrackingMap::object_track_map_publish, this, std::placeholders::_1)); m_odom_sub = this->create_subscription( "odometry/filtered", 10, std::bind(&ObjectTrackingMap::odom_callback, this, std::placeholders::_1)); } -//copied from obstacle_detector.cpp (same function, different class) +//copied from obstacle_detector.cpp void ObjectTrackingMap::odom_callback(const nav_msgs::msg::Odometry &msg) { m_nav_x = msg.pose.pose.position.x; m_nav_y = msg.pose.pose.position.y; @@ -42,10 +42,54 @@ void ObjectTrackingMap::odom_callback(const nav_msgs::msg::Odometry &msg) { m_nav_pose = msg.pose.pose; } -//TODO: Implement untracked (but labeled, since the data is provided that way) map publishing (just convert to required format using below copied map function and utility/message creating functions in obstacle.hpp) +//copied & modified from obstacle.cpp +template +T ObjectTrackingMap::convert_to_global(double nav_x, double nav_y, double nav_heading, T point) { + T new_point = point;//to keep color-related data + double magnitude = std::hypot(point.x, point.y); + double point_angle = std::atan2(point.y, point.x); + new_point.x = nav_x + std::cos(nav_heading + point_angle) * magnitude; + new_point.y = nav_y + std::sin(nav_heading + point_angle) * magnitude; + new_point.z = 0; + return new_point; +} + //TODO: Implement object tracking and tracked map publishing -void object_track_map_publish(const all_seaing_interfaces::msg::LabeledObjectPointCloudArray &msg){ +void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::msg::LabeledObjectPointCloudArray::ConstSharedPtr &msg){ + if(msg->objects.size()==0) return; + std::vector> raw_obstacles; + for(all_seaing_interfaces::msg::LabeledObjectPointCloud obj : msg->objects){ + pcl::PointCloud::Ptr local_obj_pcloud(new pcl::PointCloud); + pcl::PointCloud::Ptr global_obj_pcloud(new pcl::PointCloud); + pcl::fromROSMsg(obj.cloud, *local_obj_pcloud); + for(pcl::PointXYZHSV &pt : local_obj_pcloud->points){ + pcl::PointXYZHSV global_pt = this->convert_to_global(m_nav_x, m_nav_y, m_nav_heading, pt); + global_obj_pcloud->push_back(global_pt); + } + std::shared_ptr obj_cloud (new ObjectTrackingMap::ObjectCloud(obj.time, obj.label, local_obj_pcloud, global_obj_pcloud)); + raw_obstacles.push_back(obj_cloud); + } + //Make and publish untracked map + std::vector> untracked_obs; + for(std::shared_ptr raw_obs : raw_obstacles){ + pcl::PointCloud::Ptr raw_cloud(new pcl::PointCloud); + for(pcl::PointXYZHSV pt : raw_obs->local_pcloud_ptr->points){ + pcl::PointXYZRGB rgb_pt; + pcl::PointXYZI i_pt; + pcl::PointXYZHSVtoXYZRGB(pt, rgb_pt); + pcl::PointXYZRGBtoXYZI(rgb_pt, i_pt); + raw_cloud->push_back(i_pt); + } + std::vector ind(raw_cloud->size()); + std::iota (std::begin(ind), std::end(ind), 0); + std::shared_ptr untracked_ob( + new all_seaing_perception::Obstacle(raw_cloud, ind, m_obstacle_id++, + raw_obs->time_seen, m_nav_x, m_nav_y, m_nav_heading)); + untracked_obs.push_back(untracked_ob); + } + this->publish_map(msg->objects[0].cloud.header, "untracked", false, untracked_obs, m_untracked_map_pub); + //Match new obstacles with old ones } void ObjectTrackingMap::publish_map( diff --git a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp index 93200f5a..9dfb5213 100644 --- a/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp +++ b/all_seaing_perception/include/all_seaing_perception/bbox_project_pcloud.hpp @@ -28,6 +28,7 @@ #include "sensor_msgs/msg/camera_info.hpp" #include "sensor_msgs/msg/image.hpp" #include "sensor_msgs/msg/point_cloud2.hpp" +#include "builtin_interfaces/msg/time.hpp" #include #include diff --git a/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp b/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp index a0fe5ce6..2b6646a0 100644 --- a/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp +++ b/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp @@ -3,7 +3,9 @@ #include #include -#include "yaml-cpp/yaml.h" +#include "yaml-cpp/yaml.h" + +#include #include "rclcpp/rclcpp.hpp" @@ -20,9 +22,13 @@ #include "sensor_msgs/msg/camera_info.hpp" #include "sensor_msgs/msg/image.hpp" #include "sensor_msgs/msg/point_cloud2.hpp" +#include "builtin_interfaces/msg/time.hpp" +#include "nav_msgs/msg/odometry.hpp" +#include "std_msgs/msg/header.hpp" #include #include +#include #include "pcl_conversions/pcl_conversions.h" #include "cv_bridge/cv_bridge.h" @@ -30,6 +36,10 @@ #include "tf2_geometry_msgs/tf2_geometry_msgs.hpp" #include "tf2_sensor_msgs/tf2_sensor_msgs.hpp" +#include "tf2/exceptions.h" +#include "tf2_ros/transform_listener.h" +#include "tf2_ros/buffer.h" + #include "all_seaing_interfaces/msg/labeled_object_point_cloud_array.hpp" #include "all_seaing_interfaces/msg/labeled_object_point_cloud.hpp" #include "all_seaing_interfaces/msg/obstacle_map.hpp" @@ -41,14 +51,33 @@ class ObjectTrackingMap : public rclcpp::Node{ private: - void object_track_map_publish(const all_seaing_interfaces::msg::LabeledObjectPointCloudArray &msg); + void object_track_map_publish(const all_seaing_interfaces::msg::LabeledObjectPointCloudArray::ConstSharedPtr &msg); void odom_callback(const nav_msgs::msg::Odometry &msg); void publish_map(std_msgs::msg::Header local_header, std::string ns, bool is_labeled, const std::vector> &map, rclcpp::Publisher::SharedPtr pub); + template + T convert_to_global(double nav_x, double nav_y, double nav_heading, T point); + + //custom struct to also keep the points themselves with the obstacle (Obstacle doesn't do that and don't want to mess with it) + struct ObjectCloud{ + int id; + rclcpp::Time time_seen; + int label; + pcl::PointCloud::Ptr local_pcloud_ptr; + pcl::PointCloud::Ptr global_pcloud_ptr; + + ObjectCloud(rclcpp::Time t, int l, pcl::PointCloud::Ptr loc, pcl::PointCloud::Ptr glob){ + id = -1; + time_seen = t; + label = l; + local_pcloud_ptr = loc; + global_pcloud_ptr = glob; + } + }; // Member variables - std::vector> m_tracked_obstacles; + std::vector> m_tracked_obstacles; std::string m_global_frame_id; int m_obstacle_id; From f85d781f3c308c93a3add4355578f0032b130c30 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Tue, 21 Jan 2025 20:18:21 +0200 Subject: [PATCH 29/35] Dashboard revert (not essential) --- all_seaing_bringup/rviz/dashboard.rviz | 95 ++++++++++++++----- .../object_tracking_map.cpp | 2 +- 2 files changed, 74 insertions(+), 23 deletions(-) diff --git a/all_seaing_bringup/rviz/dashboard.rviz b/all_seaing_bringup/rviz/dashboard.rviz index 86ca6e8c..40f44e40 100644 --- a/all_seaing_bringup/rviz/dashboard.rviz +++ b/all_seaing_bringup/rviz/dashboard.rviz @@ -5,9 +5,11 @@ Panels: Property Tree Widget: Expanded: - /Global Options1 - - /PointCloud21/Topic1 - Splitter Ratio: 0.48148149251937866 - Tree Height: 922 + - /PointCloud21 + - /PointCloud22 + - /PointCloud22/Topic1 + Splitter Ratio: 0.5 + Tree Height: 195 - Class: rviz_common/Selection Name: Selection - Class: rviz_common/Tool Properties @@ -295,21 +297,26 @@ Visualization Manager: Color Scheme: map Draw Behind: false Enabled: true - Name: Map + Invert Rainbow: false + Max Color: 255; 255; 255 + Max Intensity: 0 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: PointCloud2 + Position Transformer: XYZ + Selectable: true + Size (Pixels): 3 + Size (m): 0.30000001192092896 + Style: Flat Squares Topic: Depth: 5 Durability Policy: Volatile Filter size: 10 History Policy: Keep Last - Reliability Policy: Reliable - Value: /map - Update Topic: - Depth: 5 - Durability Policy: Volatile - History Policy: Keep Last - Reliability Policy: Reliable - Value: /map_updates - Use Timestamp: false + Reliability Policy: Best Effort + Value: /refined_object_point_clouds_viz + Use Fixed Frame: true + Use rainbow: true Value: true - Alpha: 1 Autocompute Intensity Bounds: true @@ -333,8 +340,8 @@ Visualization Manager: Position Transformer: XYZ Selectable: true Size (Pixels): 3 - Size (m): 0.009999999776482582 - Style: Points + Size (m): 0.30000001192092896 + Style: Flat Squares Topic: Depth: 5 Durability Policy: Volatile @@ -344,7 +351,49 @@ Visualization Manager: Value: /point_cloud/filtered Use Fixed Frame: true Use rainbow: true + Value: true + - Class: rviz_default_plugins/Image + Enabled: false + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Image + Normalize Range: true + Topic: + Depth: 5 + Durability Policy: Volatile + History Policy: Keep Last + Reliability Policy: Reliable + Value: /refined_object_contours_viz Value: false + - Class: rviz_default_plugins/Image + Enabled: true + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Image + Normalize Range: true + Topic: + Depth: 5 + Durability Policy: Volatile + History Policy: Keep Last + Reliability Policy: Reliable + Value: /wamv/sensors/cameras/front_left_camera_sensor/image_raw + Value: true + - Class: rviz_default_plugins/Image + Enabled: true + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Image + Normalize Range: true + Topic: + Depth: 5 + Durability Policy: Volatile + History Policy: Keep Last + Reliability Policy: Reliable + Value: /refined_object_segments_viz + Value: true Enabled: true Global Options: Background Color: 48; 48; 48 @@ -391,25 +440,25 @@ Visualization Manager: Views: Current: Class: rviz_default_plugins/Orbit - Distance: 72.52093505859375 + Distance: 75.15950775146484 Enable Stereo Rendering: Stereo Eye Separation: 0.05999999865889549 Stereo Focal Distance: 1 Swap Stereo Eyes: false Value: false Focal Point: - X: -0.7055070400238037 - Y: 29.966957092285156 - Z: 8.167037010192871 + X: 14.586471557617188 + Y: 16.246610641479492 + Z: -2.225355863571167 Focal Shape Fixed Size: true Focal Shape Size: 0.05000000074505806 Invert Z Axis: false Name: Current View Near Clip Distance: 0.009999999776482582 - Pitch: 0.7347967028617859 + Pitch: 0.5253973603248596 Target Frame: Value: Orbit (rviz) - Yaw: 4.648568630218506 + Yaw: 4.643567085266113 Saved: ~ Window Geometry: Displays: @@ -417,7 +466,9 @@ Window Geometry: Height: 1137 Hide Left Dock: false Hide Right Dock: true - QMainWindow State: 000000ff00000000fd0000000400000000000001a7000003d5fc0200000008fb000000100044006900730070006c006100790073010000003b000003d5000000c700fffffffb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c000002610000000100000100000003d7fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003b000003d7000000a000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000780000000c7fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000075e00000040fc0100000002fb0000000800540069006d006501000000000000075e0000025300fffffffb0000000800540069006d00650100000000000004500000000000000000000005b1000003d500000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + Image: + collapsed: false + QMainWindow State: 000000ff00000000fd000000040000000000000161000002f7fc020000000bfb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006e00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c00610079007301000000470000015f000000f100fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d0061006700650000000178000000690000002f00fffffffb0000000a0049006d00610067006501000001ad000000a60000002f00fffffffb0000000a0049006d006100670065010000025a000000e40000002f00ffffff0000000100000100000003d7fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003b000003d7000000c200fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000078000000040fc0100000002fb0000000800540069006d00650100000000000007800000038100fffffffb0000000800540069006d0065010000000000000450000000000000000000000618000002f700000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 Selection: collapsed: false Time: diff --git a/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp index 331b246c..f824b20f 100644 --- a/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp +++ b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp @@ -54,7 +54,7 @@ T ObjectTrackingMap::convert_to_global(double nav_x, double nav_y, double nav_he return new_point; } -//TODO: Implement object tracking and tracked map publishing +//TODO: Implement object tracking and tracked obstacle map publishing void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::msg::LabeledObjectPointCloudArray::ConstSharedPtr &msg){ if(msg->objects.size()==0) return; std::vector> raw_obstacles; From 043c1194c7c0cbb0fe5f436ae9aa39e1dab07683 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Wed, 22 Jan 2025 03:36:14 +0200 Subject: [PATCH 30/35] Fixed it, seems to correctly publish untracked obstacles in the ObstacleMap message format (and visualized in RViz appropriately) --- all_seaing_bringup/rviz/dashboard.rviz | 67 ++++++------------- .../bbox_project_pcloud.cpp | 4 ++ .../object_tracking_map.cpp | 3 +- all_seaing_rviz_plugins/CMakeLists.txt | 6 +- 4 files changed, 32 insertions(+), 48 deletions(-) diff --git a/all_seaing_bringup/rviz/dashboard.rviz b/all_seaing_bringup/rviz/dashboard.rviz index 40f44e40..c5c528eb 100644 --- a/all_seaing_bringup/rviz/dashboard.rviz +++ b/all_seaing_bringup/rviz/dashboard.rviz @@ -5,11 +5,12 @@ Panels: Property Tree Widget: Expanded: - /Global Options1 + - /ObstacleMap1 + - /ObstacleMap1/Topic1 - /PointCloud21 - - /PointCloud22 - - /PointCloud22/Topic1 + - /PointCloud21/Topic1 Splitter Ratio: 0.5 - Tree Height: 195 + Tree Height: 280 - Class: rviz_common/Selection Name: Selection - Class: rviz_common/Tool Properties @@ -70,16 +71,16 @@ Visualization Manager: Enabled: true Name: ObstacleMap Namespaces: - labeled: true - labeled_edges: true - labeled_text: true - labeled_vertices: true + untracked: true + untracked_edges: true + untracked_text: true + untracked_vertices: true Topic: Depth: 5 Durability Policy: Volatile History Policy: Keep Last Reliability Policy: Reliable - Value: /obstacle_map/labeled + Value: /obstacle_map/refined_untracked Value: true - Alpha: 1 Class: rviz_default_plugins/RobotModel @@ -292,32 +293,6 @@ Visualization Manager: Reliability Policy: Reliable Value: /clicked_point Value: true - - Alpha: 0.699999988079071 - Class: rviz_default_plugins/Map - Color Scheme: map - Draw Behind: false - Enabled: true - Invert Rainbow: false - Max Color: 255; 255; 255 - Max Intensity: 0 - Min Color: 0; 0; 0 - Min Intensity: 0 - Name: PointCloud2 - Position Transformer: XYZ - Selectable: true - Size (Pixels): 3 - Size (m): 0.30000001192092896 - Style: Flat Squares - Topic: - Depth: 5 - Durability Policy: Volatile - Filter size: 10 - History Policy: Keep Last - Reliability Policy: Best Effort - Value: /refined_object_point_clouds_viz - Use Fixed Frame: true - Use rainbow: true - Value: true - Alpha: 1 Autocompute Intensity Bounds: true Autocompute Value Bounds: @@ -330,7 +305,7 @@ Visualization Manager: Color: 255; 255; 255 Color Transformer: Intensity Decay Time: 0 - Enabled: false + Enabled: true Invert Rainbow: false Max Color: 255; 255; 255 Max Intensity: 0 @@ -348,12 +323,12 @@ Visualization Manager: Filter size: 10 History Policy: Keep Last Reliability Policy: Best Effort - Value: /point_cloud/filtered + Value: /refined_object_point_clouds_viz Use Fixed Frame: true Use rainbow: true Value: true - Class: rviz_default_plugins/Image - Enabled: false + Enabled: true Max Value: 1 Median window: 5 Min Value: 0 @@ -365,7 +340,7 @@ Visualization Manager: History Policy: Keep Last Reliability Policy: Reliable Value: /refined_object_contours_viz - Value: false + Value: true - Class: rviz_default_plugins/Image Enabled: true Max Value: 1 @@ -440,16 +415,16 @@ Visualization Manager: Views: Current: Class: rviz_default_plugins/Orbit - Distance: 75.15950775146484 + Distance: 46.016841888427734 Enable Stereo Rendering: Stereo Eye Separation: 0.05999999865889549 Stereo Focal Distance: 1 Swap Stereo Eyes: false Value: false Focal Point: - X: 14.586471557617188 - Y: 16.246610641479492 - Z: -2.225355863571167 + X: 37.104736328125 + Y: 15.365486145019531 + Z: -1.0705451965332031 Focal Shape Fixed Size: true Focal Shape Size: 0.05000000074505806 Invert Z Axis: false @@ -463,7 +438,7 @@ Visualization Manager: Window Geometry: Displays: collapsed: false - Height: 1137 + Height: 934 Hide Left Dock: false Hide Right Dock: true Image: @@ -477,6 +452,6 @@ Window Geometry: collapsed: false Views: collapsed: true - Width: 1886 - X: 15 - Y: 44 + Width: 1920 + X: 0 + Y: 34 diff --git a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp index f8538a84..995b223a 100644 --- a/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp +++ b/all_seaing_perception/all_seaing_perception/bbox_project_pcloud.cpp @@ -210,6 +210,7 @@ void BBoxProjectPCloud::bb_pcl_project( } } pcl::toROSMsg(*obj_cloud_ptr, labeled_pcl.cloud); + labeled_pcl.cloud.header.stamp = in_cloud_msg->header.stamp; object_pcls.objects.push_back(labeled_pcl); obj_cloud_vec.push_back(*obj_cloud_ptr); bbox_pcloud_objects.push_back(std::make_pair(bbox, obj_cloud_ptr)); @@ -238,6 +239,7 @@ void BBoxProjectPCloud::bb_pcl_project( } auto obj_pcls_msg = sensor_msgs::msg::PointCloud2(); pcl::toROSMsg(*all_obj_pcls_ptr, obj_pcls_msg); + obj_pcls_msg.header.stamp = in_cloud_msg->header.stamp; m_object_pcl_viz_pub->publish(obj_pcls_msg); // REFINE OBJECT POINT CLOUDS @@ -482,6 +484,7 @@ void BBoxProjectPCloud::bb_pcl_project( mat_opt_cluster.at((cv::Point)cloud_pt_xy-bbox_offset) = int_to_bgr(opt_cluster_id, clusters_indices.size()); } pcl::toROSMsg(*refined_cloud_ptr, refined_pcl_segments.cloud); + refined_pcl_segments.cloud.header.stamp = in_cloud_msg->header.stamp; cv_bridge::CvImagePtr refined_obj_contour_ptr(new cv_bridge::CvImage(in_img_msg->header, sensor_msgs::image_encodings::TYPE_8UC3, refined_obj_contour_mat)); // cv::imshow("Object contour image to be published:", refined_obj_contour_mat); // cv::waitKey(); @@ -530,6 +533,7 @@ void BBoxProjectPCloud::bb_pcl_project( RCLCPP_DEBUG(this->get_logger(), "STORED CONTOURS TO BE PUBLISHED"); auto obj_refined_pcls_msg = sensor_msgs::msg::PointCloud2(); pcl::toROSMsg(*all_obj_refined_pcls_ptr, obj_refined_pcls_msg); + obj_refined_pcls_msg.header.stamp = in_cloud_msg->header.stamp; m_refined_object_pcl_viz_pub->publish(obj_refined_pcls_msg); cv_bridge::CvImagePtr all_obj_refined_contour_ptr(new cv_bridge::CvImage(in_img_msg->header, sensor_msgs::image_encodings::TYPE_8UC3, all_obj_refined_contours)); // cv::imshow("Object contour image to be published:", all_obj_refined_contours); diff --git a/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp index f824b20f..087e672c 100644 --- a/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp +++ b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp @@ -88,7 +88,8 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms untracked_obs.push_back(untracked_ob); } this->publish_map(msg->objects[0].cloud.header, "untracked", false, untracked_obs, m_untracked_map_pub); - + //TODO: Fix stamp being published as 0 in local point and global point and chull, and polygon points (local and global chull) non-existing often + RCLCPP_INFO(this->get_logger(), "PUBLISHED STAMP SEC: %d", msg->objects[0].cloud.header.stamp.sec); //Match new obstacles with old ones } diff --git a/all_seaing_rviz_plugins/CMakeLists.txt b/all_seaing_rviz_plugins/CMakeLists.txt index 691fa792..6280375c 100644 --- a/all_seaing_rviz_plugins/CMakeLists.txt +++ b/all_seaing_rviz_plugins/CMakeLists.txt @@ -6,6 +6,10 @@ if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") endif() find_package(ament_cmake_auto REQUIRED) +find_package(ament_cmake_ros REQUIRED) +find_package(pluginlib REQUIRED) +find_package(rviz_common REQUIRED) +find_package(all_seaing_interfaces REQUIRED) ament_auto_find_build_dependencies() include_directories(include) @@ -35,4 +39,4 @@ ament_export_include_directories(include) ament_export_targets(export_all_seaing_rviz_plugins) pluginlib_export_plugin_description_file(rviz_common plugins_description.xml) -ament_auto_package(INSTALL_TO_SHARE plugins_description.xml) \ No newline at end of file +ament_auto_package() \ No newline at end of file From ec934c135e9b7033b32bd576228aa94fe84f919d Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Thu, 23 Jan 2025 05:22:39 +0200 Subject: [PATCH 31/35] Coded obstacle tracking & map publishing, not built or tested yet --- all_seaing_bringup/rviz/dashboard.rviz | 13 +- .../object_tracking_map.cpp | 192 ++++++++++++++++-- .../object_tracking_map.hpp | 31 ++- .../launch/object_tracking_map_test.launch.py | 3 +- 4 files changed, 207 insertions(+), 32 deletions(-) diff --git a/all_seaing_bringup/rviz/dashboard.rviz b/all_seaing_bringup/rviz/dashboard.rviz index c5c528eb..52e768a6 100644 --- a/all_seaing_bringup/rviz/dashboard.rviz +++ b/all_seaing_bringup/rviz/dashboard.rviz @@ -6,7 +6,6 @@ Panels: Expanded: - /Global Options1 - /ObstacleMap1 - - /ObstacleMap1/Topic1 - /PointCloud21 - /PointCloud21/Topic1 Splitter Ratio: 0.5 @@ -415,25 +414,25 @@ Visualization Manager: Views: Current: Class: rviz_default_plugins/Orbit - Distance: 46.016841888427734 + Distance: 33.20790481567383 Enable Stereo Rendering: Stereo Eye Separation: 0.05999999865889549 Stereo Focal Distance: 1 Swap Stereo Eyes: false Value: false Focal Point: - X: 37.104736328125 - Y: 15.365486145019531 - Z: -1.0705451965332031 + X: 35.28513717651367 + Y: 22.616809844970703 + Z: 10.652386665344238 Focal Shape Fixed Size: true Focal Shape Size: 0.05000000074505806 Invert Z Axis: false Name: Current View Near Clip Distance: 0.009999999776482582 - Pitch: 0.5253973603248596 + Pitch: 0.5603973269462585 Target Frame: Value: Orbit (rviz) - Yaw: 4.643567085266113 + Yaw: 4.628566265106201 Saved: ~ Window Geometry: Displays: diff --git a/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp index 087e672c..b1cd55e8 100644 --- a/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp +++ b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp @@ -3,9 +3,13 @@ ObjectTrackingMap::ObjectTrackingMap() : Node("object_tracking_map"){ // Initialize parameters this->declare_parameter("global_frame_id", "odom"); + this->declare_parameter("obstacle_seg_thresh", 1.0); + this->declare_parameter("obstacle_drop_thresh", 1.0); // Initialize member variables from parameters m_global_frame_id = this->get_parameter("global_frame_id").as_string(); + m_obstacle_seg_thresh = this->get_parameter("obstacle_seg_thresh").as_double(); + m_obstacle_drop_thresh = this->get_parameter("obstacle_drop_thresh").as_double(); // Initialize navigation variables to 0 m_nav_x = 0; @@ -23,9 +27,33 @@ ObjectTrackingMap::ObjectTrackingMap() : Node("object_tracking_map"){ m_odom_sub = this->create_subscription( "odometry/filtered", 10, std::bind(&ObjectTrackingMap::odom_callback, this, std::placeholders::_1)); + m_image_intrinsics_sub = this->create_subscription( + "camera_info_topic", 10, std::bind(&BBoxProjectPCloud::intrinsics_cb, this, std::placeholders::_1)); +} + +ObjectTrackingMap::ObjectCloud::ObjectCloud(rclcpp::Time t, int l, pcl::PointCloud::Ptr loc, pcl::PointCloud::Ptr glob){ + id = -1; + label = l; + time_seen = t; + is_dead = false; + global_pcloud_ptr = glob; + for(pcl::PointXYZHSV &global_pt : local_pcloud_ptr->points){ + global_centroid.x += global_pt.x/((double)global_cloud_ptr->points.size()); + global_centroid.y += global_pt.y/((double)global_cloud_ptr->points.size()); + global_centroid.z += global_pt.z/((double)global_cloud_ptr->points.size()); + } + this->update_loc_pcloud(loc); +} + +void ObjectTrackingMap::ObjectCloud::update_loc_pcloud(pcl::PointCloud::Ptr loc){ + local_pcloud_ptr = loc; + for(pcl::PointXYZHSV &local_pt : local_pcloud_ptr->points){ + local_centroid.x += local_pt.x/((double)local_pcloud_ptr->points.size()); + local_centroid.y += local_pt.y/((double)local_pcloud_ptr->points.size()); + local_centroid.z += local_pt.z/((double)local_pcloud_ptr->points.size()); + } } -//copied from obstacle_detector.cpp void ObjectTrackingMap::odom_callback(const nav_msgs::msg::Odometry &msg) { m_nav_x = msg.pose.pose.position.x; m_nav_y = msg.pose.pose.position.y; @@ -42,7 +70,27 @@ void ObjectTrackingMap::odom_callback(const nav_msgs::msg::Odometry &msg) { m_nav_pose = msg.pose.pose; } -//copied & modified from obstacle.cpp +void BBoxProjectPCloud::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_msg) { + RCLCPP_DEBUG(this->get_logger(), "GOT CAMERA INFO"); + m_cam_model.fromCameraInfo(info_msg); +} + +geometry_msgs::msg::TransformStamped BBoxProjectPCloud::get_tf(const std::string &in_target_frame, + const std::string &in_src_frame) { + geometry_msgs::msg::TransformStamped tf; + m_pc_cam_tf_ok = false; + try { + tf = m_tf_buffer->lookupTransform(in_target_frame, in_src_frame, tf2::TimePointZero); + m_pc_cam_tf_ok = true; + RCLCPP_INFO(this->get_logger(), "LiDAR to Camera Transform good"); + RCLCPP_INFO(this->get_logger(), "in_target_frame: %s, in_src_frame: %s", + in_target_frame.c_str(), in_src_frame.c_str()); + } catch (tf2::TransformException &ex) { + RCLCPP_ERROR(this->get_logger(), "%s", ex.what()); + } + return tf; +} + template T ObjectTrackingMap::convert_to_global(double nav_x, double nav_y, double nav_heading, T point) { T new_point = point;//to keep color-related data @@ -54,10 +102,24 @@ T ObjectTrackingMap::convert_to_global(double nav_x, double nav_y, double nav_he return new_point; } -//TODO: Implement object tracking and tracked obstacle map publishing +//TODO: Check it actually works, individually from the other parts +template +T ObjectTrackingMap::convert_to_local(double nav_x, double nav_y, double nav_heading, T point) { + T new_point = point;//to keep color-related data + double diff_x = point.x - nav_x; + double diff_y = point.y - nav_y; + double magnitude = std::hypot(diff_x, diff_y); + double point_angle = std::atan2(diff_y, diff_x); + new_point.x = std::cos(point_angle - nav_heading) * magnitude; + new_point.y = std::sin(point_angle - nav_heading) * magnitude; + new_point.z = 0; + return new_point; +} + void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::msg::LabeledObjectPointCloudArray::ConstSharedPtr &msg){ if(msg->objects.size()==0) return; - std::vector> raw_obstacles; + + std::vector> detected_obstacles; for(all_seaing_interfaces::msg::LabeledObjectPointCloud obj : msg->objects){ pcl::PointCloud::Ptr local_obj_pcloud(new pcl::PointCloud); pcl::PointCloud::Ptr global_obj_pcloud(new pcl::PointCloud); @@ -67,13 +129,14 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms global_obj_pcloud->push_back(global_pt); } std::shared_ptr obj_cloud (new ObjectTrackingMap::ObjectCloud(obj.time, obj.label, local_obj_pcloud, global_obj_pcloud)); - raw_obstacles.push_back(obj_cloud); + detected_obstacles.push_back(obj_cloud); } - //Make and publish untracked map + + // Make and publish untracked map std::vector> untracked_obs; - for(std::shared_ptr raw_obs : raw_obstacles){ + for(std::shared_ptr det_obs : detected_obstacles){ pcl::PointCloud::Ptr raw_cloud(new pcl::PointCloud); - for(pcl::PointXYZHSV pt : raw_obs->local_pcloud_ptr->points){ + for(pcl::PointXYZHSV pt : det_obs->local_pcloud_ptr->points){ pcl::PointXYZRGB rgb_pt; pcl::PointXYZI i_pt; pcl::PointXYZHSVtoXYZRGB(pt, rgb_pt); @@ -84,13 +147,110 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms std::iota (std::begin(ind), std::end(ind), 0); std::shared_ptr untracked_ob( new all_seaing_perception::Obstacle(raw_cloud, ind, m_obstacle_id++, - raw_obs->time_seen, m_nav_x, m_nav_y, m_nav_heading)); + det_obs->time_seen, m_nav_x, m_nav_y, m_nav_heading)); untracked_obs.push_back(untracked_ob); } - this->publish_map(msg->objects[0].cloud.header, "untracked", false, untracked_obs, m_untracked_map_pub); - //TODO: Fix stamp being published as 0 in local point and global point and chull, and polygon points (local and global chull) non-existing often - RCLCPP_INFO(this->get_logger(), "PUBLISHED STAMP SEC: %d", msg->objects[0].cloud.header.stamp.sec); - //Match new obstacles with old ones + this->publish_map(msg->objects[0].cloud.header, "untracked", true, untracked_obs, m_untracked_map_pub); + + /* + OBJECT TRACKING: + --Like in obstacle_detector.cpp, for each detected obstacle, find the closest (RMS point distance? or just centroids), + same-color, yet unmatched, detected obstacle, and assign it to it --> if above distance threshold for all mark it as new + --Update matched obstacles and add new ones (and assign them id) + --From each tracked & unmatched obstacle, check if it's in the robot's FoV --> those are the only ones that may be detected at that point + (subscribe to the camera_info topic and get the transform from LiDAR 3D to camera 2D plane and min/max image x,y, + project the local centroid (from its global centroid and get_local()-->also update it in the tracked obstacles) + of the obstacle onto the camera image plane and check if it's within the bounds, with some margin) + --For those objects, update the time they have been undetected in the FoV total (add time from prev appearance, time_seen, if it was in FoV and undetected then too) + --For unmatched obstacles that have been undetected in FoV for > some time threshold, remove them + */ + + // LIDAR -> Camera transform + if (!m_pc_cam_tf_ok) + m_pc_cam_tf = get_tf(msg->objects[0].segment.header.frame_id, msg->objects[0].cloud.header.frame_id); + + // Match new obstacles with old ones, update them, and add new ones + std::unordered_set chosen_indices; + for(std::shared_ptr det_obs : detected_obstacles){ + float best_dist = m_obstacle_seg_thresh; + int best_match = -1; + for(int tracked_id = 0; tracked_id < m_tracked_obstacles.size(); tracked_id++){ + if (chosen_indices.count(j) || m_tracked_obstacles[tracked_id]->label != det_obs->label) + continue; + float curr_dist = pcl::euclideanDistance(m_tracked_obstacles[tracked_id]->global_centroid, + det_obs->global_centroid); + if (curr_dist < best_dist) { + best_match = tracked_id; + best_dist = curr_dist; + } + } + if(best_match >= 0){ + det_obs->id = m_tracked_obstacles[best_match]->id; + m_tracked_obstacles[best_match] = det_obs; + chosen_indices.insert(best_match); + }else{ + det_obs->id = m_obstacle_id++; + m_tracked_obstacles.push_back(det_obs); + } + } + + // Filter old obstacles + for (int tracked_id = 0; tracked_id < m_tracked_obstacles.size(); tracked_id++) { + if(chosen_indices.count(tracked_id)) continue; + // Update local points + pcl::PointCloud::Ptr upd_local_obj_pcloud(new pcl::PointCloud); + for(pcl::PointXYZHSV &global_pt : m_tracked_obstacles[tracked_id]->global_pcloud_ptr){ + upd_local_obj_pcloud->push_back(this->convert_to_local(m_nav_x, m_nav_y, m_nav_heading, global_pt)); + } + m_tracked_obstacles[tracked_id]->update_loc_pcloud(upd_local_obj_pcloud); + // Check if in FoV (which'll mean it's dead, at least temporarily) + geometry_msgs::msg::Point lidar_point; + lidar_point.x = m_tracked_obstacles[tracked_id]->local_centroid.x; + lidar_point.y = m_tracked_obstacles[tracked_id]->local_centroid.y; + lidar_point.z = m_tracked_obstacles[tracked_id]->local_centroid.z; + geometry_msgs::msg::Point camera_point; + tf2::doTransform(lidar_point, camera_point, m_pc_cam_tf); + cv::Point2d xy_rect = m_cam_model.project3dToPixel( + cv::Point3d(camera_point.y, camera_point.z, -camera_point.x)); + if ((xy_rect.x >= 0) && (xy_rect.x < m_cam_model.cameraInfo().width) && (xy_rect.y >= 0) && + (xy_rect.y < m_cam_model.cameraInfo().height) && (obstacle.local_point.point.x >= 0)) { + // Dead + if(m_tracked_obstacles[tracked_id]->is_dead){ + // Was also dead before, add time dead + m_tracked_obstacles[tracked_id]->time_dead += rclcpp::Duration(msg->objects[0].time.sec, msg->objects[0].time.nanosec) - rclcpp::Duration(m_tracked_obstacles[tracked_id]->last_dead.sec, m_tracked_obstacles[tracked_id]->last_dead.nanosec); + if(m_tracked_obstacles[tracked_id]->time_dead.seconds() > m_obstacle_drop_thresh){ + m_tracked_obstacles.erase(m_tracked_obstacles.begin()+tracked_id); + tracked_id--; + continue; + } + } + m_tracked_obstacles[tracked_id]->is_dead = true; + m_tracked_obstacles[tracked_id]->last_dead = msg->objects[0].time; + }else{ + m_tracked_obstacles[tracked_id]->is_dead = false; + } + } + + // Publish map with tracked obstacles + std::vector> tracked_obs; + for(std::shared_ptr t_ob : m_tracked_obstacles){ + pcl::PointCloud::Ptr tracked_cloud(new pcl::PointCloud); + for(pcl::PointXYZHSV pt : t_ob->local_pcloud_ptr->points){ + pcl::PointXYZRGB rgb_pt; + pcl::PointXYZI i_pt; + pcl::PointXYZHSVtoXYZRGB(pt, rgb_pt); + pcl::PointXYZRGBtoXYZI(rgb_pt, i_pt); + tracked_cloud->push_back(i_pt); + } + std::vector ind(tracked_cloud->size()); + std::iota (std::begin(ind), std::end(ind), 0); + std::shared_ptr tracked_ob( + new all_seaing_perception::Obstacle(tracked_cloud, ind, t_ob->id, + t_ob->time_seen, m_nav_x, m_nav_y, m_nav_heading)); + tracked_ob.label = t_ob->label; + tracked_obs.push_back(tracked_ob); + } + this->publish_map(msg->objects[0].cloud.header, "tracked", true, tracked_obs, m_tracked_map_pub); } void ObjectTrackingMap::publish_map( @@ -109,9 +269,9 @@ void ObjectTrackingMap::publish_map( map_msg.header = global_header; map_msg.is_labeled = is_labeled; for (unsigned int i = 0; i < map.size(); i++) { - all_seaing_interfaces::msg::Obstacle raw_obstacle; - map[i]->to_ros_msg(local_header, global_header, raw_obstacle); - map_msg.obstacles.push_back(raw_obstacle); + all_seaing_interfaces::msg::Obstacle det_obstacle; + map[i]->to_ros_msg(local_header, global_header, det_obstacle); + map_msg.obstacles.push_back(det_obstacle); } map_msg.pose = m_nav_pose; pub->publish(map_msg); diff --git a/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp b/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp index 2b6646a0..40cf6b83 100644 --- a/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp +++ b/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp @@ -58,28 +58,33 @@ class ObjectTrackingMap : public rclcpp::Node{ rclcpp::Publisher::SharedPtr pub); template T convert_to_global(double nav_x, double nav_y, double nav_heading, T point); + template + T convert_to_local(double nav_x, double nav_y, double nav_heading, T point); //custom struct to also keep the points themselves with the obstacle (Obstacle doesn't do that and don't want to mess with it) struct ObjectCloud{ int id; - rclcpp::Time time_seen; int label; + rclcpp::Time time_seen; + rclcpp::Time last_dead; + rclcpp::Duration time_dead; + bool is_dead; pcl::PointCloud::Ptr local_pcloud_ptr; pcl::PointCloud::Ptr global_pcloud_ptr; + pcl::PointXYZ local_centroid; + pcl::PointXYZ global_centroid; - ObjectCloud(rclcpp::Time t, int l, pcl::PointCloud::Ptr loc, pcl::PointCloud::Ptr glob){ - id = -1; - time_seen = t; - label = l; - local_pcloud_ptr = loc; - global_pcloud_ptr = glob; - } + ObjectCloud(rclcpp::Time t, int l, pcl::PointCloud::Ptr loc, pcl::PointCloud::Ptr glob); + + void update_loc_pcloud(pcl::PointCloud::Ptr loc); }; // Member variables std::vector> m_tracked_obstacles; std::string m_global_frame_id; int m_obstacle_id; + double m_obstacle_seg_thresh; + double m_obstacle_drop_thresh; float m_nav_x, m_nav_y, m_nav_heading; @@ -90,6 +95,16 @@ class ObjectTrackingMap : public rclcpp::Node{ rclcpp::Publisher::SharedPtr m_tracked_map_pub; rclcpp::Subscription::SharedPtr m_object_sub; rclcpp::Subscription::SharedPtr m_odom_sub; + rclcpp::Subscription::SharedPtr m_image_intrinsics_sub; + + // Transform variables + std::shared_ptr m_tf_listener{nullptr}; + std::unique_ptr m_tf_buffer; + geometry_msgs::msg::TransformStamped m_pc_cam_tf; + bool m_pc_cam_tf_ok; + + // Intrinsics callback camera model variables + image_geometry::PinholeCameraModel m_cam_model; public: ObjectTrackingMap(); virtual ~ObjectTrackingMap(); diff --git a/all_seaing_perception/launch/object_tracking_map_test.launch.py b/all_seaing_perception/launch/object_tracking_map_test.launch.py index 26a938b4..d55cc279 100644 --- a/all_seaing_perception/launch/object_tracking_map_test.launch.py +++ b/all_seaing_perception/launch/object_tracking_map_test.launch.py @@ -15,7 +15,8 @@ def generate_launch_description(): ], parameters=[ - + {"obstacle_seg_thresh": 10.0}, + {"obstacle_drop_thresh": 1.0}, ] ) return LaunchDescription([ From da81a8a2887bc756f186298ff14e47611fca8ce8 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Fri, 24 Jan 2025 03:27:22 +0200 Subject: [PATCH 32/35] Now builds --- .../object_tracking_map.cpp | 50 +++++++++++-------- .../object_tracking_map.hpp | 46 ++++++++++------- 2 files changed, 56 insertions(+), 40 deletions(-) diff --git a/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp index b1cd55e8..23e11099 100644 --- a/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp +++ b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp @@ -28,24 +28,26 @@ ObjectTrackingMap::ObjectTrackingMap() : Node("object_tracking_map"){ "odometry/filtered", 10, std::bind(&ObjectTrackingMap::odom_callback, this, std::placeholders::_1)); m_image_intrinsics_sub = this->create_subscription( - "camera_info_topic", 10, std::bind(&BBoxProjectPCloud::intrinsics_cb, this, std::placeholders::_1)); + "camera_info_topic", 10, std::bind(&ObjectTrackingMap::intrinsics_cb, this, std::placeholders::_1)); } -ObjectTrackingMap::ObjectCloud::ObjectCloud(rclcpp::Time t, int l, pcl::PointCloud::Ptr loc, pcl::PointCloud::Ptr glob){ +ObjectCloud::ObjectCloud(rclcpp::Time t, int l, pcl::PointCloud::Ptr loc, pcl::PointCloud::Ptr glob){ id = -1; label = l; time_seen = t; + last_dead = rclcpp::Time(0); + time_dead = rclcpp::Duration(0,0); is_dead = false; global_pcloud_ptr = glob; for(pcl::PointXYZHSV &global_pt : local_pcloud_ptr->points){ - global_centroid.x += global_pt.x/((double)global_cloud_ptr->points.size()); - global_centroid.y += global_pt.y/((double)global_cloud_ptr->points.size()); - global_centroid.z += global_pt.z/((double)global_cloud_ptr->points.size()); + global_centroid.x += global_pt.x/((double)global_pcloud_ptr->points.size()); + global_centroid.y += global_pt.y/((double)global_pcloud_ptr->points.size()); + global_centroid.z += global_pt.z/((double)global_pcloud_ptr->points.size()); } this->update_loc_pcloud(loc); } -void ObjectTrackingMap::ObjectCloud::update_loc_pcloud(pcl::PointCloud::Ptr loc){ +void ObjectCloud::update_loc_pcloud(pcl::PointCloud::Ptr loc){ local_pcloud_ptr = loc; for(pcl::PointXYZHSV &local_pt : local_pcloud_ptr->points){ local_centroid.x += local_pt.x/((double)local_pcloud_ptr->points.size()); @@ -70,12 +72,12 @@ void ObjectTrackingMap::odom_callback(const nav_msgs::msg::Odometry &msg) { m_nav_pose = msg.pose.pose; } -void BBoxProjectPCloud::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_msg) { +void ObjectTrackingMap::intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_msg) { RCLCPP_DEBUG(this->get_logger(), "GOT CAMERA INFO"); m_cam_model.fromCameraInfo(info_msg); } -geometry_msgs::msg::TransformStamped BBoxProjectPCloud::get_tf(const std::string &in_target_frame, +geometry_msgs::msg::TransformStamped ObjectTrackingMap::get_tf(const std::string &in_target_frame, const std::string &in_src_frame) { geometry_msgs::msg::TransformStamped tf; m_pc_cam_tf_ok = false; @@ -119,7 +121,7 @@ T ObjectTrackingMap::convert_to_local(double nav_x, double nav_y, double nav_hea void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::msg::LabeledObjectPointCloudArray::ConstSharedPtr &msg){ if(msg->objects.size()==0) return; - std::vector> detected_obstacles; + std::vector> detected_obstacles; for(all_seaing_interfaces::msg::LabeledObjectPointCloud obj : msg->objects){ pcl::PointCloud::Ptr local_obj_pcloud(new pcl::PointCloud); pcl::PointCloud::Ptr global_obj_pcloud(new pcl::PointCloud); @@ -128,13 +130,14 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms pcl::PointXYZHSV global_pt = this->convert_to_global(m_nav_x, m_nav_y, m_nav_heading, pt); global_obj_pcloud->push_back(global_pt); } - std::shared_ptr obj_cloud (new ObjectTrackingMap::ObjectCloud(obj.time, obj.label, local_obj_pcloud, global_obj_pcloud)); + std::shared_ptr obj_cloud (new ObjectCloud(obj.time, obj.label, local_obj_pcloud, global_obj_pcloud)); detected_obstacles.push_back(obj_cloud); } // Make and publish untracked map std::vector> untracked_obs; - for(std::shared_ptr det_obs : detected_obstacles){ + std::vector untracked_labels; + for(std::shared_ptr det_obs : detected_obstacles){ pcl::PointCloud::Ptr raw_cloud(new pcl::PointCloud); for(pcl::PointXYZHSV pt : det_obs->local_pcloud_ptr->points){ pcl::PointXYZRGB rgb_pt; @@ -148,9 +151,10 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms std::shared_ptr untracked_ob( new all_seaing_perception::Obstacle(raw_cloud, ind, m_obstacle_id++, det_obs->time_seen, m_nav_x, m_nav_y, m_nav_heading)); + untracked_labels.push_back(det_obs->label); untracked_obs.push_back(untracked_ob); } - this->publish_map(msg->objects[0].cloud.header, "untracked", true, untracked_obs, m_untracked_map_pub); + this->publish_map(msg->objects[0].cloud.header, "untracked", true, untracked_obs, m_untracked_map_pub, untracked_labels); /* OBJECT TRACKING: @@ -171,11 +175,11 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms // Match new obstacles with old ones, update them, and add new ones std::unordered_set chosen_indices; - for(std::shared_ptr det_obs : detected_obstacles){ + for(std::shared_ptr det_obs : detected_obstacles){ float best_dist = m_obstacle_seg_thresh; int best_match = -1; for(int tracked_id = 0; tracked_id < m_tracked_obstacles.size(); tracked_id++){ - if (chosen_indices.count(j) || m_tracked_obstacles[tracked_id]->label != det_obs->label) + if (chosen_indices.count(tracked_id) || m_tracked_obstacles[tracked_id]->label != det_obs->label) continue; float curr_dist = pcl::euclideanDistance(m_tracked_obstacles[tracked_id]->global_centroid, det_obs->global_centroid); @@ -199,7 +203,7 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms if(chosen_indices.count(tracked_id)) continue; // Update local points pcl::PointCloud::Ptr upd_local_obj_pcloud(new pcl::PointCloud); - for(pcl::PointXYZHSV &global_pt : m_tracked_obstacles[tracked_id]->global_pcloud_ptr){ + for(pcl::PointXYZHSV &global_pt : m_tracked_obstacles[tracked_id]->global_pcloud_ptr->points){ upd_local_obj_pcloud->push_back(this->convert_to_local(m_nav_x, m_nav_y, m_nav_heading, global_pt)); } m_tracked_obstacles[tracked_id]->update_loc_pcloud(upd_local_obj_pcloud); @@ -213,11 +217,11 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms cv::Point2d xy_rect = m_cam_model.project3dToPixel( cv::Point3d(camera_point.y, camera_point.z, -camera_point.x)); if ((xy_rect.x >= 0) && (xy_rect.x < m_cam_model.cameraInfo().width) && (xy_rect.y >= 0) && - (xy_rect.y < m_cam_model.cameraInfo().height) && (obstacle.local_point.point.x >= 0)) { + (xy_rect.y < m_cam_model.cameraInfo().height) && (lidar_point.x >= 0)) { // Dead if(m_tracked_obstacles[tracked_id]->is_dead){ // Was also dead before, add time dead - m_tracked_obstacles[tracked_id]->time_dead += rclcpp::Duration(msg->objects[0].time.sec, msg->objects[0].time.nanosec) - rclcpp::Duration(m_tracked_obstacles[tracked_id]->last_dead.sec, m_tracked_obstacles[tracked_id]->last_dead.nanosec); + m_tracked_obstacles[tracked_id]->time_dead = rclcpp::Duration(msg->objects[0].time.sec, msg->objects[0].time.nanosec) - rclcpp::Duration(m_tracked_obstacles[tracked_id]->last_dead.seconds(), m_tracked_obstacles[tracked_id]->last_dead.nanoseconds()) + m_tracked_obstacles[tracked_id]->time_dead; if(m_tracked_obstacles[tracked_id]->time_dead.seconds() > m_obstacle_drop_thresh){ m_tracked_obstacles.erase(m_tracked_obstacles.begin()+tracked_id); tracked_id--; @@ -233,7 +237,8 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms // Publish map with tracked obstacles std::vector> tracked_obs; - for(std::shared_ptr t_ob : m_tracked_obstacles){ + std::vector tracked_labels; + for(std::shared_ptr t_ob : m_tracked_obstacles){ pcl::PointCloud::Ptr tracked_cloud(new pcl::PointCloud); for(pcl::PointXYZHSV pt : t_ob->local_pcloud_ptr->points){ pcl::PointXYZRGB rgb_pt; @@ -247,16 +252,16 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms std::shared_ptr tracked_ob( new all_seaing_perception::Obstacle(tracked_cloud, ind, t_ob->id, t_ob->time_seen, m_nav_x, m_nav_y, m_nav_heading)); - tracked_ob.label = t_ob->label; + tracked_labels.push_back(t_ob->label); tracked_obs.push_back(tracked_ob); } - this->publish_map(msg->objects[0].cloud.header, "tracked", true, tracked_obs, m_tracked_map_pub); + this->publish_map(msg->objects[0].cloud.header, "tracked", true, tracked_obs, m_tracked_map_pub, tracked_labels); } void ObjectTrackingMap::publish_map( std_msgs::msg::Header local_header, std::string ns, bool is_labeled, const std::vector> &map, - rclcpp::Publisher::SharedPtr pub) { + rclcpp::Publisher::SharedPtr pub, std::vector labels) { // Create global header std_msgs::msg::Header global_header = std_msgs::msg::Header(); @@ -271,6 +276,9 @@ void ObjectTrackingMap::publish_map( for (unsigned int i = 0; i < map.size(); i++) { all_seaing_interfaces::msg::Obstacle det_obstacle; map[i]->to_ros_msg(local_header, global_header, det_obstacle); + if(is_labeled){ + det_obstacle.label = labels[i]; + } map_msg.obstacles.push_back(det_obstacle); } map_msg.pose = m_nav_pose; diff --git a/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp b/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp index 40cf6b83..cf2d3365 100644 --- a/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp +++ b/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp @@ -29,6 +29,7 @@ #include #include #include +#include #include "pcl_conversions/pcl_conversions.h" #include "cv_bridge/cv_bridge.h" @@ -49,38 +50,45 @@ #include #include +//custom struct to also keep the points themselves with the obstacle (Obstacle doesn't do that and don't want to mess with it) +struct ObjectCloud{ + int id; + int label; + rclcpp::Time time_seen; + rclcpp::Time last_dead; + rclcpp::Duration time_dead = rclcpp::Duration(0,0); + bool is_dead; + pcl::PointCloud::Ptr local_pcloud_ptr; + pcl::PointCloud::Ptr global_pcloud_ptr; + pcl::PointXYZ local_centroid; + pcl::PointXYZ global_centroid; + + ObjectCloud(rclcpp::Time t, int l, pcl::PointCloud::Ptr loc, pcl::PointCloud::Ptr glob); + + void update_loc_pcloud(pcl::PointCloud::Ptr loc); +}; + class ObjectTrackingMap : public rclcpp::Node{ private: void object_track_map_publish(const all_seaing_interfaces::msg::LabeledObjectPointCloudArray::ConstSharedPtr &msg); void odom_callback(const nav_msgs::msg::Odometry &msg); void publish_map(std_msgs::msg::Header local_header, std::string ns, bool is_labeled, const std::vector> &map, - rclcpp::Publisher::SharedPtr pub); + rclcpp::Publisher::SharedPtr pub, std::vector labels); template T convert_to_global(double nav_x, double nav_y, double nav_heading, T point); template T convert_to_local(double nav_x, double nav_y, double nav_heading, T point); - //custom struct to also keep the points themselves with the obstacle (Obstacle doesn't do that and don't want to mess with it) - struct ObjectCloud{ - int id; - int label; - rclcpp::Time time_seen; - rclcpp::Time last_dead; - rclcpp::Duration time_dead; - bool is_dead; - pcl::PointCloud::Ptr local_pcloud_ptr; - pcl::PointCloud::Ptr global_pcloud_ptr; - pcl::PointXYZ local_centroid; - pcl::PointXYZ global_centroid; - - ObjectCloud(rclcpp::Time t, int l, pcl::PointCloud::Ptr loc, pcl::PointCloud::Ptr glob); - - void update_loc_pcloud(pcl::PointCloud::Ptr loc); - }; + // Get intrinsic camera model information needed for projection + void intrinsics_cb(const sensor_msgs::msg::CameraInfo &info_msg); + // Get transform from source frame to target frame + geometry_msgs::msg::TransformStamped get_tf(const std::string &in_target_frame, + const std::string &in_src_frame); + // Member variables - std::vector> m_tracked_obstacles; + std::vector> m_tracked_obstacles; std::string m_global_frame_id; int m_obstacle_id; double m_obstacle_seg_thresh; From 571f3ef5c530a5dbf8e9a376987da3b0724a4a39 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Fri, 24 Jan 2025 05:00:35 +0200 Subject: [PATCH 33/35] Finished debugging, seems to work great (obstacle map not that glitchy and pretty consistent with occlusions/missing objects for some time, especially when out of camera FoV, though not perfect, may need fine-tuning) --- all_seaing_bringup/rviz/dashboard.rviz | 26 ++++++------- .../object_tracking_map.cpp | 37 ++++++++++++------- .../launch/object_tracking_map_test.launch.py | 3 +- 3 files changed, 38 insertions(+), 28 deletions(-) diff --git a/all_seaing_bringup/rviz/dashboard.rviz b/all_seaing_bringup/rviz/dashboard.rviz index 52e768a6..360e884a 100644 --- a/all_seaing_bringup/rviz/dashboard.rviz +++ b/all_seaing_bringup/rviz/dashboard.rviz @@ -70,16 +70,16 @@ Visualization Manager: Enabled: true Name: ObstacleMap Namespaces: - untracked: true - untracked_edges: true - untracked_text: true - untracked_vertices: true + tracked: true + tracked_edges: true + tracked_text: true + tracked_vertices: true Topic: Depth: 5 Durability Policy: Volatile History Policy: Keep Last Reliability Policy: Reliable - Value: /obstacle_map/refined_untracked + Value: /obstacle_map/refined_tracked Value: true - Alpha: 1 Class: rviz_default_plugins/RobotModel @@ -327,7 +327,7 @@ Visualization Manager: Use rainbow: true Value: true - Class: rviz_default_plugins/Image - Enabled: true + Enabled: false Max Value: 1 Median window: 5 Min Value: 0 @@ -339,7 +339,7 @@ Visualization Manager: History Policy: Keep Last Reliability Policy: Reliable Value: /refined_object_contours_viz - Value: true + Value: false - Class: rviz_default_plugins/Image Enabled: true Max Value: 1 @@ -414,25 +414,25 @@ Visualization Manager: Views: Current: Class: rviz_default_plugins/Orbit - Distance: 33.20790481567383 + Distance: 59.75430679321289 Enable Stereo Rendering: Stereo Eye Separation: 0.05999999865889549 Stereo Focal Distance: 1 Swap Stereo Eyes: false Value: false Focal Point: - X: 35.28513717651367 - Y: 22.616809844970703 - Z: 10.652386665344238 + X: 34.421382904052734 + Y: 16.84052276611328 + Z: 1.149369478225708 Focal Shape Fixed Size: true Focal Shape Size: 0.05000000074505806 Invert Z Axis: false Name: Current View Near Clip Distance: 0.009999999776482582 - Pitch: 0.5603973269462585 + Pitch: 0.5253973603248596 Target Frame: Value: Orbit (rviz) - Yaw: 4.628566265106201 + Yaw: 4.643567085266113 Saved: ~ Window Geometry: Displays: diff --git a/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp index 23e11099..8eb3beef 100644 --- a/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp +++ b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp @@ -10,7 +10,7 @@ ObjectTrackingMap::ObjectTrackingMap() : Node("object_tracking_map"){ m_global_frame_id = this->get_parameter("global_frame_id").as_string(); m_obstacle_seg_thresh = this->get_parameter("obstacle_seg_thresh").as_double(); m_obstacle_drop_thresh = this->get_parameter("obstacle_drop_thresh").as_double(); - + RCLCPP_DEBUG(this->get_logger(), "OBSTACLE SEG THRESHOLD: %lf, DROP THRESHOLD: %lf", m_obstacle_seg_thresh, m_obstacle_drop_thresh); // Initialize navigation variables to 0 m_nav_x = 0; m_nav_y = 0; @@ -39,7 +39,8 @@ ObjectCloud::ObjectCloud(rclcpp::Time t, int l, pcl::PointCloudpoints){ + for(pcl::PointXYZHSV &global_pt : global_pcloud_ptr->points){ + // RCLCPP_DEBUG(this->get_logger(), "NEW POINT IN CREATED OBJECT POINT CLOUD: %lf, %lf, %lf", global_pt.x, global_pt.y, global_pt.z); global_centroid.x += global_pt.x/((double)global_pcloud_ptr->points.size()); global_centroid.y += global_pt.y/((double)global_pcloud_ptr->points.size()); global_centroid.z += global_pt.z/((double)global_pcloud_ptr->points.size()); @@ -120,7 +121,7 @@ T ObjectTrackingMap::convert_to_local(double nav_x, double nav_y, double nav_hea void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::msg::LabeledObjectPointCloudArray::ConstSharedPtr &msg){ if(msg->objects.size()==0) return; - + RCLCPP_DEBUG(this->get_logger(), "GOT DATA"); std::vector> detected_obstacles; for(all_seaing_interfaces::msg::LabeledObjectPointCloud obj : msg->objects){ pcl::PointCloud::Ptr local_obj_pcloud(new pcl::PointCloud); @@ -130,10 +131,11 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms pcl::PointXYZHSV global_pt = this->convert_to_global(m_nav_x, m_nav_y, m_nav_heading, pt); global_obj_pcloud->push_back(global_pt); } + // RCLCPP_DEBUG(this->get_logger(), "BEFORE OBSTACLE CREATION"); std::shared_ptr obj_cloud (new ObjectCloud(obj.time, obj.label, local_obj_pcloud, global_obj_pcloud)); detected_obstacles.push_back(obj_cloud); } - + // RCLCPP_DEBUG(this->get_logger(), "BEFORE UNTRACKED MAP PUBLISHING"); // Make and publish untracked map std::vector> untracked_obs; std::vector untracked_labels; @@ -155,7 +157,7 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms untracked_obs.push_back(untracked_ob); } this->publish_map(msg->objects[0].cloud.header, "untracked", true, untracked_obs, m_untracked_map_pub, untracked_labels); - + // RCLCPP_DEBUG(this->get_logger(), "AFTER UNTRACKED MAP PUBLISHING"); /* OBJECT TRACKING: --Like in obstacle_detector.cpp, for each detected obstacle, find the closest (RMS point distance? or just centroids), @@ -168,11 +170,13 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms --For those objects, update the time they have been undetected in the FoV total (add time from prev appearance, time_seen, if it was in FoV and undetected then too) --For unmatched obstacles that have been undetected in FoV for > some time threshold, remove them */ - + // RCLCPP_DEBUG(this->get_logger(), "BEFORE LIDAR->CAMERA TF"); // LIDAR -> Camera transform - if (!m_pc_cam_tf_ok) - m_pc_cam_tf = get_tf(msg->objects[0].segment.header.frame_id, msg->objects[0].cloud.header.frame_id); - + // if (!m_pc_cam_tf_ok) + // RCLCPP_DEBUG(this->get_logger(), "CAMERA FRAME ID: %s", msg->objects[0].segment.header.frame_id.c_str()); + // RCLCPP_DEBUG(this->get_logger(), "LIDAR FRAME ID: %s", msg->objects[0].cloud.header.frame_id.c_str()); + // m_pc_cam_tf = get_tf(msg->objects[0].segment.header.frame_id, msg->objects[0].cloud.header.frame_id); + // RCLCPP_DEBUG(this->get_logger(), "AFTER LIDAR->CAMERA TF"); // Match new obstacles with old ones, update them, and add new ones std::unordered_set chosen_indices; for(std::shared_ptr det_obs : detected_obstacles){ @@ -197,7 +201,7 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms m_tracked_obstacles.push_back(det_obs); } } - + RCLCPP_DEBUG(this->get_logger(), "AFTER NEW OBSTACLE MATCHING"); // Filter old obstacles for (int tracked_id = 0; tracked_id < m_tracked_obstacles.size(); tracked_id++) { if(chosen_indices.count(tracked_id)) continue; @@ -212,17 +216,21 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms lidar_point.x = m_tracked_obstacles[tracked_id]->local_centroid.x; lidar_point.y = m_tracked_obstacles[tracked_id]->local_centroid.y; lidar_point.z = m_tracked_obstacles[tracked_id]->local_centroid.z; - geometry_msgs::msg::Point camera_point; - tf2::doTransform(lidar_point, camera_point, m_pc_cam_tf); + geometry_msgs::msg::Point camera_point = lidar_point;//ALREADY IN THE SAME FRAME, WAS TRANSFORMED BEFORE BEING PUBLISHED BY bbox_project_pcloud.cpp + // tf2::doTransform(lidar_point, camera_point, m_pc_cam_tf); cv::Point2d xy_rect = m_cam_model.project3dToPixel( cv::Point3d(camera_point.y, camera_point.z, -camera_point.x)); + RCLCPP_DEBUG(this->get_logger(), "OBSTACLE ID %d (%lf, %lf, %lf)->(%lf, %lf)", m_tracked_obstacles[tracked_id]->id, lidar_point.x, lidar_point.y, lidar_point.z, xy_rect.x, xy_rect.y); if ((xy_rect.x >= 0) && (xy_rect.x < m_cam_model.cameraInfo().width) && (xy_rect.y >= 0) && (xy_rect.y < m_cam_model.cameraInfo().height) && (lidar_point.x >= 0)) { // Dead if(m_tracked_obstacles[tracked_id]->is_dead){ // Was also dead before, add time dead - m_tracked_obstacles[tracked_id]->time_dead = rclcpp::Duration(msg->objects[0].time.sec, msg->objects[0].time.nanosec) - rclcpp::Duration(m_tracked_obstacles[tracked_id]->last_dead.seconds(), m_tracked_obstacles[tracked_id]->last_dead.nanoseconds()) + m_tracked_obstacles[tracked_id]->time_dead; + RCLCPP_DEBUG(this->get_logger(), "OBSTACLE ID %d TIME PERIOD FROM PREVIOUS DEAD: %lf - %lf", m_tracked_obstacles[tracked_id]->id, m_tracked_obstacles[tracked_id]->last_dead.seconds(), rclcpp::Time(msg->objects[0].time).seconds()); + m_tracked_obstacles[tracked_id]->time_dead = rclcpp::Time(msg->objects[0].time) - m_tracked_obstacles[tracked_id]->last_dead + m_tracked_obstacles[tracked_id]->time_dead; + RCLCPP_DEBUG(this->get_logger(), "OBSTACLE ID %d DEAD FOR %lf SECONDS, OBSTACLE DROP THRESHOLD: %lf", m_tracked_obstacles[tracked_id]->id, m_tracked_obstacles[tracked_id]->time_dead.seconds(), m_obstacle_drop_thresh); if(m_tracked_obstacles[tracked_id]->time_dead.seconds() > m_obstacle_drop_thresh){ + RCLCPP_DEBUG(this->get_logger(), "OBSTACLE ID %d DROPPED", m_tracked_obstacles[tracked_id]->id); m_tracked_obstacles.erase(m_tracked_obstacles.begin()+tracked_id); tracked_id--; continue; @@ -234,7 +242,7 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms m_tracked_obstacles[tracked_id]->is_dead = false; } } - + RCLCPP_DEBUG(this->get_logger(), "AFTER OLD OBSTACLE FILTERING"); // Publish map with tracked obstacles std::vector> tracked_obs; std::vector tracked_labels; @@ -256,6 +264,7 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms tracked_obs.push_back(tracked_ob); } this->publish_map(msg->objects[0].cloud.header, "tracked", true, tracked_obs, m_tracked_map_pub, tracked_labels); + RCLCPP_DEBUG(this->get_logger(), "AFTER TRACKED MAP PUBLISHING"); } void ObjectTrackingMap::publish_map( diff --git a/all_seaing_perception/launch/object_tracking_map_test.launch.py b/all_seaing_perception/launch/object_tracking_map_test.launch.py index d55cc279..1089de92 100644 --- a/all_seaing_perception/launch/object_tracking_map_test.launch.py +++ b/all_seaing_perception/launch/object_tracking_map_test.launch.py @@ -11,8 +11,9 @@ def generate_launch_description(): package="all_seaing_perception", executable="object_tracking_map", output="screen", + arguments=['--ros-args', '--log-level', 'debug'], remappings=[ - + ("camera_info_topic", "/wamv/sensors/cameras/front_left_camera_sensor/camera_info"), ], parameters=[ {"obstacle_seg_thresh": 10.0}, From 4d1d0c6d92bc08707eb147ac617c0abb15992610 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Fri, 31 Jan 2025 10:53:49 -0500 Subject: [PATCH 34/35] Removed redundancies and added explanation of function doing the tracking and map publishing --- .../object_tracking_map.cpp | 23 ++++++------------- .../object_tracking_map.hpp | 3 +++ 2 files changed, 10 insertions(+), 16 deletions(-) diff --git a/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp index 8eb3beef..835f23fa 100644 --- a/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp +++ b/all_seaing_perception/all_seaing_perception/object_tracking_map.cpp @@ -40,7 +40,6 @@ ObjectCloud::ObjectCloud(rclcpp::Time t, int l, pcl::PointCloudpoints){ - // RCLCPP_DEBUG(this->get_logger(), "NEW POINT IN CREATED OBJECT POINT CLOUD: %lf, %lf, %lf", global_pt.x, global_pt.y, global_pt.z); global_centroid.x += global_pt.x/((double)global_pcloud_ptr->points.size()); global_centroid.y += global_pt.y/((double)global_pcloud_ptr->points.size()); global_centroid.z += global_pt.z/((double)global_pcloud_ptr->points.size()); @@ -121,7 +120,8 @@ T ObjectTrackingMap::convert_to_local(double nav_x, double nav_y, double nav_hea void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::msg::LabeledObjectPointCloudArray::ConstSharedPtr &msg){ if(msg->objects.size()==0) return; - RCLCPP_DEBUG(this->get_logger(), "GOT DATA"); + + //compute global coordinates from camera frame point cloud detections std::vector> detected_obstacles; for(all_seaing_interfaces::msg::LabeledObjectPointCloud obj : msg->objects){ pcl::PointCloud::Ptr local_obj_pcloud(new pcl::PointCloud); @@ -131,11 +131,10 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms pcl::PointXYZHSV global_pt = this->convert_to_global(m_nav_x, m_nav_y, m_nav_heading, pt); global_obj_pcloud->push_back(global_pt); } - // RCLCPP_DEBUG(this->get_logger(), "BEFORE OBSTACLE CREATION"); std::shared_ptr obj_cloud (new ObjectCloud(obj.time, obj.label, local_obj_pcloud, global_obj_pcloud)); detected_obstacles.push_back(obj_cloud); } - // RCLCPP_DEBUG(this->get_logger(), "BEFORE UNTRACKED MAP PUBLISHING"); + // Make and publish untracked map std::vector> untracked_obs; std::vector untracked_labels; @@ -157,7 +156,7 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms untracked_obs.push_back(untracked_ob); } this->publish_map(msg->objects[0].cloud.header, "untracked", true, untracked_obs, m_untracked_map_pub, untracked_labels); - // RCLCPP_DEBUG(this->get_logger(), "AFTER UNTRACKED MAP PUBLISHING"); + /* OBJECT TRACKING: --Like in obstacle_detector.cpp, for each detected obstacle, find the closest (RMS point distance? or just centroids), @@ -170,13 +169,7 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms --For those objects, update the time they have been undetected in the FoV total (add time from prev appearance, time_seen, if it was in FoV and undetected then too) --For unmatched obstacles that have been undetected in FoV for > some time threshold, remove them */ - // RCLCPP_DEBUG(this->get_logger(), "BEFORE LIDAR->CAMERA TF"); - // LIDAR -> Camera transform - // if (!m_pc_cam_tf_ok) - // RCLCPP_DEBUG(this->get_logger(), "CAMERA FRAME ID: %s", msg->objects[0].segment.header.frame_id.c_str()); - // RCLCPP_DEBUG(this->get_logger(), "LIDAR FRAME ID: %s", msg->objects[0].cloud.header.frame_id.c_str()); - // m_pc_cam_tf = get_tf(msg->objects[0].segment.header.frame_id, msg->objects[0].cloud.header.frame_id); - // RCLCPP_DEBUG(this->get_logger(), "AFTER LIDAR->CAMERA TF"); + // Match new obstacles with old ones, update them, and add new ones std::unordered_set chosen_indices; for(std::shared_ptr det_obs : detected_obstacles){ @@ -201,7 +194,7 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms m_tracked_obstacles.push_back(det_obs); } } - RCLCPP_DEBUG(this->get_logger(), "AFTER NEW OBSTACLE MATCHING"); + // Filter old obstacles for (int tracked_id = 0; tracked_id < m_tracked_obstacles.size(); tracked_id++) { if(chosen_indices.count(tracked_id)) continue; @@ -217,7 +210,6 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms lidar_point.y = m_tracked_obstacles[tracked_id]->local_centroid.y; lidar_point.z = m_tracked_obstacles[tracked_id]->local_centroid.z; geometry_msgs::msg::Point camera_point = lidar_point;//ALREADY IN THE SAME FRAME, WAS TRANSFORMED BEFORE BEING PUBLISHED BY bbox_project_pcloud.cpp - // tf2::doTransform(lidar_point, camera_point, m_pc_cam_tf); cv::Point2d xy_rect = m_cam_model.project3dToPixel( cv::Point3d(camera_point.y, camera_point.z, -camera_point.x)); RCLCPP_DEBUG(this->get_logger(), "OBSTACLE ID %d (%lf, %lf, %lf)->(%lf, %lf)", m_tracked_obstacles[tracked_id]->id, lidar_point.x, lidar_point.y, lidar_point.z, xy_rect.x, xy_rect.y); @@ -242,7 +234,7 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms m_tracked_obstacles[tracked_id]->is_dead = false; } } - RCLCPP_DEBUG(this->get_logger(), "AFTER OLD OBSTACLE FILTERING"); + // Publish map with tracked obstacles std::vector> tracked_obs; std::vector tracked_labels; @@ -264,7 +256,6 @@ void ObjectTrackingMap::object_track_map_publish(const all_seaing_interfaces::ms tracked_obs.push_back(tracked_ob); } this->publish_map(msg->objects[0].cloud.header, "tracked", true, tracked_obs, m_tracked_map_pub, tracked_labels); - RCLCPP_DEBUG(this->get_logger(), "AFTER TRACKED MAP PUBLISHING"); } void ObjectTrackingMap::publish_map( diff --git a/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp b/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp index cf2d3365..eba40834 100644 --- a/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp +++ b/all_seaing_perception/include/all_seaing_perception/object_tracking_map.hpp @@ -70,7 +70,10 @@ struct ObjectCloud{ class ObjectTrackingMap : public rclcpp::Node{ private: + //Publishes the map with the untracked (but labeled, with color) obstacles, and performs euclidean distance (in the global frame) object tracking + //using the centroid of the object point clouds to publish the map with the tracked and labeled obstacles void object_track_map_publish(const all_seaing_interfaces::msg::LabeledObjectPointCloudArray::ConstSharedPtr &msg); + void odom_callback(const nav_msgs::msg::Odometry &msg); void publish_map(std_msgs::msg::Header local_header, std::string ns, bool is_labeled, const std::vector> &map, From c69317b9036922ebef359c3fbc1d0d92d1c07db6 Mon Sep 17 00:00:00 2001 From: Panagiotis Liampas Date: Fri, 31 Jan 2025 11:35:24 -0500 Subject: [PATCH 35/35] Added node to sim.launch.py --- all_seaing_bringup/launch/sim.launch.py | 16 ++++++++++++++++ .../launch/object_tracking_map_test.launch.py | 1 + 2 files changed, 17 insertions(+) diff --git a/all_seaing_bringup/launch/sim.launch.py b/all_seaing_bringup/launch/sim.launch.py index 276779eb..ac3eb4ac 100644 --- a/all_seaing_bringup/launch/sim.launch.py +++ b/all_seaing_bringup/launch/sim.launch.py @@ -191,6 +191,21 @@ def launch_setup(context, *args, **kwargs): ] ) + object_tracking_map_node = launch_ros.actions.Node( + package="all_seaing_perception", + executable="object_tracking_map", + output="screen", + arguments=['--ros-args', '--log-level', 'debug'], + remappings=[ + ("camera_info_topic", "/wamv/sensors/cameras/front_left_camera_sensor/camera_info"), + ], + parameters=[ + {"global_frame_id": "odom"}, + {"obstacle_seg_thresh": 10.0}, + {"obstacle_drop_thresh": 1.0}, + ] + ) + rviz_node = launch_ros.actions.Node( package="rviz2", executable="rviz2", @@ -287,6 +302,7 @@ def launch_setup(context, *args, **kwargs): obstacle_bbox_overlay_node, obstacle_bbox_visualizer_node, bbox_project_pcloud_node, + object_tracking_map_node, color_segmentation_node, point_cloud_filter_node, obstacle_detector_node, diff --git a/all_seaing_perception/launch/object_tracking_map_test.launch.py b/all_seaing_perception/launch/object_tracking_map_test.launch.py index 1089de92..e4f8aeca 100644 --- a/all_seaing_perception/launch/object_tracking_map_test.launch.py +++ b/all_seaing_perception/launch/object_tracking_map_test.launch.py @@ -16,6 +16,7 @@ def generate_launch_description(): ("camera_info_topic", "/wamv/sensors/cameras/front_left_camera_sensor/camera_info"), ], parameters=[ + {"global_frame_id": "odom"}, {"obstacle_seg_thresh": 10.0}, {"obstacle_drop_thresh": 1.0}, ]