From 26939a6f36ba05035d40aec87010815c657d6bca Mon Sep 17 00:00:00 2001 From: Yusuke Muramatsu Date: Wed, 9 Aug 2023 23:27:02 +0900 Subject: [PATCH] feat(image_projection_based_fusion): add objects filter by rois (#4546) * tmp Signed-off-by: yukke42 style(pre-commit): autofix update Signed-off-by: yukke42 style(pre-commit): autofix * fix: fix association bug Signed-off-by: yukke42 * feat: add prob_threshold for each class Signed-off-by: yukke42 * feat: use class label association between roi and object Signed-off-by: yukke42 * feat: add to tier4_perception_launch Signed-off-by: yukke42 * chore: disable debug_mode Signed-off-by: yukke42 * docs: update params Signed-off-by: yukke42 * fix: apply suggestion Signed-off-by: yukke42 * chore: update prob_thresholds of bicycle Signed-off-by: yukke42 * feat: add thresut_distance for each class Signed-off-by: yukke42 * docs: add thrust_distances Signed-off-by: yukke42 * style(pre-commit): autofix * chore: remove unnecessary variable Signed-off-by: yukke42 * chore: rename to trust Signed-off-by: yukke42 * style(pre-commit): autofix * chore: add param Signed-off-by: yukke42 * Update perception/image_projection_based_fusion/config/roi_detected_object_fusion.param.yaml Co-authored-by: Shunsuke Miura <37187849+miursh@users.noreply.github.com> --------- Signed-off-by: yukke42 Signed-off-by: yukke42 Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Shunsuke Miura <37187849+miursh@users.noreply.github.com> --- ...ra_lidar_fusion_based_detection.launch.xml | 34 +++++++- .../roi_detected_object_fusion.param.yaml | 19 ++++ .../docs/roi-detected-object-fusion.md | 18 ++-- .../roi_detected_object_fusion/node.hpp | 10 ++- .../roi_detected_object_fusion.launch.xml | 7 +- .../image_projection_based_fusion/package.xml | 1 + .../src/roi_detected_object_fusion/node.cpp | 86 ++++++++++++------- 7 files changed, 129 insertions(+), 46 deletions(-) create mode 100755 perception/image_projection_based_fusion/config/roi_detected_object_fusion.param.yaml diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml index 2d983dc47545c..6e3883346b4a4 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml +++ b/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml @@ -224,12 +224,44 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + diff --git a/perception/image_projection_based_fusion/config/roi_detected_object_fusion.param.yaml b/perception/image_projection_based_fusion/config/roi_detected_object_fusion.param.yaml new file mode 100755 index 0000000000000..bd49dc65749a9 --- /dev/null +++ b/perception/image_projection_based_fusion/config/roi_detected_object_fusion.param.yaml @@ -0,0 +1,19 @@ +/**: + ros__parameters: + # UNKNOWN, CAR, TRUCK, BUS, TRAILER, MOTORBIKE, BICYCLE, PEDESTRIAN + passthrough_lower_bound_probability_thresholds: [0.35, 0.35, 0.35, 0.35, 0.35, 0.35, 0.35, 0.50] + trust_distances: [50.0, 100.0, 100.0, 100.0, 100.0, 50.0, 50.0, 50.0] + min_iou_threshold: 0.5 + use_roi_probability: false + roi_probability_threshold: 0.5 + + can_assign_matrix: + #UNKNOWN, CAR, TRUCK, BUS, TRAILER, MOTORBIKE, BICYCLE, PEDESTRIAN <-roi_msg + [1, 0, 0, 0, 0, 0, 0, 0, # UNKNOWN <-detected_objects + 0, 1, 1, 1, 1, 0, 0, 0, # CAR + 0, 1, 1, 1, 1, 0, 0, 0, # TRUCK + 0, 1, 1, 1, 1, 0, 0, 0, # BUS + 0, 1, 1, 1, 1, 0, 0, 0, # TRAILER + 0, 0, 0, 0, 0, 1, 1, 1, # MOTORBIKE + 0, 0, 0, 0, 0, 1, 1, 1, # BICYCLE + 0, 0, 0, 0, 0, 1, 1, 1] # PEDESTRIAN diff --git a/perception/image_projection_based_fusion/docs/roi-detected-object-fusion.md b/perception/image_projection_based_fusion/docs/roi-detected-object-fusion.md index accdc350bf256..4e054fc911dc6 100644 --- a/perception/image_projection_based_fusion/docs/roi-detected-object-fusion.md +++ b/perception/image_projection_based_fusion/docs/roi-detected-object-fusion.md @@ -41,14 +41,16 @@ The DetectedObject has three possible shape choices/implementations, where the p ### Core Parameters -| Name | Type | Description | -| ----------------------------------------------- | ------ | ------------------------------------------------------------------------------------------------------------------------- | -| `rois_number` | int | the number of input rois | -| `debug_mode` | bool | If set to `true`, the node subscribes to the image topic and publishes an image with debug drawings. | -| `min_iou_threshold` | double | If the iou between detected objects and rois is greater than `min_iou_threshold`, the objects are classified as fused. | -| `passthrough_lower_bound_probability_threshold` | double | If the `existence_probability` of a detected object is greater than the threshold, it is published in output. | -| `use_roi_probability` | float | If set to `true`, the algorithm uses `existence_probability` of ROIs to match with the that of detected objects. | -| `roi_probability_threshold` | double | If the `existence_probability` of ROIs is greater than the threshold, matched detected objects are published in `output`. | +| Name | Type | Description | +| ------------------------------------------------ | -------------- | -------------------------------------------------------------------------------------------------------------------------- | +| `rois_number` | int | the number of input rois | +| `debug_mode` | bool | If set to `true`, the node subscribes to the image topic and publishes an image with debug drawings. | +| `passthrough_lower_bound_probability_thresholds` | vector[double] | If the `existence_probability` of a detected object is greater than the threshold, it is published in output. | +| `trust_distances` | vector[double] | If the distance of a detected object from the origin of frame_id is greater than the threshold, it is published in output. | +| `min_iou_threshold` | double | If the iou between detected objects and rois is greater than `min_iou_threshold`, the objects are classified as fused. | +| `use_roi_probability` | float | If set to `true`, the algorithm uses `existence_probability` of ROIs to match with the that of detected objects. | +| `roi_probability_threshold` | double | If the `existence_probability` of ROIs is greater than the threshold, matched detected objects are published in `output`. | +| `can_assign_matrix` | vector[int] | association matrix between rois and detected_objects to check that two rois on images can be match | ## Assumptions / Known limits diff --git a/perception/image_projection_based_fusion/include/image_projection_based_fusion/roi_detected_object_fusion/node.hpp b/perception/image_projection_based_fusion/include/image_projection_based_fusion/roi_detected_object_fusion/node.hpp index fe95d21eb266c..e11a62c060480 100644 --- a/perception/image_projection_based_fusion/include/image_projection_based_fusion/roi_detected_object_fusion/node.hpp +++ b/perception/image_projection_based_fusion/include/image_projection_based_fusion/roi_detected_object_fusion/node.hpp @@ -18,6 +18,8 @@ #include "image_projection_based_fusion/fusion_node.hpp" #include "tier4_autoware_utils/ros/debug_publisher.hpp" +#include "autoware_auto_perception_msgs/msg/object_classification.hpp" + #include #include #include @@ -40,14 +42,14 @@ class RoiDetectedObjectFusionNode : public FusionNode generateDetectedObjectRoIs( + std::map generateDetectedObjectRoIs( const DetectedObjects & input_object_msg, const double image_width, const double image_height, const Eigen::Affine3d & object2camera_affine, const Eigen::Matrix4d & camera_projection); void fuseObjectsOnImage( const DetectedObjects & input_object_msg, const std::vector & image_rois, - const std::map & object_roi_map); + const std::map & object_roi_map); void publish(const DetectedObjects & output_msg) override; @@ -56,10 +58,12 @@ class RoiDetectedObjectFusionNode : public FusionNode passthrough_lower_bound_probability_thresholds{}; + std::vector trust_distances{}; double min_iou_threshold{}; bool use_roi_probability{}; double roi_probability_threshold{}; + Eigen::MatrixXi can_assign_matrix; } fusion_params_; std::map> passthrough_object_flags_map_, fused_object_flags_map_, diff --git a/perception/image_projection_based_fusion/launch/roi_detected_object_fusion.launch.xml b/perception/image_projection_based_fusion/launch/roi_detected_object_fusion.launch.xml index dd71ca462f666..b6165fc7c87d2 100644 --- a/perception/image_projection_based_fusion/launch/roi_detected_object_fusion.launch.xml +++ b/perception/image_projection_based_fusion/launch/roi_detected_object_fusion.launch.xml @@ -18,6 +18,7 @@ + @@ -39,13 +40,9 @@ + - - - - - diff --git a/perception/image_projection_based_fusion/package.xml b/perception/image_projection_based_fusion/package.xml index 083a40044489b..32d7a5633b811 100644 --- a/perception/image_projection_based_fusion/package.xml +++ b/perception/image_projection_based_fusion/package.xml @@ -17,6 +17,7 @@ image_transport lidar_centerpoint message_filters + object_recognition_utils pcl_conversions pcl_ros rclcpp diff --git a/perception/image_projection_based_fusion/src/roi_detected_object_fusion/node.cpp b/perception/image_projection_based_fusion/src/roi_detected_object_fusion/node.cpp index 8961c0aad478f..333ec7d7ed206 100644 --- a/perception/image_projection_based_fusion/src/roi_detected_object_fusion/node.cpp +++ b/perception/image_projection_based_fusion/src/roi_detected_object_fusion/node.cpp @@ -14,6 +14,8 @@ #include "image_projection_based_fusion/roi_detected_object_fusion/node.hpp" +#include "object_recognition_utils/object_recognition_utils.hpp" + #include #include @@ -25,11 +27,20 @@ namespace image_projection_based_fusion RoiDetectedObjectFusionNode::RoiDetectedObjectFusionNode(const rclcpp::NodeOptions & options) : FusionNode("roi_detected_object_fusion", options) { - fusion_params_.passthrough_lower_bound_probability_threshold = - declare_parameter("passthrough_lower_bound_probability_threshold"); + fusion_params_.passthrough_lower_bound_probability_thresholds = + declare_parameter>("passthrough_lower_bound_probability_thresholds"); + fusion_params_.min_iou_threshold = declare_parameter("min_iou_threshold"); + fusion_params_.trust_distances = declare_parameter>("trust_distances"); fusion_params_.use_roi_probability = declare_parameter("use_roi_probability"); fusion_params_.roi_probability_threshold = declare_parameter("roi_probability_threshold"); - fusion_params_.min_iou_threshold = declare_parameter("min_iou_threshold"); + { + const auto can_assign_vector_tmp = declare_parameter>("can_assign_matrix"); + std::vector can_assign_vector(can_assign_vector_tmp.begin(), can_assign_vector_tmp.end()); + const int label_num = static_cast(std::sqrt(can_assign_vector.size())); + Eigen::Map can_assign_matrix_tmp( + can_assign_vector.data(), label_num, label_num); + fusion_params_.can_assign_matrix = can_assign_matrix_tmp.transpose(); + } } void RoiDetectedObjectFusionNode::preprocess(DetectedObjects & output_msg) @@ -39,9 +50,15 @@ void RoiDetectedObjectFusionNode::preprocess(DetectedObjects & output_msg) fused_object_flags.resize(output_msg.objects.size()); ignored_object_flags.resize(output_msg.objects.size()); for (std::size_t obj_i = 0; obj_i < output_msg.objects.size(); ++obj_i) { - if ( - output_msg.objects.at(obj_i).existence_probability > - fusion_params_.passthrough_lower_bound_probability_threshold) { + const auto & object = output_msg.objects.at(obj_i); + const auto label = object_recognition_utils::getHighestProbLabel(object.classification); + const auto pos = object_recognition_utils::getPose(object).position; + const auto object_sqr_dist = pos.x * pos.x + pos.y * pos.y; + const auto prob_threshold = + fusion_params_.passthrough_lower_bound_probability_thresholds.at(label); + const auto trust_sqr_dist = + fusion_params_.trust_distances.at(label) * fusion_params_.trust_distances.at(label); + if (object.existence_probability > prob_threshold || object_sqr_dist > trust_sqr_dist) { passthrough_object_flags.at(obj_i) = true; } } @@ -90,11 +107,12 @@ void RoiDetectedObjectFusionNode::fuseOnSingleImage( } } -std::map RoiDetectedObjectFusionNode::generateDetectedObjectRoIs( +std::map +RoiDetectedObjectFusionNode::generateDetectedObjectRoIs( const DetectedObjects & input_object_msg, const double image_width, const double image_height, const Eigen::Affine3d & object2camera_affine, const Eigen::Matrix4d & camera_projection) { - std::map object_roi_map; + std::map object_roi_map; int64_t timestamp_nsec = input_object_msg.header.stamp.sec * (int64_t)1e9 + input_object_msg.header.stamp.nanosec; if (passthrough_object_flags_map_.size() == 0) { @@ -106,18 +124,18 @@ std::map RoiDetectedObjectFusionNode::generateDet const auto & passthrough_object_flags = passthrough_object_flags_map_.at(timestamp_nsec); for (std::size_t obj_i = 0; obj_i < input_object_msg.objects.size(); ++obj_i) { std::vector vertices_camera_coord; - { - const auto & object = input_object_msg.objects.at(obj_i); + const auto & object = input_object_msg.objects.at(obj_i); - if (passthrough_object_flags.at(obj_i)) { - continue; - } + if (passthrough_object_flags.at(obj_i)) { + continue; + } - // filter point out of scope - if (debugger_ && out_of_scope(object)) { - continue; - } + // filter point out of scope + if (debugger_ && out_of_scope(object)) { + continue; + } + { std::vector vertices; objectToVertices(object.kinematics.pose_with_covariance.pose, object.shape, vertices); transformPoints(vertices, object2camera_affine, vertices_camera_coord); @@ -154,7 +172,7 @@ std::map RoiDetectedObjectFusionNode::generateDet } } } - if (point_on_image_cnt == 3) { + if (point_on_image_cnt < 3) { continue; } @@ -163,16 +181,18 @@ std::map RoiDetectedObjectFusionNode::generateDet max_x = std::min(max_x, image_width - 1); max_y = std::min(max_y, image_height - 1); - // build roi - RegionOfInterest roi; - roi.x_offset = static_cast(min_x); - roi.y_offset = static_cast(min_y); - roi.width = static_cast(max_x) - static_cast(min_x); - roi.height = static_cast(max_y) - static_cast(min_y); - object_roi_map.insert(std::make_pair(obj_i, roi)); + DetectedObjectWithFeature object_roi; + object_roi.feature.roi.x_offset = static_cast(min_x); + object_roi.feature.roi.y_offset = static_cast(min_y); + object_roi.feature.roi.width = + static_cast(max_x) - static_cast(min_x); + object_roi.feature.roi.height = + static_cast(max_y) - static_cast(min_y); + object_roi.object = object; + object_roi_map.insert(std::make_pair(obj_i, object_roi)); if (debugger_) { - debugger_->obstacle_rois_.push_back(roi); + debugger_->obstacle_rois_.push_back(object_roi.feature.roi); } } @@ -182,7 +202,7 @@ std::map RoiDetectedObjectFusionNode::generateDet void RoiDetectedObjectFusionNode::fuseObjectsOnImage( const DetectedObjects & input_object_msg, const std::vector & image_rois, - const std::map & object_roi_map) + const std::map & object_roi_map) { int64_t timestamp_nsec = input_object_msg.header.stamp.sec * (int64_t)1e9 + input_object_msg.header.stamp.nanosec; @@ -207,7 +227,15 @@ void RoiDetectedObjectFusionNode::fuseObjectsOnImage( float max_iou = 0.0f; for (const auto & image_roi : image_rois) { const auto & object_roi = object_pair.second; - const double iou = calcIoU(object_roi, image_roi.feature.roi); + const auto object_roi_label = + object_recognition_utils::getHighestProbLabel(object_roi.object.classification); + const auto image_roi_label = + object_recognition_utils::getHighestProbLabel(image_roi.object.classification); + if (!fusion_params_.can_assign_matrix(object_roi_label, image_roi_label)) { + continue; + } + + const double iou = calcIoU(object_roi.feature.roi, image_roi.feature.roi); if (iou > max_iou) { max_iou = iou; roi_prob = image_roi.object.existence_probability; @@ -301,7 +329,7 @@ void RoiDetectedObjectFusionNode::publish(const DetectedObjects & output_msg) passthrough_object_flags_map_.erase(timestamp_nsec); fused_object_flags_map_.erase(timestamp_nsec); - fused_object_flags_map_.erase(timestamp_nsec); + ignored_object_flags_map_.erase(timestamp_nsec); } } // namespace image_projection_based_fusion