diff --git a/perception/image_projection_based_fusion/config/roi_detected_object_fusion.param.yaml b/perception/image_projection_based_fusion/config/roi_detected_object_fusion.param.yaml index dacbabf33f7fb..8e2f3f0baed20 100755 --- a/perception/image_projection_based_fusion/config/roi_detected_object_fusion.param.yaml +++ b/perception/image_projection_based_fusion/config/roi_detected_object_fusion.param.yaml @@ -6,3 +6,13 @@ use_roi_probability: false roi_probability_threshold: 0.5 + can_assign_matrix: + #UNKNOWN, CAR, TRUCK, BUS, TRAILER, MOTORBIKE, BICYCLE, PEDESTRIAN <-roi_msg + [1, 0, 0, 0, 0, 0, 0, 0, # UNKNOWN <-detected_objects + 0, 1, 1, 1, 1, 0, 0, 0, # CAR + 0, 1, 1, 1, 1, 0, 0, 0, # TRUCK + 0, 1, 1, 1, 1, 0, 0, 0, # BUS + 0, 1, 1, 1, 1, 0, 0, 0, # TRAILER + 0, 0, 0, 0, 0, 1, 1, 1, # MOTORBIKE + 0, 0, 0, 0, 0, 1, 1, 1, # BICYCLE + 0, 0, 0, 0, 0, 1, 1, 1] # PEDESTRIAN diff --git a/perception/image_projection_based_fusion/include/image_projection_based_fusion/roi_detected_object_fusion/node.hpp b/perception/image_projection_based_fusion/include/image_projection_based_fusion/roi_detected_object_fusion/node.hpp index 19b63ffb4d0d7..92343e033162f 100644 --- a/perception/image_projection_based_fusion/include/image_projection_based_fusion/roi_detected_object_fusion/node.hpp +++ b/perception/image_projection_based_fusion/include/image_projection_based_fusion/roi_detected_object_fusion/node.hpp @@ -18,6 +18,8 @@ #include "image_projection_based_fusion/fusion_node.hpp" #include "tier4_autoware_utils/ros/debug_publisher.hpp" +#include "autoware_auto_perception_msgs/msg/object_classification.hpp" + #include #include #include @@ -40,14 +42,14 @@ class RoiDetectedObjectFusionNode : public FusionNode generateDetectedObjectRoIs( + std::map generateDetectedObjectRoIs( const DetectedObjects & input_object_msg, const double image_width, const double image_height, const Eigen::Affine3d & object2camera_affine, const Eigen::Matrix4d & camera_projection); void fuseObjectsOnImage( const DetectedObjects & input_object_msg, const std::vector & image_rois, - const std::map & object_roi_map); + const std::map & object_roi_map); void publish(const DetectedObjects & output_msg) override; @@ -60,6 +62,7 @@ class RoiDetectedObjectFusionNode : public FusionNode> passthrough_object_flags_map_, fused_object_flags_map_, diff --git a/perception/image_projection_based_fusion/src/roi_detected_object_fusion/node.cpp b/perception/image_projection_based_fusion/src/roi_detected_object_fusion/node.cpp index eadf4e0b3851c..0ad46fb0da585 100644 --- a/perception/image_projection_based_fusion/src/roi_detected_object_fusion/node.cpp +++ b/perception/image_projection_based_fusion/src/roi_detected_object_fusion/node.cpp @@ -32,6 +32,14 @@ RoiDetectedObjectFusionNode::RoiDetectedObjectFusionNode(const rclcpp::NodeOptio fusion_params_.min_iou_threshold = declare_parameter("min_iou_threshold"); fusion_params_.use_roi_probability = declare_parameter("use_roi_probability"); fusion_params_.roi_probability_threshold = declare_parameter("roi_probability_threshold"); + { + const auto can_assign_vector_tmp = declare_parameter>("can_assign_matrix"); + std::vector can_assign_vector(can_assign_vector_tmp.begin(), can_assign_vector_tmp.end()); + const int label_num = static_cast(std::sqrt(can_assign_vector.size())); + Eigen::Map can_assign_matrix_tmp( + can_assign_vector.data(), label_num, label_num); + fusion_params_.can_assign_matrix = can_assign_matrix_tmp.transpose(); + } } void RoiDetectedObjectFusionNode::preprocess(DetectedObjects & output_msg) @@ -93,11 +101,12 @@ void RoiDetectedObjectFusionNode::fuseOnSingleImage( } } -std::map RoiDetectedObjectFusionNode::generateDetectedObjectRoIs( +std::map +RoiDetectedObjectFusionNode::generateDetectedObjectRoIs( const DetectedObjects & input_object_msg, const double image_width, const double image_height, const Eigen::Affine3d & object2camera_affine, const Eigen::Matrix4d & camera_projection) { - std::map object_roi_map; + std::map object_roi_map; int64_t timestamp_nsec = input_object_msg.header.stamp.sec * (int64_t)1e9 + input_object_msg.header.stamp.nanosec; if (passthrough_object_flags_map_.size() == 0) { @@ -109,18 +118,18 @@ std::map RoiDetectedObjectFusionNode::generateDet const auto & passthrough_object_flags = passthrough_object_flags_map_.at(timestamp_nsec); for (std::size_t obj_i = 0; obj_i < input_object_msg.objects.size(); ++obj_i) { std::vector vertices_camera_coord; - { - const auto & object = input_object_msg.objects.at(obj_i); + const auto & object = input_object_msg.objects.at(obj_i); - if (passthrough_object_flags.at(obj_i)) { - continue; - } + if (passthrough_object_flags.at(obj_i)) { + continue; + } - // filter point out of scope - if (debugger_ && out_of_scope(object)) { - continue; - } + // filter point out of scope + if (debugger_ && out_of_scope(object)) { + continue; + } + { std::vector vertices; objectToVertices(object.kinematics.pose_with_covariance.pose, object.shape, vertices); transformPoints(vertices, object2camera_affine, vertices_camera_coord); @@ -157,7 +166,7 @@ std::map RoiDetectedObjectFusionNode::generateDet } } } - if (point_on_image_cnt == 3) { + if (point_on_image_cnt < 3) { continue; } @@ -166,16 +175,18 @@ std::map RoiDetectedObjectFusionNode::generateDet max_x = std::min(max_x, image_width - 1); max_y = std::min(max_y, image_height - 1); - // build roi - RegionOfInterest roi; - roi.x_offset = static_cast(min_x); - roi.y_offset = static_cast(min_y); - roi.width = static_cast(max_x) - static_cast(min_x); - roi.height = static_cast(max_y) - static_cast(min_y); - object_roi_map.insert(std::make_pair(obj_i, roi)); + DetectedObjectWithFeature object_roi; + object_roi.feature.roi.x_offset = static_cast(min_x); + object_roi.feature.roi.y_offset = static_cast(min_y); + object_roi.feature.roi.width = + static_cast(max_x) - static_cast(min_x); + object_roi.feature.roi.height = + static_cast(max_y) - static_cast(min_y); + object_roi.object = object; + object_roi_map.insert(std::make_pair(obj_i, object_roi)); if (debugger_) { - debugger_->obstacle_rois_.push_back(roi); + debugger_->obstacle_rois_.push_back(object_roi.feature.roi); } } @@ -185,7 +196,7 @@ std::map RoiDetectedObjectFusionNode::generateDet void RoiDetectedObjectFusionNode::fuseObjectsOnImage( const DetectedObjects & input_object_msg, const std::vector & image_rois, - const std::map & object_roi_map) + const std::map & object_roi_map) { int64_t timestamp_nsec = input_object_msg.header.stamp.sec * (int64_t)1e9 + input_object_msg.header.stamp.nanosec; @@ -210,7 +221,15 @@ void RoiDetectedObjectFusionNode::fuseObjectsOnImage( float max_iou = 0.0f; for (const auto & image_roi : image_rois) { const auto & object_roi = object_pair.second; - const double iou = calcIoU(object_roi, image_roi.feature.roi); + const auto object_roi_label = + object_recognition_utils::getHighestProbLabel(object_roi.object.classification); + const auto image_roi_label = + object_recognition_utils::getHighestProbLabel(image_roi.object.classification); + if (!fusion_params_.can_assign_matrix(object_roi_label, image_roi_label)) { + continue; + } + + const double iou = calcIoU(object_roi.feature.roi, image_roi.feature.roi); if (iou > max_iou) { max_iou = iou; roi_prob = image_roi.object.existence_probability;