Skip to content

Commit

Permalink
feat: use class label association between roi and object
Browse files Browse the repository at this point in the history
Signed-off-by: yukke42 <yukke42@users.noreply.github.com>
  • Loading branch information
yukke42 committed Aug 7, 2023
1 parent cedbc95 commit d2dffe6
Show file tree
Hide file tree
Showing 3 changed files with 56 additions and 24 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,13 @@
use_roi_probability: false
roi_probability_threshold: 0.5

can_assign_matrix:
#UNKNOWN, CAR, TRUCK, BUS, TRAILER, MOTORBIKE, BICYCLE, PEDESTRIAN <-roi_msg
[1, 0, 0, 0, 0, 0, 0, 0, # UNKNOWN <-detected_objects
0, 1, 1, 1, 1, 0, 0, 0, # CAR
0, 1, 1, 1, 1, 0, 0, 0, # TRUCK
0, 1, 1, 1, 1, 0, 0, 0, # BUS
0, 1, 1, 1, 1, 0, 0, 0, # TRAILER
0, 0, 0, 0, 0, 1, 1, 1, # MOTORBIKE
0, 0, 0, 0, 0, 1, 1, 1, # BICYCLE
0, 0, 0, 0, 0, 1, 1, 1] # PEDESTRIAN
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
#include "image_projection_based_fusion/fusion_node.hpp"
#include "tier4_autoware_utils/ros/debug_publisher.hpp"

#include "autoware_auto_perception_msgs/msg/object_classification.hpp"

#include <map>
#include <memory>
#include <vector>
Expand All @@ -40,14 +42,14 @@ class RoiDetectedObjectFusionNode : public FusionNode<DetectedObjects, DetectedO
const DetectedObjectsWithFeature & input_roi_msg,
const sensor_msgs::msg::CameraInfo & camera_info, DetectedObjects & output_object_msg) override;

std::map<std::size_t, RegionOfInterest> generateDetectedObjectRoIs(
std::map<std::size_t, DetectedObjectWithFeature> generateDetectedObjectRoIs(
const DetectedObjects & input_object_msg, const double image_width, const double image_height,
const Eigen::Affine3d & object2camera_affine, const Eigen::Matrix4d & camera_projection);

void fuseObjectsOnImage(
const DetectedObjects & input_object_msg,
const std::vector<DetectedObjectWithFeature> & image_rois,
const std::map<std::size_t, sensor_msgs::msg::RegionOfInterest> & object_roi_map);
const std::map<std::size_t, DetectedObjectWithFeature> & object_roi_map);

void publish(const DetectedObjects & output_msg) override;

Expand All @@ -60,6 +62,7 @@ class RoiDetectedObjectFusionNode : public FusionNode<DetectedObjects, DetectedO
double min_iou_threshold{};
bool use_roi_probability{};
double roi_probability_threshold{};
Eigen::MatrixXi can_assign_matrix;
} fusion_params_;

std::map<int64_t, std::vector<bool>> passthrough_object_flags_map_, fused_object_flags_map_,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,14 @@ RoiDetectedObjectFusionNode::RoiDetectedObjectFusionNode(const rclcpp::NodeOptio
fusion_params_.min_iou_threshold = declare_parameter<double>("min_iou_threshold");
fusion_params_.use_roi_probability = declare_parameter<bool>("use_roi_probability");
fusion_params_.roi_probability_threshold = declare_parameter<double>("roi_probability_threshold");
{
const auto can_assign_vector_tmp = declare_parameter<std::vector<int64_t>>("can_assign_matrix");
std::vector<int> can_assign_vector(can_assign_vector_tmp.begin(), can_assign_vector_tmp.end());
const int label_num = static_cast<int>(std::sqrt(can_assign_vector.size()));
Eigen::Map<Eigen::MatrixXi> can_assign_matrix_tmp(
can_assign_vector.data(), label_num, label_num);
fusion_params_.can_assign_matrix = can_assign_matrix_tmp.transpose();
}
}

void RoiDetectedObjectFusionNode::preprocess(DetectedObjects & output_msg)
Expand Down Expand Up @@ -93,11 +101,12 @@ void RoiDetectedObjectFusionNode::fuseOnSingleImage(
}
}

std::map<std::size_t, RegionOfInterest> RoiDetectedObjectFusionNode::generateDetectedObjectRoIs(
std::map<std::size_t, DetectedObjectWithFeature>
RoiDetectedObjectFusionNode::generateDetectedObjectRoIs(
const DetectedObjects & input_object_msg, const double image_width, const double image_height,
const Eigen::Affine3d & object2camera_affine, const Eigen::Matrix4d & camera_projection)
{
std::map<std::size_t, RegionOfInterest> object_roi_map;
std::map<std::size_t, DetectedObjectWithFeature> object_roi_map;
int64_t timestamp_nsec =
input_object_msg.header.stamp.sec * (int64_t)1e9 + input_object_msg.header.stamp.nanosec;
if (passthrough_object_flags_map_.size() == 0) {
Expand All @@ -109,18 +118,18 @@ std::map<std::size_t, RegionOfInterest> RoiDetectedObjectFusionNode::generateDet
const auto & passthrough_object_flags = passthrough_object_flags_map_.at(timestamp_nsec);
for (std::size_t obj_i = 0; obj_i < input_object_msg.objects.size(); ++obj_i) {
std::vector<Eigen::Vector3d> vertices_camera_coord;
{
const auto & object = input_object_msg.objects.at(obj_i);
const auto & object = input_object_msg.objects.at(obj_i);

if (passthrough_object_flags.at(obj_i)) {
continue;
}
if (passthrough_object_flags.at(obj_i)) {
continue;
}

// filter point out of scope
if (debugger_ && out_of_scope(object)) {
continue;
}
// filter point out of scope
if (debugger_ && out_of_scope(object)) {
continue;
}

{
std::vector<Eigen::Vector3d> vertices;
objectToVertices(object.kinematics.pose_with_covariance.pose, object.shape, vertices);
transformPoints(vertices, object2camera_affine, vertices_camera_coord);
Expand Down Expand Up @@ -157,7 +166,7 @@ std::map<std::size_t, RegionOfInterest> RoiDetectedObjectFusionNode::generateDet
}
}
}
if (point_on_image_cnt == 3) {
if (point_on_image_cnt < 3) {
continue;
}

Expand All @@ -166,16 +175,18 @@ std::map<std::size_t, RegionOfInterest> RoiDetectedObjectFusionNode::generateDet
max_x = std::min(max_x, image_width - 1);
max_y = std::min(max_y, image_height - 1);

// build roi
RegionOfInterest roi;
roi.x_offset = static_cast<std::uint32_t>(min_x);
roi.y_offset = static_cast<std::uint32_t>(min_y);
roi.width = static_cast<std::uint32_t>(max_x) - static_cast<std::uint32_t>(min_x);
roi.height = static_cast<std::uint32_t>(max_y) - static_cast<std::uint32_t>(min_y);
object_roi_map.insert(std::make_pair(obj_i, roi));
DetectedObjectWithFeature object_roi;
object_roi.feature.roi.x_offset = static_cast<std::uint32_t>(min_x);
object_roi.feature.roi.y_offset = static_cast<std::uint32_t>(min_y);
object_roi.feature.roi.width =
static_cast<std::uint32_t>(max_x) - static_cast<std::uint32_t>(min_x);
object_roi.feature.roi.height =
static_cast<std::uint32_t>(max_y) - static_cast<std::uint32_t>(min_y);
object_roi.object = object;
object_roi_map.insert(std::make_pair(obj_i, object_roi));

if (debugger_) {
debugger_->obstacle_rois_.push_back(roi);
debugger_->obstacle_rois_.push_back(object_roi.feature.roi);
}
}

Expand All @@ -185,7 +196,7 @@ std::map<std::size_t, RegionOfInterest> RoiDetectedObjectFusionNode::generateDet
void RoiDetectedObjectFusionNode::fuseObjectsOnImage(
const DetectedObjects & input_object_msg,
const std::vector<DetectedObjectWithFeature> & image_rois,
const std::map<std::size_t, sensor_msgs::msg::RegionOfInterest> & object_roi_map)
const std::map<std::size_t, DetectedObjectWithFeature> & object_roi_map)
{
int64_t timestamp_nsec =
input_object_msg.header.stamp.sec * (int64_t)1e9 + input_object_msg.header.stamp.nanosec;
Expand All @@ -210,7 +221,15 @@ void RoiDetectedObjectFusionNode::fuseObjectsOnImage(
float max_iou = 0.0f;
for (const auto & image_roi : image_rois) {
const auto & object_roi = object_pair.second;
const double iou = calcIoU(object_roi, image_roi.feature.roi);
const auto object_roi_label =
object_recognition_utils::getHighestProbLabel(object_roi.object.classification);
const auto image_roi_label =
object_recognition_utils::getHighestProbLabel(image_roi.object.classification);
if (!fusion_params_.can_assign_matrix(object_roi_label, image_roi_label)) {
continue;
}

const double iou = calcIoU(object_roi.feature.roi, image_roi.feature.roi);
if (iou > max_iou) {
max_iou = iou;
roi_prob = image_roi.object.existence_probability;
Expand Down

0 comments on commit d2dffe6

Please sign in to comment.