diff --git a/common/perception_utils/include/perception_utils/perception_utils.hpp b/common/perception_utils/include/perception_utils/perception_utils.hpp index 70171a95d19ea..90630dab9d6f2 100644 --- a/common/perception_utils/include/perception_utils/perception_utils.hpp +++ b/common/perception_utils/include/perception_utils/perception_utils.hpp @@ -29,7 +29,9 @@ #include #include +#include #include +#include #include #ifdef ROS_DISTRO_GALACTIC @@ -102,6 +104,72 @@ inline double get2dIoU(const T1 source_object, const T2 target_object) return iou; } +inline std::map getMaxMin2d(const tier4_autoware_utils::Polygon2d & polygon) +{ + const auto init_point = polygon.outer().at(0); + double max_x = init_point.x(); + double min_x = init_point.x(); + double max_y = init_point.y(); + double min_y = init_point.y(); + for (const auto & point : polygon.outer()) { + if (point.x() > max_x) max_x = point.x(); + if (min_x > point.x()) min_x = point.x(); + if (point.y() > max_y) max_y = point.y(); + if (min_y > point.y()) min_y = point.y(); + } + std::map max_min_map; + + max_min_map.insert(std::make_pair("max_x", max_x)); + max_min_map.insert(std::make_pair("min_x", min_x)); + max_min_map.insert(std::make_pair("max_y", max_y)); + max_min_map.insert(std::make_pair("min_y", min_y)); + return max_min_map; +} + +inline double getConvexShape( + const tier4_autoware_utils::Polygon2d & source_polygon, + const tier4_autoware_utils::Polygon2d & target_polygon) +{ + const auto source_max_min = getMaxMin2d(source_polygon); + const auto target_max_min = getMaxMin2d(target_polygon); + + const double highest_x = std::max(source_max_min.at("max_x"), target_max_min.at("max_x")); + const double lowest_x = std::min(source_max_min.at("min_x"), target_max_min.at("min_x")); + const double highest_y = std::max(source_max_min.at("max_y"), target_max_min.at("max_y")); + const double lowest_y = std::min(source_max_min.at("min_y"), target_max_min.at("min_y")); + + return std::abs(highest_x - lowest_x) * std::abs(highest_y - lowest_y); +} + +template +inline double get2dGeneralizedIoU(const T1 source_object, const T2 target_object) +{ + const auto & source_pose = getPose(source_object); + const auto & target_pose = getPose(target_object); + + const auto source_polygon = tier4_autoware_utils::toPolygon2d(source_pose, source_object.shape); + const auto target_polygon = tier4_autoware_utils::toPolygon2d(target_pose, target_object.shape); + + std::vector union_polygons; + std::vector intersection_polygons; + boost::geometry::union_(source_polygon, target_polygon, union_polygons); + boost::geometry::intersection(source_polygon, target_polygon, intersection_polygons); + + double intersection_area = 0.0; + double union_area = 0.0; + for (const auto & intersection_polygon : intersection_polygons) { + intersection_area += boost::geometry::area(intersection_polygon); + } + + for (const auto & union_polygon : union_polygons) { + union_area += boost::geometry::area(union_polygon); + } + + const double iou = union_area < 0.01 ? 0.0 : std::min(1.0, intersection_area / union_area); + const double convex_shape_area = getConvexShape(source_polygon, target_polygon); + return iou - (convex_shape_area - union_area) / convex_shape_area; +} + template inline double get2dPrecision(const T1 source_object, const T2 target_object) { diff --git a/common/perception_utils/test/src/test_perception_utils.cpp b/common/perception_utils/test/src/test_perception_utils.cpp index 889f92994b37a..eaab72d6fb500 100644 --- a/common/perception_utils/test/src/test_perception_utils.cpp +++ b/common/perception_utils/test/src/test_perception_utils.cpp @@ -375,6 +375,68 @@ TEST(perception_utils, test_get2dIoU) } } +TEST(perception_utils, test_get2dGeneralizedIoU) +{ + using perception_utils::get2dGeneralizedIoU; + // TODO(Shin-kyoto): + // get2dGeneralizedIoU uses outer points of each polygon. + // But these points contain an sampling error of outer line, + // so get2dGeneralizedIoU icludes an error of about 0.03. + // Threfore, in this test, epsilon is set to 0.04. + constexpr double epsilon_giou = 4 * 1e-02; + const double quart_circle = 0.16237976320958225; + + { // non overlapped + autoware_auto_perception_msgs::msg::DetectedObject source_obj; + source_obj.kinematics.pose_with_covariance.pose = createPose(1.5, 1.5, M_PI); + source_obj.shape.type = autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX; + source_obj.shape.dimensions.x = 1.0; + source_obj.shape.dimensions.y = 1.0; + + autoware_auto_perception_msgs::msg::DetectedObject target_obj; + target_obj.kinematics.pose_with_covariance.pose = createPose(0.0, 0.0, M_PI_2); + target_obj.shape.type = autoware_auto_perception_msgs::msg::Shape::CYLINDER; + target_obj.shape.dimensions.x = 1.0; + + const double giou = get2dGeneralizedIoU(source_obj, target_obj); + EXPECT_NEAR(giou, -1 + (1 + 4 * quart_circle) / (2.5 * 2.5), epsilon_giou); // not 0 + } + + { // partially overlapped + autoware_auto_perception_msgs::msg::DetectedObject source_obj; + source_obj.kinematics.pose_with_covariance.pose = createPose(0.5, 0.5, M_PI); + source_obj.shape.type = autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX; + source_obj.shape.dimensions.x = 1.0; + source_obj.shape.dimensions.y = 1.0; + + autoware_auto_perception_msgs::msg::DetectedObject target_obj; + target_obj.kinematics.pose_with_covariance.pose = createPose(0.0, 0.0, M_PI_2); + target_obj.shape.type = autoware_auto_perception_msgs::msg::Shape::CYLINDER; + target_obj.shape.dimensions.x = 1.0; + + const double giou = get2dGeneralizedIoU(source_obj, target_obj); + EXPECT_NEAR( + giou, quart_circle / (1.0 + quart_circle * 3) - 1.0 + (1.0 + 3 * quart_circle) / (1.5 * 1.5), + epsilon_giou); + } + + { // fully overlapped + autoware_auto_perception_msgs::msg::DetectedObject source_obj; + source_obj.kinematics.pose_with_covariance.pose = createPose(0.0, 0.0, M_PI); + source_obj.shape.type = autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX; + source_obj.shape.dimensions.x = 1.0; + source_obj.shape.dimensions.y = 1.0; + + autoware_auto_perception_msgs::msg::DetectedObject target_obj; + target_obj.kinematics.pose_with_covariance.pose = createPose(0.0, 0.0, M_PI_2); + target_obj.shape.type = autoware_auto_perception_msgs::msg::Shape::CYLINDER; + target_obj.shape.dimensions.x = 1.0; + + const double giou = get2dGeneralizedIoU(source_obj, target_obj); + EXPECT_NEAR(giou, quart_circle * 4, epsilon_giou); // giou equals iou + } +} + TEST(perception_utils, test_get2dPrecision) { using perception_utils::get2dPrecision;