Skip to content

Commit

Permalink
refactor(detected_object_validation): use common utils (#1410)
Browse files Browse the repository at this point in the history
Signed-off-by: Takayuki Murooka <takayuki5168@gmail.com>
  • Loading branch information
takayuki5168 authored Jul 25, 2022
1 parent 2706fd8 commit 9eb890a
Show file tree
Hide file tree
Showing 5 changed files with 7 additions and 151 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,6 @@ class ObjectLaneletFilterNode : public rclcpp::Node

Filter_target_label filter_target_;

bool transformDetectedObjects(
const autoware_auto_perception_msgs::msg::DetectedObjects &, const std::string &,
const tf2_ros::Buffer &, autoware_auto_perception_msgs::msg::DetectedObjects &);
LinearRing2d getConvexHull(const autoware_auto_perception_msgs::msg::DetectedObjects &);
lanelet::ConstLanelets getIntersectedLanelets(
const LinearRing2d &, const lanelet::ConstLanelets &);
Expand Down
1 change: 1 addition & 0 deletions perception/detected_object_validation/package.xml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
<depend>message_filters</depend>
<depend>nav_msgs</depend>
<depend>pcl_conversions</depend>
<depend>perception_utils</depend>
<depend>rclcpp</depend>
<depend>rclcpp_components</depend>
<depend>tf2</depend>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

#include "detected_object_filter/object_lanelet_filter.hpp"

#include <perception_utils/perception_utils.hpp>
#include <tier4_autoware_utils/tier4_autoware_utils.hpp>

#include <boost/geometry/algorithms/convex_hull.hpp>
Expand All @@ -22,21 +23,6 @@

#include <lanelet2_core/geometry/Polygon.h>

boost::optional<geometry_msgs::msg::Transform> getTransform(
const tf2_ros::Buffer & tf_buffer, const std::string & source_frame_id,
const std::string & target_frame_id, const rclcpp::Time & time)
{
try {
geometry_msgs::msg::TransformStamped self_transform_stamped;
self_transform_stamped = tf_buffer.lookupTransform(
/*target*/ target_frame_id, /*src*/ source_frame_id, time,
rclcpp::Duration::from_seconds(0.5));
return self_transform_stamped.transform;
} catch (tf2::TransformException & ex) {
RCLCPP_WARN_STREAM(rclcpp::get_logger("object_lanelet_filter"), ex.what());
return boost::none;
}
}
namespace object_lanelet_filter
{
ObjectLaneletFilterNode::ObjectLaneletFilterNode(const rclcpp::NodeOptions & node_options)
Expand Down Expand Up @@ -91,7 +77,7 @@ void ObjectLaneletFilterNode::objectCallback(
return;
}
autoware_auto_perception_msgs::msg::DetectedObjects transformed_objects;
if (!transformDetectedObjects(*input_msg, "map", tf_buffer_, transformed_objects)) {
if (!perception_utils::transformObjects(*input_msg, "map", tf_buffer_, transformed_objects)) {
RCLCPP_ERROR(get_logger(), "Failed transform to map.");
return;
}
Expand Down Expand Up @@ -131,36 +117,6 @@ void ObjectLaneletFilterNode::objectCallback(
object_pub_->publish(output_object_msg);
}

bool ObjectLaneletFilterNode::transformDetectedObjects(
const autoware_auto_perception_msgs::msg::DetectedObjects & input_msg,
const std::string & target_frame_id, const tf2_ros::Buffer & tf_buffer,
autoware_auto_perception_msgs::msg::DetectedObjects & output_msg)
{
output_msg = input_msg;

/* transform to world coordinate */
if (input_msg.header.frame_id != target_frame_id) {
output_msg.header.frame_id = target_frame_id;
tf2::Transform tf_target2objects_world;
tf2::Transform tf_target2objects;
tf2::Transform tf_objects_world2objects;
{
const auto ros_target2objects_world =
getTransform(tf_buffer, input_msg.header.frame_id, target_frame_id, input_msg.header.stamp);
if (!ros_target2objects_world) {
return false;
}
tf2::fromMsg(*ros_target2objects_world, tf_target2objects_world);
}
for (auto & object : output_msg.objects) {
tf2::fromMsg(object.kinematics.pose_with_covariance.pose, tf_objects_world2objects);
tf_target2objects = tf_target2objects_world * tf_objects_world2objects;
tf2::toMsg(tf_target2objects, object.kinematics.pose_with_covariance.pose);
}
}
return true;
}

LinearRing2d ObjectLaneletFilterNode::getConvexHull(
const autoware_auto_perception_msgs::msg::DetectedObjects & detected_objects)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

#include "obstacle_pointcloud_based_validator/obstacle_pointcloud_based_validator.hpp"

#include <perception_utils/perception_utils.hpp>
#include <tier4_autoware_utils/tier4_autoware_utils.hpp>

#include <pcl/filters/crop_hull.h>
Expand All @@ -34,54 +35,6 @@

namespace
{
std::optional<geometry_msgs::msg::Transform> getTransform(
const tf2_ros::Buffer & tf_buffer, const std::string & source_frame_id,
const std::string & target_frame_id, const rclcpp::Time & time)
{
try {
geometry_msgs::msg::TransformStamped self_transform_stamped;
self_transform_stamped = tf_buffer.lookupTransform(
/*target*/ target_frame_id, /*src*/ source_frame_id, time,
rclcpp::Duration::from_seconds(0.5));
return self_transform_stamped.transform;
} catch (tf2::TransformException & ex) {
RCLCPP_WARN_STREAM(rclcpp::get_logger("obstacle_pointcloud_based_validator"), ex.what());
return std::nullopt;
}
}

bool transformDetectedObjects(
const autoware_auto_perception_msgs::msg::DetectedObjects & input_msg,
const std::string & target_frame_id, const tf2_ros::Buffer & tf_buffer,
autoware_auto_perception_msgs::msg::DetectedObjects & output_msg)
{
output_msg = input_msg;

/* transform to world coordinate */
if (input_msg.header.frame_id != target_frame_id) {
output_msg.header.frame_id = target_frame_id;
tf2::Transform tf_target2objects_world;
tf2::Transform tf_target2objects;
tf2::Transform tf_objects_world2objects;
{
const auto ros_target2objects_world =
getTransform(tf_buffer, input_msg.header.frame_id, target_frame_id, input_msg.header.stamp);
if (!ros_target2objects_world) {
return false;
}
tf2::fromMsg(*ros_target2objects_world, tf_target2objects_world);
}
for (size_t i = 0; i < output_msg.objects.size(); ++i) {
tf2::fromMsg(
output_msg.objects.at(i).kinematics.pose_with_covariance.pose, tf_objects_world2objects);
tf_target2objects = tf_target2objects_world * tf_objects_world2objects;
tf2::toMsg(tf_target2objects, output_msg.objects.at(i).kinematics.pose_with_covariance.pose);
// Note: Covariance is not transformed.
}
}
return true;
}

inline pcl::PointXY toPCL(const double x, const double y)
{
pcl::PointXY pcl_point;
Expand Down Expand Up @@ -152,7 +105,7 @@ void ObstaclePointCloudBasedValidator::onObjectsAndObstaclePointCloud(

// Transform to pointcloud frame
autoware_auto_perception_msgs::msg::DetectedObjects transformed_objects;
if (!transformDetectedObjects(
if (!perception_utils::transformObjects(
*input_objects, input_obstacle_pointcloud->header.frame_id, tf_buffer_,
transformed_objects)) {
// objects_pub_->publish(*input_objects);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

#include "occupancy_grid_based_validator/occupancy_grid_based_validator.hpp"

#include <perception_utils/perception_utils.hpp>
#include <tier4_autoware_utils/tier4_autoware_utils.hpp>

#include <boost/optional.hpp>
Expand All @@ -28,58 +29,6 @@
#include <Eigen/Core>
#include <Eigen/Geometry>

namespace
{
boost::optional<geometry_msgs::msg::Transform> getTransform(
const tf2_ros::Buffer & tf_buffer, const std::string & source_frame_id,
const std::string & target_frame_id, const rclcpp::Time & time)
{
try {
geometry_msgs::msg::TransformStamped self_transform_stamped;
self_transform_stamped = tf_buffer.lookupTransform(
/*target*/ target_frame_id, /*src*/ source_frame_id, time,
rclcpp::Duration::from_seconds(0.5));
return self_transform_stamped.transform;
} catch (tf2::TransformException & ex) {
RCLCPP_WARN_STREAM(rclcpp::get_logger("multi_object_tracker"), ex.what());
return boost::none;
}
}

bool transformDetectedObjects(
const autoware_auto_perception_msgs::msg::DetectedObjects & input_msg,
const std::string & target_frame_id, const tf2_ros::Buffer & tf_buffer,
autoware_auto_perception_msgs::msg::DetectedObjects & output_msg)
{
output_msg = input_msg;

/* transform to world coordinate */
if (input_msg.header.frame_id != target_frame_id) {
output_msg.header.frame_id = target_frame_id;
tf2::Transform tf_target2objects_world;
tf2::Transform tf_target2objects;
tf2::Transform tf_objects_world2objects;
{
const auto ros_target2objects_world =
getTransform(tf_buffer, input_msg.header.frame_id, target_frame_id, input_msg.header.stamp);
if (!ros_target2objects_world) {
return false;
}
tf2::fromMsg(*ros_target2objects_world, tf_target2objects_world);
}
for (size_t i = 0; i < output_msg.objects.size(); ++i) {
tf2::fromMsg(
output_msg.objects.at(i).kinematics.pose_with_covariance.pose, tf_objects_world2objects);
tf_target2objects = tf_target2objects_world * tf_objects_world2objects;
tf2::toMsg(tf_target2objects, output_msg.objects.at(i).kinematics.pose_with_covariance.pose);
// Note: Covariance is not transformed.
}
}
return true;
}

} // namespace

namespace occupancy_grid_based_validator
{
using Label = autoware_auto_perception_msgs::msg::ObjectClassification;
Expand Down Expand Up @@ -113,7 +62,7 @@ void OccupancyGridBasedValidator::onObjectsAndOccGrid(

// Transform to occ grid frame
autoware_auto_perception_msgs::msg::DetectedObjects transformed_objects;
if (!transformDetectedObjects(
if (!perception_utils::transformObjects(
*input_objects, input_occ_grid->header.frame_id, tf_buffer_, transformed_objects))
return;

Expand Down

0 comments on commit 9eb890a

Please sign in to comment.