Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Feature] Evaluating tracking accuracy based on KITTI data #1671

Merged
merged 16 commits into from
Nov 1, 2018
Merged
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,14 @@ class ImmUkfPda
// whether if publish debug ros markers
bool is_debug_;

// whether if benchmarking tracking result
bool is_benchmark_;
int frame_count_;
std::string kitti_data_dir_;

// for benchmark
std::string result_file_path_;

// prevent explode param for ukf
double prevent_explosion_thres_;

Expand Down Expand Up @@ -150,6 +158,8 @@ class ImmUkfPda

void pubDebugRosMarker(const autoware_msgs::DetectedObjectArray& input);

void dumpResultText(autoware_msgs::DetectedObjectArray& detected_objects);

void tracker(const autoware_msgs::DetectedObjectArray& transformed_input,
jsk_recognition_msgs::BoundingBoxArray& jskbboxes_output,
autoware_msgs::DetectedObjectArray& detected_objects_output);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@
ImmUkfPda::ImmUkfPda()
: target_id_(0)
, // assign unique ukf_id_ to each tracking targets
init_(false)
init_(false),
frame_count_(0)
{
ros::NodeHandle private_nh_("~");
private_nh_.param<std::string>("pointcloud_frame", pointcloud_frame_, "velodyne");
Expand All @@ -51,6 +52,15 @@ ImmUkfPda::ImmUkfPda()
private_nh_.param<double>("prevent_explosion_thres", prevent_explosion_thres_, 1000);
private_nh_.param<bool>("use_sukf", use_sukf_, false);
private_nh_.param<bool>("is_debug", is_debug_, false);

// rosparam for benchmark
private_nh_.param<bool>("is_benchmark", is_benchmark_, false);
private_nh_.param<std::string>("kitti_data_dir", kitti_data_dir_, "/home/hoge/kitti/2011_09_26/2011_09_26_drive_0005_sync/");
if(is_benchmark_)
{
result_file_path_ = kitti_data_dir_ + "benchmark_results.txt";
remove(result_file_path_.c_str());
}
}

void ImmUkfPda::run()
Expand Down Expand Up @@ -80,6 +90,11 @@ void ImmUkfPda::callback(const autoware_msgs::DetectedObjectArray& input)
transformPoseToLocal(jskbboxes_output, detected_objects_output);
pub_jskbbox_array_.publish(jskbboxes_output);
pub_object_array_.publish(detected_objects_output);

if(is_benchmark_)
{
dumpResultText(detected_objects_output);
}
}

void ImmUkfPda::relayJskbbox(const autoware_msgs::DetectedObjectArray& input,
Expand Down Expand Up @@ -752,6 +767,42 @@ void ImmUkfPda::pubDebugRosMarker(const autoware_msgs::DetectedObjectArray& inpu
pub_texts_array_.publish(texts_markers);
}

void ImmUkfPda::dumpResultText(autoware_msgs::DetectedObjectArray& detected_objects)
{
std::ofstream outputfile(result_file_path_, std::ofstream::out | std::ofstream::app);
for(size_t i = 0; i < detected_objects.objects.size(); i++)
{
tf::Quaternion q(detected_objects.objects[i].pose.orientation.x, detected_objects.objects[i].pose.orientation.y,
detected_objects.objects[i].pose.orientation.z, detected_objects.objects[i].pose.orientation.w);
double roll, pitch, yaw;
tf::Matrix3x3(q).getRPY(roll, pitch, yaw);
Copy link

@yn-mrse yn-mrse Nov 1, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You can abbreviate the code by using tf::getYaw in "tf/transform_datatypes.h".
http://docs.ros.org/melodic/api/tf/html/c++/transform__datatypes_8h.html

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I fixed here by using tf::getYaw. Plus, added benchmark description in README.md


// KITTI tracking benchmark data format:
// (frame_number,tracked_id, object type, truncation, occlusion, observation angle, x1,y1,x2,y2, h, w, l, cx, cy, cz, yaw)
// x1, y1, x2, y2 are for 2D bounding box.
// h, w, l, are for height, width, length respectively
// cx, cy, cz are for object centroid

// Tracking benchmark is based on frame_number, tracked_id,
// bounding box dimentions and object pose(centroid and orientation) from bird-eye view
outputfile << std::to_string(frame_count_) <<" "
<< std::to_string(detected_objects.objects[i].id) <<" "
<< "Unknown" <<" "
<< "-1" <<" "
<< "-1" <<" "
<< "-1" <<" "
<< "-1 -1 -1 -1" <<" "
<< std::to_string(detected_objects.objects[i].dimensions.x) <<" "
<< std::to_string(detected_objects.objects[i].dimensions.y) <<" "
<< "-1" <<" "
<< std::to_string(detected_objects.objects[i].pose.position.x)<<" "
<< std::to_string(detected_objects.objects[i].pose.position.y)<<" "
<< "-1" <<" "
<< std::to_string(yaw) <<"\n";
}
frame_count_ ++;
}

void ImmUkfPda::tracker(const autoware_msgs::DetectedObjectArray& input,
jsk_recognition_msgs::BoundingBoxArray& jskbboxes_output,
autoware_msgs::DetectedObjectArray& detected_objects_output)
Expand Down