diff --git a/ros/src/computing/perception/detection/lidar_tracker/packages/lidar_imm_ukf_pda_track/README.md b/ros/src/computing/perception/detection/lidar_tracker/packages/lidar_imm_ukf_pda_track/README.md index d12da7d9e57..e4f1d3b5b0f 100644 --- a/ros/src/computing/perception/detection/lidar_tracker/packages/lidar_imm_ukf_pda_track/README.md +++ b/ros/src/computing/perception/detection/lidar_tracker/packages/lidar_imm_ukf_pda_track/README.md @@ -87,6 +87,13 @@ Node: visualize_detected_objects |`/detected_objects/target_id`|`visualization_msgs::Marker`|Visualize targets' id.| + ### Video [![IMM UKF PDA lidar_tracker Autoware](https://img.youtube.com/vi/tKgDVsIfH-s/0.jpg)](https://youtu.be/tKgDVsIfH-s) + + +### Benchmark +Please notice that benchmark scripts are in another repository. +You can tune parameters by using benchmark based on KITTI dataset. +The repository is [here](https://github.com/cirpue49/kitti_tracking_benchmark). diff --git a/ros/src/computing/perception/detection/lidar_tracker/packages/lidar_imm_ukf_pda_track/include/imm_ukf_pda.h b/ros/src/computing/perception/detection/lidar_tracker/packages/lidar_imm_ukf_pda_track/include/imm_ukf_pda.h index 344e818abfd..3e3ea393f35 100644 --- a/ros/src/computing/perception/detection/lidar_tracker/packages/lidar_imm_ukf_pda_track/include/imm_ukf_pda.h +++ b/ros/src/computing/perception/detection/lidar_tracker/packages/lidar_imm_ukf_pda_track/include/imm_ukf_pda.h @@ -83,6 +83,14 @@ class ImmUkfPda // whether if publish debug ros markers bool is_debug_; + // whether if benchmarking tracking result + bool is_benchmark_; + int frame_count_; + std::string kitti_data_dir_; + + // for benchmark + std::string result_file_path_; + // prevent explode param for ukf double prevent_explosion_thres_; @@ -150,6 +158,8 @@ class ImmUkfPda void pubDebugRosMarker(const autoware_msgs::DetectedObjectArray& input); + void dumpResultText(autoware_msgs::DetectedObjectArray& detected_objects); + void tracker(const autoware_msgs::DetectedObjectArray& transformed_input, jsk_recognition_msgs::BoundingBoxArray& jskbboxes_output, autoware_msgs::DetectedObjectArray& detected_objects_output); diff --git a/ros/src/computing/perception/detection/lidar_tracker/packages/lidar_imm_ukf_pda_track/nodes/imm_ukf_pda/imm_ukf_pda.cpp b/ros/src/computing/perception/detection/lidar_tracker/packages/lidar_imm_ukf_pda_track/nodes/imm_ukf_pda/imm_ukf_pda.cpp index a5c7aaae14f..8720162d2f0 100644 --- a/ros/src/computing/perception/detection/lidar_tracker/packages/lidar_imm_ukf_pda_track/nodes/imm_ukf_pda/imm_ukf_pda.cpp +++ b/ros/src/computing/perception/detection/lidar_tracker/packages/lidar_imm_ukf_pda_track/nodes/imm_ukf_pda/imm_ukf_pda.cpp @@ -37,7 +37,8 @@ ImmUkfPda::ImmUkfPda() : target_id_(0) , // assign unique ukf_id_ to each tracking targets - init_(false) + init_(false), + frame_count_(0) { ros::NodeHandle private_nh_("~"); private_nh_.param("pointcloud_frame", pointcloud_frame_, "velodyne"); @@ -51,6 +52,15 @@ ImmUkfPda::ImmUkfPda() private_nh_.param("prevent_explosion_thres", prevent_explosion_thres_, 1000); private_nh_.param("use_sukf", use_sukf_, false); private_nh_.param("is_debug", is_debug_, false); + + // rosparam for benchmark + private_nh_.param("is_benchmark", is_benchmark_, false); + private_nh_.param("kitti_data_dir", kitti_data_dir_, "/home/hoge/kitti/2011_09_26/2011_09_26_drive_0005_sync/"); + if(is_benchmark_) + { + result_file_path_ = kitti_data_dir_ + "benchmark_results.txt"; + std::remove(result_file_path_.c_str()); + } } void ImmUkfPda::run() @@ -80,6 +90,11 @@ void ImmUkfPda::callback(const autoware_msgs::DetectedObjectArray& input) transformPoseToLocal(jskbboxes_output, detected_objects_output); pub_jskbbox_array_.publish(jskbboxes_output); pub_object_array_.publish(detected_objects_output); + + if(is_benchmark_) + { + dumpResultText(detected_objects_output); + } } void ImmUkfPda::relayJskbbox(const autoware_msgs::DetectedObjectArray& input, @@ -752,6 +767,39 @@ void ImmUkfPda::pubDebugRosMarker(const autoware_msgs::DetectedObjectArray& inpu pub_texts_array_.publish(texts_markers); } +void ImmUkfPda::dumpResultText(autoware_msgs::DetectedObjectArray& detected_objects) +{ + std::ofstream outputfile(result_file_path_, std::ofstream::out | std::ofstream::app); + for(size_t i = 0; i < detected_objects.objects.size(); i++) + { + double yaw = tf::getYaw(detected_objects.objects[i].pose.orientation); + + // KITTI tracking benchmark data format: + // (frame_number,tracked_id, object type, truncation, occlusion, observation angle, x1,y1,x2,y2, h, w, l, cx, cy, cz, yaw) + // x1, y1, x2, y2 are for 2D bounding box. + // h, w, l, are for height, width, length respectively + // cx, cy, cz are for object centroid + + // Tracking benchmark is based on frame_number, tracked_id, + // bounding box dimentions and object pose(centroid and orientation) from bird-eye view + outputfile << std::to_string(frame_count_) <<" " + << std::to_string(detected_objects.objects[i].id) <<" " + << "Unknown" <<" " + << "-1" <<" " + << "-1" <<" " + << "-1" <<" " + << "-1 -1 -1 -1" <<" " + << std::to_string(detected_objects.objects[i].dimensions.x) <<" " + << std::to_string(detected_objects.objects[i].dimensions.y) <<" " + << "-1" <<" " + << std::to_string(detected_objects.objects[i].pose.position.x)<<" " + << std::to_string(detected_objects.objects[i].pose.position.y)<<" " + << "-1" <<" " + << std::to_string(yaw) <<"\n"; + } + frame_count_ ++; +} + void ImmUkfPda::tracker(const autoware_msgs::DetectedObjectArray& input, jsk_recognition_msgs::BoundingBoxArray& jskbboxes_output, autoware_msgs::DetectedObjectArray& detected_objects_output)