Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add lidar centerpoint package #71

Closed
wants to merge 9 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions perception/lidar_centerpoint/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
/data/
174 changes: 174 additions & 0 deletions perception/lidar_centerpoint/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,174 @@
cmake_minimum_required(VERSION 3.5)
project(lidar_centerpoint)

if(NOT CMAKE_CXX_STANDARD)
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)
endif()
if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang")
add_compile_options(-Wall -Wextra -Wpedantic)
endif()

set(CUDA_VERBOSE OFF)

# set flags for CUDA availability
option(CUDA_AVAIL "CUDA available" OFF)
find_package(CUDA)
if(CUDA_FOUND)
find_library(CUBLAS_LIBRARIES cublas HINTS
${CUDA_TOOLKIT_ROOT_DIR}/lib64
${CUDA_TOOLKIT_ROOT_DIR}/lib
)
if(CUDA_VERBOSE)
message("CUDA is available!")
message("CUDA Libs: ${CUDA_LIBRARIES}")
message("CUDA Headers: ${CUDA_INCLUDE_DIRS}")
endif()
# Note: cublas_device was depreciated in CUDA version 9.2
# https://forums.developer.nvidia.com/t/where-can-i-find-libcublas-device-so-or-libcublas-device-a/67251/4
# In LibTorch, CUDA_cublas_device_LIBRARY is used.
unset(CUDA_cublas_device_LIBRARY CACHE)
set(CUDA_AVAIL ON)
else()
message("CUDA NOT FOUND")
set(CUDA_AVAIL OFF)
endif()

# set flags for TensorRT availability
option(TRT_AVAIL "TensorRT available" OFF)
# try to find the tensorRT modules
find_library(NVINFER nvinfer)
find_library(NVONNXPARSER nvonnxparser)
if(NVINFER AND NVONNXPARSER)
if(CUDA_VERBOSE)
message("TensorRT is available!")
message("NVINFER: ${NVINFER}")
message("NVONNXPARSER: ${NVONNXPARSER}")
endif()
set(TRT_AVAIL ON)
else()
message("TensorRT is NOT Available")
set(TRT_AVAIL OFF)
endif()

option(TORCH_AVAIL "Torch available" OFF)
if(CUDA_FOUND)
set(Torch_DIR /usr/local/libtorch/share/cmake/Torch)
find_package(Torch)
if(TORCH_FOUND)
if(CUDA_VERBOSE)
message(STATUS "TORCH_INCLUDE_DIRS: ${TORCH_INCLUDE_DIRS}")
message(STATUS "TORCH_LIBRARIES: ${TORCH_LIBRARIES}")
endif()
set(TORCH_AVAIL ON)
else()
message("Torch NOT FOUND")
set(TORCH_AVAIL OFF)
endif()
endif()

if(TRT_AVAIL AND CUDA_AVAIL AND TORCH_AVAIL)
# Download trained models
find_program(GDOWN_AVAIL "gdown")
if(NOT GDOWN_AVAIL)
message("gdown: command not found. External files could not be downloaded.")
endif()

set(DATA_PATH ${CMAKE_CURRENT_SOURCE_DIR}/data)
execute_process(COMMAND mkdir -p ${DATA_PATH})

function(download FILE_NAME GFILE_ID FILE_HASH)
# https://drive.google.com/file/d/GFILE_ID/view
message(STATUS "Checking and downloading ${FILE_NAME}")
set(FILE_PATH ${DATA_PATH}/${FILE_NAME})
if(EXISTS ${FILE_PATH})
file(MD5 ${FILE_PATH} EXISTING_FILE_HASH)
if(NOT ${FILE_HASH} EQUAL ${EXISTING_FILE_HASH})
message(STATUS "... file hash changes. Downloading now ...")
execute_process(COMMAND gdown --quiet https://drive.google.com//uc?id=${GFILE_ID} -O ${FILE_PATH})
endif()
else()
message(STATUS "... file doesn't exists. Downloading now ...")
execute_process(COMMAND gdown --quiet https://drive.google.com//uc?id=${GFILE_ID} -O ${FILE_PATH})
endif()
endfunction()

# default model
download(pts_voxel_encoder_default.onnx 1_8OCQmrPm_R4ZVh70QsS9HZo6uGrlbgz 01b860612e497591c4375d90dff61ef7)
download(pts_voxel_encoder_default.pt 1RZ7cuDnI-RBrDiWe-2vEs16mR_z0e9Uo 33136caa97e3bcef2cf3e04bbc93d1e4)
download(pts_backbone_neck_head_default.onnx 1UxDyt8T-TMJS7Ujx-1vbbqGRfDbMUZg2 e23a8ad4ea440f923e44dbe072b070da)
download(pts_backbone_neck_head_default.pt 1toAhmOriX8bwVI-ohuas9_2EBZnltoXh eb0df29b30acf9c1082ac4490af0bbc5)

# aip_x2 model
download(pts_voxel_encoder_aip_x2.onnx 1x-NAHQ3W0lbLmjJlrL6Nhvdq8yz6Ux0n 65eeb95c5e48ebfe6894146cdb48c160)
download(pts_voxel_encoder_aip_x2.pt 1jzKopAhXWjnEgo_v8rtYy0hQIayUE-oL 4db81ce8edc6571aa0afb1ae43ee72e9)
download(pts_backbone_neck_head_aip_x2.onnx 1l2fdIQcBWr3-6stVoNkudnL4OZaPqmNT a33c8910fd9c9c910b10904d3cd96717)
download(pts_backbone_neck_head_aip_x2.pt 18iOAlRsjvcWoUG9KiL1PlD7OY5mi9BSw 274fdf1580dd899e36c050c1366f1883)

find_package(ament_cmake_auto REQUIRED)
ament_auto_find_build_dependencies()

include_directories(
lib/include
${CUDA_INCLUDE_DIRS}
${TORCH_INCLUDE_DIRS}
)

### centerpoint ###
ament_auto_add_library(centerpoint SHARED
lib/src/pointcloud_densification.cpp
lib/src/voxel_generator.cpp
lib/src/centerpoint_trt.cpp
lib/src/tensorrt_wrapper.cpp
lib/src/network_trt.cpp
)

target_link_libraries(centerpoint
${NVINFER}
${NVONNXPARSER}
${NVINFER_PLUGIN}
${CUDA_LIBRARIES}
${CUBLAS_LIBRARIES}
${CUDA_curand_LIBRARY}
${TORCH_LIBRARIES}
)

## node ##
ament_auto_add_library(lidar_centerpoint_component SHARED
src/node.cpp
)

target_link_libraries(lidar_centerpoint_component
centerpoint
)

rclcpp_components_register_node(lidar_centerpoint_component
PLUGIN "centerpoint::LidarCenterPointNode"
EXECUTABLE lidar_centerpoint_node
)

if(BUILD_TESTING)
find_package(ament_lint_auto REQUIRED)
ament_lint_auto_find_test_dependencies()
endif()

ament_auto_package(
INSTALL_TO_SHARE
launch
data
config
)
else()
find_package(ament_cmake_auto REQUIRED)
ament_auto_find_build_dependencies()

if(BUILD_TESTING)
find_package(ament_lint_auto REQUIRED)
ament_lint_auto_find_test_dependencies()
endif()
ament_auto_package(
INSTALL_TO_SHARE
launch
)
endif()
63 changes: 63 additions & 0 deletions perception/lidar_centerpoint/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
# CenterPoint TensorRT

This is a 3D object detection implementation of CenterPoint supporting TensorRT inference.

The models are trained with [Mmdetection3D](https://github.com/tier4/autoware_mmdetection3d)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This repository is public. Could you comment out (or delete) this description?


The object.existence_probability is stored the value of classification confidence of DNN, not probability.

## Parameters

### Input Topics

| Name | Type | Description |
| ---------------- | ----------- | ------------------------------------ |
| input/pointcloud | PointCloud2 | Point Clouds (x, y, z and intensity) |

### Output Topics

| Name | Type | Description |
| ------------------------------ | ----------------------------- | ---------------------- |
| output/objects | DynamicObjectWithFeatureArray | 3D Bounding Box |
| debug/pointcloud_densification | PointCloud2 | multi-frame pointcloud |

### ROS Parameters

| Name | Type | Description | Default |
| ------------------------- | ------ | ----------------------------------------------------------- | ------- |
| score_threshold | float | detected objects with score less than threshold are ignored | `0.4` |
| densification_base_frame | string | the base frame id to fuse multi-frame pointcloud | `map` |
| densification_past_frames | int | the number of past frames to fuse with the current frame | `1` |
| use_encoder_trt | bool | use TensorRT VoxelFeatureEncoder | `false` |
| use_head_trt | bool | use TensorRT DetectionHead | `true` |
| trt_precision | string | TensorRT inference precision: `fp32` or `fp16` | `fp16` |
| encoder_onnx_path | string | path to VoxelFeatureEncoder ONNX file | |
| encoder_engine_path | string | path to VoxelFeatureEncoder TensorRT Engine file | |
| encoder_pt_path | string | path to VoxelFeatureEncoder TorchScript file | |
| head_onnx_path | string | path to DetectionHead ONNX file | |
| head_engine_path | string | path to DetectionHead TensorRT Engine file | |
| head_pt_path | string | path to DetectionHead TorchScript file | |

## For Developers

If you have an error like `'GOMP_4.5' not found`, replace the OpenMP library in libtorch.

```bash
sudo apt install libgomp1 -y
rm /path/to/libtorch/lib/libgomp-75eea7e8.so.1
ln -s /usr/lib/x86_64-linux-gnu/libgomp.so.1 /path/to/libtorch/lib/libgomp-75eea7e8.so.1
```

## Reference

Yin, Tianwei, Xingyi Zhou, and Philipp Krähenbühl. "Center-based 3d object detection and tracking." arXiv preprint arXiv:2006.11275 (2020).

## Reference Repositories

- <https://github.com/tianweiy/CenterPoint>
- <https://github.com/open-mmlab/OpenPCDet>
- <https://github.com/poodarchu/Det3D>
- <https://github.com/xingyizhou/CenterNet>
- <https://github.com/lzccccc/SMOKE>
- <https://github.com/yukkysaito/autoware_perception>
- <https://github.com/pytorch/pytorch>
3 changes: 3 additions & 0 deletions perception/lidar_centerpoint/config/aip_x2.param.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
/**:
ros__parameters:
class_names: ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"]
4 changes: 4 additions & 0 deletions perception/lidar_centerpoint/config/default.param.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
/**:
ros__parameters:
class_names: ["CAR", "PEDESTRIAN", "BICYCLE"]
rename_car_to_truck_and_bus: true
75 changes: 75 additions & 0 deletions perception/lidar_centerpoint/include/lidar_centerpoint/node.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
// Copyright 2021 Tier IV, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#ifndef LIDAR_CENTERPOINT__NODE_HPP_
#define LIDAR_CENTERPOINT__NODE_HPP_

#include <centerpoint_trt.hpp>
#include <config.hpp>
#include <pointcloud_densification.hpp>
#include <rclcpp/rclcpp.hpp>

#include <autoware_auto_perception_msgs/msg/detected_object_kinematics.hpp>
#include <autoware_auto_perception_msgs/msg/detected_objects.hpp>
#include <autoware_auto_perception_msgs/msg/object_classification.hpp>
#include <autoware_auto_perception_msgs/msg/shape.hpp>
#include <sensor_msgs/msg/point_cloud2.hpp>

#include <memory>
#include <string>
#include <vector>

namespace centerpoint
{
using Label = autoware_auto_perception_msgs::msg::ObjectClassification;

class LidarCenterPointNode : public rclcpp::Node
{
public:
explicit LidarCenterPointNode(const rclcpp::NodeOptions & node_options);

private:
void pointCloudCallback(const sensor_msgs::msg::PointCloud2::ConstSharedPtr input_pointcloud_msg);

static uint8_t getSemanticType(const std::string & class_name);
static bool isCarLikeVehicleLabel(const uint8_t label);

rclcpp::Subscription<sensor_msgs::msg::PointCloud2>::SharedPtr pointcloud_sub_;
rclcpp::Publisher<autoware_auto_perception_msgs::msg::DetectedObjects>::SharedPtr objects_pub_;
rclcpp::Publisher<sensor_msgs::msg::PointCloud2>::SharedPtr pointcloud_pub_;

float score_threshold_{0.0};
std::string densification_base_frame_;
int densification_past_frames_{0};
bool use_encoder_trt_{false};
bool use_head_trt_{false};
std::string trt_precision_;

std::string encoder_onnx_path_;
std::string encoder_engine_path_;
std::string encoder_pt_path_;
std::string head_onnx_path_;
std::string head_engine_path_;
std::string head_pt_path_;

std::vector<std::string> class_names_;
bool rename_car_to_truck_and_bus_{false};

std::unique_ptr<PointCloudDensification> densification_ptr_{nullptr};
std::unique_ptr<CenterPointTRT> detector_ptr_{nullptr};
};

} // namespace centerpoint

#endif // LIDAR_CENTERPOINT__NODE_HPP_
23 changes: 23 additions & 0 deletions perception/lidar_centerpoint/launch/lidar_centerpoint.launch.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
<?xml version="1.0"?>
<launch>
<arg name="input/pointcloud" default="/sensing/lidar/top/rectified/pointcloud"/>
<arg name="output/objects" default="objects"/>
<arg name="model_name" default="default" description="options: `default` or `aip_x2`"/>
<arg name="model_path" default="$(find-pkg-share lidar_centerpoint)/data"/>
<arg name="model_param_path" default="$(find-pkg-share lidar_centerpoint)/config/$(var model_name).param.yaml"/>

<node pkg="lidar_centerpoint" exec="lidar_centerpoint_node"
name="lidar_centerpoint" output="screen">
<remap from="~/input/pointcloud" to="$(var input/pointcloud)"/>
<remap from="~/output/objects" to="$(var output/objects)"/>
<param name="score_threshold" value="0.45"/>
<param name="densification_base_frame" value="map"/>
<param name="densification_past_frames" value="1"/>
<param name="use_head_trt" value="true"/>
<param name="trt_precision" value="fp16"/>
<param name="encoder_pt_path" value="$(var model_path)/pts_voxel_encoder_$(var model_name).pt"/>
<param name="head_onnx_path" value="$(var model_path)/pts_backbone_neck_head_$(var model_name).onnx"/>
<param name="head_engine_path" value="$(var model_path)/pts_backbone_neck_head_$(var model_name).engine"/>
<param from="$(var model_param_path)"/>
</node>
</launch>
Loading