diff --git a/sensing/autoware_pointcloud_preprocessor/CMakeLists.txt b/sensing/autoware_pointcloud_preprocessor/CMakeLists.txt index 56dab521d7dff..19c698b11bc5f 100644 --- a/sensing/autoware_pointcloud_preprocessor/CMakeLists.txt +++ b/sensing/autoware_pointcloud_preprocessor/CMakeLists.txt @@ -64,7 +64,10 @@ ament_target_dependencies(faster_voxel_grid_downsample_filter ) ament_auto_add_library(pointcloud_preprocessor_filter SHARED - src/concatenate_data/concatenate_and_time_sync_nodelet.cpp + src/concatenate_data/concatenate_and_time_sync_node.cpp + src/concatenate_data/combine_cloud_handler.cpp + src/concatenate_data/cloud_collector.cpp + src/concatenate_data/collector_matching_strategy.cpp src/concatenate_data/concatenate_pointclouds.cpp src/crop_box_filter/crop_box_filter_node.cpp src/time_synchronizer/time_synchronizer_node.cpp @@ -111,7 +114,7 @@ rclcpp_components_register_node(pointcloud_preprocessor_filter # ========== Concatenate and Sync data ========== rclcpp_components_register_node(pointcloud_preprocessor_filter PLUGIN "autoware::pointcloud_preprocessor::PointCloudConcatenateDataSynchronizerComponent" - EXECUTABLE concatenate_data_node) + EXECUTABLE concatenate_and_time_sync_node) # ========== CropBox ========== rclcpp_components_register_node(pointcloud_preprocessor_filter @@ -243,8 +246,17 @@ if(BUILD_TESTING) test/test_distortion_corrector_node.cpp ) + ament_add_gtest(test_concatenate_node_unit + test/test_concatenate_node_unit.cpp + ) + target_link_libraries(test_utilities pointcloud_preprocessor_filter) target_link_libraries(test_distortion_corrector_node pointcloud_preprocessor_filter) + target_link_libraries(test_concatenate_node_unit pointcloud_preprocessor_filter) + add_ros_test( + test/test_concatenate_node_component.py + TIMEOUT "50" + ) endif() diff --git a/sensing/autoware_pointcloud_preprocessor/config/concatenate_and_time_sync_node.param.yaml b/sensing/autoware_pointcloud_preprocessor/config/concatenate_and_time_sync_node.param.yaml new file mode 100644 index 0000000000000..56fe6643b9973 --- /dev/null +++ b/sensing/autoware_pointcloud_preprocessor/config/concatenate_and_time_sync_node.param.yaml @@ -0,0 +1,21 @@ +/**: + ros__parameters: + debug_mode: false + has_static_tf_only: false + rosbag_length: 10.0 + maximum_queue_size: 5 + timeout_sec: 0.2 + is_motion_compensated: true + publish_synchronized_pointcloud: true + keep_input_frame_in_synchronized_pointcloud: true + publish_previous_but_late_pointcloud: false + synchronized_pointcloud_postfix: pointcloud + input_twist_topic_type: twist + input_topics: [ + "/sensing/lidar/right/pointcloud_before_sync", + "/sensing/lidar/top/pointcloud_before_sync", + "/sensing/lidar/left/pointcloud_before_sync", + ] + output_frame: base_link + matching_strategy: + type: naive diff --git a/sensing/autoware_pointcloud_preprocessor/docs/concatenate-data.md b/sensing/autoware_pointcloud_preprocessor/docs/concatenate-data.md index 08f7b92f88975..f640608cf91ec 100644 --- a/sensing/autoware_pointcloud_preprocessor/docs/concatenate-data.md +++ b/sensing/autoware_pointcloud_preprocessor/docs/concatenate-data.md @@ -1,75 +1,224 @@ -# concatenate_data +# concatenate_and_time_synchronize_node ## Purpose -Many self-driving cars combine multiple LiDARs to expand the sensing range. Therefore, a function to combine a plurality of point clouds is required. +The `concatenate_and_time_synchronize_node` is a node designed to combine and synchronize multiple point clouds into a single, unified point cloud. By integrating data from multiple LiDARs, this node significantly enhances the sensing range and coverage of autonomous vehicles, enabling more accurate perception of the surrounding environment. Synchronization ensures that point clouds are aligned temporally, reducing errors caused by mismatched timestamps. -To combine multiple sensor data with a similar timestamp, the [message_filters](https://github.com/ros2/message_filters) is often used in the ROS-based system, but this requires the assumption that all inputs can be received. Since safety must be strongly considered in autonomous driving, the point clouds concatenate node must be designed so that even if one sensor fails, the remaining sensor information can be output. +For example, consider a vehicle equipped with three LiDAR sensors mounted on the left, right, and top positions. Each LiDAR captures data from its respective field of view, as shown below: -## Inner-workings / Algorithms +| Left | Top | Right | +| :-----------------------------------------------: | :---------------------------------------------: | :-------------------------------------------------: | +| ![Concatenate Left](./image/concatenate_left.png) | ![Concatenate Top](./image/concatenate_top.png) | ![Concatenate Right](./image/concatenate_right.png) | -The figure below represents the reception time of each sensor data and how it is combined in the case. +After processing the data through the `concatenate_and_time_synchronize_node`, the outputs from all LiDARs are combined into a single comprehensive point cloud that provides a complete view of the environment: -![concatenate_data_timing_chart](./image/concatenate_data.drawio.svg) +![Full Scene View](./image/concatenate_all.png) + +This resulting point cloud allows autonomous systems to detect obstacles, map the environment, and navigate more effectively, leveraging the complementary fields of view from multiple LiDAR sensors. + +## Inner Workings / Algorithms + +![concatenate_algorithm](./image/concatenate_algorithm.drawio.svg) + +### Step 1: Match and Create Collector + +When a point cloud arrives, its timestamp is checked, and an offset is subtracted to get the reference timestamp. The node then checks if there is an existing collector with the same reference timestamp. If such a collector exists, the point cloud is added to it. If no such collector exists, a new collector is created with the reference timestamp. + +### Step 2: Trigger the Timer + +Once a collector is created, a timer for that collector starts counting down (this value is defined by `timeout_sec`). The collector begins to concatenate the point clouds either when all point clouds defined in `input_topics` have been collected or when the timer counts down to zero. + +### Step 3: Concatenate the Point Clouds + +The concatenation process involves merging multiple point clouds into a single, concatenated point cloud. The timestamp of the concatenated point cloud will be the earliest timestamp from the input point clouds. By setting the parameter `is_motion_compensated` to `true`, the node will consider the timestamps of the input point clouds and utilize the `twist` information from `geometry_msgs::msg::TwistWithCovarianceStamped` to compensate for motion, aligning the point cloud to the selected (earliest) timestamp. + +### Step 4: Publish the Point Cloud + +After concatenation, the concatenated point cloud is published, and the collector is deleted to free up resources. ## Inputs / Outputs ### Input -| Name | Type | Description | -| --------------- | ------------------------------------------------ | ----------------------------------------------------------------------------- | -| `~/input/twist` | `geometry_msgs::msg::TwistWithCovarianceStamped` | The vehicle odometry is used to interpolate the timestamp of each sensor data | +| Name | Type | Description | +| --------------- | ------------------------------------------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `~/input/twist` | `geometry_msgs::msg::TwistWithCovarianceStamped` | Twist information adjusts the point cloud scans based on vehicle motion, allowing LiDARs with different timestamps to be synchronized for concatenation. | +| `~/input/odom` | `nav_msgs::msg::Odometry` | Vehicle odometry adjusts the point cloud scans based on vehicle motion, allowing LiDARs with different timestamps to be synchronized for concatenation. | + +By setting the `input_twist_topic_type` parameter to `twist` or `odom`, the subscriber will subscribe to either `~/input/twist` or `~/input/odom`. If the user doesn't want to use the twist information or vehicle odometry to compensate for motion, set `is_motion_compensated` to `false`. ### Output | Name | Type | Description | | ----------------- | ------------------------------- | ------------------------- | -| `~/output/points` | `sensor_msgs::msg::Pointcloud2` | concatenated point clouds | +| `~/output/points` | `sensor_msgs::msg::Pointcloud2` | Concatenated point clouds | + +### Core Parameters -## Parameters +{{ json_to_markdown("sensing/autoware_pointcloud_preprocessor/schema/concatenate_and_time_sync_node.schema.json") }} -| Name | Type | Default Value | Description | -| -------------------- | ---------------- | ------------- | ------------------------------------------------------------------- | -| `input/points` | vector of string | [] | input topic names that type must be `sensor_msgs::msg::Pointcloud2` | -| `input_frame` | string | "" | input frame id | -| `output_frame` | string | "" | output frame id | -| `has_static_tf_only` | bool | false | flag to listen TF only once | -| `max_queue_size` | int | 5 | max queue size of input/output topics | +### Parameter Settings -### Core Parameters +If you didn't synchronize your LiDAR sensors, set the `type` parameter of `matching_strategy` to `naive` to concatenate the point clouds directly. However, if your LiDAR sensors are synchronized, set type to `advanced` and adjust the `lidar_timestamp_offsets` and `lidar_timestamp_noise_window` parameters accordingly. + +The three parameters, `timeout_sec`, `lidar_timestamp_offsets`, and `lidar_timestamp_noise_window`, are essential for efficiently collecting point clouds in the same collector and managing edge cases (point cloud drops or delays) effectively. + +#### timeout_sec + +When network issues occur or when point clouds experience delays in the previous processing pipeline, some point clouds may be delayed or dropped. To address this, the `timeout_sec` parameter is used. Once the timer is created, it will start counting down from `timeout_sec`. If the timer reaches zero, the collector will not wait for delayed or dropped point clouds but will concatenate the remaining point clouds in the collector directly. The figure below demonstrates how `timeout_sec` works with `concatenate_and_time_sync_node` when `timeout_sec` is set to `0.12` (120 ms). + +![concatenate_edge_case](./image/concatenate_edge_case.drawio.svg) + +#### lidar_timestamp_offsets + +Since different vehicles have varied designs for LiDAR scanning, the timestamps of each LiDAR may differ. Users need to know the offsets between each LiDAR and set the values in `lidar_timestamp_offsets`. + +To monitor the timestamps of each LiDAR, run the following command: + +```bash +ros2 topic echo "pointcloud_topic" --field header +``` + +The timestamps should increase steadily by approximately 100 ms, as per the Autoware default. You should see output like this: + +```bash +nanosec: 156260951 +nanosec: 257009560 +nanosec: 355444581 +``` + +This pattern indicates a LiDAR timestamp of 0.05. + +If there are three LiDARs (left, right, top), and the timestamps for the left, right, and top point clouds are `0.01`, `0.05`, and `0.09` seconds respectively, the parameters should be set as [0.0, 0.04, 0.08]. This reflects the timestamp differences between the current point cloud and the point cloud with the earliest timestamp. Note that the order of the `lidar_timestamp_offsets` corresponds to the order of the `input_topics`. + +The figure below demonstrates how `lidar_timestamp_offsets` works with `concatenate_and_time_sync_node`. + +![ideal_timestamp_offset](./image/ideal_timestamp_offset.drawio.svg) + +#### lidar_timestamp_noise_window + +Additionally, due to the mechanical design of LiDARs, there may be some jitter in the timestamps of each scan, as shown in the image below. For example, if the scan frequency is set to 10 Hz (scanning every 100 ms), the timestamps between each scan might not be exactly 100 ms apart. To handle this noise, the `lidar_timestamp_noise_window` parameter is provided. + +Users can use [this tool](https://github.com/tier4/timestamp_analyzer) to visualize the noise between each scan. + +![jitter](./image/jitter.png) + +From the example above, the noise ranges from 0 to 8 ms, so the user should set `lidar_timestamp_noise_window` to `0.008`. -| Name | Type | Default Value | Description | -| --------------------------------- | ---------------- | ------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `timeout_sec` | double | 0.1 | tolerance of time to publish next pointcloud [s]
When this time limit is exceeded, the filter concatenates and publishes pointcloud, even if not all the point clouds are subscribed. | -| `input_offset` | vector of double | [] | This parameter can control waiting time for each input sensor pointcloud [s]. You must to set the same length of offsets with input pointclouds numbers.
For its tuning, please see [actual usage page](#how-to-tuning-timeout_sec-and-input_offset). | -| `publish_synchronized_pointcloud` | bool | false | If true, publish the time synchronized pointclouds. All input pointclouds are transformed and then re-published as message named `_synchronized`. | -| `input_twist_topic_type` | std::string | twist | Topic type for twist. Currently support `twist` or `odom`. | +The figure below demonstrates how `lidar_timestamp_noise_window` works with the `concatenate_and_time_sync_node`. If the green `X` is within the range of the red triangles, it indicates that the point cloud matches the reference timestamp of the collector. -## Actual Usage +![noise_timestamp_offset](./image/noise_timestamp_offset.drawio.svg) -For the example of actual usage of this node, please refer to the [preprocessor.launch.py](../launch/preprocessor.launch.py) file. +## Launch + +```bash +# The launch file will read the parameters from the concatenate_and_time_sync_node.param.yaml +ros2 launch autoware_pointcloud_preprocessor concatenate_and_time_sync_node.launch.xml +``` -### How to tuning timeout_sec and input_offset +## Test -The values in `timeout_sec` and `input_offset` are used in the timer_callback to control concatenation timings. +```bash +# build autoware_pointcloud_preprocessor +colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release --packages-up-to autoware_pointcloud_preprocessor -- Assumptions - - when the timer runs out, we concatenate the pointclouds in the buffer - - when the first pointcloud comes to buffer, we reset the timer to `timeout_sec` - - when the second and later pointclouds comes to buffer, we reset the timer to `timeout_sec` - `input_offset` - - we assume all lidar has same frequency +# test autoware_pointcloud_preprocessor +colcon test --packages-select autoware_pointcloud_preprocessor --event-handlers console_cohesion+ +``` -| Name | Description | How to tune | -| -------------- | ---------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `timeout_sec` | timeout sec for default timer | To avoid mis-concatenation, at least this value must be shorter than sampling time. | -| `input_offset` | timeout extension when a pointcloud comes to buffer. | The amount of waiting time will be `timeout_sec` - `input_offset`. So, you will need to set larger value for the last-coming pointcloud and smaller for fore-coming. | +## Debug and Diagnostics -### Node separation options for future +To verify whether the node has successfully concatenated the point clouds, the user can examine rqt or the `/diagnostics` topic using the following command: -Since the pointcloud concatenation has two process, "time synchronization" and "pointcloud concatenation", it is possible to separate these processes. +```bash +ros2 topic echo /diagnostics +``` -In the future, Nodes will be completely separated in order to achieve node loosely coupled nature, but currently both nodes can be selected for backward compatibility ([See this PR](https://github.com/autowarefoundation/autoware.universe/pull/3312)). +Below is an example output when the point clouds are concatenated successfully: -## Assumptions / Known limits +- Each point cloud has a value of `True`. +- The `cloud_concatenation_success` is `True`. +- The `level` value is `\0`. (diagnostic_msgs::msg::DiagnosticStatus::OK) -It is necessary to assume that the vehicle odometry value exists, the sensor data and odometry timestamp are correct, and the TF from `base_link` to `sensor_frame` is also correct. +```bash +header: + stamp: + sec: 1722492015 + nanosec: 848508777 + frame_id: '' +status: +- level: "\0" + name: 'concatenate_and_time_sync_node: concat_status' + message: Concatenated pointcloud is published and include all topics + hardware_id: concatenate_data_checker + values: + - key: concatenated_cloud_timestamp + value: '1718260240.159229994' + - key: reference_timestamp_min + value: '1718260240.149230003' + - key: reference_timestamp_max + value: '1718260240.169229984' + - key: /sensing/lidar/left/pointcloud_before_sync/timestamp + value: '1718260240.159229994' + - key: /sensing/lidar/left/pointcloud_before_sync/is_concatenated + value: 'True' + - key: /sensing/lidar/right/pointcloud_before_sync/timestamp + value: '1718260240.194104910' + - key: /sensing/lidar/right/pointcloud_before_sync/is_concatenated + value: 'True' + - key: /sensing/lidar/top/pointcloud_before_sync/timestamp + value: '1718260240.234578133' + - key: /sensing/lidar/top/pointcloud_before_sync/is_concatenated + value: 'True' + - key: cloud_concatenation_success + value: 'True' +``` + +Below is an example when point clouds fail to concatenate successfully. + +- Some point clouds might have values of `False`. +- The `cloud_concatenation_success` is `False`. +- The `level` value is `\x02`. (diagnostic_msgs::msg::DiagnosticStatus::ERROR) + +```bash +header: + stamp: + sec: 1722492663 + nanosec: 344942959 + frame_id: '' +status: +- level: "\x02" + name: 'concatenate_and_time_sync_node: concat_status' + message: Concatenated pointcloud is published but miss some topics + hardware_id: concatenate_data_checker + values: + - key: concatenated_cloud_timestamp + value: '1718260240.859827995' + - key: reference_timestamp_min + value: '1718260240.849828005' + - key: reference_timestamp_max + value: '1718260240.869827986' + - key: /sensing/lidar/left/pointcloud_before_sync/timestamp + value: '1718260240.859827995' + - key: /sensing/lidar/left/pointcloud_before_sync/is_concatenated + value: 'True' + - key: /sensing/lidar/right/pointcloud_before_sync/timestamp + value: '1718260240.895193815' + - key: /sensing/lidar/right/pointcloud_before_sync/is_concatenated + value: 'True' + - key: /sensing/lidar/top/pointcloud_before_sync/is_concatenated + value: 'False' + - key: cloud_concatenation_success + value: 'False' +``` + +## Node separation options + +There is also an option to separate the concatenate_and_time_sync_node into two nodes: one for `time synchronization` and another for `concatenate pointclouds` ([See this PR](https://github.com/autowarefoundation/autoware.universe/pull/3312)). + +Note that the `concatenate_pointclouds` and `time_synchronizer_nodelet` are using the [old design](https://github.com/autowarefoundation/autoware.universe/blob/9bb228fe5b7fa4c6edb47e4713c73489a02366e1/sensing/autoware_pointcloud_preprocessor/docs/concatenate-data.md) of the concatenate node. + +## Assumptions / Known Limits + +- If `is_motion_compensated` is set to `false`, the `concatenate_and_time_sync_node` will directly concatenate the point clouds without applying for motion compensation. This can save several milliseconds depending on the number of LiDARs being concatenated. Therefore, if the timestamp differences between point clouds are negligible, the user can set `is_motion_compensated` to `false` and omit the need for twist or odometry input for the node. +- As mentioned above, the user should clearly understand how their LiDAR's point cloud timestamps are managed to set the parameters correctly. If the user does not synchronize the point clouds, please set `matching_strategy.type` to `naive`. diff --git a/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_algorithm.drawio.svg b/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_algorithm.drawio.svg new file mode 100644 index 0000000000000..0ca825f5acaa6 --- /dev/null +++ b/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_algorithm.drawio.svg @@ -0,0 +1,810 @@ + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ Collector +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ left pc +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ top pc +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ right pc +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + +
+
+
+ L +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
stamp: t1_left
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ stamp: t1_right +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
stamp: t1_top
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + +
+
+
+ reference timestamp +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ T +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ R +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + +
+
+
arrival time
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + +
+
+
+ compare with the +
reference timestamp.
+
If match, add to the group
+
if it doesn't match, create new group
+
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
+ Left pointcloud +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ Right pointcloud +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
Top pointcloud
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ timer +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ add to the collector +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + +
+
+
+ create a collector +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ set stamp as +
reference timestamp
+
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ trigger the timer +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ When timer count to zero +
+ concatenate + publish +
+
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ When group has +
left, right, top pointclouds
+
+ + concatenate + publish + +
+
+
+
+
+ +
+
+
+
+ + + + + + +
+
+
+
diff --git a/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_all.png b/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_all.png new file mode 100644 index 0000000000000..2410df95a74c7 Binary files /dev/null and b/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_all.png differ diff --git a/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_data.drawio.svg b/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_data.drawio.svg deleted file mode 100644 index 786fee4c22ed7..0000000000000 --- a/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_data.drawio.svg +++ /dev/null @@ -1,298 +0,0 @@ - - - - - - - - - - -
-
-
- input topic 1 -
-
-
-
- input topic... -
-
- - - - -
-
-
- input topic 2 -
-
-
-
- input topic... -
-
- - - - -
-
-
- input topic 3 -
-
-
-
- input topic... -
-
- - - - - - - -
-
-
- concatenated topic -
-
-
-
- concatenate... -
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
-
- timer -
- start -
-
-
-
- timer... -
-
- - - - -
-
-
- this data is abandoned -
-
-
-
- this data i... -
-
- - - - - - - - - -
-
-
- t0 -
-
-
-
- t0 -
-
- - - - -
-
-
- t1 -
-
-
-
- t1 -
-
- - - - -
-
-
- t2 -
-
-
-
- t2 -
-
- - - - -
-
-
- t3 -
-
-
-
- t3 -
-
- - - - - - - - - - - - - - -
-
-
- timer -
- start -
-
-
-
- timer... -
-
- - - - - -
-
-
- timeout -
-
-
-
- timeout -
-
- - - - - - -
-
-
- t4 -
-
-
-
- t4 -
-
-
- - - - Viewer does not support full SVG 1.1 - - -
diff --git a/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_edge_case.drawio.svg b/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_edge_case.drawio.svg new file mode 100644 index 0000000000000..33835b6396e51 --- /dev/null +++ b/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_edge_case.drawio.svg @@ -0,0 +1,2361 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 0 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 50 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 100 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 150 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 0 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 50 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 100 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 150 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 0 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 50 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 100 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 150 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 0 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 50 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 100 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 150 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
+
+ receive all pc +
+ concatenate +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
+
+ collector + 1: + timeout +
+ concatenate +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + Normal + +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ + One pointcloud + +
+ + drop + +
+
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ + Several pointclouds + +
+ + delay + +
+
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ + Several pointclouds + +
+ + delay, and one drop + +
+
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ + Several pointclouds + +
+ + delay, and drop + +
+
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ + One pointcloud + +
+ + delay + +
+
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 0 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 50 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 100 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 150 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ collector + 1 created. +
+
timer start
+
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+
collector 2 created.
+
timer start
+
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
+
+ collector + 2 +
+
concatenate
+
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
+
+ collector + 1 created. +
+
timer start
+
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+
+ collector + 1 +
+
concatenate
+
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 0 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 50 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 100 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 150 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ collector + 1 created. +
+
timer start
+
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + +
+
+
+
+ collector + 1 +
+
concatenate
+
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 0 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 50 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 100 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 150 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ collector + 1 created. +
+
timer start
+
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + +
+
+
+
+ collector + 1 +
+
concatenate
+
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
+
Can decide to
+
publish or not
+
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + +
+
+
120 ms
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + +
+
+
120 ms
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
120 ms
+
+
+
+ +
+
+
+
+
+
+
+
+
diff --git a/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_left.png b/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_left.png new file mode 100644 index 0000000000000..87e78ab91fed6 Binary files /dev/null and b/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_left.png differ diff --git a/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_right.png b/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_right.png new file mode 100644 index 0000000000000..cd6d64eabd49a Binary files /dev/null and b/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_right.png differ diff --git a/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_top.png b/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_top.png new file mode 100644 index 0000000000000..c317642bd9799 Binary files /dev/null and b/sensing/autoware_pointcloud_preprocessor/docs/image/concatenate_top.png differ diff --git a/sensing/autoware_pointcloud_preprocessor/docs/image/ideal_timestamp_offset.drawio.svg b/sensing/autoware_pointcloud_preprocessor/docs/image/ideal_timestamp_offset.drawio.svg new file mode 100644 index 0000000000000..0b8eca9dd75aa --- /dev/null +++ b/sensing/autoware_pointcloud_preprocessor/docs/image/ideal_timestamp_offset.drawio.svg @@ -0,0 +1,784 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 0.01 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 0.05 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 0.09 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
+ reference +
stamp
+
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 0.01 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 0.05 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
+ reference +
stamp
+
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ minus offset 40 ms +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + +
+
+
+ minus offset 80 ms +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ lidar_timestamp_offset: [0.0, 0.04, 0.08] +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ ideal timestamp +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
+ first arrive pointcloud +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
+ reference timestamp +
+
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + + + +
+
+
+ minus offset 80 ms +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + +
+
+
+ minus offset 40 ms +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + +
+
+
+ compare with +
reference stamp
+
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 0.09 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ compare with +
reference stamp
+
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ compare with +
reference stamp
+
+
+
+
+ +
+
+
+
+
+
+
+
diff --git a/sensing/autoware_pointcloud_preprocessor/docs/image/jitter.png b/sensing/autoware_pointcloud_preprocessor/docs/image/jitter.png new file mode 100644 index 0000000000000..c984e940171a6 Binary files /dev/null and b/sensing/autoware_pointcloud_preprocessor/docs/image/jitter.png differ diff --git a/sensing/autoware_pointcloud_preprocessor/docs/image/noise_timestamp_offset.drawio.svg b/sensing/autoware_pointcloud_preprocessor/docs/image/noise_timestamp_offset.drawio.svg new file mode 100644 index 0000000000000..bf5d604d2fb94 --- /dev/null +++ b/sensing/autoware_pointcloud_preprocessor/docs/image/noise_timestamp_offset.drawio.svg @@ -0,0 +1,2023 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 0.01 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 0.05 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 0.09 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
+ reference +
stamp
+
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ 0 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 20 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 40 +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
+ reference +
stamp
+
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + +
+
+
+ minus offset 20 ms +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + +
+
+
+ minus offset 40 ms +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + +
+
+
-5ms
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+10ms
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
5
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
-5
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
-5
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
-10
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
5
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
10
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + +
+
+
+ minus offset 40 ms +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
-15
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
-5
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
5
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
15
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + +
+
+
-15ms
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ minus offset 80 ms +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
-10
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
10
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
5
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
-5
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + +
+
+
+5ms
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+10ms
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
10
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
-10
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
-10
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
10
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
15
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
-15
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + +
+
+
-15ms
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ ideal timestamp +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
+ first arrive pointcloud +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
+ reference timestamp +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
real timestamp
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
+ possible reference timestamp +
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
+ possible timestamp +
for non-first pointcloud
+
+
+
+
+ +
+
+
+
+ + + + + + + + + + + + + +
+
+
+ timestamp of non-first pointcloud +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ lidar_timestamp_offset: [0.0, 0.04, 0.08] +
+ + lidar_timestamp_noise_window: [0.005, 0.01, 0.015] +
+
+
+
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ + Case 2 + +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ + Case 1 + +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 0.01 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 0.05 +
+
+
+
+ +
+
+
+
+ + + + + + + + +
+
+
+ 0.09 +
+
+
+
+ +
+
+
+
+
+
+
+
diff --git a/sensing/autoware_pointcloud_preprocessor/include/autoware/pointcloud_preprocessor/concatenate_data/cloud_collector.hpp b/sensing/autoware_pointcloud_preprocessor/include/autoware/pointcloud_preprocessor/concatenate_data/cloud_collector.hpp new file mode 100644 index 0000000000000..13604569df9a8 --- /dev/null +++ b/sensing/autoware_pointcloud_preprocessor/include/autoware/pointcloud_preprocessor/concatenate_data/cloud_collector.hpp @@ -0,0 +1,89 @@ +// Copyright 2024 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once + +#include "combine_cloud_handler.hpp" + +#include +#include +#include + +namespace autoware::pointcloud_preprocessor +{ + +class PointCloudConcatenateDataSynchronizerComponent; +class CombineCloudHandler; + +struct CollectorInfoBase +{ + virtual ~CollectorInfoBase() = default; +}; + +struct NaiveCollectorInfo : public CollectorInfoBase +{ + double timestamp; + + explicit NaiveCollectorInfo(double timestamp = 0.0) : timestamp(timestamp) {} +}; + +struct AdvancedCollectorInfo : public CollectorInfoBase +{ + double timestamp; + double noise_window; + + explicit AdvancedCollectorInfo(double timestamp = 0.0, double noise_window = 0.0) + : timestamp(timestamp), noise_window(noise_window) + { + } +}; + +class CloudCollector +{ +public: + CloudCollector( + std::shared_ptr && ros2_parent_node, + std::shared_ptr & combine_cloud_handler, int num_of_clouds, + double timeout_sec, bool debug_mode); + bool topic_exists(const std::string & topic_name); + bool process_pointcloud( + const std::string & topic_name, sensor_msgs::msg::PointCloud2::SharedPtr cloud); + void concatenate_callback(); + + ConcatenatedCloudResult concatenate_pointclouds( + std::unordered_map topic_to_cloud_map); + + std::unordered_map + get_topic_to_cloud_map(); + + [[nodiscard]] bool concatenate_finished() const; + + void set_info(std::shared_ptr collector_info); + [[nodiscard]] std::shared_ptr get_info() const; + void show_debug_message(); + +private: + std::shared_ptr ros2_parent_node_; + std::shared_ptr combine_cloud_handler_; + rclcpp::TimerBase::SharedPtr timer_; + std::unordered_map topic_to_cloud_map_; + uint64_t num_of_clouds_; + double timeout_sec_; + bool debug_mode_; + bool concatenate_finished_{false}; + std::mutex concatenate_mutex_; + std::shared_ptr collector_info_; +}; + +} // namespace autoware::pointcloud_preprocessor diff --git a/sensing/autoware_pointcloud_preprocessor/include/autoware/pointcloud_preprocessor/concatenate_data/collector_matching_strategy.hpp b/sensing/autoware_pointcloud_preprocessor/include/autoware/pointcloud_preprocessor/concatenate_data/collector_matching_strategy.hpp new file mode 100644 index 0000000000000..c314b24a07921 --- /dev/null +++ b/sensing/autoware_pointcloud_preprocessor/include/autoware/pointcloud_preprocessor/concatenate_data/collector_matching_strategy.hpp @@ -0,0 +1,81 @@ +// Copyright 2024 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once + +#include "cloud_collector.hpp" + +#include + +#include + +#include +#include +#include +#include +#include +#include + +namespace autoware::pointcloud_preprocessor +{ + +struct MatchingParams +{ + std::string topic_name; + double cloud_timestamp; + double cloud_arrival_time; +}; + +class CollectorMatchingStrategy +{ +public: + virtual ~CollectorMatchingStrategy() = default; + + [[nodiscard]] virtual std::optional> match_cloud_to_collector( + const std::list> & cloud_collectors, + const MatchingParams & params) const = 0; + virtual void set_collector_info( + std::shared_ptr & collector, const MatchingParams & matching_params) = 0; +}; + +class NaiveMatchingStrategy : public CollectorMatchingStrategy +{ +public: + explicit NaiveMatchingStrategy(rclcpp::Node & node); + [[nodiscard]] std::optional> match_cloud_to_collector( + const std::list> & cloud_collectors, + const MatchingParams & params) const override; + void set_collector_info( + std::shared_ptr & collector, const MatchingParams & matching_params) override; +}; + +class AdvancedMatchingStrategy : public CollectorMatchingStrategy +{ +public: + explicit AdvancedMatchingStrategy(rclcpp::Node & node, std::vector input_topics); + + [[nodiscard]] std::optional> match_cloud_to_collector( + const std::list> & cloud_collectors, + const MatchingParams & params) const override; + void set_collector_info( + std::shared_ptr & collector, const MatchingParams & matching_params) override; + +private: + std::unordered_map topic_to_offset_map_; + std::unordered_map topic_to_noise_window_map_; +}; + +std::shared_ptr parse_matching_strategy(rclcpp::Node & node); + +} // namespace autoware::pointcloud_preprocessor diff --git a/sensing/autoware_pointcloud_preprocessor/include/autoware/pointcloud_preprocessor/concatenate_data/combine_cloud_handler.hpp b/sensing/autoware_pointcloud_preprocessor/include/autoware/pointcloud_preprocessor/concatenate_data/combine_cloud_handler.hpp new file mode 100644 index 0000000000000..fa8c58aa74c11 --- /dev/null +++ b/sensing/autoware_pointcloud_preprocessor/include/autoware/pointcloud_preprocessor/concatenate_data/combine_cloud_handler.hpp @@ -0,0 +1,110 @@ +// Copyright 2024 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once + +#include +#include +#include +#include +#include +#include + +// ROS includes +#include "autoware/point_types/types.hpp" + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +namespace autoware::pointcloud_preprocessor +{ +using autoware::point_types::PointXYZIRC; +using point_cloud_msg_wrapper::PointCloud2Modifier; + +struct ConcatenatedCloudResult +{ + sensor_msgs::msg::PointCloud2::SharedPtr concatenate_cloud_ptr{nullptr}; + std::optional> + topic_to_transformed_cloud_map; + std::unordered_map topic_to_original_stamp_map; +}; + +class CombineCloudHandler +{ +private: + rclcpp::Node & node_; + + std::string output_frame_; + bool is_motion_compensated_; + bool publish_synchronized_pointcloud_; + bool keep_input_frame_in_synchronized_pointcloud_; + std::unique_ptr managed_tf_buffer_{nullptr}; + + std::deque twist_queue_; + + /// @brief RclcppTimeHash structure defines a custom hash function for the rclcpp::Time type by + /// using its nanoseconds representation as the hash value. + struct RclcppTimeHash + { + std::size_t operator()(const rclcpp::Time & t) const + { + return std::hash()(t.nanoseconds()); + } + }; + + static void convert_to_xyzirc_cloud( + const sensor_msgs::msg::PointCloud2::SharedPtr & input_cloud, + sensor_msgs::msg::PointCloud2::SharedPtr & xyzirc_cloud); + + void correct_pointcloud_motion( + const std::shared_ptr & transformed_cloud_ptr, + const std::vector & pc_stamps, + std::unordered_map & transform_memo, + std::shared_ptr transformed_delay_compensated_cloud_ptr); + +public: + CombineCloudHandler( + rclcpp::Node & node, std::string output_frame, bool is_motion_compensated, + bool publish_synchronized_pointcloud, bool keep_input_frame_in_synchronized_pointcloud, + bool has_static_tf_only); + void process_twist( + const geometry_msgs::msg::TwistWithCovarianceStamped::ConstSharedPtr & twist_msg); + void process_odometry(const nav_msgs::msg::Odometry::ConstSharedPtr & odometry_msg); + + ConcatenatedCloudResult combine_pointclouds( + std::unordered_map & topic_to_cloud_map); + + Eigen::Matrix4f compute_transform_to_adjust_for_old_timestamp( + const rclcpp::Time & old_stamp, const rclcpp::Time & new_stamp); + + std::deque get_twist_queue(); +}; + +} // namespace autoware::pointcloud_preprocessor diff --git a/sensing/autoware_pointcloud_preprocessor/include/autoware/pointcloud_preprocessor/concatenate_data/concatenate_and_time_sync_node.hpp b/sensing/autoware_pointcloud_preprocessor/include/autoware/pointcloud_preprocessor/concatenate_data/concatenate_and_time_sync_node.hpp new file mode 100644 index 0000000000000..654593e317ad9 --- /dev/null +++ b/sensing/autoware_pointcloud_preprocessor/include/autoware/pointcloud_preprocessor/concatenate_data/concatenate_and_time_sync_node.hpp @@ -0,0 +1,124 @@ +// Copyright 2023 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once + +#include +#include +#include +#include +#include +#include + +// ROS includes +#include "cloud_collector.hpp" +#include "collector_matching_strategy.hpp" +#include "combine_cloud_handler.hpp" + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +namespace autoware::pointcloud_preprocessor +{ +class PointCloudConcatenateDataSynchronizerComponent : public rclcpp::Node +{ +public: + explicit PointCloudConcatenateDataSynchronizerComponent(const rclcpp::NodeOptions & node_options); + ~PointCloudConcatenateDataSynchronizerComponent() override = default; + void publish_clouds( + ConcatenatedCloudResult && concatenated_cloud_result, + std::shared_ptr collector_info); + void manage_collector_list(); + std::list> get_cloud_collectors(); + void add_cloud_collector(const std::shared_ptr & collector); + +private: + struct Parameters + { + bool use_naive_approach; + bool debug_mode; + bool has_static_tf_only; + double rosbag_length; + int maximum_queue_size; + double timeout_sec; + bool is_motion_compensated; + bool publish_synchronized_pointcloud; + bool keep_input_frame_in_synchronized_pointcloud; + bool publish_previous_but_late_pointcloud; + std::string synchronized_pointcloud_postfix; + std::string input_twist_topic_type; + std::vector input_topics; + std::string output_frame; + std::string matching_strategy; + } params_; + + double current_concatenate_cloud_timestamp_{0.0}; + double latest_concatenate_cloud_timestamp_{0.0}; + bool drop_previous_but_late_pointcloud_{false}; + bool publish_pointcloud_{false}; + bool is_concatenated_cloud_empty_{false}; + std::shared_ptr diagnostic_collector_info_; + std::unordered_map diagnostic_topic_to_original_stamp_map_; + + std::shared_ptr combine_cloud_handler_; + std::list> cloud_collectors_; + std::unique_ptr collector_matching_strategy_; + std::mutex cloud_collectors_mutex_; + + // default postfix name for synchronized pointcloud + static constexpr const char * default_sync_topic_postfix = "_synchronized"; + + // subscribers + std::vector::SharedPtr> pointcloud_subs_; + rclcpp::Subscription::SharedPtr twist_sub_; + rclcpp::Subscription::SharedPtr odom_sub_; + + // publishers + rclcpp::Publisher::SharedPtr concatenated_cloud_publisher_; + std::unordered_map::SharedPtr> + topic_to_transformed_cloud_publisher_map_; + std::unique_ptr debug_publisher_; + + std::unique_ptr> stop_watch_ptr_; + diagnostic_updater::Updater diagnostic_updater_{this}; + + void cloud_callback( + const sensor_msgs::msg::PointCloud2::SharedPtr & input_ptr, const std::string & topic_name); + void twist_callback(const geometry_msgs::msg::TwistWithCovarianceStamped::ConstSharedPtr input); + void odom_callback(const nav_msgs::msg::Odometry::ConstSharedPtr input); + + static std::string format_timestamp(double timestamp); + void check_concat_status(diagnostic_updater::DiagnosticStatusWrapper & stat); + std::string replace_sync_topic_name_postfix( + const std::string & original_topic_name, const std::string & postfix); +}; + +} // namespace autoware::pointcloud_preprocessor diff --git a/sensing/autoware_pointcloud_preprocessor/include/autoware/pointcloud_preprocessor/concatenate_data/concatenate_and_time_sync_nodelet.hpp b/sensing/autoware_pointcloud_preprocessor/include/autoware/pointcloud_preprocessor/concatenate_data/concatenate_and_time_sync_nodelet.hpp deleted file mode 100644 index 6164e85c35717..0000000000000 --- a/sensing/autoware_pointcloud_preprocessor/include/autoware/pointcloud_preprocessor/concatenate_data/concatenate_and_time_sync_nodelet.hpp +++ /dev/null @@ -1,197 +0,0 @@ -// Copyright 2023 TIER IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * Software License Agreement (BSD License) - * - * Copyright (c) 2009, Willow Garage, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above - * copyright notice, this list of conditions and the following - * disclaimer in the documentation and/or other materials provided - * with the distribution. - * * Neither the name of Willow Garage, Inc. nor the names of its - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN - * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * - * $Id: concatenate_data.cpp 35231 2011-01-14 05:33:20Z rusu $ - * - */ - -#ifndef AUTOWARE__POINTCLOUD_PREPROCESSOR__CONCATENATE_DATA__CONCATENATE_AND_TIME_SYNC_NODELET_HPP_ -#define AUTOWARE__POINTCLOUD_PREPROCESSOR__CONCATENATE_DATA__CONCATENATE_AND_TIME_SYNC_NODELET_HPP_ - -#include -#include -#include -#include -#include -#include -#include - -// ROS includes -#include "autoware/point_types/types.hpp" - -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include - -namespace autoware::pointcloud_preprocessor -{ -using autoware::point_types::PointXYZIRC; -using point_cloud_msg_wrapper::PointCloud2Modifier; - -/** \brief @b PointCloudConcatenateDataSynchronizerComponent is a special form of data - * synchronizer: it listens for a set of input PointCloud messages on the same topic, - * checks their timestamps, and concatenates their fields together into a single - * PointCloud output message. - * \author Radu Bogdan Rusu - */ -class PointCloudConcatenateDataSynchronizerComponent : public rclcpp::Node -{ -public: - typedef sensor_msgs::msg::PointCloud2 PointCloud2; - - /** \brief constructor. */ - explicit PointCloudConcatenateDataSynchronizerComponent(const rclcpp::NodeOptions & node_options); - - /** \brief constructor. - * \param queue_size the maximum queue size - */ - PointCloudConcatenateDataSynchronizerComponent( - const rclcpp::NodeOptions & node_options, int queue_size); - - /** \brief Empty destructor. */ - virtual ~PointCloudConcatenateDataSynchronizerComponent() {} - -private: - /** \brief The output PointCloud publisher. */ - rclcpp::Publisher::SharedPtr pub_output_; - /** \brief Delay Compensated PointCloud publisher*/ - std::map::SharedPtr> - transformed_raw_pc_publisher_map_; - - /** \brief The maximum number of messages that we can store in the queue. */ - int maximum_queue_size_ = 3; - - double timeout_sec_ = 0.1; - - bool publish_synchronized_pointcloud_; - bool keep_input_frame_in_synchronized_pointcloud_; - std::string synchronized_pointcloud_postfix_; - - std::set not_subscribed_topic_names_; - - /** \brief A vector of subscriber. */ - std::vector::SharedPtr> filters_; - - rclcpp::Subscription::SharedPtr sub_twist_; - rclcpp::Subscription::SharedPtr sub_odom_; - - rclcpp::TimerBase::SharedPtr timer_; - diagnostic_updater::Updater updater_{this}; - - const std::string input_twist_topic_type_; - - /** \brief Output TF frame the concatenated points should be transformed to. */ - std::string output_frame_; - - /** \brief The flag to indicate if only static TF are used. */ - bool has_static_tf_only_; - - /** \brief Input point cloud topics. */ - // XmlRpc::XmlRpcValue input_topics_; - std::vector input_topics_; - - std::unique_ptr managed_tf_buffer_{nullptr}; - - std::deque twist_ptr_queue_; - - std::map cloud_stdmap_; - std::map cloud_stdmap_tmp_; - std::mutex mutex_; - - std::vector input_offset_; - std::map offset_map_; - - Eigen::Matrix4f computeTransformToAdjustForOldTimestamp( - const rclcpp::Time & old_stamp, const rclcpp::Time & new_stamp); - std::map combineClouds( - sensor_msgs::msg::PointCloud2::SharedPtr & concat_cloud_ptr); - void publish(); - - void convertToXYZIRCCloud( - const sensor_msgs::msg::PointCloud2::SharedPtr & input_ptr, - sensor_msgs::msg::PointCloud2::SharedPtr & output_ptr); - void setPeriod(const int64_t new_period); - void cloud_callback( - const sensor_msgs::msg::PointCloud2::ConstSharedPtr & input_ptr, - const std::string & topic_name); - void twist_callback(const geometry_msgs::msg::TwistWithCovarianceStamped::ConstSharedPtr input); - void odom_callback(const nav_msgs::msg::Odometry::ConstSharedPtr input); - void timer_callback(); - - void checkConcatStatus(diagnostic_updater::DiagnosticStatusWrapper & stat); - std::string replaceSyncTopicNamePostfix( - const std::string & original_topic_name, const std::string & postfix); - - /** \brief processing time publisher. **/ - std::unique_ptr> stop_watch_ptr_; - std::unique_ptr debug_publisher_; -}; - -} // namespace autoware::pointcloud_preprocessor - -// clang-format off -#endif // AUTOWARE__POINTCLOUD_PREPROCESSOR__CONCATENATE_DATA__CONCATENATE_AND_TIME_SYNC_NODELET_HPP_ // NOLINT -// clang-format on diff --git a/sensing/autoware_pointcloud_preprocessor/launch/concatenate_and_time_sync_node.launch.xml b/sensing/autoware_pointcloud_preprocessor/launch/concatenate_and_time_sync_node.launch.xml new file mode 100644 index 0000000000000..f553c15f01210 --- /dev/null +++ b/sensing/autoware_pointcloud_preprocessor/launch/concatenate_and_time_sync_node.launch.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/sensing/autoware_pointcloud_preprocessor/schema/concatenate_and_time_sync_node.schema.json b/sensing/autoware_pointcloud_preprocessor/schema/concatenate_and_time_sync_node.schema.json new file mode 100644 index 0000000000000..c3758d05a8349 --- /dev/null +++ b/sensing/autoware_pointcloud_preprocessor/schema/concatenate_and_time_sync_node.schema.json @@ -0,0 +1,163 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Parameters for Concatenate and Time Synchronize Node", + "type": "object", + "definitions": { + "concatenate_and_time_sync_node": { + "type": "object", + "properties": { + "debug_mode": { + "type": "boolean", + "default": false, + "description": "Flag to enables debug mode to display additional logging information." + }, + "has_static_tf_only": { + "type": "boolean", + "default": false, + "description": "Flag to indicate if only static TF is used." + }, + "rosbag_length": { + "type": "number", + "default": 10.0, + "minimum": 0.0, + "description": " This value determine if the rosbag has started from the beginning again. The value should be set smaller than the actual length of the bag." + }, + "maximum_queue_size": { + "type": "integer", + "default": "5", + "minimum": 1, + "description": "Maximum size of the queue for the Keep Last policy in QoS history." + }, + "timeout_sec": { + "type": "number", + "default": "0.1", + "minimum": 0.001, + "description": "Timer's timeout duration in seconds when collectors are created. Collectors will concatenate the point clouds when timeout_sec reaches zero." + }, + "is_motion_compensated": { + "type": "boolean", + "default": true, + "description": "Flag to indicate if motion compensation is enabled." + }, + "publish_synchronized_pointcloud": { + "type": "boolean", + "default": true, + "description": "Flag to indicate if synchronized point cloud should be published." + }, + "keep_input_frame_in_synchronized_pointcloud": { + "type": "boolean", + "default": true, + "description": "Flag to indicate if input frame should be kept in synchronized point cloud." + }, + "publish_previous_but_late_pointcloud": { + "type": "boolean", + "default": false, + "description": "Flag to indicate if a concatenated point cloud should be published if its timestamp is earlier than the previous published concatenated point cloud." + }, + "synchronized_pointcloud_postfix": { + "type": "string", + "default": "pointcloud", + "description": "Postfix for the topic name of the synchronized point cloud." + }, + "input_twist_topic_type": { + "type": "string", + "enum": ["twist", "odom"], + "default": "twist", + "description": "Type of the input twist topic." + }, + "input_topics": { + "type": "array", + "items": { + "type": "string", + "minLength": 1 + }, + "default": ["cloud_topic1", "cloud_topic2", "cloud_topic3"], + "minItems": 2, + "description": "List of input point cloud topics." + }, + "output_frame": { + "type": "string", + "default": "base_link", + "minLength": 1, + "description": "Output frame." + }, + "matching_strategy": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": ["naive", "advanced"], + "default": "advanced", + "description": "Set it to `advanced` if you can synchronize your LiDAR sensor; otherwise, set it to `naive`." + }, + "lidar_timestamp_offsets": { + "type": "array", + "items": { + "type": "number", + "minimum": 0.0 + }, + "default": [0.0, 0.0, 0.0], + "minItems": 2, + "description": "List of LiDAR timestamp offsets in seconds (relative to the earliest LiDAR timestamp). The offsets should be provided in the same order as the input topics." + }, + "lidar_timestamp_noise_window": { + "type": "array", + "items": { + "type": "number", + "minimum": 0.0 + }, + "default": [0.01, 0.01, 0.01], + "minItems": 2, + "description": "List of LiDAR timestamp noise windows in seconds. The noise values should be specified in the same order as the input_topics." + } + }, + "required": ["type"], + "dependencies": { + "type": { + "oneOf": [ + { + "properties": { "type": { "const": "naive" } }, + "not": { + "required": ["lidar_timestamp_offsets", "lidar_timestamp_noise_window"] + } + }, + { + "properties": { "type": { "const": "advanced" } }, + "required": ["lidar_timestamp_offsets", "lidar_timestamp_noise_window"] + } + ] + } + } + } + }, + "required": [ + "debug_mode", + "has_static_tf_only", + "rosbag_length", + "maximum_queue_size", + "timeout_sec", + "is_motion_compensated", + "publish_synchronized_pointcloud", + "keep_input_frame_in_synchronized_pointcloud", + "publish_previous_but_late_pointcloud", + "synchronized_pointcloud_postfix", + "input_twist_topic_type", + "input_topics", + "output_frame", + "matching_strategy" + ] + } + }, + "properties": { + "/**": { + "type": "object", + "properties": { + "ros__parameters": { + "$ref": "#/definitions/concatenate_and_time_sync_node" + } + }, + "required": ["ros__parameters"] + } + }, + "required": ["/**"] +} diff --git a/sensing/autoware_pointcloud_preprocessor/src/concatenate_data/cloud_collector.cpp b/sensing/autoware_pointcloud_preprocessor/src/concatenate_data/cloud_collector.cpp new file mode 100644 index 0000000000000..63ee23d204788 --- /dev/null +++ b/sensing/autoware_pointcloud_preprocessor/src/concatenate_data/cloud_collector.cpp @@ -0,0 +1,156 @@ +// Copyright 2024 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "autoware/pointcloud_preprocessor/concatenate_data/cloud_collector.hpp" + +#include "autoware/pointcloud_preprocessor/concatenate_data/combine_cloud_handler.hpp" +#include "autoware/pointcloud_preprocessor/concatenate_data/concatenate_and_time_sync_node.hpp" + +#include + +#include +#include +#include +#include + +namespace autoware::pointcloud_preprocessor +{ + +CloudCollector::CloudCollector( + std::shared_ptr && ros2_parent_node, + std::shared_ptr & combine_cloud_handler, int num_of_clouds, + double timeout_sec, bool debug_mode) +: ros2_parent_node_(std::move(ros2_parent_node)), + combine_cloud_handler_(combine_cloud_handler), + num_of_clouds_(num_of_clouds), + timeout_sec_(timeout_sec), + debug_mode_(debug_mode) +{ + const auto period_ns = std::chrono::duration_cast( + std::chrono::duration(timeout_sec_)); + + timer_ = + rclcpp::create_timer(ros2_parent_node_, ros2_parent_node_->get_clock(), period_ns, [this]() { + std::lock_guard concatenate_lock(concatenate_mutex_); + if (concatenate_finished_) return; + concatenate_callback(); + }); +} + +void CloudCollector::set_info(std::shared_ptr collector_info) +{ + collector_info_ = std::move(collector_info); +} + +std::shared_ptr CloudCollector::get_info() const +{ + return collector_info_; +} + +bool CloudCollector::topic_exists(const std::string & topic_name) +{ + return topic_to_cloud_map_.find(topic_name) != topic_to_cloud_map_.end(); +} + +bool CloudCollector::process_pointcloud( + const std::string & topic_name, sensor_msgs::msg::PointCloud2::SharedPtr cloud) +{ + std::lock_guard concatenate_lock(concatenate_mutex_); + if (concatenate_finished_) return false; + + // Check if the map already contains an entry for the same topic. This shouldn't happen if the + // parameter 'lidar_timestamp_noise_window' is set correctly. + if (topic_to_cloud_map_.find(topic_name) != topic_to_cloud_map_.end()) { + RCLCPP_WARN_STREAM_THROTTLE( + ros2_parent_node_->get_logger(), *ros2_parent_node_->get_clock(), + std::chrono::milliseconds(10000).count(), + "Topic '" << topic_name + << "' already exists in the collector. Check the timestamp of the pointcloud."); + } + topic_to_cloud_map_[topic_name] = cloud; + if (topic_to_cloud_map_.size() == num_of_clouds_) { + concatenate_callback(); + } + + return true; +} + +bool CloudCollector::concatenate_finished() const +{ + return concatenate_finished_; +} + +void CloudCollector::concatenate_callback() +{ + if (debug_mode_) { + show_debug_message(); + } + + // All pointclouds are received or the timer has timed out, cancel the timer and concatenate the + // pointclouds in the collector. + timer_->cancel(); + + auto concatenated_cloud_result = concatenate_pointclouds(topic_to_cloud_map_); + + ros2_parent_node_->publish_clouds(std::move(concatenated_cloud_result), collector_info_); + + concatenate_finished_ = true; +} + +ConcatenatedCloudResult CloudCollector::concatenate_pointclouds( + std::unordered_map topic_to_cloud_map) +{ + return combine_cloud_handler_->combine_pointclouds(topic_to_cloud_map); +} + +std::unordered_map +CloudCollector::get_topic_to_cloud_map() +{ + return topic_to_cloud_map_; +} + +void CloudCollector::show_debug_message() +{ + auto time_until_trigger = timer_->time_until_trigger(); + std::stringstream log_stream; + log_stream << std::fixed << std::setprecision(6); + log_stream << "Collector's concatenate callback time: " + << ros2_parent_node_->get_clock()->now().seconds() << " seconds\n"; + + if (auto advanced_info = std::dynamic_pointer_cast(collector_info_)) { + log_stream << "Advanced strategy:\n Collector's reference time min: " + << advanced_info->timestamp - advanced_info->noise_window + << " to max: " << advanced_info->timestamp + advanced_info->noise_window + << " seconds\n"; + } else if (auto naive_info = std::dynamic_pointer_cast(collector_info_)) { + log_stream << "Naive strategy:\n Collector's timestamp: " << naive_info->timestamp + << " seconds\n"; + } + + log_stream << "Time until trigger: " << (time_until_trigger.count() / 1e9) << " seconds\n"; + + log_stream << "Pointclouds: ["; + std::string separator = ""; + for (const auto & [topic, cloud] : topic_to_cloud_map_) { + log_stream << separator; + log_stream << "[" << topic << ", " << rclcpp::Time(cloud->header.stamp).seconds() << "]"; + separator = ", "; + } + + log_stream << "]\n"; + + RCLCPP_INFO(ros2_parent_node_->get_logger(), "%s", log_stream.str().c_str()); +} + +} // namespace autoware::pointcloud_preprocessor diff --git a/sensing/autoware_pointcloud_preprocessor/src/concatenate_data/collector_matching_strategy.cpp b/sensing/autoware_pointcloud_preprocessor/src/concatenate_data/collector_matching_strategy.cpp new file mode 100644 index 0000000000000..50058df3d91af --- /dev/null +++ b/sensing/autoware_pointcloud_preprocessor/src/concatenate_data/collector_matching_strategy.cpp @@ -0,0 +1,121 @@ +// Copyright 2024 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "autoware/pointcloud_preprocessor/concatenate_data/cloud_collector.hpp" + +#include +#include + +#include +#include +#include +#include +#include + +namespace autoware::pointcloud_preprocessor +{ + +NaiveMatchingStrategy::NaiveMatchingStrategy(rclcpp::Node & node) +{ + RCLCPP_INFO(node.get_logger(), "Utilize naive matching strategy"); +} + +std::optional> NaiveMatchingStrategy::match_cloud_to_collector( + const std::list> & cloud_collectors, + const MatchingParams & params) const +{ + std::optional smallest_time_difference = std::nullopt; + std::shared_ptr closest_collector = nullptr; + + for (const auto & cloud_collector : cloud_collectors) { + if (!cloud_collector->topic_exists(params.topic_name)) { + auto info = cloud_collector->get_info(); + if (auto naive_info = std::dynamic_pointer_cast(info)) { + double time_difference = std::abs(params.cloud_arrival_time - naive_info->timestamp); + if (!smallest_time_difference || time_difference < smallest_time_difference) { + smallest_time_difference = time_difference; + closest_collector = cloud_collector; + } + } + } + } + + if (closest_collector != nullptr) { + return closest_collector; + } + return std::nullopt; +} + +void NaiveMatchingStrategy::set_collector_info( + std::shared_ptr & collector, const MatchingParams & matching_params) +{ + auto info = std::make_shared(matching_params.cloud_arrival_time); + collector->set_info(info); +} + +AdvancedMatchingStrategy::AdvancedMatchingStrategy( + rclcpp::Node & node, std::vector input_topics) +{ + auto lidar_timestamp_offsets = + node.declare_parameter>("matching_strategy.lidar_timestamp_offsets"); + auto lidar_timestamp_noise_window = + node.declare_parameter>("matching_strategy.lidar_timestamp_noise_window"); + + if (lidar_timestamp_offsets.size() != input_topics.size()) { + throw std::runtime_error( + "The number of topics does not match the number of timestamp offsets."); + } + if (lidar_timestamp_noise_window.size() != input_topics.size()) { + throw std::runtime_error( + "The number of topics does not match the number of timestamp noise window."); + } + + for (size_t i = 0; i < input_topics.size(); i++) { + topic_to_offset_map_[input_topics[i]] = lidar_timestamp_offsets[i]; + topic_to_noise_window_map_[input_topics[i]] = lidar_timestamp_noise_window[i]; + } + + RCLCPP_INFO(node.get_logger(), "Utilize advanced matching strategy"); +} + +std::optional> AdvancedMatchingStrategy::match_cloud_to_collector( + const std::list> & cloud_collectors, + const MatchingParams & params) const +{ + for (const auto & cloud_collector : cloud_collectors) { + auto info = cloud_collector->get_info(); + if (auto advanced_info = std::dynamic_pointer_cast(info)) { + auto reference_timestamp_min = advanced_info->timestamp - advanced_info->noise_window; + auto reference_timestamp_max = advanced_info->timestamp + advanced_info->noise_window; + double time = params.cloud_timestamp - topic_to_offset_map_.at(params.topic_name); + if ( + time < reference_timestamp_max + topic_to_noise_window_map_.at(params.topic_name) && + time > reference_timestamp_min - topic_to_noise_window_map_.at(params.topic_name)) { + return cloud_collector; + } + } + } + return std::nullopt; +} + +void AdvancedMatchingStrategy::set_collector_info( + std::shared_ptr & collector, const MatchingParams & matching_params) +{ + auto info = std::make_shared( + matching_params.cloud_timestamp - topic_to_offset_map_.at(matching_params.topic_name), + topic_to_noise_window_map_[matching_params.topic_name]); + collector->set_info(info); +} + +} // namespace autoware::pointcloud_preprocessor diff --git a/sensing/autoware_pointcloud_preprocessor/src/concatenate_data/combine_cloud_handler.cpp b/sensing/autoware_pointcloud_preprocessor/src/concatenate_data/combine_cloud_handler.cpp new file mode 100644 index 0000000000000..d68218314830b --- /dev/null +++ b/sensing/autoware_pointcloud_preprocessor/src/concatenate_data/combine_cloud_handler.cpp @@ -0,0 +1,336 @@ +// Copyright 2024 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "autoware/pointcloud_preprocessor/concatenate_data/combine_cloud_handler.hpp" + +#include + +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +namespace autoware::pointcloud_preprocessor +{ + +CombineCloudHandler::CombineCloudHandler( + rclcpp::Node & node, std::string output_frame, bool is_motion_compensated, + bool publish_synchronized_pointcloud, bool keep_input_frame_in_synchronized_pointcloud, + bool has_static_tf_only) +: node_(node), + output_frame_(std::move(output_frame)), + is_motion_compensated_(is_motion_compensated), + publish_synchronized_pointcloud_(publish_synchronized_pointcloud), + keep_input_frame_in_synchronized_pointcloud_(keep_input_frame_in_synchronized_pointcloud), + managed_tf_buffer_( + std::make_unique(&node_, has_static_tf_only)) +{ +} + +void CombineCloudHandler::process_twist( + const geometry_msgs::msg::TwistWithCovarianceStamped::ConstSharedPtr & twist_msg) +{ + geometry_msgs::msg::TwistStamped msg; + msg.header = twist_msg->header; + msg.twist = twist_msg->twist.twist; + + // If time jumps backwards (e.g. when a rosbag restarts), clear buffer + if (!twist_queue_.empty()) { + if (rclcpp::Time(twist_queue_.front().header.stamp) > rclcpp::Time(msg.header.stamp)) { + twist_queue_.clear(); + } + } + + // Twist data in the queue that is older than the current twist by 1 second will be cleared. + auto cutoff_time = rclcpp::Time(msg.header.stamp) - rclcpp::Duration::from_seconds(1.0); + + while (!twist_queue_.empty()) { + if (rclcpp::Time(twist_queue_.front().header.stamp) > cutoff_time) { + break; + } + twist_queue_.pop_front(); + } + + twist_queue_.push_back(msg); +} + +void CombineCloudHandler::process_odometry( + const nav_msgs::msg::Odometry::ConstSharedPtr & odometry_msg) +{ + geometry_msgs::msg::TwistStamped msg; + msg.header = odometry_msg->header; + msg.twist = odometry_msg->twist.twist; + + // If time jumps backwards (e.g. when a rosbag restarts), clear buffer + if (!twist_queue_.empty()) { + if (rclcpp::Time(twist_queue_.front().header.stamp) > rclcpp::Time(msg.header.stamp)) { + twist_queue_.clear(); + } + } + + // Twist data in the queue that is older than the current twist by 1 second will be cleared. + auto cutoff_time = rclcpp::Time(msg.header.stamp) - rclcpp::Duration::from_seconds(1.0); + + while (!twist_queue_.empty()) { + if (rclcpp::Time(twist_queue_.front().header.stamp) > cutoff_time) { + break; + } + twist_queue_.pop_front(); + } + + twist_queue_.push_back(msg); +} + +std::deque CombineCloudHandler::get_twist_queue() +{ + return twist_queue_; +} + +void CombineCloudHandler::convert_to_xyzirc_cloud( + const sensor_msgs::msg::PointCloud2::SharedPtr & input_cloud, + sensor_msgs::msg::PointCloud2::SharedPtr & xyzirc_cloud) +{ + xyzirc_cloud->header = input_cloud->header; + + PointCloud2Modifier output_modifier{ + *xyzirc_cloud, input_cloud->header.frame_id}; + output_modifier.reserve(input_cloud->width); + + bool has_valid_intensity = + std::any_of(input_cloud->fields.begin(), input_cloud->fields.end(), [](const auto & field) { + return field.name == "intensity" && field.datatype == sensor_msgs::msg::PointField::UINT8; + }); + + bool has_valid_return_type = + std::any_of(input_cloud->fields.begin(), input_cloud->fields.end(), [](const auto & field) { + return field.name == "return_type" && field.datatype == sensor_msgs::msg::PointField::UINT8; + }); + + bool has_valid_channel = + std::any_of(input_cloud->fields.begin(), input_cloud->fields.end(), [](const auto & field) { + return field.name == "channel" && field.datatype == sensor_msgs::msg::PointField::UINT16; + }); + + sensor_msgs::PointCloud2Iterator it_x(*input_cloud, "x"); + sensor_msgs::PointCloud2Iterator it_y(*input_cloud, "y"); + sensor_msgs::PointCloud2Iterator it_z(*input_cloud, "z"); + + if (has_valid_intensity && has_valid_return_type && has_valid_channel) { + sensor_msgs::PointCloud2Iterator it_i(*input_cloud, "intensity"); + sensor_msgs::PointCloud2Iterator it_r(*input_cloud, "return_type"); + sensor_msgs::PointCloud2Iterator it_c(*input_cloud, "channel"); + + for (; it_x != it_x.end(); ++it_x, ++it_y, ++it_z, ++it_i, ++it_r, ++it_c) { + PointXYZIRC point; + point.x = *it_x; + point.y = *it_y; + point.z = *it_z; + point.intensity = *it_i; + point.return_type = *it_r; + point.channel = *it_c; + output_modifier.push_back(std::move(point)); + } + } else { + for (; it_x != it_x.end(); ++it_x, ++it_y, ++it_z) { + PointXYZIRC point; + point.x = *it_x; + point.y = *it_y; + point.z = *it_z; + output_modifier.push_back(std::move(point)); + } + } +} + +void CombineCloudHandler::correct_pointcloud_motion( + const std::shared_ptr & transformed_cloud_ptr, + const std::vector & pc_stamps, + std::unordered_map & transform_memo, + std::shared_ptr transformed_delay_compensated_cloud_ptr) +{ + Eigen::Matrix4f adjust_to_old_data_transform = Eigen::Matrix4f::Identity(); + rclcpp::Time current_cloud_stamp = rclcpp::Time(transformed_cloud_ptr->header.stamp); + for (const auto & stamp : pc_stamps) { + if (stamp >= current_cloud_stamp) continue; + + Eigen::Matrix4f new_to_old_transform; + if (transform_memo.find(stamp) != transform_memo.end()) { + new_to_old_transform = transform_memo[stamp]; + } else { + new_to_old_transform = + compute_transform_to_adjust_for_old_timestamp(stamp, current_cloud_stamp); + transform_memo[stamp] = new_to_old_transform; + } + adjust_to_old_data_transform = new_to_old_transform * adjust_to_old_data_transform; + current_cloud_stamp = stamp; + } + pcl_ros::transformPointCloud( + adjust_to_old_data_transform, *transformed_cloud_ptr, *transformed_delay_compensated_cloud_ptr); +} + +ConcatenatedCloudResult CombineCloudHandler::combine_pointclouds( + std::unordered_map & topic_to_cloud_map) +{ + ConcatenatedCloudResult concatenate_cloud_result; + + std::vector pc_stamps; + for (const auto & [topic, cloud] : topic_to_cloud_map) { + pc_stamps.emplace_back(cloud->header.stamp); + } + std::sort(pc_stamps.begin(), pc_stamps.end(), std::greater()); + const auto oldest_stamp = pc_stamps.back(); + + std::unordered_map transform_memo; + + // Before combining the pointclouds, initialize and reserve space for the concatenated pointcloud + concatenate_cloud_result.concatenate_cloud_ptr = + std::make_shared(); + + // Reserve space based on the total size of the pointcloud data to speed up the concatenation + // process + size_t total_data_size = 0; + for (const auto & [topic, cloud] : topic_to_cloud_map) { + total_data_size += cloud->data.size(); + } + concatenate_cloud_result.concatenate_cloud_ptr->data.reserve(total_data_size); + + for (const auto & [topic, cloud] : topic_to_cloud_map) { + // convert to XYZIRC pointcloud if pointcloud is not empty + auto xyzirc_cloud = std::make_shared(); + convert_to_xyzirc_cloud(cloud, xyzirc_cloud); + + auto transformed_cloud_ptr = std::make_shared(); + managed_tf_buffer_->transformPointcloud(output_frame_, *xyzirc_cloud, *transformed_cloud_ptr); + + concatenate_cloud_result.topic_to_original_stamp_map[topic] = + rclcpp::Time(cloud->header.stamp).seconds(); + + // compensate pointcloud + std::shared_ptr transformed_delay_compensated_cloud_ptr; + if (is_motion_compensated_) { + transformed_delay_compensated_cloud_ptr = std::make_shared(); + correct_pointcloud_motion( + transformed_cloud_ptr, pc_stamps, transform_memo, transformed_delay_compensated_cloud_ptr); + } else { + transformed_delay_compensated_cloud_ptr = transformed_cloud_ptr; + } + + pcl::concatenatePointCloud( + *concatenate_cloud_result.concatenate_cloud_ptr, *transformed_delay_compensated_cloud_ptr, + *concatenate_cloud_result.concatenate_cloud_ptr); + + if (publish_synchronized_pointcloud_) { + if (!concatenate_cloud_result.topic_to_transformed_cloud_map) { + // Initialize the map if it is not present + concatenate_cloud_result.topic_to_transformed_cloud_map = + std::unordered_map(); + } + // convert to original sensor frame if necessary + bool need_transform_to_sensor_frame = (cloud->header.frame_id != output_frame_); + if (keep_input_frame_in_synchronized_pointcloud_ && need_transform_to_sensor_frame) { + auto transformed_cloud_ptr_in_sensor_frame = + std::make_shared(); + managed_tf_buffer_->transformPointcloud( + cloud->header.frame_id, *transformed_delay_compensated_cloud_ptr, + *transformed_cloud_ptr_in_sensor_frame); + transformed_cloud_ptr_in_sensor_frame->header.stamp = oldest_stamp; + transformed_cloud_ptr_in_sensor_frame->header.frame_id = cloud->header.frame_id; + + (*concatenate_cloud_result.topic_to_transformed_cloud_map)[topic] = + transformed_cloud_ptr_in_sensor_frame; + } else { + transformed_delay_compensated_cloud_ptr->header.stamp = oldest_stamp; + transformed_delay_compensated_cloud_ptr->header.frame_id = output_frame_; + (*concatenate_cloud_result.topic_to_transformed_cloud_map)[topic] = + transformed_delay_compensated_cloud_ptr; + } + } + } + concatenate_cloud_result.concatenate_cloud_ptr->header.stamp = oldest_stamp; + + return concatenate_cloud_result; +} + +Eigen::Matrix4f CombineCloudHandler::compute_transform_to_adjust_for_old_timestamp( + const rclcpp::Time & old_stamp, const rclcpp::Time & new_stamp) +{ + // return identity if no twist is available + if (twist_queue_.empty()) { + RCLCPP_WARN_STREAM_THROTTLE( + node_.get_logger(), *node_.get_clock(), std::chrono::milliseconds(10000).count(), + "No twist is available. Please confirm twist topic and timestamp. Leaving point cloud " + "untransformed."); + return Eigen::Matrix4f::Identity(); + } + + auto old_twist_it = std::lower_bound( + std::begin(twist_queue_), std::end(twist_queue_), old_stamp, + [](const geometry_msgs::msg::TwistStamped & x, const rclcpp::Time & t) { + return rclcpp::Time(x.header.stamp) < t; + }); + old_twist_it = old_twist_it == twist_queue_.end() ? (twist_queue_.end() - 1) : old_twist_it; + + auto new_twist_it = std::lower_bound( + std::begin(twist_queue_), std::end(twist_queue_), new_stamp, + [](const geometry_msgs::msg::TwistStamped & x, const rclcpp::Time & t) { + return rclcpp::Time(x.header.stamp) < t; + }); + new_twist_it = new_twist_it == twist_queue_.end() ? (twist_queue_.end() - 1) : new_twist_it; + + auto prev_time = old_stamp; + double x = 0.0; + double y = 0.0; + double yaw = 0.0; + for (auto twist_it = old_twist_it; twist_it != new_twist_it + 1; ++twist_it) { + const double dt = + (twist_it != new_twist_it) + ? (rclcpp::Time((*twist_it).header.stamp) - rclcpp::Time(prev_time)).seconds() + : (rclcpp::Time(new_stamp) - rclcpp::Time(prev_time)).seconds(); + + if (std::fabs(dt) > 0.1) { + RCLCPP_WARN_STREAM_THROTTLE( + node_.get_logger(), *node_.get_clock(), std::chrono::milliseconds(10000).count(), + "Time difference is too large. Cloud not interpolate. Please confirm twist topic and " + "timestamp"); + break; + } + + const double distance = (*twist_it).twist.linear.x * dt; + yaw += (*twist_it).twist.angular.z * dt; + x += distance * std::cos(yaw); + y += distance * std::sin(yaw); + prev_time = (*twist_it).header.stamp; + } + + Eigen::Matrix4f transformation_matrix = Eigen::Matrix4f::Identity(); + + float cos_yaw = std::cos(yaw); + float sin_yaw = std::sin(yaw); + + transformation_matrix(0, 3) = x; + transformation_matrix(1, 3) = y; + transformation_matrix(0, 0) = cos_yaw; + transformation_matrix(0, 1) = -sin_yaw; + transformation_matrix(1, 0) = sin_yaw; + transformation_matrix(1, 1) = cos_yaw; + + return transformation_matrix; +} + +} // namespace autoware::pointcloud_preprocessor diff --git a/sensing/autoware_pointcloud_preprocessor/src/concatenate_data/concatenate_and_time_sync_node.cpp b/sensing/autoware_pointcloud_preprocessor/src/concatenate_data/concatenate_and_time_sync_node.cpp new file mode 100644 index 0000000000000..e168781c647ed --- /dev/null +++ b/sensing/autoware_pointcloud_preprocessor/src/concatenate_data/concatenate_and_time_sync_node.cpp @@ -0,0 +1,459 @@ +// Copyright 2024 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "autoware/pointcloud_preprocessor/concatenate_data/concatenate_and_time_sync_node.hpp" + +#include "autoware/pointcloud_preprocessor/concatenate_data/cloud_collector.hpp" +#include "autoware/pointcloud_preprocessor/utility/memory.hpp" + +#include + +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +namespace autoware::pointcloud_preprocessor +{ + +PointCloudConcatenateDataSynchronizerComponent::PointCloudConcatenateDataSynchronizerComponent( + const rclcpp::NodeOptions & node_options) +: Node("point_cloud_concatenator_component", node_options) +{ + // initialize debug tool + using autoware::universe_utils::DebugPublisher; + using autoware::universe_utils::StopWatch; + stop_watch_ptr_ = std::make_unique>(); + debug_publisher_ = std::make_unique(this, "concatenate_data_synchronizer"); + stop_watch_ptr_->tic("cyclic_time"); + stop_watch_ptr_->tic("processing_time"); + + // initialize parameters + params_.debug_mode = declare_parameter("debug_mode"); + params_.has_static_tf_only = declare_parameter("has_static_tf_only"); + params_.rosbag_length = declare_parameter("rosbag_length"); + params_.maximum_queue_size = static_cast(declare_parameter("maximum_queue_size")); + params_.timeout_sec = declare_parameter("timeout_sec"); + params_.is_motion_compensated = declare_parameter("is_motion_compensated"); + params_.publish_synchronized_pointcloud = + declare_parameter("publish_synchronized_pointcloud"); + params_.keep_input_frame_in_synchronized_pointcloud = + declare_parameter("keep_input_frame_in_synchronized_pointcloud"); + params_.publish_previous_but_late_pointcloud = + declare_parameter("publish_previous_but_late_pointcloud"); + params_.synchronized_pointcloud_postfix = + declare_parameter("synchronized_pointcloud_postfix"); + params_.input_twist_topic_type = declare_parameter("input_twist_topic_type"); + params_.input_topics = declare_parameter>("input_topics"); + params_.output_frame = declare_parameter("output_frame"); + + if (params_.input_topics.empty()) { + throw std::runtime_error("Need a 'input_topics' parameter to be set before continuing."); + } + if (params_.input_topics.size() == 1) { + throw std::runtime_error("Only one topic given. Need at least two topics to continue."); + } + + if (params_.output_frame.empty()) { + throw std::runtime_error("Need an 'output_frame' parameter to be set before continuing."); + } + + params_.matching_strategy = declare_parameter("matching_strategy.type"); + if (params_.matching_strategy == "naive") { + collector_matching_strategy_ = std::make_unique(*this); + } else if (params_.matching_strategy == "advanced") { + collector_matching_strategy_ = + std::make_unique(*this, params_.input_topics); + } else { + throw std::runtime_error("Matching strategy must be 'advanced' or 'naive'"); + } + + // Publishers + concatenated_cloud_publisher_ = this->create_publisher( + "output", rclcpp::SensorDataQoS().keep_last(params_.maximum_queue_size)); + + // Transformed Raw PointCloud2 Publisher to publish the transformed pointcloud + if (params_.publish_synchronized_pointcloud) { + for (auto & topic : params_.input_topics) { + std::string new_topic = + replace_sync_topic_name_postfix(topic, params_.synchronized_pointcloud_postfix); + auto publisher = this->create_publisher( + new_topic, rclcpp::SensorDataQoS().keep_last(params_.maximum_queue_size)); + topic_to_transformed_cloud_publisher_map_.insert({topic, publisher}); + } + } + + // Subscribers + if (params_.is_motion_compensated) { + if (params_.input_twist_topic_type == "twist") { + twist_sub_ = this->create_subscription( + "~/input/twist", rclcpp::QoS{100}, + std::bind( + &PointCloudConcatenateDataSynchronizerComponent::twist_callback, this, + std::placeholders::_1)); + } else if (params_.input_twist_topic_type == "odom") { + odom_sub_ = this->create_subscription( + "~/input/odom", rclcpp::QoS{100}, + std::bind( + &PointCloudConcatenateDataSynchronizerComponent::odom_callback, this, + std::placeholders::_1)); + } else { + throw std::runtime_error( + "input_twist_topic_type is invalid: " + params_.input_twist_topic_type); + } + } + + for (const std::string & topic : params_.input_topics) { + std::function callback = std::bind( + &PointCloudConcatenateDataSynchronizerComponent::cloud_callback, this, std::placeholders::_1, + topic); + + auto pointcloud_sub = this->create_subscription( + topic, rclcpp::SensorDataQoS().keep_last(params_.maximum_queue_size), callback); + pointcloud_subs_.push_back(pointcloud_sub); + } + RCLCPP_DEBUG_STREAM( + get_logger(), + "Subscribing to " << params_.input_topics.size() << " user given topics as inputs:"); + for (const auto & input_topic : params_.input_topics) { + RCLCPP_DEBUG_STREAM(get_logger(), " - " << input_topic); + } + + // Combine cloud handler + combine_cloud_handler_ = std::make_shared( + *this, params_.output_frame, params_.is_motion_compensated, + params_.publish_synchronized_pointcloud, params_.keep_input_frame_in_synchronized_pointcloud, + params_.has_static_tf_only); + + // Diagnostic Updater + diagnostic_updater_.setHardwareID("concatenate_data_checker"); + diagnostic_updater_.add( + "concat_status", this, &PointCloudConcatenateDataSynchronizerComponent::check_concat_status); +} + +std::string PointCloudConcatenateDataSynchronizerComponent::replace_sync_topic_name_postfix( + const std::string & original_topic_name, const std::string & postfix) +{ + std::string replaced_topic_name; + // separate the topic name by '/' and replace the last element with the new postfix + size_t pos = original_topic_name.find_last_of('/'); + if (pos == std::string::npos) { + // not found '/': this is not a namespaced topic + RCLCPP_WARN_STREAM( + get_logger(), + "The topic name is not namespaced. The postfix will be added to the end of the topic name."); + return original_topic_name + postfix; + } + + // replace the last element with the new postfix + replaced_topic_name = original_topic_name.substr(0, pos) + "/" + postfix; + + // if topic name is the same with original topic name, add postfix to the end of the topic name + if (replaced_topic_name == original_topic_name) { + RCLCPP_WARN_STREAM( + get_logger(), "The topic name " + << original_topic_name + << " have the same postfix with synchronized pointcloud. We use " + "the postfix " + "to the end of the topic name."); + replaced_topic_name = original_topic_name + default_sync_topic_postfix; + } + return replaced_topic_name; +} + +void PointCloudConcatenateDataSynchronizerComponent::cloud_callback( + const sensor_msgs::msg::PointCloud2::SharedPtr & input_ptr, const std::string & topic_name) +{ + stop_watch_ptr_->toc("processing_time", true); + double cloud_arrival_time = this->get_clock()->now().seconds(); + manage_collector_list(); + + if (!utils::is_data_layout_compatible_with_point_xyzirc(*input_ptr)) { + RCLCPP_ERROR( + get_logger(), "The pointcloud layout is not compatible with PointXYZIRC. Aborting"); + + if (utils::is_data_layout_compatible_with_point_xyzi(*input_ptr)) { + RCLCPP_ERROR( + get_logger(), + "The pointcloud layout is compatible with PointXYZI. You may be using legacy code/data"); + } + + return; + } + + if (params_.debug_mode) { + RCLCPP_INFO( + this->get_logger(), " pointcloud %s timestamp: %lf arrive time: %lf seconds, latency: %lf", + topic_name.c_str(), rclcpp::Time(input_ptr->header.stamp).seconds(), cloud_arrival_time, + cloud_arrival_time - rclcpp::Time(input_ptr->header.stamp).seconds()); + } + + if (input_ptr->data.empty()) { + RCLCPP_WARN_STREAM_THROTTLE( + this->get_logger(), *this->get_clock(), 1000, "Empty sensor points!"); + } + + // protect cloud collectors list + std::unique_lock cloud_collectors_lock(cloud_collectors_mutex_); + + // For each callback, check whether there is a exist collector that matches this cloud + std::optional> cloud_collector = std::nullopt; + MatchingParams matching_params; + matching_params.topic_name = topic_name; + matching_params.cloud_arrival_time = cloud_arrival_time; + matching_params.cloud_timestamp = rclcpp::Time(input_ptr->header.stamp).seconds(); + + if (!cloud_collectors_.empty()) { + cloud_collector = + collector_matching_strategy_->match_cloud_to_collector(cloud_collectors_, matching_params); + } + + bool process_success = false; + if (cloud_collector.has_value()) { + auto collector = cloud_collector.value(); + if (collector) { + cloud_collectors_lock.unlock(); + process_success = cloud_collector.value()->process_pointcloud(topic_name, input_ptr); + } + } + + if (!process_success) { + auto new_cloud_collector = std::make_shared( + std::dynamic_pointer_cast(shared_from_this()), + combine_cloud_handler_, params_.input_topics.size(), params_.timeout_sec, params_.debug_mode); + + cloud_collectors_.push_back(new_cloud_collector); + cloud_collectors_lock.unlock(); + + collector_matching_strategy_->set_collector_info(new_cloud_collector, matching_params); + (void)new_cloud_collector->process_pointcloud(topic_name, input_ptr); + } +} + +void PointCloudConcatenateDataSynchronizerComponent::twist_callback( + const geometry_msgs::msg::TwistWithCovarianceStamped::ConstSharedPtr input) +{ + combine_cloud_handler_->process_twist(input); +} + +void PointCloudConcatenateDataSynchronizerComponent::odom_callback( + const nav_msgs::msg::Odometry::ConstSharedPtr input) +{ + combine_cloud_handler_->process_odometry(input); +} + +void PointCloudConcatenateDataSynchronizerComponent::publish_clouds( + ConcatenatedCloudResult && concatenated_cloud_result, + std::shared_ptr collector_info) +{ + // should never come to this state. + if (concatenated_cloud_result.concatenate_cloud_ptr == nullptr) { + RCLCPP_ERROR(this->get_logger(), "Concatenated cloud is a nullptr."); + return; + } + + if (concatenated_cloud_result.concatenate_cloud_ptr->data.empty()) { + RCLCPP_ERROR(this->get_logger(), "Concatenated cloud is an empty pointcloud."); + is_concatenated_cloud_empty_ = true; + } + + current_concatenate_cloud_timestamp_ = + rclcpp::Time(concatenated_cloud_result.concatenate_cloud_ptr->header.stamp).seconds(); + + if ( + current_concatenate_cloud_timestamp_ < latest_concatenate_cloud_timestamp_ && + !params_.publish_previous_but_late_pointcloud) { + // Publish the cloud if the rosbag replays in loop + if ( + latest_concatenate_cloud_timestamp_ - current_concatenate_cloud_timestamp_ > + params_.rosbag_length) { + publish_pointcloud_ = true; // Force publishing in this case + } else { + drop_previous_but_late_pointcloud_ = true; // Otherwise, drop the late pointcloud + } + } else { + // Publish pointcloud if timestamps are valid or the condition doesn't apply + publish_pointcloud_ = true; + } + + if (publish_pointcloud_) { + latest_concatenate_cloud_timestamp_ = current_concatenate_cloud_timestamp_; + auto concatenate_pointcloud_output = std::make_unique( + std::move(*concatenated_cloud_result.concatenate_cloud_ptr)); + concatenated_cloud_publisher_->publish(std::move(concatenate_pointcloud_output)); + + // publish transformed raw pointclouds + if ( + params_.publish_synchronized_pointcloud && + concatenated_cloud_result.topic_to_transformed_cloud_map) { + for (const auto & topic : params_.input_topics) { + // Get a reference to the internal map + if ( + (*concatenated_cloud_result.topic_to_transformed_cloud_map).find(topic) != + (*concatenated_cloud_result.topic_to_transformed_cloud_map).end()) { + auto transformed_cloud_output = std::make_unique( + *(*concatenated_cloud_result.topic_to_transformed_cloud_map).at(topic)); + topic_to_transformed_cloud_publisher_map_[topic]->publish( + std::move(transformed_cloud_output)); + } else { + RCLCPP_WARN( + this->get_logger(), + "transformed_raw_points[%s] is nullptr, skipping pointcloud publish.", topic.c_str()); + } + } + } + } + + diagnostic_collector_info_ = collector_info; + + diagnostic_topic_to_original_stamp_map_ = concatenated_cloud_result.topic_to_original_stamp_map; + diagnostic_updater_.force_update(); + + // add processing time for debug + if (debug_publisher_) { + const double cyclic_time_ms = stop_watch_ptr_->toc("cyclic_time", true); + const double processing_time_ms = stop_watch_ptr_->toc("processing_time", true); + debug_publisher_->publish( + "debug/cyclic_time_ms", cyclic_time_ms); + debug_publisher_->publish( + "debug/processing_time_ms", processing_time_ms); + + for (const auto & [topic, stamp] : concatenated_cloud_result.topic_to_original_stamp_map) { + const auto pipeline_latency_ms = (this->get_clock()->now().seconds() - stamp) * 1000; + debug_publisher_->publish( + "debug" + topic + "/pipeline_latency_ms", pipeline_latency_ms); + } + } +} + +void PointCloudConcatenateDataSynchronizerComponent::manage_collector_list() +{ + std::lock_guard cloud_collectors_lock(cloud_collectors_mutex_); + + for (auto it = cloud_collectors_.begin(); it != cloud_collectors_.end();) { + if ((*it)->concatenate_finished()) { + it = cloud_collectors_.erase(it); // Erase and move the iterator to the next element + } else { + ++it; // Move to the next element + } + } +} + +std::string PointCloudConcatenateDataSynchronizerComponent::format_timestamp(double timestamp) +{ + std::ostringstream oss; + oss << std::fixed << std::setprecision(9) << timestamp; + return oss.str(); +} + +void PointCloudConcatenateDataSynchronizerComponent::check_concat_status( + diagnostic_updater::DiagnosticStatusWrapper & stat) +{ + if (publish_pointcloud_ || drop_previous_but_late_pointcloud_) { + stat.add( + "concatenated_cloud_timestamp", format_timestamp(current_concatenate_cloud_timestamp_)); + + if ( + auto naive_info = std::dynamic_pointer_cast(diagnostic_collector_info_)) { + stat.add("first_cloud_arrival_timestamp", format_timestamp(naive_info->timestamp)); + } else if ( + auto advanced_info = + std::dynamic_pointer_cast(diagnostic_collector_info_)) { + stat.add( + "reference_timestamp_min", + format_timestamp(advanced_info->timestamp - advanced_info->noise_window)); + stat.add( + "reference_timestamp_max", + format_timestamp(advanced_info->timestamp + advanced_info->noise_window)); + } + + bool topic_miss = false; + + bool concatenation_success = true; + for (const auto & topic : params_.input_topics) { + bool input_cloud_concatenated = true; + if ( + diagnostic_topic_to_original_stamp_map_.find(topic) != + diagnostic_topic_to_original_stamp_map_.end()) { + stat.add( + topic + "/timestamp", format_timestamp(diagnostic_topic_to_original_stamp_map_[topic])); + } else { + topic_miss = true; + concatenation_success = false; + input_cloud_concatenated = false; + } + stat.add(topic + "/is_concatenated", input_cloud_concatenated); + } + + stat.add("cloud_concatenation_success", concatenation_success); + + int8_t level = diagnostic_msgs::msg::DiagnosticStatus::OK; + std::string message = "Concatenated pointcloud is published and include all topics"; + + if (drop_previous_but_late_pointcloud_) { + if (topic_miss) { + level = diagnostic_msgs::msg::DiagnosticStatus::ERROR; + message = + "Concatenated pointcloud misses some topics and is not published because it arrived " + "too late"; + } else { + level = diagnostic_msgs::msg::DiagnosticStatus::ERROR; + message = "Concatenated pointcloud is not published as it is too late"; + } + } else { + if (is_concatenated_cloud_empty_) { + level = diagnostic_msgs::msg::DiagnosticStatus::ERROR; + message = "Concatenated pointcloud is empty"; + } else if (topic_miss) { + level = diagnostic_msgs::msg::DiagnosticStatus::ERROR; + message = "Concatenated pointcloud is published but misses some topics"; + } + } + + stat.summary(level, message); + + publish_pointcloud_ = false; + drop_previous_but_late_pointcloud_ = false; + is_concatenated_cloud_empty_ = false; + } else { + const int8_t level = diagnostic_msgs::msg::DiagnosticStatus::OK; + const std::string message = + "Concatenate node launch successfully, but waiting for input pointcloud"; + stat.summary(level, message); + } +} + +std::list> +PointCloudConcatenateDataSynchronizerComponent::get_cloud_collectors() +{ + return cloud_collectors_; +} + +void PointCloudConcatenateDataSynchronizerComponent::add_cloud_collector( + const std::shared_ptr & collector) +{ + cloud_collectors_.push_back(collector); +} + +} // namespace autoware::pointcloud_preprocessor + +#include +RCLCPP_COMPONENTS_REGISTER_NODE( + autoware::pointcloud_preprocessor::PointCloudConcatenateDataSynchronizerComponent) diff --git a/sensing/autoware_pointcloud_preprocessor/src/concatenate_data/concatenate_and_time_sync_nodelet.cpp b/sensing/autoware_pointcloud_preprocessor/src/concatenate_data/concatenate_and_time_sync_nodelet.cpp deleted file mode 100644 index b49e21b8f30be..0000000000000 --- a/sensing/autoware_pointcloud_preprocessor/src/concatenate_data/concatenate_and_time_sync_nodelet.cpp +++ /dev/null @@ -1,718 +0,0 @@ -// Copyright 2020 Tier IV, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* - * Software License Agreement (BSD License) - * - * Copyright (c) 2009, Willow Garage, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above - * copyright notice, this list of conditions and the following - * disclaimer in the documentation and/or other materials provided - * with the distribution. - * * Neither the name of Willow Garage, Inc. nor the names of its - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN - * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * - * $Id: concatenate_data.cpp 35231 2011-01-14 05:33:20Z rusu $ - * - */ - -#include "autoware/pointcloud_preprocessor/concatenate_data/concatenate_and_time_sync_nodelet.hpp" - -#include "autoware/pointcloud_preprocessor/utility/memory.hpp" - -#include - -#include - -#include -#include -#include -#include -#include -#include - -#define DEFAULT_SYNC_TOPIC_POSTFIX \ - "_synchronized" // default postfix name for synchronized pointcloud - -////////////////////////////////////////////////////////////////////////////////////////////// - -namespace autoware::pointcloud_preprocessor -{ -PointCloudConcatenateDataSynchronizerComponent::PointCloudConcatenateDataSynchronizerComponent( - const rclcpp::NodeOptions & node_options) -: Node("point_cloud_concatenator_component", node_options), - input_twist_topic_type_(declare_parameter("input_twist_topic_type", "twist")) -{ - // initialize debug tool - { - using autoware::universe_utils::DebugPublisher; - using autoware::universe_utils::StopWatch; - stop_watch_ptr_ = std::make_unique>(); - debug_publisher_ = std::make_unique(this, "concatenate_data_synchronizer"); - stop_watch_ptr_->tic("cyclic_time"); - stop_watch_ptr_->tic("processing_time"); - } - - // Set parameters - { - output_frame_ = static_cast(declare_parameter("output_frame", "")); - if (output_frame_.empty()) { - RCLCPP_ERROR(get_logger(), "Need an 'output_frame' parameter to be set before continuing!"); - return; - } - has_static_tf_only_ = declare_parameter("has_static_tf_only", false); - declare_parameter("input_topics", std::vector()); - input_topics_ = get_parameter("input_topics").as_string_array(); - if (input_topics_.empty()) { - RCLCPP_ERROR(get_logger(), "Need a 'input_topics' parameter to be set before continuing!"); - return; - } - if (input_topics_.size() == 1) { - RCLCPP_ERROR(get_logger(), "Only one topic given. Need at least two topics to continue."); - return; - } - - // Optional parameters - maximum_queue_size_ = static_cast(declare_parameter("max_queue_size", 5)); - timeout_sec_ = static_cast(declare_parameter("timeout_sec", 0.1)); - - input_offset_ = declare_parameter("input_offset", std::vector{}); - if (!input_offset_.empty() && input_topics_.size() != input_offset_.size()) { - RCLCPP_ERROR(get_logger(), "The number of topics does not match the number of offsets."); - return; - } - - // Check if publish synchronized pointcloud - publish_synchronized_pointcloud_ = declare_parameter("publish_synchronized_pointcloud", true); - keep_input_frame_in_synchronized_pointcloud_ = - declare_parameter("keep_input_frame_in_synchronized_pointcloud", true); - synchronized_pointcloud_postfix_ = - declare_parameter("synchronized_pointcloud_postfix", "pointcloud"); - } - - // Initialize not_subscribed_topic_names_ - { - for (const std::string & e : input_topics_) { - not_subscribed_topic_names_.insert(e); - } - } - - // Initialize offset map - { - for (size_t i = 0; i < input_offset_.size(); ++i) { - offset_map_[input_topics_[i]] = input_offset_[i]; - } - } - - // tf2 listener - { - managed_tf_buffer_ = - std::make_unique(this, has_static_tf_only_); - } - - // Output Publishers - { - rclcpp::PublisherOptions pub_options; - pub_options.qos_overriding_options = rclcpp::QosOverridingOptions::with_default_policies(); - pub_output_ = this->create_publisher( - "output", rclcpp::SensorDataQoS().keep_last(maximum_queue_size_), pub_options); - } - - // Subscribers - { - RCLCPP_DEBUG_STREAM( - get_logger(), "Subscribing to " << input_topics_.size() << " user given topics as inputs:"); - for (const auto & input_topic : input_topics_) { - RCLCPP_DEBUG_STREAM(get_logger(), " - " << input_topic); - } - - // Subscribe to the filters - filters_.resize(input_topics_.size()); - - // First input_topics_.size () filters are valid - for (size_t d = 0; d < input_topics_.size(); ++d) { - cloud_stdmap_.insert(std::make_pair(input_topics_[d], nullptr)); - cloud_stdmap_tmp_ = cloud_stdmap_; - - // CAN'T use auto type here. - std::function cb = std::bind( - &PointCloudConcatenateDataSynchronizerComponent::cloud_callback, this, - std::placeholders::_1, input_topics_[d]); - - filters_[d].reset(); - filters_[d] = this->create_subscription( - input_topics_[d], rclcpp::SensorDataQoS().keep_last(maximum_queue_size_), cb); - } - - if (input_twist_topic_type_ == "twist") { - auto twist_cb = std::bind( - &PointCloudConcatenateDataSynchronizerComponent::twist_callback, this, - std::placeholders::_1); - sub_twist_ = this->create_subscription( - "~/input/twist", rclcpp::QoS{100}, twist_cb); - } else if (input_twist_topic_type_ == "odom") { - auto odom_cb = std::bind( - &PointCloudConcatenateDataSynchronizerComponent::odom_callback, this, - std::placeholders::_1); - sub_odom_ = this->create_subscription( - "~/input/odom", rclcpp::QoS{100}, odom_cb); - } else { - RCLCPP_ERROR_STREAM( - get_logger(), "input_twist_topic_type is invalid: " << input_twist_topic_type_); - throw std::runtime_error("input_twist_topic_type is invalid: " + input_twist_topic_type_); - } - } - - // Transformed Raw PointCloud2 Publisher to publish the transformed pointcloud - if (publish_synchronized_pointcloud_) { - rclcpp::PublisherOptions pub_options; - pub_options.qos_overriding_options = rclcpp::QosOverridingOptions::with_default_policies(); - - for (auto & topic : input_topics_) { - std::string new_topic = replaceSyncTopicNamePostfix(topic, synchronized_pointcloud_postfix_); - auto publisher = this->create_publisher( - new_topic, rclcpp::SensorDataQoS().keep_last(maximum_queue_size_), pub_options); - transformed_raw_pc_publisher_map_.insert({topic, publisher}); - } - } - - // Set timer - { - const auto period_ns = std::chrono::duration_cast( - std::chrono::duration(timeout_sec_)); - timer_ = rclcpp::create_timer( - this, get_clock(), period_ns, - std::bind(&PointCloudConcatenateDataSynchronizerComponent::timer_callback, this)); - } - - // Diagnostic Updater - { - updater_.setHardwareID("concatenate_data_checker"); - updater_.add( - "concat_status", this, &PointCloudConcatenateDataSynchronizerComponent::checkConcatStatus); - } -} - -std::string PointCloudConcatenateDataSynchronizerComponent::replaceSyncTopicNamePostfix( - const std::string & original_topic_name, const std::string & postfix) -{ - std::string replaced_topic_name; - // separate the topic name by '/' and replace the last element with the new postfix - size_t pos = original_topic_name.find_last_of("/"); - if (pos == std::string::npos) { - // not found '/': this is not a namespaced topic - RCLCPP_WARN_STREAM( - get_logger(), - "The topic name is not namespaced. The postfix will be added to the end of the topic name."); - return original_topic_name + postfix; - } else { - // replace the last element with the new postfix - replaced_topic_name = original_topic_name.substr(0, pos) + "/" + postfix; - } - - // if topic name is the same with original topic name, add postfix to the end of the topic name - if (replaced_topic_name == original_topic_name) { - RCLCPP_WARN_STREAM( - get_logger(), "The topic name " - << original_topic_name - << " have the same postfix with synchronized pointcloud. We use the postfix " - "to the end of the topic name."); - replaced_topic_name = original_topic_name + DEFAULT_SYNC_TOPIC_POSTFIX; - } - return replaced_topic_name; -} - -/** - * @brief compute transform to adjust for old timestamp - * - * @param old_stamp - * @param new_stamp - * @return Eigen::Matrix4f: transformation matrix from new_stamp to old_stamp - */ -Eigen::Matrix4f -PointCloudConcatenateDataSynchronizerComponent::computeTransformToAdjustForOldTimestamp( - const rclcpp::Time & old_stamp, const rclcpp::Time & new_stamp) -{ - // return identity if no twist is available - if (twist_ptr_queue_.empty()) { - RCLCPP_WARN_STREAM_THROTTLE( - get_logger(), *get_clock(), std::chrono::milliseconds(10000).count(), - "No twist is available. Please confirm twist topic and timestamp"); - return Eigen::Matrix4f::Identity(); - } - - // return identity if old_stamp is newer than new_stamp - if (old_stamp > new_stamp) { - RCLCPP_DEBUG_STREAM_THROTTLE( - get_logger(), *get_clock(), std::chrono::milliseconds(10000).count(), - "old_stamp is newer than new_stamp,"); - return Eigen::Matrix4f::Identity(); - } - - auto old_twist_ptr_it = std::lower_bound( - std::begin(twist_ptr_queue_), std::end(twist_ptr_queue_), old_stamp, - [](const geometry_msgs::msg::TwistStamped::ConstSharedPtr & x_ptr, const rclcpp::Time & t) { - return rclcpp::Time(x_ptr->header.stamp) < t; - }); - old_twist_ptr_it = - old_twist_ptr_it == twist_ptr_queue_.end() ? (twist_ptr_queue_.end() - 1) : old_twist_ptr_it; - - auto new_twist_ptr_it = std::lower_bound( - std::begin(twist_ptr_queue_), std::end(twist_ptr_queue_), new_stamp, - [](const geometry_msgs::msg::TwistStamped::ConstSharedPtr & x_ptr, const rclcpp::Time & t) { - return rclcpp::Time(x_ptr->header.stamp) < t; - }); - new_twist_ptr_it = - new_twist_ptr_it == twist_ptr_queue_.end() ? (twist_ptr_queue_.end() - 1) : new_twist_ptr_it; - - auto prev_time = old_stamp; - double x = 0.0; - double y = 0.0; - double yaw = 0.0; - for (auto twist_ptr_it = old_twist_ptr_it; twist_ptr_it != new_twist_ptr_it + 1; ++twist_ptr_it) { - const double dt = - (twist_ptr_it != new_twist_ptr_it) - ? (rclcpp::Time((*twist_ptr_it)->header.stamp) - rclcpp::Time(prev_time)).seconds() - : (rclcpp::Time(new_stamp) - rclcpp::Time(prev_time)).seconds(); - - if (std::fabs(dt) > 0.1) { - RCLCPP_WARN_STREAM_THROTTLE( - get_logger(), *get_clock(), std::chrono::milliseconds(10000).count(), - "Time difference is too large. Cloud not interpolate. Please confirm twist topic and " - "timestamp"); - break; - } - - const double dis = (*twist_ptr_it)->twist.linear.x * dt; - yaw += (*twist_ptr_it)->twist.angular.z * dt; - x += dis * std::cos(yaw); - y += dis * std::sin(yaw); - prev_time = (*twist_ptr_it)->header.stamp; - } - Eigen::AngleAxisf rotation_x(0, Eigen::Vector3f::UnitX()); - Eigen::AngleAxisf rotation_y(0, Eigen::Vector3f::UnitY()); - Eigen::AngleAxisf rotation_z(yaw, Eigen::Vector3f::UnitZ()); - Eigen::Translation3f translation(x, y, 0); - Eigen::Matrix4f rotation_matrix = (translation * rotation_z * rotation_y * rotation_x).matrix(); - return rotation_matrix; -} - -std::map -PointCloudConcatenateDataSynchronizerComponent::combineClouds( - sensor_msgs::msg::PointCloud2::SharedPtr & concat_cloud_ptr) -{ - // map for storing the transformed point clouds - std::map transformed_clouds; - - // Step1. gather stamps and sort it - std::vector pc_stamps; - for (const auto & e : cloud_stdmap_) { - transformed_clouds[e.first] = nullptr; - if (e.second != nullptr) { - if (e.second->data.size() == 0) { - continue; - } - pc_stamps.push_back(rclcpp::Time(e.second->header.stamp)); - } - } - if (pc_stamps.empty()) { - return transformed_clouds; - } - // sort stamps and get oldest stamp - std::sort(pc_stamps.begin(), pc_stamps.end()); - std::reverse(pc_stamps.begin(), pc_stamps.end()); - const auto oldest_stamp = pc_stamps.back(); - - // Step2. Calculate compensation transform and concatenate with the oldest stamp - for (const auto & e : cloud_stdmap_) { - if (e.second != nullptr) { - if (e.second->data.size() == 0) { - continue; - } - sensor_msgs::msg::PointCloud2::SharedPtr transformed_cloud_ptr( - new sensor_msgs::msg::PointCloud2()); - managed_tf_buffer_->transformPointcloud(output_frame_, *e.second, *transformed_cloud_ptr); - - // calculate transforms to oldest stamp - Eigen::Matrix4f adjust_to_old_data_transform = Eigen::Matrix4f::Identity(); - rclcpp::Time transformed_stamp = rclcpp::Time(e.second->header.stamp); - for (const auto & stamp : pc_stamps) { - const auto new_to_old_transform = - computeTransformToAdjustForOldTimestamp(stamp, transformed_stamp); - adjust_to_old_data_transform = new_to_old_transform * adjust_to_old_data_transform; - transformed_stamp = std::min(transformed_stamp, stamp); - } - sensor_msgs::msg::PointCloud2::SharedPtr transformed_delay_compensated_cloud_ptr( - new sensor_msgs::msg::PointCloud2()); - pcl_ros::transformPointCloud( - adjust_to_old_data_transform, *transformed_cloud_ptr, - *transformed_delay_compensated_cloud_ptr); - - // concatenate - if (concat_cloud_ptr == nullptr) { - concat_cloud_ptr = - std::make_shared(*transformed_delay_compensated_cloud_ptr); - } else { - pcl::concatenatePointCloud( - *concat_cloud_ptr, *transformed_delay_compensated_cloud_ptr, *concat_cloud_ptr); - } - // convert to original sensor frame if necessary - bool need_transform_to_sensor_frame = (e.second->header.frame_id != output_frame_); - if (keep_input_frame_in_synchronized_pointcloud_ && need_transform_to_sensor_frame) { - sensor_msgs::msg::PointCloud2::SharedPtr transformed_cloud_ptr_in_sensor_frame( - new sensor_msgs::msg::PointCloud2()); - managed_tf_buffer_->transformPointcloud( - e.second->header.frame_id, *transformed_delay_compensated_cloud_ptr, - *transformed_cloud_ptr_in_sensor_frame); - transformed_cloud_ptr_in_sensor_frame->header.stamp = oldest_stamp; - transformed_cloud_ptr_in_sensor_frame->header.frame_id = e.second->header.frame_id; - transformed_clouds[e.first] = transformed_cloud_ptr_in_sensor_frame; - } else { - transformed_delay_compensated_cloud_ptr->header.stamp = oldest_stamp; - transformed_delay_compensated_cloud_ptr->header.frame_id = output_frame_; - transformed_clouds[e.first] = transformed_delay_compensated_cloud_ptr; - } - - } else { - not_subscribed_topic_names_.insert(e.first); - } - } - concat_cloud_ptr->header.stamp = oldest_stamp; - return transformed_clouds; -} - -void PointCloudConcatenateDataSynchronizerComponent::publish() -{ - stop_watch_ptr_->toc("processing_time", true); - sensor_msgs::msg::PointCloud2::SharedPtr concat_cloud_ptr = nullptr; - not_subscribed_topic_names_.clear(); - - const auto & transformed_raw_points = - PointCloudConcatenateDataSynchronizerComponent::combineClouds(concat_cloud_ptr); - - // publish concatenated pointcloud - if (concat_cloud_ptr) { - auto output = std::make_unique(*concat_cloud_ptr); - pub_output_->publish(std::move(output)); - } else { - RCLCPP_WARN(this->get_logger(), "concat_cloud_ptr is nullptr, skipping pointcloud publish."); - } - - // publish transformed raw pointclouds - if (publish_synchronized_pointcloud_) { - for (const auto & e : transformed_raw_points) { - if (e.second) { - auto output = std::make_unique(*e.second); - transformed_raw_pc_publisher_map_[e.first]->publish(std::move(output)); - } else { - RCLCPP_WARN( - this->get_logger(), "transformed_raw_points[%s] is nullptr, skipping pointcloud publish.", - e.first.c_str()); - } - } - } - - updater_.force_update(); - - cloud_stdmap_ = cloud_stdmap_tmp_; - std::for_each(std::begin(cloud_stdmap_tmp_), std::end(cloud_stdmap_tmp_), [](auto & e) { - e.second = nullptr; - }); - // add processing time for debug - if (debug_publisher_) { - const double cyclic_time_ms = stop_watch_ptr_->toc("cyclic_time", true); - const double processing_time_ms = stop_watch_ptr_->toc("processing_time", true); - debug_publisher_->publish( - "debug/cyclic_time_ms", cyclic_time_ms); - debug_publisher_->publish( - "debug/processing_time_ms", processing_time_ms); - } - for (const auto & e : cloud_stdmap_) { - if (e.second != nullptr) { - if (debug_publisher_) { - const auto pipeline_latency_ms = - std::chrono::duration( - std::chrono::nanoseconds( - (this->get_clock()->now() - e.second->header.stamp).nanoseconds())) - .count(); - debug_publisher_->publish( - "debug" + e.first + "/pipeline_latency_ms", pipeline_latency_ms); - } - } - } -} - -/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -void PointCloudConcatenateDataSynchronizerComponent::convertToXYZIRCCloud( - const sensor_msgs::msg::PointCloud2::SharedPtr & input_ptr, - sensor_msgs::msg::PointCloud2::SharedPtr & output_ptr) -{ - output_ptr->header = input_ptr->header; - - PointCloud2Modifier output_modifier{ - *output_ptr, input_ptr->header.frame_id}; - output_modifier.reserve(input_ptr->width); - - bool has_valid_intensity = - std::any_of(input_ptr->fields.begin(), input_ptr->fields.end(), [](const auto & field) { - return field.name == "intensity" && field.datatype == sensor_msgs::msg::PointField::UINT8; - }); - - bool has_valid_return_type = - std::any_of(input_ptr->fields.begin(), input_ptr->fields.end(), [](const auto & field) { - return field.name == "return_type" && field.datatype == sensor_msgs::msg::PointField::UINT8; - }); - - bool has_valid_channel = - std::any_of(input_ptr->fields.begin(), input_ptr->fields.end(), [](const auto & field) { - return field.name == "channel" && field.datatype == sensor_msgs::msg::PointField::UINT16; - }); - - sensor_msgs::PointCloud2Iterator it_x(*input_ptr, "x"); - sensor_msgs::PointCloud2Iterator it_y(*input_ptr, "y"); - sensor_msgs::PointCloud2Iterator it_z(*input_ptr, "z"); - - if (has_valid_intensity && has_valid_return_type && has_valid_channel) { - sensor_msgs::PointCloud2Iterator it_i(*input_ptr, "intensity"); - sensor_msgs::PointCloud2Iterator it_r(*input_ptr, "return_type"); - sensor_msgs::PointCloud2Iterator it_c(*input_ptr, "channel"); - - for (; it_x != it_x.end(); ++it_x, ++it_y, ++it_z, ++it_i, ++it_r, ++it_c) { - PointXYZIRC point; - point.x = *it_x; - point.y = *it_y; - point.z = *it_z; - point.intensity = *it_i; - point.return_type = *it_r; - point.channel = *it_c; - output_modifier.push_back(std::move(point)); - } - } else { - for (; it_x != it_x.end(); ++it_x, ++it_y, ++it_z) { - PointXYZIRC point; - point.x = *it_x; - point.y = *it_y; - point.z = *it_z; - output_modifier.push_back(std::move(point)); - } - } -} - -void PointCloudConcatenateDataSynchronizerComponent::setPeriod(const int64_t new_period) -{ - if (!timer_) { - return; - } - int64_t old_period = 0; - rcl_ret_t ret = rcl_timer_get_period(timer_->get_timer_handle().get(), &old_period); - if (ret != RCL_RET_OK) { - rclcpp::exceptions::throw_from_rcl_error(ret, "Couldn't get old period"); - } - ret = rcl_timer_exchange_period(timer_->get_timer_handle().get(), new_period, &old_period); - if (ret != RCL_RET_OK) { - rclcpp::exceptions::throw_from_rcl_error(ret, "Couldn't exchange_period"); - } -} - -void PointCloudConcatenateDataSynchronizerComponent::cloud_callback( - const sensor_msgs::msg::PointCloud2::ConstSharedPtr & input_ptr, const std::string & topic_name) -{ - if (!utils::is_data_layout_compatible_with_point_xyzirc(*input_ptr)) { - RCLCPP_ERROR( - get_logger(), "The pointcloud layout is not compatible with PointXYZIRC. Aborting"); - - if (utils::is_data_layout_compatible_with_point_xyzi(*input_ptr)) { - RCLCPP_ERROR( - get_logger(), - "The pointcloud layout is compatible with PointXYZI. You may be using legacy code/data"); - } - - return; - } - - std::lock_guard lock(mutex_); - sensor_msgs::msg::PointCloud2::SharedPtr xyzirc_input_ptr(new sensor_msgs::msg::PointCloud2()); - auto input = std::make_shared(*input_ptr); - if (input->data.empty()) { - RCLCPP_WARN_STREAM_THROTTLE( - this->get_logger(), *this->get_clock(), 1000, "Empty sensor points!"); - } else { - // convert to XYZIRC pointcloud if pointcloud is not empty - convertToXYZIRCCloud(input, xyzirc_input_ptr); - } - - const bool is_already_subscribed_this = (cloud_stdmap_[topic_name] != nullptr); - const bool is_already_subscribed_tmp = std::any_of( - std::begin(cloud_stdmap_tmp_), std::end(cloud_stdmap_tmp_), - [](const auto & e) { return e.second != nullptr; }); - - if (is_already_subscribed_this) { - cloud_stdmap_tmp_[topic_name] = xyzirc_input_ptr; - - if (!is_already_subscribed_tmp) { - auto period = std::chrono::duration_cast( - std::chrono::duration(timeout_sec_)); - try { - setPeriod(period.count()); - } catch (rclcpp::exceptions::RCLError & ex) { - RCLCPP_WARN_THROTTLE(get_logger(), *get_clock(), 5000, "%s", ex.what()); - } - timer_->reset(); - } - } else { - cloud_stdmap_[topic_name] = xyzirc_input_ptr; - - const bool is_subscribed_all = std::all_of( - std::begin(cloud_stdmap_), std::end(cloud_stdmap_), - [](const auto & e) { return e.second != nullptr; }); - - if (is_subscribed_all) { - for (const auto & e : cloud_stdmap_tmp_) { - if (e.second != nullptr) { - cloud_stdmap_[e.first] = e.second; - } - } - std::for_each(std::begin(cloud_stdmap_tmp_), std::end(cloud_stdmap_tmp_), [](auto & e) { - e.second = nullptr; - }); - - timer_->cancel(); - publish(); - } else if (offset_map_.size() > 0) { - timer_->cancel(); - auto period = std::chrono::duration_cast( - std::chrono::duration(timeout_sec_ - offset_map_[topic_name])); - try { - setPeriod(period.count()); - } catch (rclcpp::exceptions::RCLError & ex) { - RCLCPP_WARN_THROTTLE(get_logger(), *get_clock(), 5000, "%s", ex.what()); - } - timer_->reset(); - } - } -} - -void PointCloudConcatenateDataSynchronizerComponent::timer_callback() -{ - using std::chrono_literals::operator""ms; - timer_->cancel(); - if (mutex_.try_lock()) { - publish(); - mutex_.unlock(); - } else { - try { - std::chrono::nanoseconds period = 10ms; - setPeriod(period.count()); - } catch (rclcpp::exceptions::RCLError & ex) { - RCLCPP_WARN_THROTTLE(get_logger(), *get_clock(), 5000, "%s", ex.what()); - } - timer_->reset(); - } -} - -void PointCloudConcatenateDataSynchronizerComponent::twist_callback( - const geometry_msgs::msg::TwistWithCovarianceStamped::ConstSharedPtr input) -{ - // if rosbag restart, clear buffer - if (!twist_ptr_queue_.empty()) { - if (rclcpp::Time(twist_ptr_queue_.front()->header.stamp) > rclcpp::Time(input->header.stamp)) { - twist_ptr_queue_.clear(); - } - } - - // pop old data - while (!twist_ptr_queue_.empty()) { - if ( - rclcpp::Time(twist_ptr_queue_.front()->header.stamp) + rclcpp::Duration::from_seconds(1.0) > - rclcpp::Time(input->header.stamp)) { - break; - } - twist_ptr_queue_.pop_front(); - } - - auto twist_ptr = std::make_shared(); - twist_ptr->header = input->header; - twist_ptr->twist = input->twist.twist; - twist_ptr_queue_.push_back(twist_ptr); -} - -void PointCloudConcatenateDataSynchronizerComponent::odom_callback( - const nav_msgs::msg::Odometry::ConstSharedPtr input) -{ - // if rosbag restart, clear buffer - if (!twist_ptr_queue_.empty()) { - if (rclcpp::Time(twist_ptr_queue_.front()->header.stamp) > rclcpp::Time(input->header.stamp)) { - twist_ptr_queue_.clear(); - } - } - - // pop old data - while (!twist_ptr_queue_.empty()) { - if ( - rclcpp::Time(twist_ptr_queue_.front()->header.stamp) + rclcpp::Duration::from_seconds(1.0) > - rclcpp::Time(input->header.stamp)) { - break; - } - twist_ptr_queue_.pop_front(); - } - - auto twist_ptr = std::make_shared(); - twist_ptr->header = input->header; - twist_ptr->twist = input->twist.twist; - twist_ptr_queue_.push_back(twist_ptr); -} - -void PointCloudConcatenateDataSynchronizerComponent::checkConcatStatus( - diagnostic_updater::DiagnosticStatusWrapper & stat) -{ - for (const std::string & e : input_topics_) { - const std::string subscribe_status = not_subscribed_topic_names_.count(e) ? "NG" : "OK"; - stat.add(e, subscribe_status); - } - - const int8_t level = not_subscribed_topic_names_.empty() - ? diagnostic_msgs::msg::DiagnosticStatus::OK - : diagnostic_msgs::msg::DiagnosticStatus::WARN; - const std::string message = not_subscribed_topic_names_.empty() - ? "Concatenate all topics" - : "Some topics are not concatenated"; - stat.summary(level, message); -} -} // namespace autoware::pointcloud_preprocessor - -#include -RCLCPP_COMPONENTS_REGISTER_NODE( - autoware::pointcloud_preprocessor::PointCloudConcatenateDataSynchronizerComponent) diff --git a/sensing/autoware_pointcloud_preprocessor/test/test_concatenate_node_component.py b/sensing/autoware_pointcloud_preprocessor/test/test_concatenate_node_component.py new file mode 100644 index 0000000000000..3d15eecf0101c --- /dev/null +++ b/sensing/autoware_pointcloud_preprocessor/test/test_concatenate_node_component.py @@ -0,0 +1,909 @@ +#!/usr/bin/env python3 + +# Copyright 2024 TIER IV, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import random +import struct +import time +from typing import List +from typing import Tuple +import unittest + +from geometry_msgs.msg import TransformStamped +from geometry_msgs.msg import TwistWithCovarianceStamped +import launch +import launch.actions +from launch_ros.actions import ComposableNodeContainer +from launch_ros.descriptions import ComposableNode +import launch_testing +import numpy as np +import pytest +import rclpy +from rclpy.qos import QoSDurabilityPolicy +from rclpy.qos import QoSHistoryPolicy +from rclpy.qos import QoSProfile +from rclpy.qos import QoSReliabilityPolicy +from rclpy.time import Time +from sensor_msgs.msg import PointCloud2 +from sensor_msgs.msg import PointField +from sensor_msgs_py import point_cloud2 +from std_msgs.msg import Header +from tf2_ros.static_transform_broadcaster import StaticTransformBroadcaster + +INPUT_LIDAR_TOPICS = [ + "/test/sensing/lidar/left/pointcloud", + "/test/sensing/lidar/right/pointcloud", + "/test/sensing/lidar/top/pointcloud", +] +FRAME_ID_LISTS = [ + "left_lidar", + "right_lidar", + "top_lidar", +] + +TIMEOUT_SEC = 0.2 +TIMESTAMP_OFFSET = [0.0, 0.04, 0.08] +TIMESTAMP_NOISE = 0.01 # 10 ms + +NUM_OF_POINTS = 3 +MILLISECONDS = 1000000 + + +STANDARD_TOLERANCE = 1e-4 +COARSE_TOLERANCE = TIMESTAMP_NOISE * 2 + +GLOBAL_SECONDS = 10 +GLOBAL_NANOSECONDS = 100000000 + +# Set to True if you want to check the output of the component tests. +DEBUG = False + + +@pytest.mark.launch_test +def generate_test_description(): + nodes = [] + + nodes.append( + ComposableNode( + package="autoware_pointcloud_preprocessor", + plugin="autoware::pointcloud_preprocessor::PointCloudConcatenateDataSynchronizerComponent", + name="test_concatenate_node", + remappings=[ + ("~/input/twist", "/test/sensing/vehicle_velocity_converter/twist_with_covariance"), + ("output", "/test/sensing/lidar/concatenated/pointcloud"), + ], + parameters=[ + { + "debug_mode": False, + "has_static_tf_only": False, + "rosbag_length": 0.0, + "maximum_queue_size": 5, + "timeout_sec": TIMEOUT_SEC, + "is_motion_compensated": True, + "publish_synchronized_pointcloud": True, + "keep_input_frame_in_synchronized_pointcloud": True, + "publish_previous_but_late_pointcloud": True, + "synchronized_pointcloud_postfix": "pointcloud", + "input_twist_topic_type": "twist", + "input_topics": INPUT_LIDAR_TOPICS, + "output_frame": "base_link", + "matching_strategy.type": "advanced", + "matching_strategy.lidar_timestamp_offsets": TIMESTAMP_OFFSET, + "matching_strategy.lidar_timestamp_noise_window": [ + TIMESTAMP_NOISE, + TIMESTAMP_NOISE, + TIMESTAMP_NOISE, + ], + } + ], + extra_arguments=[{"use_intra_process_comms": True}], + ) + ) + + container = ComposableNodeContainer( + name="test_concatenate_data_container", + namespace="pointcloud_preprocessor", + package="rclcpp_components", + executable="component_container", + composable_node_descriptions=nodes, + output="screen", + ) + + return launch.LaunchDescription( + [ + container, + # Start tests right away - no need to wait for anything + launch_testing.actions.ReadyToTest(), + ] + ) + + +def create_header(timestamp: Time, frame_id_index: int, is_base_link: bool) -> Header: + header = Header() + header.stamp = timestamp + + if is_base_link: + header.frame_id = "base_link" + else: + header.frame_id = FRAME_ID_LISTS[frame_id_index] + return header + + +def create_points() -> List[Tuple[float, float, float]]: + return [(1.0, 0.0, 0.0), (0.0, 1.0, 0.0), (0.0, 0.0, 1.0)] + + +def create_fields() -> ( + Tuple[List[int], List[int], List[int], List[float], List[float], List[float], List[int]] +): + # The values of the fields do not influence the results. + intensities = [255] * NUM_OF_POINTS + return_types = [1] * NUM_OF_POINTS + channels = [1] * NUM_OF_POINTS + azimuths = [0.0] * NUM_OF_POINTS + elevations = [0.0] * NUM_OF_POINTS + distances = [1.0] * NUM_OF_POINTS + timestamps = [0] * NUM_OF_POINTS + return intensities, return_types, channels, azimuths, elevations, distances, timestamps + + +def get_pointcloud_msg( + timestamp: Time, is_generate_points: bool, frame_id_index: int, is_base_link: bool +) -> PointCloud2: + header = create_header(timestamp, frame_id_index, is_base_link) + points = create_points() + intensities, return_types, channels, azimuths, elevations, distances, timestamps = ( + create_fields() + ) + + pointcloud_data = bytearray() + + if is_generate_points: + for i in range(NUM_OF_POINTS): + pointcloud_data += struct.pack("fff", points[i][0], points[i][1], points[i][2]) + pointcloud_data += struct.pack("B", intensities[i]) + pointcloud_data += struct.pack("B", return_types[i]) + pointcloud_data += struct.pack("H", channels[i]) + pointcloud_data += struct.pack("f", azimuths[i]) + pointcloud_data += struct.pack("f", elevations[i]) + pointcloud_data += struct.pack("f", distances[i]) + pointcloud_data += struct.pack("I", timestamps[i]) + + fields = [ + PointField(name="x", offset=0, datatype=PointField.FLOAT32, count=1), + PointField(name="y", offset=4, datatype=PointField.FLOAT32, count=1), + PointField(name="z", offset=8, datatype=PointField.FLOAT32, count=1), + PointField(name="intensity", offset=12, datatype=PointField.UINT8, count=1), + PointField(name="return_type", offset=13, datatype=PointField.UINT8, count=1), + PointField(name="channel", offset=14, datatype=PointField.UINT16, count=1), + PointField(name="azimuth", offset=16, datatype=PointField.FLOAT32, count=1), + PointField(name="elevation", offset=20, datatype=PointField.FLOAT32, count=1), + PointField(name="distance", offset=24, datatype=PointField.FLOAT32, count=1), + PointField(name="time_stamp", offset=28, datatype=PointField.UINT32, count=1), + ] + + pointcloud_msg = PointCloud2( + header=header, + height=1, + width=NUM_OF_POINTS, + is_dense=True, + is_bigendian=False, + point_step=32, # 3*4 + 1 + 1 + 2 + 4 + 4 + 4 + 4 = 32 bytes per point + row_step=32 * NUM_OF_POINTS, + fields=fields, + data=pointcloud_data, + ) + + return pointcloud_msg + + +def generate_transform_msg( + parent_frame: str, + child_frame: str, + x: float, + y: float, + z: float, + qx: float, + qy: float, + qz: float, + qw: float, +) -> TransformStamped: + tf_msg = TransformStamped() + tf_msg.header.stamp = Time(seconds=GLOBAL_SECONDS, nanoseconds=GLOBAL_NANOSECONDS).to_msg() + tf_msg.header.frame_id = parent_frame + tf_msg.child_frame_id = child_frame + tf_msg.transform.translation.x = x + tf_msg.transform.translation.y = y + tf_msg.transform.translation.z = z + tf_msg.transform.rotation.x = qx + tf_msg.transform.rotation.y = qy + tf_msg.transform.rotation.z = qz + tf_msg.transform.rotation.w = qw + return tf_msg + + +def generate_static_transform_msgs() -> List[TransformStamped]: + tf_top_lidar_msg = generate_transform_msg( + parent_frame="base_link", + child_frame=FRAME_ID_LISTS[0], + x=0.0, + y=0.0, + z=5.0, + qx=0.0, + qy=0.0, + qz=0.0, + qw=1.0, + ) + + tf_right_lidar_msg = generate_transform_msg( + parent_frame="base_link", + child_frame=FRAME_ID_LISTS[1], + x=0.0, + y=5.0, + z=5.0, + qx=0.0, + qy=0.0, + qz=0.0, + qw=1.0, + ) + + tf_left_lidar_msg = generate_transform_msg( + parent_frame="base_link", + child_frame=FRAME_ID_LISTS[2], + x=0.0, + y=-5.0, + z=5.0, + qx=0.0, + qy=0.0, + qz=0.0, + qw=1.0, + ) + + return [tf_top_lidar_msg, tf_right_lidar_msg, tf_left_lidar_msg] + + +def generate_twist_msg() -> TwistWithCovarianceStamped: + twist_header = Header() + twist_header.stamp = Time(seconds=GLOBAL_SECONDS, nanoseconds=GLOBAL_NANOSECONDS).to_msg() + twist_header.frame_id = "base_link" + twist_msg = TwistWithCovarianceStamped() + twist_msg.header = twist_header + twist_msg.twist.twist.linear.x = 1.0 + + return twist_msg + + +def get_output_points(cloud_msg) -> np.ndarray: + points_list = [] + for point in point_cloud2.read_points(cloud_msg, field_names=("x", "y", "z")): + points_list.append([point[0], point[1], point[2]]) + points = np.array(points_list, dtype=np.float32) + return points + + +class TestConcatenateNode(unittest.TestCase): + @classmethod + def setUpClass(cls): + # Init ROS at once + rclpy.init() + + @classmethod + def tearDownClass(cls): + # Shutdown ROS at once + rclpy.shutdown() + + def setUp(self): + self.node = rclpy.create_node("test_concat_node") + tf_msg = generate_static_transform_msgs() + self.tf_broadcaster = StaticTransformBroadcaster(self.node) + self.tf_broadcaster.sendTransform(tf_msg) + self.msg_buffer = [] + self.twist_publisher, self.pointcloud_publishers = self.create_pub_sub() + + def tearDown(self): + self.node.destroy_node() + + def callback(self, msg: PointCloud2): + self.msg_buffer.append(msg) + + def create_pub_sub(self): + # QoS profile for sensor data + sensor_qos = QoSProfile( + history=QoSHistoryPolicy.KEEP_LAST, + depth=10, + reliability=QoSReliabilityPolicy.BEST_EFFORT, + durability=QoSDurabilityPolicy.VOLATILE, + ) + # Publishers + twist_publisher = self.node.create_publisher( + TwistWithCovarianceStamped, + "/test/sensing/vehicle_velocity_converter/twist_with_covariance", + 10, + ) + + pointcloud_publishers = {} + for idx, input_lidar_topic in enumerate(INPUT_LIDAR_TOPICS): + pointcloud_publishers[idx] = self.node.create_publisher( + PointCloud2, + input_lidar_topic, + qos_profile=sensor_qos, + ) + + # create subscriber + self.msg_buffer = [] + self.node.create_subscription( + PointCloud2, + "/test/sensing/lidar/concatenated/pointcloud", + self.callback, + qos_profile=sensor_qos, + ) + + return twist_publisher, pointcloud_publishers + + def test_1_normal_inputs(self): + """Test the normal situation when no pointcloud is delayed or dropped. + + This can test that + 1. Concatenate works fine when all pointclouds are arrived in time. + 2. The motion compensation of concatenation works well. + """ + time.sleep(1) + global GLOBAL_SECONDS + + twist_msg = generate_twist_msg() + self.twist_publisher.publish(twist_msg) + + for frame_idx, _ in enumerate(INPUT_LIDAR_TOPICS): + pointcloud_seconds = GLOBAL_SECONDS + pointcloud_nanoseconds = GLOBAL_NANOSECONDS + frame_idx * MILLISECONDS * 40 # add 40 ms + pointcloud_timestamp = Time( + seconds=pointcloud_seconds, nanoseconds=pointcloud_nanoseconds + ).to_msg() + pointcloud_msg = get_pointcloud_msg( + timestamp=pointcloud_timestamp, + is_generate_points=True, + frame_id_index=frame_idx, + is_base_link=False, + ) + self.pointcloud_publishers[frame_idx].publish(pointcloud_msg) + time.sleep(0.01) + + rclpy.spin_once(self.node, timeout_sec=0.1) + + self.assertEqual( + len(self.msg_buffer), + 1, + "The number of concatenate pointcloud has different number as expected.", + ) + + expected_pointcloud = np.array( + [ + [1.08, -5, 5], + [0.08, -4, 5], + [0.08, -5, 6], + [1.04, 5, 5], + [0.04, 6, 5], + [0.04, 5, 6], + [1, 0, 5], + [0, 1, 5], + [0, 0, 6], + ], + dtype=np.float32, + ) + + concatenate_cloud = get_output_points(self.msg_buffer[0]) + + if DEBUG: + print("concatenate_cloud: ", concatenate_cloud) + + self.assertTrue( + np.allclose(concatenate_cloud, expected_pointcloud, atol=1e-3), + "The concatenation node have weird output", + ) + + self.assertEqual( + self.msg_buffer[0].header.frame_id, + "base_link", + "The concatenate pointcloud frame id is not base_link", + ) + + GLOBAL_SECONDS += 1 + + def test_2_normal_inputs_with_noise(self): + """Test the normal situation when no pointcloud is delayed or dropped. Additionally, the pointcloud's timestamp is not ideal which has some noise. + + This can test that + 1. Concatenate works fine when pointclouds' timestamp has noise. + """ + time.sleep(1) + global GLOBAL_SECONDS + + twist_msg = generate_twist_msg() + self.twist_publisher.publish(twist_msg) + + for frame_idx, _ in enumerate(INPUT_LIDAR_TOPICS): + noise = random.uniform(-10, 10) * MILLISECONDS + pointcloud_seconds = GLOBAL_SECONDS + pointcloud_nanoseconds = ( + GLOBAL_NANOSECONDS + frame_idx * MILLISECONDS * 40 + noise + ) # add 40 ms and noise (-10 to 10 ms) + pointcloud_timestamp = Time( + seconds=pointcloud_seconds, nanoseconds=pointcloud_nanoseconds + ).to_msg() + pointcloud_msg = get_pointcloud_msg( + timestamp=pointcloud_timestamp, + is_generate_points=True, + frame_id_index=frame_idx, + is_base_link=False, + ) + self.pointcloud_publishers[frame_idx].publish(pointcloud_msg) + time.sleep(0.01) + + rclpy.spin_once(self.node, timeout_sec=0.1) + + self.assertEqual( + len(self.msg_buffer), + 1, + "The number of concatenate pointcloud has different number as expected.", + ) + + expected_pointcloud = np.array( + [ + [1.08, -5, 5], + [0.08, -4, 5], + [0.08, -5, 6], + [1.04, 5, 5], + [0.04, 6, 5], + [0.04, 5, 6], + [1, 0, 5], + [0, 1, 5], + [0, 0, 6], + ], + dtype=np.float32, + ) + + concatenate_cloud = get_output_points(self.msg_buffer[0]) + if DEBUG: + print("concatenate_cloud: ", concatenate_cloud) + + self.assertTrue( + np.allclose(concatenate_cloud, expected_pointcloud, atol=2e-2), + "The concatenation node have weird output", + ) + + def test_3_abnormal_null_pointcloud(self): + """Test the abnormal situation when a pointcloud is empty. + + This can test that + 1. The concatenate node ignore empty pointcloud and concatenate the remain pointcloud. + """ + time.sleep(1) + global GLOBAL_SECONDS + + twist_msg = generate_twist_msg() + self.twist_publisher.publish(twist_msg) + + for frame_idx, _ in enumerate(INPUT_LIDAR_TOPICS): + pointcloud_seconds = GLOBAL_SECONDS + pointcloud_nanoseconds = GLOBAL_NANOSECONDS + frame_idx * MILLISECONDS * 40 # add 40 ms + pointcloud_timestamp = Time( + seconds=pointcloud_seconds, nanoseconds=pointcloud_nanoseconds + ).to_msg() + + if frame_idx == len(INPUT_LIDAR_TOPICS) - 1: + pointcloud_msg = get_pointcloud_msg( + timestamp=pointcloud_timestamp, + is_generate_points=False, + frame_id_index=len(INPUT_LIDAR_TOPICS) - 1, + is_base_link=False, + ) + else: + pointcloud_msg = get_pointcloud_msg( + timestamp=pointcloud_timestamp, + is_generate_points=True, + frame_id_index=frame_idx, + is_base_link=False, + ) + + self.pointcloud_publishers[frame_idx].publish(pointcloud_msg) + time.sleep(0.01) + + time.sleep(TIMEOUT_SEC) # timeout threshold + rclpy.spin_once(self.node, timeout_sec=0.1) + + self.assertEqual( + len(self.msg_buffer), + 1, + "The number of concatenate pointcloud has different number as expected.", + ) + + expected_pointcloud = np.array( + [ + [1.04, 5, 5], + [0.04, 6, 5], + [0.04, 5, 6], + [1, 0, 5], + [0, 1, 5], + [0, 0, 6], + ], + dtype=np.float32, + ) + + concatenate_cloud = get_output_points(self.msg_buffer[0]) + if DEBUG: + print("concatenate_cloud: ", concatenate_cloud) + + self.assertTrue( + np.allclose(concatenate_cloud, expected_pointcloud, atol=1e-3), + "The concatenation node have weird output", + ) + + GLOBAL_SECONDS += 1 + + def test_4_abnormal_null_pointcloud_and_drop(self): + """Test the abnormal situation when a pointcloud is empty and other pointclouds are dropped. + + This can test that + 1. The concatenate node publish an empty pointcloud. + """ + time.sleep(1) + global GLOBAL_SECONDS + + twist_msg = generate_twist_msg() + self.twist_publisher.publish(twist_msg) + + pointcloud_seconds = GLOBAL_SECONDS + pointcloud_nanoseconds = GLOBAL_NANOSECONDS + pointcloud_timestamp = Time( + seconds=pointcloud_seconds, nanoseconds=pointcloud_nanoseconds + ).to_msg() + + pointcloud_msg = get_pointcloud_msg( + timestamp=pointcloud_timestamp, + is_generate_points=False, + frame_id_index=0, + is_base_link=False, + ) + + self.pointcloud_publishers[0].publish(pointcloud_msg) + time.sleep(0.01) + + time.sleep(TIMEOUT_SEC) # timeout threshold + rclpy.spin_once(self.node, timeout_sec=0.1) + + self.assertEqual( + len(self.msg_buffer), + 1, + "The number of concatenate pointcloud has different number as expected.", + ) + + GLOBAL_SECONDS += 1 + + def test_5_abnormal_multiple_pointcloud_drop(self): + """Test the abnormal situation when multiple pointclouds were dropped (only one pointcloud arrive). + + This can test that + 1. The concatenate node concatenates the single pointcloud after the timeout. + """ + time.sleep(1) + global GLOBAL_SECONDS + + twist_msg = generate_twist_msg() + self.twist_publisher.publish(twist_msg) + + pointcloud_seconds = GLOBAL_SECONDS + pointcloud_nanoseconds = GLOBAL_NANOSECONDS + pointcloud_timestamp = Time( + seconds=pointcloud_seconds, nanoseconds=pointcloud_nanoseconds + ).to_msg() + + pointcloud_msg = get_pointcloud_msg( + timestamp=pointcloud_timestamp, + is_generate_points=True, + frame_id_index=0, + is_base_link=False, + ) + + self.pointcloud_publishers[0].publish(pointcloud_msg) + time.sleep(0.01) + + time.sleep(TIMEOUT_SEC) # timeout threshold + rclpy.spin_once(self.node, timeout_sec=0.1) + + self.assertEqual( + len(self.msg_buffer), + 1, + "The number of concatenate pointcloud has different number as expected.", + ) + + expected_pointcloud = np.array( + [ + [1, 0, 5], + [0, 1, 5], + [0, 0, 6], + ], + dtype=np.float32, + ) + + concatenate_cloud = get_output_points(self.msg_buffer[0]) + if DEBUG: + print("concatenate_cloud: ", concatenate_cloud) + + self.assertTrue( + np.allclose(concatenate_cloud, expected_pointcloud, atol=1e-3), + "The concatenation node have weird output", + ) + + def test_6_abnormal_single_pointcloud_drop(self): + """Test the abnormal situation when a pointcloud was dropped. + + This can test that + 1. The concatenate node concatenate the remain pointcloud after the timeout. + """ + time.sleep(1) + global GLOBAL_SECONDS + + twist_msg = generate_twist_msg() + self.twist_publisher.publish(twist_msg) + + for frame_idx, _ in enumerate(INPUT_LIDAR_TOPICS[:-1]): + pointcloud_seconds = GLOBAL_SECONDS + pointcloud_nanoseconds = GLOBAL_NANOSECONDS + frame_idx * MILLISECONDS * 40 # add 40 ms + pointcloud_timestamp = Time( + seconds=pointcloud_seconds, nanoseconds=pointcloud_nanoseconds + ).to_msg() + pointcloud_msg = get_pointcloud_msg( + timestamp=pointcloud_timestamp, + is_generate_points=True, + frame_id_index=frame_idx, + is_base_link=False, + ) + self.pointcloud_publishers[frame_idx].publish(pointcloud_msg) + time.sleep(0.02) + + time.sleep(TIMEOUT_SEC) # timeout threshold + rclpy.spin_once(self.node, timeout_sec=0.1) + + # Should receive only one concatenate pointcloud + self.assertEqual( + len(self.msg_buffer), + 1, + "The number of concatenate pointcloud has different number as expected.", + ) + + expected_pointcloud = np.array( + [ + [1.04, 5, 5], + [0.04, 6, 5], + [0.04, 5, 6], + [1, 0, 5], + [0, 1, 5], + [0, 0, 6], + ], + dtype=np.float32, + ) + + concatenate_cloud = get_output_points(self.msg_buffer[0]) + if DEBUG: + print("concatenate_cloud: ", concatenate_cloud) + + self.assertTrue( + np.allclose(concatenate_cloud, expected_pointcloud, atol=1e-3), + "The concatenation node have weird output", + ) + + GLOBAL_SECONDS += 1 + + def test_7_abnormal_pointcloud_delay(self): + """Test the abnormal situation when a pointcloud was delayed after the timeout. + + This can test that + 1. The concatenate node concatenate the remain pointcloud after the timeout. + 2. The concatenate node will publish the delayed pointcloud after the timeout. + """ + time.sleep(1) + global GLOBAL_SECONDS + + twist_msg = generate_twist_msg() + self.twist_publisher.publish(twist_msg) + + for frame_idx, _ in enumerate(INPUT_LIDAR_TOPICS[:-1]): + pointcloud_seconds = GLOBAL_SECONDS + pointcloud_nanoseconds = GLOBAL_NANOSECONDS + frame_idx * MILLISECONDS * 40 # add 40 ms + pointcloud_timestamp = Time( + seconds=pointcloud_seconds, nanoseconds=pointcloud_nanoseconds + ).to_msg() + pointcloud_msg = get_pointcloud_msg( + timestamp=pointcloud_timestamp, + is_generate_points=True, + frame_id_index=frame_idx, + is_base_link=False, + ) + self.pointcloud_publishers[frame_idx].publish(pointcloud_msg) + time.sleep(0.02) + + time.sleep(TIMEOUT_SEC) # timeout threshold + rclpy.spin_once(self.node, timeout_sec=0.1) + + pointcloud_seconds = GLOBAL_SECONDS + pointcloud_nanoseconds = ( + GLOBAL_NANOSECONDS + (len(INPUT_LIDAR_TOPICS) - 1) * MILLISECONDS * 40 + ) # add 40 ms + pointcloud_timestamp = Time( + seconds=pointcloud_seconds, nanoseconds=pointcloud_nanoseconds + ).to_msg() + pointcloud_msg = get_pointcloud_msg( + timestamp=pointcloud_timestamp, + is_generate_points=True, + frame_id_index=len(INPUT_LIDAR_TOPICS) - 1, + is_base_link=False, + ) + + self.pointcloud_publishers[len(INPUT_LIDAR_TOPICS) - 1].publish(pointcloud_msg) + + time.sleep(TIMEOUT_SEC) # timeout threshold + rclpy.spin_once(self.node, timeout_sec=0.1) + + # Should receive only one concatenate pointcloud + self.assertEqual( + len(self.msg_buffer), + 2, + "The number of concatenate pointcloud has different number as expected.", + ) + + expected_pointcloud1 = np.array( + [ + [1.04, 5, 5], + [0.04, 6, 5], + [0.04, 5, 6], + [1, 0, 5], + [0, 1, 5], + [0, 0, 6], + ], + dtype=np.float32, + ) + + concatenate_cloud1 = get_output_points(self.msg_buffer[0]) + if DEBUG: + print("concatenate_cloud 1: ", concatenate_cloud1) + + self.assertTrue( + np.allclose(concatenate_cloud1, expected_pointcloud1, atol=1e-3), + "The concatenation node have weird output", + ) + + expected_pointcloud2 = np.array( + [ + [1, -5, 5], + [0, -4, 5], + [0, -5, 6], + ], + dtype=np.float32, + ) + + concatenate_cloud2 = get_output_points(self.msg_buffer[1]) + if DEBUG: + print("concatenate_cloud 2: ", concatenate_cloud2) + + self.assertTrue( + np.allclose(concatenate_cloud2, expected_pointcloud2, atol=1e-3), + "The concatenation node have weird output", + ) + + GLOBAL_SECONDS += 1 + + def test_8_abnormal_pointcloud_drop_continue_normal(self): + """Test the abnormal situation when a pointcloud was dropped. Afterward, next iteration of pointclouds comes normally. + + This can test that + 1. The concatenate node concatenate the remain pointcloud after the timeout. + 2. The concatenate node concatenate next iteration pointclouds when all of the pointcloud arrived. + """ + time.sleep(1) + global GLOBAL_SECONDS + + twist_msg = generate_twist_msg() + self.twist_publisher.publish(twist_msg) + + for frame_idx, _ in enumerate(INPUT_LIDAR_TOPICS[:-1]): + pointcloud_seconds = GLOBAL_SECONDS + pointcloud_nanoseconds = GLOBAL_NANOSECONDS + frame_idx * MILLISECONDS * 40 # add 40 ms + pointcloud_timestamp = Time( + seconds=pointcloud_seconds, nanoseconds=pointcloud_nanoseconds + ).to_msg() + pointcloud_msg = get_pointcloud_msg( + timestamp=pointcloud_timestamp, + is_generate_points=True, + frame_id_index=frame_idx, + is_base_link=False, + ) + self.pointcloud_publishers[frame_idx].publish(pointcloud_msg) + time.sleep(0.01) + + time.sleep(TIMEOUT_SEC) + rclpy.spin_once(self.node) + + next_global_nanosecond = GLOBAL_NANOSECONDS + 100 * MILLISECONDS + for frame_idx, _ in enumerate(INPUT_LIDAR_TOPICS): + pointcloud_seconds = GLOBAL_SECONDS + pointcloud_nanoseconds = ( + next_global_nanosecond + frame_idx * MILLISECONDS * 40 + ) # add 40 ms + pointcloud_timestamp = Time( + seconds=pointcloud_seconds, nanoseconds=pointcloud_nanoseconds + ).to_msg() + pointcloud_msg = get_pointcloud_msg( + timestamp=pointcloud_timestamp, + is_generate_points=True, + frame_id_index=frame_idx, + is_base_link=False, + ) + self.pointcloud_publishers[frame_idx].publish(pointcloud_msg) + time.sleep(0.01) + + rclpy.spin_once(self.node) + + self.assertEqual( + len(self.msg_buffer), + 2, + "The number of concatenate pointcloud has different number as expected.", + ) + + expected_pointcloud1 = np.array( + [ + [1.04, 5, 5], + [0.04, 6, 5], + [0.04, 5, 6], + [1, 0, 5], + [0, 1, 5], + [0, 0, 6], + ], + dtype=np.float32, + ) + + concatenate_cloud1 = get_output_points(self.msg_buffer[0]) + if DEBUG: + print("concatenate_cloud 1: ", concatenate_cloud1) + + self.assertTrue( + np.allclose(concatenate_cloud1, expected_pointcloud1, atol=1e-3), + "The concatenation node have weird output", + ) + + expected_pointcloud2 = np.array( + [ + [1.08, -5, 5], + [0.08, -4, 5], + [0.08, -5, 6], + [1.04, 5, 5], + [0.04, 6, 5], + [0.04, 5, 6], + [1, 0, 5], + [0, 1, 5], + [0, 0, 6], + ], + dtype=np.float32, + ) + + concatenate_cloud2 = get_output_points(self.msg_buffer[1]) + if DEBUG: + print("concatenate_cloud 2: ", concatenate_cloud2) + + self.assertTrue( + np.allclose(concatenate_cloud2, expected_pointcloud2, atol=1e-3), + "The concatenation node have weird output", + ) + + GLOBAL_SECONDS += 1 diff --git a/sensing/autoware_pointcloud_preprocessor/test/test_concatenate_node_unit.cpp b/sensing/autoware_pointcloud_preprocessor/test/test_concatenate_node_unit.cpp new file mode 100644 index 0000000000000..bc3c7e9538c84 --- /dev/null +++ b/sensing/autoware_pointcloud_preprocessor/test/test_concatenate_node_unit.cpp @@ -0,0 +1,539 @@ +// Copyright 2024 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Note: To regenerate the ground truth (GT) for the expected undistorted point cloud values, +// set the "debug_" value to true to display the point cloud values. Then, +// replace the expected values with the newly displayed undistorted point cloud values. + +#include "autoware/pointcloud_preprocessor/concatenate_data/cloud_collector.hpp" +#include "autoware/pointcloud_preprocessor/concatenate_data/combine_cloud_handler.hpp" +#include "autoware/pointcloud_preprocessor/concatenate_data/concatenate_and_time_sync_node.hpp" + +#include + +#include +#include + +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +class ConcatenateCloudTest : public ::testing::Test +{ +protected: + void SetUp() override + { + rclcpp::NodeOptions node_options; + // Instead of "input_topics", other parameters are not used. + // They just helps to setup the concatenate node + node_options.parameter_overrides( + {{"debug_mode", false}, + {"has_static_tf_only", false}, + {"rosbag_length", 0.0}, + {"maximum_queue_size", 5}, + {"timeout_sec", 0.2}, + {"is_motion_compensated", true}, + {"publish_synchronized_pointcloud", true}, + {"keep_input_frame_in_synchronized_pointcloud", true}, + {"publish_previous_but_late_pointcloud", false}, + {"synchronized_pointcloud_postfix", "pointcloud"}, + {"input_twist_topic_type", "twist"}, + {"input_topics", std::vector{"lidar_top", "lidar_left", "lidar_right"}}, + {"output_frame", "base_link"}, + {"matching_strategy.type", "advanced"}, + {"matching_strategy.lidar_timestamp_offsets", std::vector{0.0, 0.04, 0.08}}, + {"matching_strategy.lidar_timestamp_noise_window", std::vector{0.01, 0.01, 0.01}}}); + + concatenate_node_ = std::make_shared< + autoware::pointcloud_preprocessor::PointCloudConcatenateDataSynchronizerComponent>( + node_options); + combine_cloud_handler_ = + std::make_shared( + *concatenate_node_, "base_link", true, true, true, false); + + collector_ = std::make_shared( + std::dynamic_pointer_cast< + autoware::pointcloud_preprocessor::PointCloudConcatenateDataSynchronizerComponent>( + concatenate_node_->shared_from_this()), + combine_cloud_handler_, number_of_pointcloud, timeout_sec, collector_debug_mode); + + // Setup TF + tf_broadcaster_ = std::make_shared(concatenate_node_); + tf_broadcaster_->sendTransform(generate_static_transform_msgs()); + + // Spin the node for a while to ensure transforms are published + auto start = std::chrono::steady_clock::now(); + auto timeout = std::chrono::milliseconds(100); + while (std::chrono::steady_clock::now() - start < timeout) { + rclcpp::spin_some(concatenate_node_); + std::this_thread::sleep_for(std::chrono::milliseconds(10)); + } + } + + static geometry_msgs::msg::TransformStamped generate_transform_msg( + const std::string & parent_frame, const std::string & child_frame, double x, double y, double z, + double qx, double qy, double qz, double qw) + { + rclcpp::Time timestamp(timestamp_seconds, timestamp_nanoseconds, RCL_ROS_TIME); + geometry_msgs::msg::TransformStamped tf_msg; + tf_msg.header.stamp = timestamp; + tf_msg.header.frame_id = parent_frame; + tf_msg.child_frame_id = child_frame; + tf_msg.transform.translation.x = x; + tf_msg.transform.translation.y = y; + tf_msg.transform.translation.z = z; + tf_msg.transform.rotation.x = qx; + tf_msg.transform.rotation.y = qy; + tf_msg.transform.rotation.z = qz; + tf_msg.transform.rotation.w = qw; + return tf_msg; + } + + static sensor_msgs::msg::PointCloud2 generate_pointcloud_msg( + bool generate_points, bool is_lidar_frame, const std::string & topic_name, + const rclcpp::Time & stamp) + { + sensor_msgs::msg::PointCloud2 pointcloud_msg; + pointcloud_msg.header.stamp = stamp; + pointcloud_msg.header.frame_id = is_lidar_frame ? topic_name : "base_link"; + pointcloud_msg.height = 1; + pointcloud_msg.is_dense = true; + pointcloud_msg.is_bigendian = false; + + if (generate_points) { + std::array points = {{ + Eigen::Vector3f(10.0f, 0.0f, 0.0f), // point 1 + Eigen::Vector3f(0.0f, 10.0f, 0.0f), // point 2 + Eigen::Vector3f(0.0f, 0.0f, 10.0f), // point 3 + }}; + + sensor_msgs::PointCloud2Modifier modifier(pointcloud_msg); + modifier.setPointCloud2Fields( + 10, "x", 1, sensor_msgs::msg::PointField::FLOAT32, "y", 1, + sensor_msgs::msg::PointField::FLOAT32, "z", 1, sensor_msgs::msg::PointField::FLOAT32, + "intensity", 1, sensor_msgs::msg::PointField::UINT8, "return_type", 1, + sensor_msgs::msg::PointField::UINT8, "channel", 1, sensor_msgs::msg::PointField::UINT16, + "azimuth", 1, sensor_msgs::msg::PointField::FLOAT32, "elevation", 1, + sensor_msgs::msg::PointField::FLOAT32, "distance", 1, sensor_msgs::msg::PointField::FLOAT32, + "time_stamp", 1, sensor_msgs::msg::PointField::UINT32); + + modifier.resize(number_of_points); + + sensor_msgs::PointCloud2Iterator iter_x(pointcloud_msg, "x"); + sensor_msgs::PointCloud2Iterator iter_y(pointcloud_msg, "y"); + sensor_msgs::PointCloud2Iterator iter_z(pointcloud_msg, "z"); + sensor_msgs::PointCloud2Iterator iter_t(pointcloud_msg, "time_stamp"); + + for (size_t i = 0; i < number_of_points; ++i) { + *iter_x = points[i].x(); + *iter_y = points[i].y(); + *iter_z = points[i].z(); + *iter_t = 0; + ++iter_x; + ++iter_y; + ++iter_z; + ++iter_t; + } + } else { + pointcloud_msg.width = 0; + pointcloud_msg.row_step = 0; + } + + return pointcloud_msg; + } + + static std::vector generate_static_transform_msgs() + { + // generate defined transformations + return { + generate_transform_msg("base_link", "lidar_top", 5.0, 5.0, 5.0, 0.683, 0.5, 0.183, 0.499), + generate_transform_msg("base_link", "lidar_left", 1.0, 1.0, 3.0, 0.278, 0.717, 0.441, 0.453)}; + generate_transform_msg("base_link", "lidar_right", 1.0, 1.0, 3.0, 0.278, 0.717, 0.441, 0.453); + } + + std::shared_ptr + concatenate_node_; + std::shared_ptr combine_cloud_handler_; + std::shared_ptr collector_; + std::shared_ptr tf_broadcaster_; + + static constexpr int32_t timestamp_seconds{10}; + static constexpr uint32_t timestamp_nanoseconds{100'000'000}; + static constexpr size_t number_of_points{3}; + static constexpr float standard_tolerance{1e-4}; + static constexpr int number_of_pointcloud{3}; + static constexpr float timeout_sec{0.2}; + static constexpr bool collector_debug_mode{false}; // For showing collector information + bool debug_{false}; // For the Unit test +}; + +//////////////////////////////// Test combine_cloud_handler //////////////////////////////// +TEST_F(ConcatenateCloudTest, TestProcessTwist) +{ + auto twist_msg = std::make_shared(); + twist_msg->header.stamp = rclcpp::Time(10, 0); + twist_msg->twist.twist.linear.x = 1.0; + twist_msg->twist.twist.angular.z = 0.1; + + combine_cloud_handler_->process_twist(twist_msg); + + ASSERT_FALSE(combine_cloud_handler_->get_twist_queue().empty()); + EXPECT_EQ(combine_cloud_handler_->get_twist_queue().front().twist.linear.x, 1.0); + EXPECT_EQ(combine_cloud_handler_->get_twist_queue().front().twist.angular.z, 0.1); +} + +TEST_F(ConcatenateCloudTest, TestProcessOdometry) +{ + auto odom_msg = std::make_shared(); + odom_msg->header.stamp = rclcpp::Time(10, 0); + odom_msg->twist.twist.linear.x = 1.0; + odom_msg->twist.twist.angular.z = 0.1; + + combine_cloud_handler_->process_odometry(odom_msg); + + ASSERT_FALSE(combine_cloud_handler_->get_twist_queue().empty()); + EXPECT_EQ(combine_cloud_handler_->get_twist_queue().front().twist.linear.x, 1.0); + EXPECT_EQ(combine_cloud_handler_->get_twist_queue().front().twist.angular.z, 0.1); +} + +TEST_F(ConcatenateCloudTest, TestComputeTransformToAdjustForOldTimestamp) +{ + // If time difference between twist msg and pointcloud stamp is more than 100 miliseconds, return + // Identity transformation. case 1: time difference larger than 100 miliseconds + rclcpp::Time pointcloud_stamp1(10, 100'000'000, RCL_ROS_TIME); + rclcpp::Time pointcloud_stamp2(10, 210'000'000, RCL_ROS_TIME); + auto twist_msg1 = std::make_shared(); + twist_msg1->header.stamp = rclcpp::Time(9, 130'000'000, RCL_ROS_TIME); + twist_msg1->twist.twist.linear.x = 1.0; + twist_msg1->twist.twist.angular.z = 0.1; + combine_cloud_handler_->process_twist(twist_msg1); + + auto twist_msg2 = std::make_shared(); + twist_msg2->header.stamp = rclcpp::Time(9, 160'000'000, RCL_ROS_TIME); + twist_msg2->twist.twist.linear.x = 1.0; + twist_msg2->twist.twist.angular.z = 0.1; + combine_cloud_handler_->process_twist(twist_msg2); + + Eigen::Matrix4f transform = combine_cloud_handler_->compute_transform_to_adjust_for_old_timestamp( + pointcloud_stamp1, pointcloud_stamp2); + + // translation + EXPECT_NEAR(transform(0, 3), 0.0, standard_tolerance); + EXPECT_NEAR(transform(1, 3), 0.0, standard_tolerance); + + EXPECT_NEAR(transform(0, 0), 1.0, standard_tolerance); + EXPECT_NEAR(transform(0, 1), 0.0, standard_tolerance); + EXPECT_NEAR(transform(1, 0), 0.0, standard_tolerance); + EXPECT_NEAR(transform(1, 1), 1.0, standard_tolerance); + + std::ostringstream oss; + oss << "Transformation matrix from cloud 2 to cloud 1:\n" << transform; + + if (debug_) { + RCLCPP_INFO(concatenate_node_->get_logger(), "%s", oss.str().c_str()); + } + + // case 2: time difference smaller than 100 miliseconds + rclcpp::Time pointcloud_stamp3(11, 100'000'000, RCL_ROS_TIME); + rclcpp::Time pointcloud_stamp4(11, 150'000'000, RCL_ROS_TIME); + auto twist_msg3 = std::make_shared(); + twist_msg3->header.stamp = rclcpp::Time(11, 130'000'000, RCL_ROS_TIME); + twist_msg3->twist.twist.linear.x = 1.0; + twist_msg3->twist.twist.angular.z = 0.1; + combine_cloud_handler_->process_twist(twist_msg3); + + auto twist_msg4 = std::make_shared(); + twist_msg4->header.stamp = rclcpp::Time(11, 160'000'000, RCL_ROS_TIME); + twist_msg4->twist.twist.linear.x = 1.0; + twist_msg4->twist.twist.angular.z = 0.1; + combine_cloud_handler_->process_twist(twist_msg4); + + transform = combine_cloud_handler_->compute_transform_to_adjust_for_old_timestamp( + pointcloud_stamp3, pointcloud_stamp4); + + // translation + EXPECT_NEAR(transform(0, 3), 0.0499996, standard_tolerance); + EXPECT_NEAR(transform(1, 3), 0.000189999, standard_tolerance); + + // rotation, yaw = 0.005 + EXPECT_NEAR(transform(0, 0), 0.999987, standard_tolerance); + EXPECT_NEAR(transform(0, 1), -0.00499998, standard_tolerance); + EXPECT_NEAR(transform(1, 0), 0.00499998, standard_tolerance); + EXPECT_NEAR(transform(1, 1), 0.999987, standard_tolerance); + + oss.str(""); + oss.clear(); + oss << "Transformation matrix from cloud 4 to cloud 3:\n" << transform; + + if (debug_) { + RCLCPP_INFO(concatenate_node_->get_logger(), "%s", oss.str().c_str()); + } +} + +//////////////////////////////// Test cloud_collector //////////////////////////////// + +TEST_F(ConcatenateCloudTest, TestSetAndGetNaiveCollectorInfo) +{ + auto naive_info = std::make_shared(); + naive_info->timestamp = 15.0; + + collector_->set_info(naive_info); + auto collector_info_new = collector_->get_info(); + + auto naive_info_new = + std::dynamic_pointer_cast( + collector_info_new); + ASSERT_NE(naive_info_new, nullptr) << "Collector info is not of type NaiveCollectorInfo"; + + EXPECT_DOUBLE_EQ(naive_info_new->timestamp, 15.0); +} + +TEST_F(ConcatenateCloudTest, TestSetAndGetAdvancedCollectorInfo) +{ + auto advanced_info = std::make_shared(); + advanced_info->timestamp = 10.0; + advanced_info->noise_window = 0.1; + collector_->set_info(advanced_info); + auto collector_info_new = collector_->get_info(); + auto advanced_info_new = + std::dynamic_pointer_cast( + collector_info_new); + ASSERT_NE(advanced_info_new, nullptr) << "Collector info is not of type AdvancedCollectorInfo"; + + // Validate the values + auto min = advanced_info_new->timestamp - advanced_info_new->noise_window; + auto max = advanced_info_new->timestamp + advanced_info_new->noise_window; + EXPECT_DOUBLE_EQ(min, 9.9); + EXPECT_DOUBLE_EQ(max, 10.1); +} + +TEST_F(ConcatenateCloudTest, TestConcatenateClouds) +{ + rclcpp::Time top_timestamp(timestamp_seconds, timestamp_nanoseconds, RCL_ROS_TIME); + rclcpp::Time left_timestamp(timestamp_seconds, timestamp_nanoseconds + 40'000'000, RCL_ROS_TIME); + rclcpp::Time right_timestamp(timestamp_seconds, timestamp_nanoseconds + 80'000'000, RCL_ROS_TIME); + sensor_msgs::msg::PointCloud2 top_pointcloud = + generate_pointcloud_msg(true, false, "lidar_top", top_timestamp); + sensor_msgs::msg::PointCloud2 left_pointcloud = + generate_pointcloud_msg(true, false, "lidar_left", left_timestamp); + sensor_msgs::msg::PointCloud2 right_pointcloud = + generate_pointcloud_msg(true, false, "lidar_right", right_timestamp); + + sensor_msgs::msg::PointCloud2::SharedPtr top_pointcloud_ptr = + std::make_shared(top_pointcloud); + sensor_msgs::msg::PointCloud2::SharedPtr left_pointcloud_ptr = + std::make_shared(left_pointcloud); + sensor_msgs::msg::PointCloud2::SharedPtr right_pointcloud_ptr = + std::make_shared(right_pointcloud); + + std::unordered_map topic_to_cloud_map; + topic_to_cloud_map["lidar_top"] = top_pointcloud_ptr; + topic_to_cloud_map["lidar_left"] = left_pointcloud_ptr; + topic_to_cloud_map["lidar_right"] = right_pointcloud_ptr; + + auto [concatenate_cloud_ptr, topic_to_transformed_cloud_map, topic_to_original_stamp_map] = + collector_->concatenate_pointclouds(topic_to_cloud_map); + + // test output concatenate cloud + // No input twist, so it will not do the motion compensation + std::array expected_pointcloud = { + {Eigen::Vector3f(10.0f, 0.0f, 0.0f), Eigen::Vector3f(0.0f, 10.0f, 0.0f), + Eigen::Vector3f(0.0f, 0.0f, 10.0f), Eigen::Vector3f(10.0f, 0.0f, 0.0f), + Eigen::Vector3f(0.0f, 10.0f, 0.0f), Eigen::Vector3f(0.0f, 0.0f, 10.0f), + Eigen::Vector3f(10.0f, 0.0f, 0.0f), Eigen::Vector3f(0.0f, 10.0f, 0.0f), + Eigen::Vector3f(0.0f, 0.0f, 10.0f)}}; + + size_t i = 0; + std::ostringstream oss; + oss << "Concatenated pointcloud:\n"; + + sensor_msgs::PointCloud2Iterator iter_x(*concatenate_cloud_ptr, "x"); + sensor_msgs::PointCloud2Iterator iter_y(*concatenate_cloud_ptr, "y"); + sensor_msgs::PointCloud2Iterator iter_z(*concatenate_cloud_ptr, "z"); + + for (; iter_x != iter_x.end(); ++iter_x, ++iter_y, ++iter_z, ++i) { + oss << "Concatenated point " << i << ": (" << *iter_x << ", " << *iter_y << ", " << *iter_z + << ")\n"; + EXPECT_FLOAT_EQ(*iter_x, expected_pointcloud[i].x()); + EXPECT_FLOAT_EQ(*iter_y, expected_pointcloud[i].y()); + EXPECT_FLOAT_EQ(*iter_z, expected_pointcloud[i].z()); + } + + if (debug_) { + RCLCPP_INFO(concatenate_node_->get_logger(), "%s", oss.str().c_str()); + } + + // test concatenate cloud has the oldest pointcloud's timestamp + EXPECT_FLOAT_EQ( + top_timestamp.seconds(), rclcpp::Time(concatenate_cloud_ptr->header.stamp).seconds()); + + // test separated transformed cloud + std::array expected_top_pointcloud = { + {Eigen::Vector3f(10.0f, 0.0f, 0.0f), Eigen::Vector3f(0.0f, 10.0f, 0.0f), + Eigen::Vector3f(0.0f, 0.0f, 10.0f)}}; + std::array expected_left_pointcloud = { + {Eigen::Vector3f(10.0f, 0.0f, 0.0f), Eigen::Vector3f(0.0f, 10.0f, 0.0f), + Eigen::Vector3f(0.0f, 0.0f, 10.0f)}}; + std::array expected_right_pointcloud = { + {Eigen::Vector3f(10.0f, 0.0f, 0.0f), Eigen::Vector3f(0.0f, 10.0f, 0.0f), + Eigen::Vector3f(0.0f, 0.0f, 10.0f)}}; + + oss.clear(); + oss.str(""); + i = 0; + + sensor_msgs::PointCloud2Iterator top_pc_iter_x( + *(topic_to_transformed_cloud_map.value().at("lidar_top")), "x"); + sensor_msgs::PointCloud2Iterator top_pc_iter_y( + *(topic_to_transformed_cloud_map.value().at("lidar_top")), "y"); + sensor_msgs::PointCloud2Iterator top_pc_iter_z( + *(topic_to_transformed_cloud_map.value().at("lidar_top")), "z"); + + for (; top_pc_iter_x != top_pc_iter_x.end(); + ++top_pc_iter_x, ++top_pc_iter_y, ++top_pc_iter_z, ++i) { + oss << "Top point " << i << ": (" << *top_pc_iter_x << ", " << *top_pc_iter_y << ", " + << *top_pc_iter_z << ")\n"; + EXPECT_FLOAT_EQ(*top_pc_iter_x, expected_top_pointcloud[i].x()); + EXPECT_FLOAT_EQ(*top_pc_iter_y, expected_top_pointcloud[i].y()); + EXPECT_FLOAT_EQ(*top_pc_iter_z, expected_top_pointcloud[i].z()); + } + + if (debug_) { + RCLCPP_INFO(concatenate_node_->get_logger(), "%s", oss.str().c_str()); + } + + oss.clear(); + oss.str(""); + i = 0; + sensor_msgs::PointCloud2Iterator left_pc_iter_x( + *(topic_to_transformed_cloud_map.value().at("lidar_left")), "x"); + sensor_msgs::PointCloud2Iterator left_pc_iter_y( + *(topic_to_transformed_cloud_map.value().at("lidar_left")), "y"); + sensor_msgs::PointCloud2Iterator left_pc_iter_z( + *(topic_to_transformed_cloud_map.value().at("lidar_left")), "z"); + + for (; left_pc_iter_x != left_pc_iter_x.end(); + ++left_pc_iter_x, ++left_pc_iter_y, ++left_pc_iter_z, ++i) { + oss << "Left point " << i << ": (" << *left_pc_iter_x << ", " << *left_pc_iter_y << ", " + << *left_pc_iter_z << ")\n"; + EXPECT_FLOAT_EQ(*left_pc_iter_x, expected_left_pointcloud[i].x()); + EXPECT_FLOAT_EQ(*left_pc_iter_y, expected_left_pointcloud[i].y()); + EXPECT_FLOAT_EQ(*left_pc_iter_z, expected_left_pointcloud[i].z()); + } + + if (debug_) { + RCLCPP_INFO(concatenate_node_->get_logger(), "%s", oss.str().c_str()); + } + + oss.clear(); + oss.str(""); + i = 0; + sensor_msgs::PointCloud2Iterator right_pc_iter_x( + *(topic_to_transformed_cloud_map.value().at("lidar_right")), "x"); + sensor_msgs::PointCloud2Iterator right_pc_iter_y( + *(topic_to_transformed_cloud_map.value().at("lidar_right")), "y"); + sensor_msgs::PointCloud2Iterator right_pc_iter_z( + *(topic_to_transformed_cloud_map.value().at("lidar_right")), "z"); + + for (; right_pc_iter_x != right_pc_iter_x.end(); + ++right_pc_iter_x, ++right_pc_iter_y, ++right_pc_iter_z, ++i) { + oss << "Right point " << i << ": (" << *right_pc_iter_x << ", " << *right_pc_iter_y << ", " + << *right_pc_iter_z << ")\n"; + EXPECT_FLOAT_EQ(*right_pc_iter_x, expected_right_pointcloud[i].x()); + EXPECT_FLOAT_EQ(*right_pc_iter_y, expected_right_pointcloud[i].y()); + EXPECT_FLOAT_EQ(*right_pc_iter_z, expected_right_pointcloud[i].z()); + } + + if (debug_) { + RCLCPP_INFO(concatenate_node_->get_logger(), "%s", oss.str().c_str()); + } + + // test original cloud's timestamps + EXPECT_FLOAT_EQ(top_timestamp.seconds(), topic_to_original_stamp_map["lidar_top"]); + EXPECT_FLOAT_EQ(left_timestamp.seconds(), topic_to_original_stamp_map["lidar_left"]); + EXPECT_FLOAT_EQ(right_timestamp.seconds(), topic_to_original_stamp_map["lidar_right"]); +} + +TEST_F(ConcatenateCloudTest, TestProcessSingleCloud) +{ + concatenate_node_->add_cloud_collector(collector_); + + rclcpp::Time timestamp(timestamp_seconds, timestamp_nanoseconds, RCL_ROS_TIME); + sensor_msgs::msg::PointCloud2 top_pointcloud = + generate_pointcloud_msg(true, false, "lidar_top", timestamp); + sensor_msgs::msg::PointCloud2::SharedPtr top_pointcloud_ptr = + std::make_shared(top_pointcloud); + collector_->process_pointcloud("lidar_top", top_pointcloud_ptr); + + auto topic_to_cloud_map = collector_->get_topic_to_cloud_map(); + EXPECT_EQ(topic_to_cloud_map["lidar_top"], top_pointcloud_ptr); + EXPECT_FALSE(collector_->concatenate_finished()); + concatenate_node_->manage_collector_list(); + EXPECT_FALSE(concatenate_node_->get_cloud_collectors().empty()); + + // Sleep for timeout seconds (200 ms) + std::this_thread::sleep_for(std::chrono::milliseconds(200)); + rclcpp::spin_some(concatenate_node_); + + // Collector should concatenate and publish the pointcloud, also delete itself. + EXPECT_TRUE(collector_->concatenate_finished()); + concatenate_node_->manage_collector_list(); + EXPECT_TRUE(concatenate_node_->get_cloud_collectors().empty()); +} + +TEST_F(ConcatenateCloudTest, TestProcessMultipleCloud) +{ + concatenate_node_->add_cloud_collector(collector_); + + rclcpp::Time top_timestamp(timestamp_seconds, timestamp_nanoseconds, RCL_ROS_TIME); + rclcpp::Time left_timestamp(timestamp_seconds, timestamp_nanoseconds + 40'000'000, RCL_ROS_TIME); + rclcpp::Time right_timestamp(timestamp_seconds, timestamp_nanoseconds + 80'000'000, RCL_ROS_TIME); + sensor_msgs::msg::PointCloud2 top_pointcloud = + generate_pointcloud_msg(true, false, "lidar_top", top_timestamp); + sensor_msgs::msg::PointCloud2 left_pointcloud = + generate_pointcloud_msg(true, false, "lidar_left", left_timestamp); + sensor_msgs::msg::PointCloud2 right_pointcloud = + generate_pointcloud_msg(true, false, "lidar_right", right_timestamp); + + sensor_msgs::msg::PointCloud2::SharedPtr top_pointcloud_ptr = + std::make_shared(top_pointcloud); + sensor_msgs::msg::PointCloud2::SharedPtr left_pointcloud_ptr = + std::make_shared(left_pointcloud); + sensor_msgs::msg::PointCloud2::SharedPtr right_pointcloud_ptr = + std::make_shared(right_pointcloud); + + collector_->process_pointcloud("lidar_top", top_pointcloud_ptr); + collector_->process_pointcloud("lidar_left", left_pointcloud_ptr); + collector_->process_pointcloud("lidar_right", right_pointcloud_ptr); + + EXPECT_TRUE(collector_->concatenate_finished()); + concatenate_node_->manage_collector_list(); + EXPECT_TRUE(concatenate_node_->get_cloud_collectors().empty()); +} + +int main(int argc, char ** argv) +{ + ::testing::InitGoogleTest(&argc, argv); + rclcpp::init(argc, argv); + int ret = RUN_ALL_TESTS(); + rclcpp::shutdown(); + return ret; +}