diff --git a/common/perception_utils/CMakeLists.txt b/common/perception_utils/CMakeLists.txt index 338874a41a3f7..146591c865b5d 100644 --- a/common/perception_utils/CMakeLists.txt +++ b/common/perception_utils/CMakeLists.txt @@ -8,6 +8,7 @@ find_package(Boost REQUIRED) ament_auto_add_library(perception_utils SHARED src/predicted_path_utils.cpp + src/traffic_light_utils.cpp src/conversion.cpp ) diff --git a/common/perception_utils/include/perception_utils/prime_synchronizer.hpp b/common/perception_utils/include/perception_utils/prime_synchronizer.hpp new file mode 100644 index 0000000000000..5af44595b8fb2 --- /dev/null +++ b/common/perception_utils/include/perception_utils/prime_synchronizer.hpp @@ -0,0 +1,354 @@ +// Copyright 2023 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#ifndef PERCEPTION_UTILS__PRIME_SYNCHRONIZER_HPP_ +#define PERCEPTION_UTILS__PRIME_SYNCHRONIZER_HPP_ + +#include + +#include + +#include +#include +#include +#include +#include +#include +#include + +namespace perception_utils +{ + +/** + * @brief This class implemented a multi-topic approximate time synchronizer. + * Different from message_filters::sync::ApproximateTime, this class assumes there's a primary topic + * and more than one secondary topics and the registered callback function will always been called + * for every primary topic message, while for message_filters::sync::ApproximateTime, the callback + * function will be called only when all topics are synchronized. + * + * For every primary topic message M1, when this class receives it, the class would check whether it + * could gather one message of every secondary topic whose timestamp is similar to that of M1. If + * yes, the registered callback function would be called at once with M1 and the gathered messages. + * Otherwise, it would wait for some time. After that, the class would collect secondary messages + * again. For secondary topics that can't find any message with similar timestamp to M1, nullptr + * would be used instead and the registered function would be called. + * + * This class assumes the message types of the primary topic and secondary topics are defined during + * compile-time. It should be noted that the registered callback function is too slow, it would + * block the thread and the primary message couldn't be received. + * + * @tparam PrimeMsgT Primary topic message type + * @tparam SecondaryMsgT Secondary topic message types + */ +template +class PrimeSynchronizer +{ + typedef double StampT; + +public: + /** + * @brief Construct a new Prime Synchronizer object + * + * @param node_ptr node handler pointer + * @param topics topic vector. topics[0] corresponds to primary topic and the others + * correspond to secondary topics + * @param qos qos vector. configured for every topic + * @param callback registered callback function, would be called when the messages are + * synchronized + * @param max_delay_t maximum timestamp different (seconds) between primary topic message and + * any other secondary topic message + * @param max_wait_t maximum wait time (seconds) before the messages are synchronized + */ + PrimeSynchronizer( + rclcpp::Node * node_ptr, const std::vector & topics, + const std::vector & qos, + std::function + callback, + StampT max_delay_t = 0.2, StampT max_wait_t = 0.1) + : node_ptr_(node_ptr), callback_(callback), max_delay_t_(max_delay_t), max_wait_t_(max_wait_t) + { + assert((topics.size() == sizeof...(SecondaryMsgT) + 1) && "Incorrect topic number"); + assert(topics.size() == qos.size() && "topic size not equal to qos size!"); + prime_subscriber_ = node_ptr_->create_subscription( + topics[0], qos[0], std::bind(&PrimeSynchronizer::primeCallback, this, std::placeholders::_1)); + initSecondaryListener( + std::vector(topics.begin() + 1, topics.end()), + std::vector(qos.begin() + 1, qos.end())); + std::chrono::nanoseconds wait_duration(static_cast(1e9 * max_wait_t)); + timer_ = rclcpp::create_timer( + node_ptr_, node_ptr_->get_clock(), wait_duration, + std::bind(&PrimeSynchronizer::timerCallback, this)); + timer_->cancel(); + } + +private: + /** + * @brief initialize the secondary topic subscribers. + * Have to use template recursion method to indexing the subscriber tuple + * + * @tparam Idx secondary subscriber tuple index + * @param topics topics vector + * @param qos qos vector + */ + template + void initSecondaryListener( + const std::vector & topics, const std::vector & qos) + { + if constexpr (Idx < sizeof...(SecondaryMsgT)) { + typedef std::tuple_element_t> type; + std::get(sec_subscriber_) = node_ptr_->create_subscription( + topics[Idx], qos[Idx], + std::bind(&PrimeSynchronizer::secondaryCallback, this, std::placeholders::_1)); + initSecondaryListener(topics, qos); + } + } + + /** + * @brief Collect the Idx-th secondary messages with similar timestamp to prime message, + * which is stored in argv[0] and write to argv[Idx + 1] + * + * @tparam Idx secondary subscriber tuple index + * @param argv output tuple + */ + template + void collectSecondaryMsg( + std::tuple & + argv) + { + if constexpr (Idx < sizeof...(SecondaryMsgT)) { + /* + if can't find any message for this topic, write nullptr + */ + if (std::get(sec_messages_).empty()) { + std::get(argv) = nullptr; + } else { + StampT prime_stamp = convertStampFormat(std::get<0>(argv)->header.stamp); + StampT min_delay = std::numeric_limits::max(); + auto best_sec_msg = std::get(sec_messages_).begin()->second; + /* + find the message with closest timestamp to primary message + */ + for (const auto & sec_msg_p : std::get(sec_messages_)) { + StampT delay = std::abs(prime_stamp - sec_msg_p.first); + if (delay < min_delay) { + min_delay = delay; + best_sec_msg = sec_msg_p.second; + } + } + std::get(argv) = min_delay < max_delay_t_ ? best_sec_msg : nullptr; + } + collectSecondaryMsg(argv); + } + } + + /** + * @brief check if all messages in argv are valid (not equal to nullptr) + * + * @tparam Idx + * @param argv + * @return true All messages are not nullptr + * @return false At least one message in the tuplc is nullptr + */ + template + bool isArgvValid( + const std::tuple< + typename PrimeMsgT::ConstSharedPtr, typename SecondaryMsgT::ConstSharedPtr...> & argv) + { + if constexpr (Idx < sizeof...(SecondaryMsgT) + 1) { + return (std::get(argv) != nullptr) && isArgvValid(argv); + } + return true; + } + + inline StampT convertStampFormat(const std_msgs::msg::Header::_stamp_type & stamp) + { + return rclcpp::Time(stamp).seconds(); + } + + /** + * @brief callback function when primary message is received + * + * @param msg + */ + void primeCallback(const typename PrimeMsgT::ConstSharedPtr msg) + { + prime_cnt++; + timer_->cancel(); + assert(prime_messages_.size() <= 1); + /* + if there are old prime messages waiting for synchronization with secondary messages, + stop waiting and call the registered callback function with prime message and synchronized + secondary messages. For secondary topics that are not synchronized, use nullptr. + */ + for (auto & p : prime_messages_) { + tryCallback(p.first, true); + } + prime_messages_.clear(); + /* + update the prime messages + */ + StampT stamp = convertStampFormat(msg->header.stamp); + prime_messages_.insert(std::make_pair(stamp, msg)); + /* + check if secondary messages are all ready to synchronize with prime message. + If yes, process it immediately + */ + if (tryCallback(stamp, false)) { + prime_messages_.clear(); + } else { + timer_->reset(); + } + // RCLCPP_INFO_STREAM(node_ptr_->get_logger(), "primary message count = " << prime_cnt); + } + + /** + * @brief callback function when secondary message is received. + * + * @tparam M Type of the secondary message + * @tparam Idx Idx of the type + * @param msg + */ + template + void secondaryCallback(const typename M::ConstSharedPtr msg) + { + /* + insert the new secondary message + */ + StampT stamp = convertStampFormat(msg->header.stamp); + auto & msg_map = std::get(sec_messages_); + msg_map.insert(std::make_pair(stamp, msg)); + assert(prime_messages_.size() <= 1); + /* + check if any prime message can gather all secondary messages. + */ + for (auto it = prime_messages_.begin(); it != prime_messages_.end();) { + /* + Try to synchronize. If succeeds, remove the handled primary message + */ + if (tryCallback(it->first, false)) { + timer_->cancel(); + it = prime_messages_.erase(it); + } else { + it++; + } + } + + /* + this should not happen. Just in case. + */ + if (prime_messages_.empty() && msg_map.empty()) { + RCLCPP_ERROR_STREAM(node_ptr_->get_logger(), "Empty prime_messages and secondary_messages"); + return; + } + /* + remove old secondary messages. + */ + StampT stamp_thres = + prime_messages_.empty() ? msg_map.rbegin()->first : prime_messages_.begin()->first; + for (auto it = msg_map.begin(); it != msg_map.end();) { + if (stamp_thres - it->first > max_delay_t_) { + it = msg_map.erase(it); + } else { + it++; + } + } + } + + /** + * @brief Timer callback. The maximum wait time exceeds. Handle all stored primary messages. + * + */ + void timerCallback() + { + timer_->cancel(); + assert(prime_messages_.size() <= 1); + for (auto & p : prime_messages_) { + tryCallback(p.first, true); + } + prime_messages_.clear(); + } + + /** + * @brief Try to synchronize. If ignoreInvalidSecMsg is set to true, + * the registered function would be called with the primary message with given timestamp and + * collected secondary messages even if not all secondary messages are collected. Otherwise, the + * registered function would not be called except all secondary messages are collected. + * + * @param stamp + * @param ignoreInvalidSecMsg + * @return true + * @return false + */ + bool tryCallback(StampT stamp, bool ignoreInvalidSecMsg = true) + { + if (prime_messages_.count(stamp) == 0) { + return true; + } + std::tuple argv; + std::get<0>(argv) = prime_messages_[stamp]; + collectSecondaryMsg(argv); + if (ignoreInvalidSecMsg || isArgvValid(argv)) { + std::apply(callback_, argv); + return true; + } + return false; + } + /** + * @brief node pointer + * + */ + rclcpp::Node * node_ptr_; + /** + * @brief The registered callback function that would be called when the prime message and sub + * messages are synchronized or timeout + * + */ + std::function + callback_; + /** + * @brief the prime message subscriber + * + */ + typename rclcpp::Subscription::SharedPtr prime_subscriber_; + /** + * @brief the secondary message subscriber tuple + * + */ + std::tuple::SharedPtr...> sec_subscriber_; + /** + * @brief map to store the prime messages using timestamp of the messages as key. + * + */ + std::map prime_messages_; + rclcpp::TimerBase::SharedPtr timer_; + /** + * @brief tuple of maps to store the secondary messages using timestamp of the messages as key + * + */ + std::tuple...> sec_messages_; + /* + maximum wait time (seconds) before the secondary messages are collected + */ + double max_wait_t_; + /* + maximum delay time (seconds) between a secondary message and the primary message + */ + double max_delay_t_; + int prime_cnt = 0; +}; + +} // namespace perception_utils + +#endif // PERCEPTION_UTILS__PRIME_SYNCHRONIZER_HPP_ diff --git a/common/perception_utils/include/perception_utils/traffic_light_utils.hpp b/common/perception_utils/include/perception_utils/traffic_light_utils.hpp new file mode 100644 index 0000000000000..be21536953ce7 --- /dev/null +++ b/common/perception_utils/include/perception_utils/traffic_light_utils.hpp @@ -0,0 +1,52 @@ +// Copyright 2023 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef PERCEPTION_UTILS__TRAFFIC_LIGHT_UTILS_HPP_ +#define PERCEPTION_UTILS__TRAFFIC_LIGHT_UTILS_HPP_ + +#include "tier4_perception_msgs/msg/traffic_light_element.hpp" +#include "tier4_perception_msgs/msg/traffic_light_roi.hpp" +#include "tier4_perception_msgs/msg/traffic_signal.hpp" + +#include +#include +#include +#include + +namespace perception_utils +{ + +namespace traffic_light +{ + +bool isRoiValid( + const tier4_perception_msgs::msg::TrafficLightRoi & roi, uint32_t width, uint32_t height); + +void setRoiInvalid(tier4_perception_msgs::msg::TrafficLightRoi & roi); + +bool isSignalUnknown(const tier4_perception_msgs::msg::TrafficSignal & signal); + +void setSignalUnknown(tier4_perception_msgs::msg::TrafficSignal & signal, float confidence = -1); + +tf2::Vector3 getTrafficLightTopLeft(const lanelet::ConstLineString3d & traffic_light); + +tf2::Vector3 getTrafficLightBottomRight(const lanelet::ConstLineString3d & traffic_light); + +tf2::Vector3 getTrafficLightCenter(const lanelet::ConstLineString3d & traffic_light); + +} // namespace traffic_light + +} // namespace perception_utils + +#endif // PERCEPTION_UTILS__TRAFFIC_LIGHT_UTILS_HPP_ diff --git a/common/perception_utils/package.xml b/common/perception_utils/package.xml index bc1d1b8b6bece..2fa9ccfbb430c 100644 --- a/common/perception_utils/package.xml +++ b/common/perception_utils/package.xml @@ -16,9 +16,11 @@ autoware_auto_perception_msgs geometry_msgs interpolation + lanelet2_extension libboost-dev rclcpp tier4_autoware_utils + tier4_perception_msgs ament_cmake_ros ament_lint_auto diff --git a/common/perception_utils/src/traffic_light_utils.cpp b/common/perception_utils/src/traffic_light_utils.cpp new file mode 100644 index 0000000000000..5ff3b3d422945 --- /dev/null +++ b/common/perception_utils/src/traffic_light_utils.cpp @@ -0,0 +1,79 @@ +// Copyright 2023 TIER IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "perception_utils/traffic_light_utils.hpp" + +namespace perception_utils +{ + +namespace traffic_light +{ + +bool isRoiValid( + const tier4_perception_msgs::msg::TrafficLightRoi & roi, uint32_t width, uint32_t height) +{ + uint32_t x1 = roi.roi.x_offset; + uint32_t x2 = roi.roi.x_offset + roi.roi.width; + uint32_t y1 = roi.roi.y_offset; + uint32_t y2 = roi.roi.y_offset + roi.roi.height; + return roi.roi.width > 0 && roi.roi.height > 0 && x1 < width && y1 < height && x2 < width && + y2 < height; +} + +void setRoiInvalid(tier4_perception_msgs::msg::TrafficLightRoi & roi) +{ + roi.roi.height = roi.roi.width = 0; +} + +bool isSignalUnknown(const tier4_perception_msgs::msg::TrafficSignal & signal) +{ + return signal.elements.size() == 1 && + signal.elements[0].shape == tier4_perception_msgs::msg::TrafficLightElement::UNKNOWN && + signal.elements[0].color == tier4_perception_msgs::msg::TrafficLightElement::UNKNOWN; +} + +void setSignalUnknown(tier4_perception_msgs::msg::TrafficSignal & signal, float confidence) +{ + signal.elements.resize(1); + signal.elements[0].shape = tier4_perception_msgs::msg::TrafficLightElement::UNKNOWN; + signal.elements[0].color = tier4_perception_msgs::msg::TrafficLightElement::UNKNOWN; + // the default value is -1, which means to not set confidence + if (confidence >= 0) { + signal.elements[0].confidence = confidence; + } +} + +tf2::Vector3 getTrafficLightTopLeft(const lanelet::ConstLineString3d & traffic_light) +{ + const auto & tl_bl = traffic_light.front(); + const double tl_height = traffic_light.attributeOr("height", 0.0); + return tf2::Vector3(tl_bl.x(), tl_bl.y(), tl_bl.z() + tl_height); +} + +tf2::Vector3 getTrafficLightBottomRight(const lanelet::ConstLineString3d & traffic_light) +{ + const auto & tl_bl = traffic_light.back(); + return tf2::Vector3(tl_bl.x(), tl_bl.y(), tl_bl.z()); +} + +tf2::Vector3 getTrafficLightCenter(const lanelet::ConstLineString3d & traffic_light) +{ + tf2::Vector3 top_left = getTrafficLightTopLeft(traffic_light); + tf2::Vector3 bottom_right = getTrafficLightBottomRight(traffic_light); + return (top_left + bottom_right) / 2; +} + +} // namespace traffic_light + +} // namespace perception_utils diff --git a/common/tier4_autoware_utils/include/tier4_autoware_utils/ros/pcl_conversion.hpp b/common/tier4_autoware_utils/include/tier4_autoware_utils/ros/pcl_conversion.hpp new file mode 100644 index 0000000000000..c66a63eb1ac51 --- /dev/null +++ b/common/tier4_autoware_utils/include/tier4_autoware_utils/ros/pcl_conversion.hpp @@ -0,0 +1,72 @@ +// Copyright 2023 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef TIER4_AUTOWARE_UTILS__ROS__PCL_CONVERSION_HPP_ +#define TIER4_AUTOWARE_UTILS__ROS__PCL_CONVERSION_HPP_ + +#include +#include + +#include + +namespace tier4_autoware_utils +{ +/** + * @brief a faster implementation of converting sensor_msgs::msg::PointCloud2 to + * pcl::PointCloud and transform the cloud + * + * @tparam Scalar + * @param cloud input PointCloud2 message + * @param pcl_cloud output transformed pcl cloud + * @param transform eigen transformation matrix + */ +template +void transformPointCloudFromROSMsg( + const sensor_msgs::msg::PointCloud2 & cloud, pcl::PointCloud & pcl_cloud, + const Eigen::Matrix & transform) +{ + // Copy info fields + pcl_conversions::toPCL(cloud.header, pcl_cloud.header); + pcl_cloud.width = cloud.width; + pcl_cloud.height = cloud.height; + pcl_cloud.is_dense = cloud.is_dense == 1; + + pcl::MsgFieldMap field_map; + std::vector msg_fields; + pcl_conversions::toPCL(cloud.fields, msg_fields); + pcl::createMapping(msg_fields, field_map); + + // transform point data + std::uint32_t num_points = cloud.width * cloud.height; + pcl_cloud.points.resize(num_points); + std::uint8_t * cloud_data = reinterpret_cast(&pcl_cloud.points[0]); + pcl::detail::Transformer tf(transform); + + for (std::uint32_t row = 0; row < cloud.height; ++row) { + const std::uint8_t * row_data = &cloud.data[row * cloud.row_step]; + for (std::uint32_t col = 0; col < cloud.width; ++col) { + const std::uint8_t * msg_data = row_data + col * cloud.point_step; + for (const pcl::detail::FieldMapping & mapping : field_map) { + const float * msg_ptr = reinterpret_cast(msg_data); + float * pcl_ptr = reinterpret_cast(cloud_data + mapping.struct_offset); + tf.se3(msg_ptr, pcl_ptr); + } + cloud_data += sizeof(pcl::PointXYZ); + } + } +} + +} // namespace tier4_autoware_utils + +#endif // TIER4_AUTOWARE_UTILS__ROS__PCL_CONVERSION_HPP_ diff --git a/common/tier4_autoware_utils/package.xml b/common/tier4_autoware_utils/package.xml index ff3fa65f034b3..fd9b315f8e4d5 100644 --- a/common/tier4_autoware_utils/package.xml +++ b/common/tier4_autoware_utils/package.xml @@ -19,6 +19,7 @@ diagnostic_msgs geometry_msgs libboost-dev + pcl_conversions pcl_ros rclcpp tf2 diff --git a/perception/traffic_light_occlusion_predictor/CMakeLists.txt b/perception/traffic_light_occlusion_predictor/CMakeLists.txt new file mode 100644 index 0000000000000..481561ed92be8 --- /dev/null +++ b/perception/traffic_light_occlusion_predictor/CMakeLists.txt @@ -0,0 +1,32 @@ +cmake_minimum_required(VERSION 3.14) +project(traffic_light_occlusion_predictor) + +find_package(autoware_cmake REQUIRED) +autoware_package() + +find_package(Eigen3 REQUIRED) +find_package(PCL 1.8 REQUIRED) + +include_directories( + SYSTEM + ${EIGEN3_INCLUDE_DIR} + ${PCL_INCLUDE_DIRS} +) + +ament_auto_add_library(traffic_light_occlusion_predictor SHARED + src/nodelet.cpp + src/occlusion_predictor.cpp +) + +rclcpp_components_register_node(traffic_light_occlusion_predictor + PLUGIN "traffic_light::TrafficLightOcclusionPredictorNodelet" + EXECUTABLE traffic_light_occlusion_predictor_node +) + +link_directories(${PCL_LIBRARY_DIRS}) +target_link_libraries(traffic_light_occlusion_predictor ${PCL_LIBRARIES}) + +ament_auto_package(INSTALL_TO_SHARE + config + launch +) diff --git a/perception/traffic_light_occlusion_predictor/README.md b/perception/traffic_light_occlusion_predictor/README.md new file mode 100644 index 0000000000000..312f9abccc464 --- /dev/null +++ b/perception/traffic_light_occlusion_predictor/README.md @@ -0,0 +1,36 @@ +# The `traffic_light_occlusion_predictor` Package + +## Overview + +`traffic_light_occlusion_predictor` receives the detected traffic lights rois and calculates the occlusion ratios of each roi with point cloud. + +For each traffic light roi, hundreds of pixels would be selected and projected into the 3D space. Then from the camera point of view, the number of projected pixels that are occluded by the point cloud is counted and used for calculating the occlusion ratio for the roi. As shown in follow image, the red pixels are occluded and the occlusion ratio is the number of red pixels divided by the total pixel numbers. + +![image](images/occlusion.png) + +If no point cloud is received or all point clouds have very large stamp difference with the camera image, the occlusion ratio of each roi would be set as 0. + +## Input topics + +| Name | Type | Description | +| -------------------- | --------------------------------------------------- | ------------------------ | +| `~input/vector_map` | autoware_auto_mapping_msgs::HADMapBin | vector map | +| `~/input/rois` | autoware_auto_perception_msgs::TrafficLightRoiArray | traffic light detections | +| `~input/camera_info` | sensor_msgs::CameraInfo | target camera parameter | +| `~/input/cloud` | sensor_msgs::PointCloud2 | LiDAR point cloud | + +## Output topics + +| Name | Type | Description | +| -------------------- | --------------------------------------------------------- | ---------------------------- | +| `~/output/occlusion` | autoware_auto_perception_msgs::TrafficLightOcclusionArray | occlusion ratios of each roi | + +## Node parameters + +| Parameter | Type | Description | +| ------------------------------------ | ------ | ------------------------------------------------------------- | +| `azimuth_occlusion_resolution_deg` | double | azimuth resolution of LiDAR point cloud (degree) | +| `elevation_occlusion_resolution_deg` | double | elevation resolution of LiDAR point cloud (degree) | +| `max_valid_pt_dist` | double | The points within this distance would be used for calculation | +| `max_image_cloud_delay` | double | The maximum delay between LiDAR point cloud and camera image | +| `max_wait_t` | double | The maximum time waiting for the LiDAR point cloud | diff --git a/perception/traffic_light_occlusion_predictor/config/traffic_light_occlusion_predictor.param.yaml b/perception/traffic_light_occlusion_predictor/config/traffic_light_occlusion_predictor.param.yaml new file mode 100644 index 0000000000000..d0230e2b12c42 --- /dev/null +++ b/perception/traffic_light_occlusion_predictor/config/traffic_light_occlusion_predictor.param.yaml @@ -0,0 +1,11 @@ +/**: + ros__parameters: + azimuth_occlusion_resolution_deg: 0.15 # degree + elevation_occlusion_resolution_deg: 0.08 # degree + max_valid_pt_dist: 50.0 + # the node would wait at most max_wait_t seconds for cloud whose timestamp different with the image is less than max_image_cloud_delay. + # If no cloud with timestamp difference smaller than max_image_cloud_delay is found, the node would publish occlusion ratio of all + # traffic lights as 0 + max_image_cloud_delay: 0.5 + max_wait_t: 0.05 + max_occlusion_ratio: 50 diff --git a/perception/traffic_light_occlusion_predictor/images/occlusion.png b/perception/traffic_light_occlusion_predictor/images/occlusion.png new file mode 100644 index 0000000000000..94e2dc4469f35 Binary files /dev/null and b/perception/traffic_light_occlusion_predictor/images/occlusion.png differ diff --git a/perception/traffic_light_occlusion_predictor/include/traffic_light_occlusion_predictor/nodelet.hpp b/perception/traffic_light_occlusion_predictor/include/traffic_light_occlusion_predictor/nodelet.hpp new file mode 100644 index 0000000000000..2117b650efc41 --- /dev/null +++ b/perception/traffic_light_occlusion_predictor/include/traffic_light_occlusion_predictor/nodelet.hpp @@ -0,0 +1,102 @@ +// Copyright 2023 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef TRAFFIC_LIGHT_OCCLUSION_PREDICTOR__NODELET_HPP_ +#define TRAFFIC_LIGHT_OCCLUSION_PREDICTOR__NODELET_HPP_ + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace traffic_light +{ +class TrafficLightOcclusionPredictorNodelet : public rclcpp::Node +{ +public: + explicit TrafficLightOcclusionPredictorNodelet(const rclcpp::NodeOptions & node_options); + +private: + struct Config + { + double azimuth_occlusion_resolution_deg; + double elevation_occlusion_resolution_deg; + double max_valid_pt_dist; + double max_image_cloud_delay; + double max_wait_t; + int max_occlusion_ratio; + }; + +private: + /** + * @brief receive the lanelet2 map + * + * @param input_msg + */ + void mapCallback(const autoware_auto_mapping_msgs::msg::HADMapBin::ConstSharedPtr input_msg); + /** + * @brief subscribers + * + */ + void syncCallback( + const tier4_perception_msgs::msg::TrafficSignalArray::ConstSharedPtr in_signal_msg, + const tier4_perception_msgs::msg::TrafficLightRoiArray::ConstSharedPtr in_roi_msg, + const sensor_msgs::msg::CameraInfo::ConstSharedPtr in_cam_info_msg, + const sensor_msgs::msg::PointCloud2::ConstSharedPtr in_cloud_msg); + + rclcpp::Subscription::SharedPtr map_sub_; + /** + * @brief publishers + * + */ + rclcpp::Publisher::SharedPtr signal_pub_; + + tf2_ros::Buffer tf_buffer_; + tf2_ros::TransformListener tf_listener_; + std::map traffic_light_position_map_; + Config config_; + /** + * @brief main class for calculating the occlusion probability + * + */ + std::shared_ptr cloud_occlusion_predictor_; + + typedef perception_utils::PrimeSynchronizer< + tier4_perception_msgs::msg::TrafficSignalArray, + tier4_perception_msgs::msg::TrafficLightRoiArray, sensor_msgs::msg::CameraInfo, + sensor_msgs::msg::PointCloud2> + SynchronizerType; + + std::shared_ptr synchronizer_; +}; +} // namespace traffic_light +#endif // TRAFFIC_LIGHT_OCCLUSION_PREDICTOR__NODELET_HPP_ diff --git a/perception/traffic_light_occlusion_predictor/include/traffic_light_occlusion_predictor/occlusion_predictor.hpp b/perception/traffic_light_occlusion_predictor/include/traffic_light_occlusion_predictor/occlusion_predictor.hpp new file mode 100644 index 0000000000000..9c14cd992fac8 --- /dev/null +++ b/perception/traffic_light_occlusion_predictor/include/traffic_light_occlusion_predictor/occlusion_predictor.hpp @@ -0,0 +1,95 @@ +// Copyright 2023-2026 the Autoware Foundation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +#ifndef TRAFFIC_LIGHT_OCCLUSION_PREDICTOR__OCCLUSION_PREDICTOR_HPP_ +#define TRAFFIC_LIGHT_OCCLUSION_PREDICTOR__OCCLUSION_PREDICTOR_HPP_ + +#include +#include +#include +#include + +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +namespace traffic_light +{ + +struct Ray +{ + float azimuth; + float elevation; + float dist; +}; + +class CloudOcclusionPredictor +{ +public: + CloudOcclusionPredictor( + rclcpp::Node * node_ptr, float max_valid_pt_distance, float azimuth_occlusion_resolution_deg, + float elevation_occlusion_resolution_deg); + + void predict( + const sensor_msgs::msg::CameraInfo::ConstSharedPtr & camera_info_msg, + const tier4_perception_msgs::msg::TrafficLightRoiArray::ConstSharedPtr & rois_msg, + const sensor_msgs::msg::PointCloud2::ConstSharedPtr & cloud_msg, + const tf2_ros::Buffer & tf_buffer, + const std::map & traffic_light_position_map, + std::vector & occlusion_ratios); + +private: + uint32_t predict(const pcl::PointXYZ & roi_top_left, const pcl::PointXYZ & roi_bottom_right); + + void filterCloud( + const pcl::PointCloud & cloud_in, const std::vector & roi_tls, + const std::vector & roi_brs, pcl::PointCloud & cloud_out); + + void sampleTrafficLightRoi( + const pcl::PointXYZ & top_left, const pcl::PointXYZ & bottom_right, + uint32_t horizontal_sample_num, uint32_t vertical_sample_num, + pcl::PointCloud & cloud_out); + + void calcRoiVectex3D( + const tier4_perception_msgs::msg::TrafficLightRoi & roi, + const image_geometry::PinholeCameraModel & pinhole_model, + const std::map & traffic_light_position_map, + const tf2::Transform & tf_camera2map, pcl::PointXYZ & top_left, pcl::PointXYZ & bottom_right); + + std::map > > lidar_rays_; + rclcpp::Node * node_ptr_; + float max_valid_pt_distance_; + float azimuth_occlusion_resolution_deg_; + float elevation_occlusion_resolution_deg_; +}; + +} // namespace traffic_light + +#endif // TRAFFIC_LIGHT_OCCLUSION_PREDICTOR__OCCLUSION_PREDICTOR_HPP_ diff --git a/perception/traffic_light_occlusion_predictor/launch/traffic_light_occlusion_predictor.launch.xml b/perception/traffic_light_occlusion_predictor/launch/traffic_light_occlusion_predictor.launch.xml new file mode 100644 index 0000000000000..795267b920e24 --- /dev/null +++ b/perception/traffic_light_occlusion_predictor/launch/traffic_light_occlusion_predictor.launch.xml @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/perception/traffic_light_occlusion_predictor/package.xml b/perception/traffic_light_occlusion_predictor/package.xml new file mode 100644 index 0000000000000..813f2914b7c4b --- /dev/null +++ b/perception/traffic_light_occlusion_predictor/package.xml @@ -0,0 +1,36 @@ + + + + traffic_light_occlusion_predictor + 0.1.0 + The traffic_light_occlusion_predictor package + Mingyu Li + Apache License 2.0 + + ament_cmake_auto + + autoware_cmake + + autoware_auto_geometry + autoware_auto_mapping_msgs + geometry_msgs + image_geometry + lanelet2_extension + pcl_msgs + perception_utils + rclcpp + rclcpp_components + sensor_msgs + tf2 + tf2_eigen + tf2_ros + tier4_autoware_utils + tier4_perception_msgs + + ament_lint_auto + autoware_lint_common + + + ament_cmake + + diff --git a/perception/traffic_light_occlusion_predictor/src/nodelet.cpp b/perception/traffic_light_occlusion_predictor/src/nodelet.cpp new file mode 100644 index 0000000000000..90d96b4c9cbfc --- /dev/null +++ b/perception/traffic_light_occlusion_predictor/src/nodelet.cpp @@ -0,0 +1,138 @@ +// Copyright 2023 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "traffic_light_occlusion_predictor/nodelet.hpp" + +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#define EIGEN_MPL2_ONLY +#include +#include + +namespace traffic_light +{ + +TrafficLightOcclusionPredictorNodelet::TrafficLightOcclusionPredictorNodelet( + const rclcpp::NodeOptions & node_options) +: Node("traffic_light_occlusion_predictor_node", node_options), + tf_buffer_(this->get_clock()), + tf_listener_(tf_buffer_) +{ + using std::placeholders::_1; + using std::placeholders::_2; + using std::placeholders::_3; + using std::placeholders::_4; + + // subscribers + map_sub_ = create_subscription( + "~/input/vector_map", rclcpp::QoS{1}.transient_local(), + std::bind(&TrafficLightOcclusionPredictorNodelet::mapCallback, this, _1)); + + // publishers + signal_pub_ = + create_publisher("~/output/traffic_signals", 1); + // configuration parameters + config_.azimuth_occlusion_resolution_deg = + declare_parameter("azimuth_occlusion_resolution_deg", 0.15); + config_.elevation_occlusion_resolution_deg = + declare_parameter("elevation_occlusion_resolution_deg", 0.08); + config_.max_valid_pt_dist = declare_parameter("max_valid_pt_dist", 50.0); + config_.max_image_cloud_delay = declare_parameter("max_image_cloud_delay", 1.0); + config_.max_wait_t = declare_parameter("max_wait_t", 0.02); + config_.max_occlusion_ratio = declare_parameter("max_occlusion_ratio", 50); + + cloud_occlusion_predictor_ = std::make_shared( + this, config_.max_valid_pt_dist, config_.azimuth_occlusion_resolution_deg, + config_.elevation_occlusion_resolution_deg); + + const std::vector topics{ + "~/input/traffic_signals", "~/input/rois", "~/input/camera_info", "~/input/cloud"}; + const std::vector qos(topics.size(), rclcpp::SensorDataQoS()); + synchronizer_ = std::make_shared( + this, topics, qos, + std::bind(&TrafficLightOcclusionPredictorNodelet::syncCallback, this, _1, _2, _3, _4), + config_.max_image_cloud_delay, config_.max_wait_t); +} + +void TrafficLightOcclusionPredictorNodelet::mapCallback( + const autoware_auto_mapping_msgs::msg::HADMapBin::ConstSharedPtr input_msg) +{ + traffic_light_position_map_.clear(); + auto lanelet_map_ptr = std::make_shared(); + + lanelet::utils::conversion::fromBinMsg(*input_msg, lanelet_map_ptr); + lanelet::ConstLanelets all_lanelets = lanelet::utils::query::laneletLayer(lanelet_map_ptr); + std::vector all_lanelet_traffic_lights = + lanelet::utils::query::autowareTrafficLights(all_lanelets); + for (auto tl_itr = all_lanelet_traffic_lights.begin(); tl_itr != all_lanelet_traffic_lights.end(); + ++tl_itr) { + lanelet::AutowareTrafficLightConstPtr tl = *tl_itr; + + auto lights = tl->trafficLights(); + for (auto lsp : lights) { + if (!lsp.isLineString()) { // traffic lights must be linestrings + continue; + } + lanelet::ConstLineString3d string3d = static_cast(lsp); + traffic_light_position_map_[lsp.id()] = + perception_utils::traffic_light::getTrafficLightCenter(string3d); + } + } +} + +void TrafficLightOcclusionPredictorNodelet::syncCallback( + const tier4_perception_msgs::msg::TrafficSignalArray::ConstSharedPtr in_signal_msg, + const tier4_perception_msgs::msg::TrafficLightRoiArray::ConstSharedPtr in_roi_msg, + const sensor_msgs::msg::CameraInfo::ConstSharedPtr in_cam_info_msg, + const sensor_msgs::msg::PointCloud2::ConstSharedPtr in_cloud_msg) +{ + tier4_perception_msgs::msg::TrafficSignalArray out_msg = *in_signal_msg; + std::vector occlusion_ratios; + if ( + in_cloud_msg == nullptr || in_cam_info_msg == nullptr || in_roi_msg == nullptr || + in_roi_msg->rois.size() != in_signal_msg->signals.size()) { + occlusion_ratios.resize(in_roi_msg->rois.size(), 0); + } else { + cloud_occlusion_predictor_->predict( + in_cam_info_msg, in_roi_msg, in_cloud_msg, tf_buffer_, traffic_light_position_map_, + occlusion_ratios); + } + for (size_t i = 0; i < occlusion_ratios.size(); i++) { + if (occlusion_ratios[i] >= config_.max_occlusion_ratio) { + perception_utils::traffic_light::setSignalUnknown(out_msg.signals[i]); + } + } + signal_pub_->publish(out_msg); +} + +} // namespace traffic_light + +#include +RCLCPP_COMPONENTS_REGISTER_NODE(traffic_light::TrafficLightOcclusionPredictorNodelet) diff --git a/perception/traffic_light_occlusion_predictor/src/occlusion_predictor.cpp b/perception/traffic_light_occlusion_predictor/src/occlusion_predictor.cpp new file mode 100644 index 0000000000000..eff8921a897a3 --- /dev/null +++ b/perception/traffic_light_occlusion_predictor/src/occlusion_predictor.cpp @@ -0,0 +1,250 @@ +// Copyright 2023-2026 the Autoware Foundation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +#include "traffic_light_occlusion_predictor/occlusion_predictor.hpp" + +namespace +{ + +traffic_light::Ray point2ray(const pcl::PointXYZ & pt) +{ + traffic_light::Ray ray; + ray.dist = std::sqrt(pt.x * pt.x + pt.y * pt.y + pt.z * pt.z); + ray.elevation = RAD2DEG(std::atan2(pt.y, std::hypot(pt.x, pt.z))); + ray.azimuth = RAD2DEG(std::atan2(pt.x, pt.z)); + return ray; +} + +} // namespace + +namespace traffic_light +{ + +CloudOcclusionPredictor::CloudOcclusionPredictor( + rclcpp::Node * node_ptr, float max_valid_pt_distance, float azimuth_occlusion_resolution_deg, + float elevation_occlusion_resolution_deg) +: node_ptr_(node_ptr), + max_valid_pt_distance_(max_valid_pt_distance), + azimuth_occlusion_resolution_deg_(azimuth_occlusion_resolution_deg), + elevation_occlusion_resolution_deg_(elevation_occlusion_resolution_deg) +{ +} + +void CloudOcclusionPredictor::predict( + const sensor_msgs::msg::CameraInfo::ConstSharedPtr & camera_info_msg, + const tier4_perception_msgs::msg::TrafficLightRoiArray::ConstSharedPtr & rois_msg, + const sensor_msgs::msg::PointCloud2::ConstSharedPtr & cloud_msg, + const tf2_ros::Buffer & tf_buffer, + const std::map & traffic_light_position_map, + std::vector & occlusion_ratios) +{ + if (camera_info_msg == nullptr || rois_msg == nullptr || cloud_msg == nullptr) { + return; + } + occlusion_ratios.resize(rois_msg->rois.size()); + // get transformation from cloud to camera + Eigen::Matrix4d camera2cloud; + try { + camera2cloud = + tf2::transformToEigen(tf_buffer.lookupTransform( + camera_info_msg->header.frame_id, cloud_msg->header.frame_id, + rclcpp::Time(camera_info_msg->header.stamp), + rclcpp::Duration::from_seconds(0.2))) + .matrix(); + } catch (tf2::TransformException & ex) { + RCLCPP_ERROR_STREAM( + node_ptr_->get_logger(), "Error: cannot get transform from << " + << camera_info_msg->header.frame_id << " to " + << cloud_msg->header.frame_id); + return; + } + // get transformation from map to camera + tf2::Transform tf_camera2map; + try { + geometry_msgs::msg::TransformStamped tf_stamped = tf_buffer.lookupTransform( + camera_info_msg->header.frame_id, "map", rclcpp::Time(camera_info_msg->header.stamp), + rclcpp::Duration::from_seconds(0.2)); + tf2::fromMsg(tf_stamped.transform, tf_camera2map); + } catch (tf2::TransformException & ex) { + RCLCPP_ERROR_STREAM( + node_ptr_->get_logger(), + "Error: cannot get transform from << " << camera_info_msg->header.frame_id << " to map"); + return; + } + + std::vector roi_tls(rois_msg->rois.size()), roi_brs(rois_msg->rois.size()); + image_geometry::PinholeCameraModel pinhole_model; + pinhole_model.fromCameraInfo(*camera_info_msg); + for (size_t i = 0; i < rois_msg->rois.size(); i++) { + calcRoiVectex3D( + rois_msg->rois[i], pinhole_model, traffic_light_position_map, tf_camera2map, roi_tls[i], + roi_brs[i]); + } + + lidar_rays_.clear(); + // points in camera frame + pcl::PointCloud cloud_camera; + // points within roi + pcl::PointCloud cloud_roi; + tier4_autoware_utils::transformPointCloudFromROSMsg(*cloud_msg, cloud_camera, camera2cloud); + + filterCloud(cloud_camera, roi_tls, roi_brs, cloud_roi); + + for (const pcl::PointXYZ & pt : cloud_roi) { + Ray ray = ::point2ray(pt); + lidar_rays_[static_cast(ray.azimuth)][static_cast(ray.elevation)].push_back(ray); + } + for (size_t i = 0; i < roi_tls.size(); i++) { + occlusion_ratios[i] = predict(roi_tls[i], roi_brs[i]); + } +} + +void CloudOcclusionPredictor::calcRoiVectex3D( + const tier4_perception_msgs::msg::TrafficLightRoi & roi, + const image_geometry::PinholeCameraModel & pinhole_model, + const std::map & traffic_light_position_map, + const tf2::Transform & tf_camera2map, pcl::PointXYZ & top_left, pcl::PointXYZ & bottom_right) +{ + if (traffic_light_position_map.count(roi.traffic_light_id) == 0) { + return; + } + double dist2cam = (tf_camera2map * traffic_light_position_map.at(roi.traffic_light_id)).length(); + { + cv::Point2d pixel(roi.roi.x_offset, roi.roi.y_offset); + cv::Point2d rectified_pixel = pinhole_model.rectifyPoint(pixel); + cv::Point3d ray = pinhole_model.projectPixelTo3dRay(rectified_pixel); + double ray_len = std::sqrt(ray.ddot(ray)); + top_left.x = dist2cam * ray.x / ray_len; + top_left.y = dist2cam * ray.y / ray_len; + top_left.z = dist2cam * ray.z / ray_len; + } + { + cv::Point2d pixel(roi.roi.x_offset + roi.roi.width, roi.roi.y_offset + roi.roi.height); + cv::Point2d rectified_pixel = pinhole_model.rectifyPoint(pixel); + cv::Point3d ray = pinhole_model.projectPixelTo3dRay(rectified_pixel); + double ray_len = std::sqrt(ray.ddot(ray)); + bottom_right.x = dist2cam * ray.x / ray_len; + bottom_right.y = dist2cam * ray.y / ray_len; + bottom_right.z = dist2cam * ray.z / ray_len; + } +} + +void CloudOcclusionPredictor::filterCloud( + const pcl::PointCloud & cloud_in, const std::vector & roi_tls, + const std::vector & roi_brs, pcl::PointCloud & cloud_out) +{ + float min_x = 0, max_x = 0, min_y = 0, max_y = 0, min_z = 0, max_z = 0; + for (const auto & pt : roi_tls) { + min_x = std::min(min_x, pt.x); + max_x = std::max(max_x, pt.x); + min_y = std::min(min_y, pt.y); + max_y = std::max(max_y, pt.y); + min_z = std::min(min_z, pt.z); + max_z = std::max(max_z, pt.z); + } + for (const auto & pt : roi_brs) { + min_x = std::min(min_x, pt.x); + max_x = std::max(max_x, pt.x); + min_y = std::min(min_y, pt.y); + max_y = std::max(max_y, pt.y); + min_z = std::min(min_z, pt.z); + max_z = std::max(max_z, pt.z); + } + const float min_dist_to_cam = 1.0f; + cloud_out.clear(); + for (const auto & pt : cloud_in) { + if ( + pt.x < min_x || pt.x > max_x || pt.y < min_y || pt.y > max_y || pt.z < min_z || + pt.z > max_z) { + continue; + } + float dist = pt.getVector3fMap().squaredNorm(); + if ( + dist <= min_dist_to_cam * min_dist_to_cam || + dist >= max_valid_pt_distance_ * max_valid_pt_distance_) { + continue; + } + cloud_out.push_back(pt); + } +} + +void CloudOcclusionPredictor::sampleTrafficLightRoi( + const pcl::PointXYZ & top_left, const pcl::PointXYZ & bottom_right, + uint32_t horizontal_sample_num, uint32_t vertical_sample_num, + pcl::PointCloud & cloud_out) +{ + cloud_out.clear(); + float x1 = top_left.x; + float y1 = top_left.y; + float z1 = top_left.z; + float x2 = bottom_right.x; + float y2 = bottom_right.y; + float z2 = bottom_right.z; + for (uint32_t i1 = 0; i1 < horizontal_sample_num; i1++) { + for (uint32_t i2 = 0; i2 < vertical_sample_num; i2++) { + float x = x1 + (x2 - x1) * i1 / (horizontal_sample_num - 1); + float y = y1 + (y2 - y1) * i2 / (vertical_sample_num - 1); + float z = z1 + (z2 - z1) * i1 / (horizontal_sample_num - 1); + cloud_out.push_back(pcl::PointXYZ(x, y, z)); + } + } +} + +uint32_t CloudOcclusionPredictor::predict( + const pcl::PointXYZ & roi_top_left, const pcl::PointXYZ & roi_bottom_right) +{ + const uint32_t horizontal_sample_num = 20; + const uint32_t vertical_sample_num = 20; + static_assert(horizontal_sample_num > 1 && vertical_sample_num > 1); + const float min_dist_from_occlusion_to_tl = 5.0f; + + pcl::PointCloud tl_sample_cloud; + sampleTrafficLightRoi( + roi_top_left, roi_bottom_right, horizontal_sample_num, vertical_sample_num, tl_sample_cloud); + uint32_t occluded_num = 0; + for (const pcl::PointXYZ & tl_pt : tl_sample_cloud) { + Ray tl_ray = ::point2ray(tl_pt); + bool occluded = false; + // the azimuth and elevation range to search for points that may occlude tl_pt + int min_azimuth = static_cast(tl_ray.azimuth - azimuth_occlusion_resolution_deg_); + int max_azimuth = static_cast(tl_ray.azimuth + azimuth_occlusion_resolution_deg_); + int min_elevation = static_cast(tl_ray.elevation - elevation_occlusion_resolution_deg_); + int max_elevation = static_cast(tl_ray.elevation + elevation_occlusion_resolution_deg_); + /** + * search among lidar rays whose azimuth and elevation angle are close to the tl_ray. + * for a lidar ray r1 whose azimuth and elevation are very close to tl_pt, + * and the distance from r1 to camera is smaller than the distance from tl_pt to camera, + * then tl_pt is occluded by r1. + */ + for (int azimuth = min_azimuth; (azimuth <= max_azimuth) && !occluded; azimuth++) { + for (int elevation = min_elevation; (elevation <= max_elevation) && !occluded; elevation++) { + for (const Ray & lidar_ray : lidar_rays_[azimuth][elevation]) { + if ( + std::abs(lidar_ray.azimuth - tl_ray.azimuth) <= azimuth_occlusion_resolution_deg_ && + std::abs(lidar_ray.elevation - tl_ray.elevation) <= + elevation_occlusion_resolution_deg_ && + lidar_ray.dist < tl_ray.dist - min_dist_from_occlusion_to_tl) { + occluded = true; + break; + } + } + } + } + occluded_num += occluded; + } + return 100 * occluded_num / tl_sample_cloud.size(); +} + +} // namespace traffic_light