Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Expose stereo extrinsics #1242

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions realsense2_camera/include/base_realsense_node.h
Original file line number Diff line number Diff line change
Expand Up @@ -198,6 +198,7 @@ namespace realsense2_camera
cv::Mat& fix_depth_scale(const cv::Mat& from_image, cv::Mat& to_image);
void clip_depth(rs2::depth_frame depth_frame, float clipping_dist);
void updateStreamCalibData(const rs2::video_stream_profile& video_profile);
void updateExtrinsicsCalibData(const rs2::video_stream_profile& left_video_profile, const rs2::video_stream_profile& right_video_profile);
void SetBaseStream();
void publishStaticTransforms();
void publishDynamicTransforms();
Expand Down
64 changes: 64 additions & 0 deletions realsense2_camera/src/base_realsense_node.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1714,6 +1714,9 @@ void BaseRealSenseNode::setupStreams()
{
ROS_INFO("setupStreams...");
try{
std::shared_ptr<rs2::video_stream_profile> left_profile;
std::shared_ptr<rs2::video_stream_profile> right_profile;

// Publish image stream info
for (auto& profiles : _enabled_profiles)
{
Expand All @@ -1723,10 +1726,18 @@ void BaseRealSenseNode::setupStreams()
{
auto video_profile = profile.as<rs2::video_stream_profile>();
updateStreamCalibData(video_profile);

// stream index: 1=left, 2=right
if (video_profile.stream_index() == 1) { left_profile = std::make_shared<rs2::video_stream_profile>(video_profile); }
if (video_profile.stream_index() == 2) { right_profile = std::make_shared<rs2::video_stream_profile>(video_profile); }
}
}
}

if (left_profile && right_profile) {
updateExtrinsicsCalibData(*left_profile, *right_profile);
}

// Streaming IMAGES
std::map<std::string, std::vector<rs2::stream_profile> > profiles;
std::map<std::string, rs2::sensor> active_sensors;
Expand Down Expand Up @@ -1837,6 +1848,59 @@ void BaseRealSenseNode::updateStreamCalibData(const rs2::video_stream_profile& v
}
}

void BaseRealSenseNode::updateExtrinsicsCalibData(const rs2::video_stream_profile& left_video_profile, const rs2::video_stream_profile& right_video_profile)
{
stream_index_pair left{left_video_profile.stream_type(), left_video_profile.stream_index()};
stream_index_pair right{right_video_profile.stream_type(), right_video_profile.stream_index()};

// Get the relative extrinsics between the left and right camera
auto LEFT_T_RIGHT = right_video_profile.get_extrinsics_to(left_video_profile);

auto R = Eigen::Map<Eigen::Matrix<float,3,3,Eigen::RowMajor>>(LEFT_T_RIGHT.rotation);
auto T = Eigen::Map<Eigen::Matrix<float,3,1>>(LEFT_T_RIGHT.translation);

// force y- and z-axis components to be 0 (but do we also need to force P(0,3) and P(1,3) to be 0?)
T[1] = 0;
T[2] = 0;

Eigen::Matrix<float,3,4,Eigen::RowMajor> RT;
RT << R, T;

auto K_right = Eigen::Map<Eigen::Matrix<double,3,3,Eigen::RowMajor>>(_camera_info[right].K.data());

// Compute Projection matrix for the right camera
auto P_right = K_right.cast<float>() * RT;

// Note that all matrices are stored in row-major format
// 1. Leave the left rotation matrix as identity
// 2. Set the right rotation matrix
_camera_info[right].R.at(0) = LEFT_T_RIGHT.rotation[0];
_camera_info[right].R.at(1) = LEFT_T_RIGHT.rotation[1];
_camera_info[right].R.at(2) = LEFT_T_RIGHT.rotation[2];
_camera_info[right].R.at(3) = LEFT_T_RIGHT.rotation[3];
_camera_info[right].R.at(4) = LEFT_T_RIGHT.rotation[4];
_camera_info[right].R.at(5) = LEFT_T_RIGHT.rotation[5];
_camera_info[right].R.at(6) = LEFT_T_RIGHT.rotation[6];
_camera_info[right].R.at(7) = LEFT_T_RIGHT.rotation[7];
_camera_info[right].R.at(8) = LEFT_T_RIGHT.rotation[8];

// 3. Leave the left projection matrix
// 4. Set the right projection matrix
_camera_info[right].P.at(0) = P_right(0,0);
_camera_info[right].P.at(1) = P_right(0,1);
_camera_info[right].P.at(2) = P_right(0,2);
_camera_info[right].P.at(3) = P_right(0,3);
_camera_info[right].P.at(4) = P_right(1,0);
_camera_info[right].P.at(5) = P_right(1,1);
_camera_info[right].P.at(6) = P_right(1,2);
_camera_info[right].P.at(7) = P_right(1,3);
_camera_info[right].P.at(8) = P_right(2,0);
_camera_info[right].P.at(9) = P_right(2,1);
_camera_info[right].P.at(10) = P_right(2,2);
_camera_info[right].P.at(11) = P_right(2,3);

}

tf::Quaternion BaseRealSenseNode::rotationMatrixToQuaternion(const float rotation[9]) const
{
Eigen::Matrix3f m;
Expand Down