Skip to content

Commit

Permalink
fix(client_openxr): 🐛 Better fix for Pico and Vive wrong velocities (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
zmerp committed Jan 18, 2025
1 parent 33708b3 commit 93faf9b
Show file tree
Hide file tree
Showing 3 changed files with 73 additions and 44 deletions.
77 changes: 62 additions & 15 deletions alvr/client_openxr/src/interaction.rs
Original file line number Diff line number Diff line change
Expand Up @@ -480,6 +480,7 @@ pub fn get_head_data(
stage_reference_space: &xr::Space,
view_reference_space: &xr::Space,
time: Duration,
future_time: Duration,
last_view_params: &[ViewParams; 2],
) -> Option<(DeviceMotion, Option<[ViewParams; 2]>)> {
let xr_time = crate::to_xr_time(time);
Expand Down Expand Up @@ -523,10 +524,33 @@ pub fn get_head_data(
.unwrap_or_default(),
};

// Angular velocity should be in global reference frame as per spec but Pico and Vive use local
// reference frame
// Some headsets use wrong frame of reference for linear and angular velocities.
if platform.is_pico() || platform.is_vive() {
motion.angular_velocity = motion.pose.orientation * motion.angular_velocity;
let xr_future_time = crate::to_xr_time(future_time);

let predicted_location = view_reference_space
.locate(stage_reference_space, xr_future_time)
.ok()?;

if !predicted_location
.location_flags
.contains(xr::SpaceLocationFlags::ORIENTATION_VALID)
{
return None;
}

let time_offset_s = future_time
.saturating_sub(time)
.max(Duration::from_millis(1))
.as_secs_f32();

motion.linear_velocity = (crate::from_xr_vec3(predicted_location.pose.position)
- motion.pose.position)
/ time_offset_s;
motion.angular_velocity = (crate::from_xr_quat(predicted_location.pose.orientation)
* motion.pose.orientation.inverse())
.to_scaled_axis()
/ time_offset_s;
}

let last_ipd_m = last_view_params[0]
Expand All @@ -553,11 +577,13 @@ pub fn get_head_data(
Some((motion, view_params))
}

#[expect(clippy::too_many_arguments)]
pub fn get_hand_data(
xr_session: &xr::Session<xr::OpenGlEs>,
platform: Platform,
reference_space: &xr::Space,
time: Duration,
future_time: Duration,
hand_source: &HandInteraction,
last_controller_pose: &mut Pose,
last_palm_pose: &mut Pose,
Expand All @@ -570,27 +596,48 @@ pub fn get_hand_data(
.unwrap_or(false)
{
if let Ok((location, velocity)) = hand_source.grip_space.relate(reference_space, xr_time) {
if location
let orientation_valid = location
.location_flags
.contains(xr::SpaceLocationFlags::ORIENTATION_VALID)
{
.contains(xr::SpaceLocationFlags::ORIENTATION_VALID);
let position_valid = location
.location_flags
.contains(xr::SpaceLocationFlags::POSITION_VALID);

if orientation_valid {
last_controller_pose.orientation = crate::from_xr_quat(location.pose.orientation);
}

if location
.location_flags
.contains(xr::SpaceLocationFlags::POSITION_VALID)
{
if position_valid {
last_controller_pose.position = crate::from_xr_vec3(location.pose.position);
}

let linear_velocity = crate::from_xr_vec3(velocity.linear_velocity);
let mut linear_velocity = crate::from_xr_vec3(velocity.linear_velocity);
let mut angular_velocity = crate::from_xr_vec3(velocity.angular_velocity);

// Some headsets use wrong frame of reference
if matches!(platform, Platform::PicoNeo3 | Platform::Pico4) || platform.is_vive() {
angular_velocity = last_controller_pose.orientation * angular_velocity;
};
// Some headsets use wrong frame of reference for linear and angular velocities.
if platform.is_pico() || platform.is_vive() {
let xr_future_time = crate::to_xr_time(future_time);

let maybe_future_location = hand_source
.grip_space
.locate(reference_space, xr_future_time);

if let Ok(future_location) = maybe_future_location {
if future_location.location_flags.contains(
xr::SpaceLocationFlags::ORIENTATION_VALID
| xr::SpaceLocationFlags::POSITION_VALID,
) {
let time_offset_s = future_time.saturating_sub(time).as_secs_f32();
linear_velocity = (crate::from_xr_vec3(future_location.pose.position)
- last_controller_pose.position)
/ time_offset_s;
angular_velocity = (crate::from_xr_quat(future_location.pose.orientation)
* last_controller_pose.orientation.inverse())
.to_scaled_axis()
/ time_offset_s;
}
}
}

Some(DeviceMotion {
pose: *last_controller_pose,
Expand Down
5 changes: 5 additions & 0 deletions alvr/client_openxr/src/lobby.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,11 +106,15 @@ impl Lobby {
self.xr_session
.sync_actions(&[(&self.interaction_ctx.read().action_set).into()])
.ok();

// future_time doesn't have to be any particular value, just something after vsync_time
let future_time = vsync_time + Duration::from_millis(80);
let left_hand_data = interaction::get_hand_data(
&self.xr_session,
self.platform,
&self.reference_space,
vsync_time,
future_time,
&self.interaction_ctx.read().hands_interaction[0],
&mut Pose::default(),
&mut Pose::default(),
Expand All @@ -120,6 +124,7 @@ impl Lobby {
self.platform,
&self.reference_space,
vsync_time,
future_time,
&self.interaction_ctx.read().hands_interaction[1],
&mut Pose::default(),
&mut Pose::default(),
Expand Down
35 changes: 6 additions & 29 deletions alvr/client_openxr/src/stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -463,36 +463,20 @@ fn stream_input_loop(
return;
};

// All Pico headsets seem to have a problem with velocity values, to different degrees.
// Calculating velocities by differentiation yields jittery results. In the following
// workaround, we predict using the runtime, then manually predict back in time in order to
// return poses in the "now" time, required by the ClientCore interface. This solution
// doesn't fix the issue completely, but most of the predicted time interval will be
// correct.
let target_time = if platform.is_pico() {
now + core_ctx.get_total_prediction_offset()
} else {
now
};
let target_time = now + core_ctx.get_total_prediction_offset();

let Some((head_motion, local_views)) = interaction::get_head_data(
&xr_session,
platform,
stage_reference_space,
view_reference_space,
now,
target_time,
&last_view_params,
) else {
continue;
};

let head_motion = if platform.is_pico() {
// Predict back in time, matching the prediction that is done on later on
head_motion.predict(target_time, now)
} else {
head_motion
};

if let Some(views) = local_views {
core_ctx.send_view_params(views);
last_view_params = views;
Expand All @@ -502,34 +486,27 @@ fn stream_input_loop(

device_motions.push((*HEAD_ID, head_motion));

let (mut left_hand_motion, left_hand_skeleton) = crate::interaction::get_hand_data(
let (left_hand_motion, left_hand_skeleton) = crate::interaction::get_hand_data(
&xr_session,
platform,
stage_reference_space,
now,
target_time,
&int_ctx.hands_interaction[0],
&mut last_controller_poses[0],
&mut last_palm_poses[0],
);
let (mut right_hand_motion, right_hand_skeleton) = crate::interaction::get_hand_data(
let (right_hand_motion, right_hand_skeleton) = crate::interaction::get_hand_data(
&xr_session,
platform,
stage_reference_space,
now,
target_time,
&int_ctx.hands_interaction[1],
&mut last_controller_poses[1],
&mut last_palm_poses[1],
);

if platform.is_pico() {
if let Some(left_hand_motion) = &mut left_hand_motion {
*left_hand_motion = left_hand_motion.predict(target_time, now);
}
if let Some(right_hand_motion) = &mut right_hand_motion {
*right_hand_motion = right_hand_motion.predict(target_time, now);
}
}

// Note: When multimodal input is enabled, we are sure that when free hands are used
// (not holding controllers) the controller data is None.
if int_ctx.multimodal_hands_enabled || left_hand_skeleton.is_none() {
Expand Down

0 comments on commit 93faf9b

Please sign in to comment.