diff --git a/alvr/client_openxr/src/interaction.rs b/alvr/client_openxr/src/interaction.rs index de4177f03d..6d2901c1d8 100644 --- a/alvr/client_openxr/src/interaction.rs +++ b/alvr/client_openxr/src/interaction.rs @@ -480,6 +480,7 @@ pub fn get_head_data( stage_reference_space: &xr::Space, view_reference_space: &xr::Space, time: Duration, + future_time: Duration, last_view_params: &[ViewParams; 2], ) -> Option<(DeviceMotion, Option<[ViewParams; 2]>)> { let xr_time = crate::to_xr_time(time); @@ -523,10 +524,33 @@ pub fn get_head_data( .unwrap_or_default(), }; - // Angular velocity should be in global reference frame as per spec but Pico and Vive use local - // reference frame + // Some headsets use wrong frame of reference for linear and angular velocities. if platform.is_pico() || platform.is_vive() { - motion.angular_velocity = motion.pose.orientation * motion.angular_velocity; + let xr_future_time = crate::to_xr_time(future_time); + + let predicted_location = view_reference_space + .locate(stage_reference_space, xr_future_time) + .ok()?; + + if !predicted_location + .location_flags + .contains(xr::SpaceLocationFlags::ORIENTATION_VALID) + { + return None; + } + + let time_offset_s = future_time + .saturating_sub(time) + .max(Duration::from_millis(1)) + .as_secs_f32(); + + motion.linear_velocity = (crate::from_xr_vec3(predicted_location.pose.position) + - motion.pose.position) + / time_offset_s; + motion.angular_velocity = (crate::from_xr_quat(predicted_location.pose.orientation) + * motion.pose.orientation.inverse()) + .to_scaled_axis() + / time_offset_s; } let last_ipd_m = last_view_params[0] @@ -553,11 +577,13 @@ pub fn get_head_data( Some((motion, view_params)) } +#[expect(clippy::too_many_arguments)] pub fn get_hand_data( xr_session: &xr::Session, platform: Platform, reference_space: &xr::Space, time: Duration, + future_time: Duration, hand_source: &HandInteraction, last_controller_pose: &mut Pose, last_palm_pose: &mut Pose, @@ -570,27 +596,48 @@ pub fn get_hand_data( .unwrap_or(false) { if let Ok((location, velocity)) = hand_source.grip_space.relate(reference_space, xr_time) { - if location + let orientation_valid = location .location_flags - .contains(xr::SpaceLocationFlags::ORIENTATION_VALID) - { + .contains(xr::SpaceLocationFlags::ORIENTATION_VALID); + let position_valid = location + .location_flags + .contains(xr::SpaceLocationFlags::POSITION_VALID); + + if orientation_valid { last_controller_pose.orientation = crate::from_xr_quat(location.pose.orientation); } - if location - .location_flags - .contains(xr::SpaceLocationFlags::POSITION_VALID) - { + if position_valid { last_controller_pose.position = crate::from_xr_vec3(location.pose.position); } - let linear_velocity = crate::from_xr_vec3(velocity.linear_velocity); + let mut linear_velocity = crate::from_xr_vec3(velocity.linear_velocity); let mut angular_velocity = crate::from_xr_vec3(velocity.angular_velocity); - // Some headsets use wrong frame of reference - if matches!(platform, Platform::PicoNeo3 | Platform::Pico4) || platform.is_vive() { - angular_velocity = last_controller_pose.orientation * angular_velocity; - }; + // Some headsets use wrong frame of reference for linear and angular velocities. + if platform.is_pico() || platform.is_vive() { + let xr_future_time = crate::to_xr_time(future_time); + + let maybe_future_location = hand_source + .grip_space + .locate(reference_space, xr_future_time); + + if let Ok(future_location) = maybe_future_location { + if future_location.location_flags.contains( + xr::SpaceLocationFlags::ORIENTATION_VALID + | xr::SpaceLocationFlags::POSITION_VALID, + ) { + let time_offset_s = future_time.saturating_sub(time).as_secs_f32(); + linear_velocity = (crate::from_xr_vec3(future_location.pose.position) + - last_controller_pose.position) + / time_offset_s; + angular_velocity = (crate::from_xr_quat(future_location.pose.orientation) + * last_controller_pose.orientation.inverse()) + .to_scaled_axis() + / time_offset_s; + } + } + } Some(DeviceMotion { pose: *last_controller_pose, diff --git a/alvr/client_openxr/src/lobby.rs b/alvr/client_openxr/src/lobby.rs index 71d9f7fb81..0244ceeca3 100644 --- a/alvr/client_openxr/src/lobby.rs +++ b/alvr/client_openxr/src/lobby.rs @@ -106,11 +106,15 @@ impl Lobby { self.xr_session .sync_actions(&[(&self.interaction_ctx.read().action_set).into()]) .ok(); + + // future_time doesn't have to be any particular value, just something after vsync_time + let future_time = vsync_time + Duration::from_millis(80); let left_hand_data = interaction::get_hand_data( &self.xr_session, self.platform, &self.reference_space, vsync_time, + future_time, &self.interaction_ctx.read().hands_interaction[0], &mut Pose::default(), &mut Pose::default(), @@ -120,6 +124,7 @@ impl Lobby { self.platform, &self.reference_space, vsync_time, + future_time, &self.interaction_ctx.read().hands_interaction[1], &mut Pose::default(), &mut Pose::default(), diff --git a/alvr/client_openxr/src/stream.rs b/alvr/client_openxr/src/stream.rs index 119c7dc517..9fab265921 100644 --- a/alvr/client_openxr/src/stream.rs +++ b/alvr/client_openxr/src/stream.rs @@ -463,36 +463,20 @@ fn stream_input_loop( return; }; - // All Pico headsets seem to have a problem with velocity values, to different degrees. - // Calculating velocities by differentiation yields jittery results. In the following - // workaround, we predict using the runtime, then manually predict back in time in order to - // return poses in the "now" time, required by the ClientCore interface. This solution - // doesn't fix the issue completely, but most of the predicted time interval will be - // correct. - let target_time = if platform.is_pico() { - now + core_ctx.get_total_prediction_offset() - } else { - now - }; + let target_time = now + core_ctx.get_total_prediction_offset(); let Some((head_motion, local_views)) = interaction::get_head_data( &xr_session, platform, stage_reference_space, view_reference_space, + now, target_time, &last_view_params, ) else { continue; }; - let head_motion = if platform.is_pico() { - // Predict back in time, matching the prediction that is done on later on - head_motion.predict(target_time, now) - } else { - head_motion - }; - if let Some(views) = local_views { core_ctx.send_view_params(views); last_view_params = views; @@ -502,34 +486,27 @@ fn stream_input_loop( device_motions.push((*HEAD_ID, head_motion)); - let (mut left_hand_motion, left_hand_skeleton) = crate::interaction::get_hand_data( + let (left_hand_motion, left_hand_skeleton) = crate::interaction::get_hand_data( &xr_session, platform, stage_reference_space, + now, target_time, &int_ctx.hands_interaction[0], &mut last_controller_poses[0], &mut last_palm_poses[0], ); - let (mut right_hand_motion, right_hand_skeleton) = crate::interaction::get_hand_data( + let (right_hand_motion, right_hand_skeleton) = crate::interaction::get_hand_data( &xr_session, platform, stage_reference_space, + now, target_time, &int_ctx.hands_interaction[1], &mut last_controller_poses[1], &mut last_palm_poses[1], ); - if platform.is_pico() { - if let Some(left_hand_motion) = &mut left_hand_motion { - *left_hand_motion = left_hand_motion.predict(target_time, now); - } - if let Some(right_hand_motion) = &mut right_hand_motion { - *right_hand_motion = right_hand_motion.predict(target_time, now); - } - } - // Note: When multimodal input is enabled, we are sure that when free hands are used // (not holding controllers) the controller data is None. if int_ctx.multimodal_hands_enabled || left_hand_skeleton.is_none() {