Client refactoring (#1634)

* Use more let-else expressions
* Use channels between rendering and tracking thread
This commit is contained in:
Riccardo Zaglia 2023-05-23 10:56:54 +08:00 committed by GitHub
parent d48ddffcf3
commit 06c3204b9e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 168 additions and 144 deletions

View File

@ -461,16 +461,18 @@ pub fn get_hand_motion(
time: xr::Time,
hand_source: &HandSource,
last_position: &mut Vec3,
) -> StrResult<(Option<DeviceMotion>, Option<[Pose; 26]>)> {
) -> (Option<DeviceMotion>, Option<[Pose; 26]>) {
if hand_source
.grip_action
.is_active(session, xr::Path::NULL)
.map_err(err!())?
.unwrap_or(false)
{
let (location, velocity) = hand_source
let Ok((location, velocity)) = hand_source
.grip_space
.relate(reference_space, time)
.map_err(err!())?;
else {
return (None, None);
};
if location
.location_flags
@ -488,35 +490,34 @@ pub fn get_hand_motion(
angular_velocity: to_vec3(velocity.angular_velocity),
};
return Ok((Some(hand_motion), None));
return (Some(hand_motion), None);
}
if let Some(tracker) = &hand_source.skeleton_tracker {
// todo: support also velocities in the protocol
if let Some((joint_locations, jont_velocities)) = reference_space
let Some(tracker) = &hand_source.skeleton_tracker else {
return (None, None);
};
let Some((joint_locations, jont_velocities)) = reference_space
.relate_hand_joints(tracker, time)
.map_err(err!())?
{
let root_motion = DeviceMotion {
pose: to_pose(joint_locations[0].pose),
linear_velocity: to_vec3(jont_velocities[0].linear_velocity),
angular_velocity: to_vec3(jont_velocities[0].angular_velocity),
};
.ok().flatten()
else {
return (None, None);
};
let joints = joint_locations
.iter()
.map(|j| to_pose(j.pose))
.collect::<Vec<_>>()
.try_into()
.unwrap();
let root_motion = DeviceMotion {
pose: to_pose(joint_locations[0].pose),
linear_velocity: to_vec3(jont_velocities[0].linear_velocity),
angular_velocity: to_vec3(jont_velocities[0].angular_velocity),
};
Ok((Some(root_motion), Some(joints)))
} else {
Ok((None, None))
}
} else {
Ok((None, None))
}
let joints = joint_locations
.iter()
.map(|j| to_pose(j.pose))
.collect::<Vec<_>>()
.try_into()
.unwrap();
(Some(root_motion), Some(joints))
}
// todo: move emulation to server
@ -588,12 +589,14 @@ pub fn update_buttons(
platform: Platform,
xr_session: &xr::Session<xr::AnyGraphics>,
button_actions: &HashMap<u64, ButtonAction>,
) -> StrResult<Vec<ButtonEntry>> {
) -> Vec<ButtonEntry> {
let mut button_entries = Vec::with_capacity(2);
for (id, action) in button_actions {
match action {
ButtonAction::Binary(action) => {
let state = action.state(xr_session, xr::Path::NULL).map_err(err!())?;
let Ok(state) = action.state(xr_session, xr::Path::NULL) else {
continue;
};
if state.changed_since_last_sync {
button_entries.push(ButtonEntry {
@ -609,7 +612,9 @@ pub fn update_buttons(
}
}
ButtonAction::Scalar(action) => {
let state = action.state(xr_session, xr::Path::NULL).map_err(err!())?;
let Ok(state) = action.state(xr_session, xr::Path::NULL) else {
continue;
};
if state.changed_since_last_sync {
button_entries.push(ButtonEntry {
@ -627,7 +632,7 @@ pub fn update_buttons(
}
}
Ok(button_entries)
button_entries
}
pub struct FaceInputContext {
@ -691,15 +696,15 @@ pub fn get_eye_gazes(
reference_space: &xr::Space,
time: xr::Time,
) -> [Option<Pose>; 2] {
if let Some(tracker) = &context.eye_tracker_fb {
if let Ok(gazes) = tracker.get_eye_gazes(reference_space, time) {
[
gazes.gaze[0].as_ref().map(|g| to_pose(g.pose)),
gazes.gaze[1].as_ref().map(|g| to_pose(g.pose)),
]
} else {
[None, None]
}
let Some(tracker) = &context.eye_tracker_fb else {
return [None, None]
};
if let Ok(gazes) = tracker.get_eye_gazes(reference_space, time) {
[
gazes.gaze[0].as_ref().map(|g| to_pose(g.pose)),
gazes.gaze[1].as_ref().map(|g| to_pose(g.pose)),
]
} else {
[None, None]
}

View File

@ -3,7 +3,6 @@ mod interaction;
use alvr_client_core::{opengl::RenderViewInput, ClientCoreEvent};
use alvr_common::{
glam::{Quat, UVec2, Vec2, Vec3},
parking_lot::{Mutex, RwLock},
prelude::*,
settings_schema::Switch,
DeviceMotion, Fov, Pose, RelaxedAtomic, HEAD_ID, LEFT_HAND_ID, RIGHT_HAND_ID,
@ -16,7 +15,7 @@ use std::{
collections::VecDeque,
path::Path,
ptr,
sync::Arc,
sync::{mpsc, Arc},
thread,
time::{Duration, Instant},
};
@ -41,18 +40,12 @@ struct HistoryView {
struct StreamingInputContext {
platform: Platform,
is_streaming: Arc<RelaxedAtomic>,
frame_interval: Duration,
xr_instance: xr::Instance,
xr_session: xr::Session<xr::AnyGraphics>,
hands_context: Arc<HandsInteractionContext>,
face_context: Option<FaceInputContext>,
reference_space: Arc<RwLock<xr::Space>>,
views_history: Arc<Mutex<VecDeque<HistoryView>>>,
}
#[derive(Default)]
struct StreamingInputState {
history_view_sender: mpsc::Sender<HistoryView>,
reference_space: Arc<xr::Space>,
last_ipd: f32,
last_hand_positions: [Vec3; 2],
}
@ -210,87 +203,94 @@ pub fn create_swapchain(
// This function is allowed to return errors. It can happen when the session is destroyed
// asynchronously
fn update_streaming_input(
ctx: &StreamingInputContext,
state: &mut StreamingInputState,
) -> StrResult {
fn update_streaming_input(ctx: &mut StreamingInputContext) {
// Streaming related inputs are updated here. Make sure every input poll is done in this
// thread
ctx.xr_session
if let Err(e) = ctx
.xr_session
.sync_actions(&[(&ctx.hands_context.action_set).into()])
.map_err(err!())?;
{
error!("{e}");
return;
}
let now = xr_runtime_now(&ctx.xr_instance, ctx.platform).ok_or_else(enone!())?;
let Some(now) = xr_runtime_now(&ctx.xr_instance) else {
error!("Cannot poll tracking: invalid time");
return;
};
let target_timestamp = now + alvr_client_core::get_head_prediction_offset();
let (view_flags, views) = ctx
.xr_session
.locate_views(
xr::ViewConfigurationType::PRIMARY_STEREO,
to_xr_time(target_timestamp),
&ctx.reference_space.read(),
)
.map_err(err!())?;
let mut device_motions = Vec::with_capacity(3);
if !view_flags.contains(xr::ViewStateFlags::POSITION_VALID)
|| !view_flags.contains(xr::ViewStateFlags::ORIENTATION_VALID)
{
return Ok(());
}
'head_tracking: {
let Ok((view_flags, views)) = ctx
.xr_session
.locate_views(
xr::ViewConfigurationType::PRIMARY_STEREO,
to_xr_time(target_timestamp),
&ctx.reference_space,
)
else {
error!("Cannot locate views");
break 'head_tracking;
};
let ipd = (to_vec3(views[0].pose.position) - to_vec3(views[1].pose.position)).length();
if f32::abs(state.last_ipd - ipd) > IPD_CHANGE_EPS {
alvr_client_core::send_views_config([to_fov(views[0].fov), to_fov(views[1].fov)], ipd);
state.last_ipd = ipd;
}
// Note: Here is assumed that views are on the same plane and orientation. The head position
// is approximated as the center point between the eyes.
let head_position = (to_vec3(views[0].pose.position) + to_vec3(views[1].pose.position)) / 2.0;
let head_orientation = to_quat(views[0].pose.orientation);
{
let mut views_history_lock = ctx.views_history.lock();
views_history_lock.push_back(HistoryView {
timestamp: target_timestamp,
views,
});
if views_history_lock.len() > 360 {
views_history_lock.pop_front();
if !view_flags.contains(xr::ViewStateFlags::POSITION_VALID)
|| !view_flags.contains(xr::ViewStateFlags::ORIENTATION_VALID)
{
break 'head_tracking;
}
let ipd = (to_vec3(views[0].pose.position) - to_vec3(views[1].pose.position)).length();
if f32::abs(ctx.last_ipd - ipd) > IPD_CHANGE_EPS {
alvr_client_core::send_views_config([to_fov(views[0].fov), to_fov(views[1].fov)], ipd);
ctx.last_ipd = ipd;
}
// Note: Here is assumed that views are on the same plane and orientation. The head position
// is approximated as the center point between the eyes.
let head_position =
(to_vec3(views[0].pose.position) + to_vec3(views[1].pose.position)) / 2.0;
let head_orientation = to_quat(views[0].pose.orientation);
ctx.history_view_sender
.send(HistoryView {
timestamp: target_timestamp,
views,
})
.ok();
device_motions.push((
*HEAD_ID,
DeviceMotion {
pose: Pose {
orientation: head_orientation,
position: head_position,
},
linear_velocity: Vec3::ZERO,
angular_velocity: Vec3::ZERO,
},
));
}
let tracker_time = to_xr_time(now + alvr_client_core::get_tracker_prediction_offset());
let mut device_motions = vec![(
*HEAD_ID,
DeviceMotion {
pose: Pose {
orientation: head_orientation,
position: head_position,
},
linear_velocity: Vec3::ZERO,
angular_velocity: Vec3::ZERO,
},
)];
let (left_hand_motion, left_hand_skeleton) = interaction::get_hand_motion(
&ctx.xr_session,
&ctx.reference_space.read(),
&ctx.reference_space,
tracker_time,
&ctx.hands_context.hand_sources[0],
&mut state.last_hand_positions[0],
)?;
&mut ctx.last_hand_positions[0],
);
let (right_hand_motion, right_hand_skeleton) = interaction::get_hand_motion(
&ctx.xr_session,
&ctx.reference_space.read(),
&ctx.reference_space,
tracker_time,
&ctx.hands_context.hand_sources[1],
&mut state.last_hand_positions[1],
)?;
&mut ctx.last_hand_positions[1],
);
if let Some(motion) = left_hand_motion {
device_motions.push((*LEFT_HAND_ID, motion));
@ -301,11 +301,7 @@ fn update_streaming_input(
let face_data = if let Some(context) = &ctx.face_context {
FaceData {
eye_gazes: interaction::get_eye_gazes(
context,
&ctx.reference_space.read(),
to_xr_time(now),
),
eye_gazes: interaction::get_eye_gazes(context, &ctx.reference_space, to_xr_time(now)),
fb_face_expression: interaction::get_fb_face_expression(context, to_xr_time(now)),
htc_eye_expression: interaction::get_htc_eye_expression(context),
htc_lip_expression: interaction::get_htc_lip_expression(context),
@ -325,12 +321,10 @@ fn update_streaming_input(
ctx.platform,
&ctx.xr_session,
&ctx.hands_context.button_actions,
)?;
);
if !button_entries.is_empty() {
alvr_client_core::send_buttons(button_entries);
}
Ok(())
}
pub fn entry_point() {
@ -438,19 +432,21 @@ pub fn entry_point() {
&xr_session.clone().into_any_graphics(),
));
let reference_space = Arc::new(RwLock::new(
let is_streaming = Arc::new(RelaxedAtomic::new(false));
let mut reference_space = Arc::new(
xr_session
.create_reference_space(xr::ReferenceSpaceType::STAGE, xr::Posef::IDENTITY)
.unwrap(),
));
let is_streaming = Arc::new(RelaxedAtomic::new(false));
);
let mut lobby_swapchains = None;
let mut stream_swapchains = None;
let mut stream_view_resolution = UVec2::ZERO;
let mut streaming_input_thread = None::<thread::JoinHandle<_>>;
let views_history = Arc::new(Mutex::new(VecDeque::new()));
let mut views_history = VecDeque::new();
let (history_view_sender, history_view_receiver) = mpsc::channel();
let mut reference_space_sender = None::<mpsc::Sender<_>>;
let default_view = xr::View {
pose: xr::Posef {
@ -547,12 +543,18 @@ pub fn entry_point() {
event.reference_space_type()
);
*reference_space.write() = xr_session
.create_reference_space(
xr::ReferenceSpaceType::STAGE,
xr::Posef::IDENTITY,
)
.unwrap();
reference_space = Arc::new(
xr_session
.create_reference_space(
xr::ReferenceSpaceType::STAGE,
xr::Posef::IDENTITY,
)
.unwrap(),
);
if let Some(sender) = &reference_space_sender {
sender.send(Arc::clone(&reference_space)).ok();
}
alvr_client_core::send_playspace(
xr_session
@ -649,26 +651,35 @@ pub fn entry_point() {
None
};
let context = StreamingInputContext {
let mut context = StreamingInputContext {
platform,
is_streaming: Arc::clone(&is_streaming),
frame_interval: Duration::from_secs_f32(1.0 / refresh_rate_hint),
xr_instance: xr_instance.clone(),
xr_session: xr_session.clone().into_any_graphics(),
hands_context: Arc::clone(&hands_context),
face_context,
history_view_sender: history_view_sender.clone(),
reference_space: Arc::clone(&reference_space),
views_history: Arc::clone(&views_history),
last_ipd: 0.0,
last_hand_positions: [Vec3::ZERO; 2],
};
let is_streaming = Arc::clone(&is_streaming);
let (sender, reference_space_receiver) = mpsc::channel();
reference_space_sender = Some(sender);
streaming_input_thread = Some(thread::spawn(move || {
let mut state = StreamingInputState::default();
let mut deadline = Instant::now();
while context.is_streaming.value() {
show_err(update_streaming_input(&context, &mut state));
let frame_interval = Duration::from_secs_f32(1.0 / refresh_rate_hint);
deadline += context.frame_interval / 3;
while is_streaming.value() {
update_streaming_input(&mut context);
if let Ok(reference_space) = reference_space_receiver.try_recv() {
context.reference_space = reference_space;
}
deadline += frame_interval / 3;
thread::sleep(deadline.saturating_duration_since(Instant::now()));
}
}));
@ -801,8 +812,16 @@ pub fn entry_point() {
(vsync_time, ptr::null_mut())
};
while let Ok(views) = history_view_receiver.try_recv() {
if views_history.len() > 360 {
views_history.pop_front();
}
views_history.push_back(views);
}
let mut history_views = None;
for history_frame in &*views_history.lock() {
for history_frame in &views_history {
if history_frame.timestamp == timestamp {
history_views = Some(history_frame.views.clone());
}
@ -821,7 +840,7 @@ pub fn entry_point() {
);
if !hardware_buffer.is_null() {
if let Some(now) = xr_runtime_now(&xr_instance, platform) {
if let Some(now) = xr_runtime_now(&xr_instance) {
alvr_client_core::report_submit(timestamp, vsync_time.saturating_sub(now));
}
}
@ -836,7 +855,7 @@ pub fn entry_point() {
.locate_views(
xr::ViewConfigurationType::PRIMARY_STEREO,
frame_state.predicted_display_time,
&reference_space.read(),
&reference_space,
)
.unwrap()
.1;
@ -872,7 +891,7 @@ pub fn entry_point() {
to_xr_time(display_time),
xr::EnvironmentBlendMode::OPAQUE,
&[&xr::CompositionLayerProjection::new()
.space(&reference_space.read())
.space(&reference_space)
.views(&[
xr::CompositionLayerProjectionView::new()
.pose(views[0].pose)
@ -904,7 +923,7 @@ pub fn entry_point() {
}
#[allow(unused)]
fn xr_runtime_now(xr_instance: &xr::Instance, platform: Platform) -> Option<Duration> {
fn xr_runtime_now(xr_instance: &xr::Instance) -> Option<Duration> {
let time_nanos = xr_instance.now().ok()?.as_nanos();
(time_nanos > 0).then(|| Duration::from_nanos(time_nanos as _))