From 9dd9fed1fe3d2044ed4b0bf0efd629c1bd3e3808 Mon Sep 17 00:00:00 2001 From: zarik5 Date: Sat, 29 May 2021 23:18:50 +0200 Subject: [PATCH] OpenXR support --- Cargo.toml | 42 ++ crates/bevy_internal/Cargo.toml | 2 + crates/bevy_internal/src/default_plugins.rs | 6 + crates/bevy_internal/src/lib.rs | 12 + crates/bevy_openxr/Cargo.toml | 34 + crates/bevy_openxr/lifecycle graphs.md | 17 + crates/bevy_openxr/src/conversion.rs | 15 + crates/bevy_openxr/src/interaction/mod.rs | 364 +++++++++++ .../bevy_openxr/src/interaction/tracking.rs | 358 +++++++++++ crates/bevy_openxr/src/lib.rs | 590 ++++++++++++++++++ crates/bevy_openxr/src/presentation.rs | 200 ++++++ crates/bevy_render/Cargo.toml | 1 + crates/bevy_webxr/Cargo.toml | 60 ++ crates/bevy_webxr/src/lib.rs | 99 +++ crates/bevy_xr/Cargo.toml | 29 + crates/bevy_xr/src/interaction.rs | 396 ++++++++++++ crates/bevy_xr/src/lib.rs | 74 +++ crates/bevy_xr/src/presentation.rs | 33 + examples/README.md | 12 + examples/xr/vr_cubes.rs | 127 ++++ 20 files changed, 2471 insertions(+) create mode 100644 crates/bevy_openxr/Cargo.toml create mode 100644 crates/bevy_openxr/lifecycle graphs.md create mode 100644 crates/bevy_openxr/src/conversion.rs create mode 100644 crates/bevy_openxr/src/interaction/mod.rs create mode 100644 crates/bevy_openxr/src/interaction/tracking.rs create mode 100644 crates/bevy_openxr/src/lib.rs create mode 100644 crates/bevy_openxr/src/presentation.rs create mode 100644 crates/bevy_webxr/Cargo.toml create mode 100644 crates/bevy_webxr/src/lib.rs create mode 100644 crates/bevy_xr/Cargo.toml create mode 100644 crates/bevy_xr/src/interaction.rs create mode 100644 crates/bevy_xr/src/lib.rs create mode 100644 crates/bevy_xr/src/presentation.rs create mode 100644 examples/xr/vr_cubes.rs diff --git a/Cargo.toml b/Cargo.toml index bdae6ba4401ed..cef951983783e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -36,8 +36,10 @@ default = [ "hdr", "mp3", "x11", + "bevy_openxr", # todo: remove ] + # Force dynamic linking, which improves iterative compile times dynamic = ["bevy_dylib"] @@ -50,6 +52,8 @@ render = [ "bevy_internal/bevy_ui", ] +xr = ["bevy_internal/bevy_xr"] + # Optional bevy crates bevy_audio = ["bevy_internal/bevy_audio"] bevy_dynamic_plugin = ["bevy_internal/bevy_dynamic_plugin"] @@ -57,6 +61,7 @@ bevy_gilrs = ["bevy_internal/bevy_gilrs"] bevy_gltf = ["bevy_internal/bevy_gltf"] bevy_wgpu = ["bevy_internal/bevy_wgpu"] bevy_winit = ["bevy_internal/bevy_winit"] +bevy_openxr = ["bevy_internal/bevy_xr", "bevy_internal/bevy_openxr"] bevy_core_pipeline = ["bevy_internal/bevy_core_pipeline"] bevy_render2 = ["bevy_internal/bevy_render2"] @@ -551,6 +556,18 @@ name = "winit_wasm" path = "examples/wasm/winit_wasm.rs" required-features = ["bevy_winit"] +# XR +[[example]] +name = "vr_cubes" +path = "examples/xr/vr_cubes.rs" +required-features = ["bevy_openxr"] + +[[example]] +name = "vr_cubes_android" +path = "examples/xr/vr_cubes.rs" +required-features = ["bevy_openxr"] +crate-type = ["cdylib"] + # Android [[example]] crate-type = ["cdylib"] @@ -563,5 +580,30 @@ assets = "assets" res = "assets/android-res" icon = "@mipmap/ic_launcher" build_targets = ["aarch64-linux-android", "armv7-linux-androideabi"] +runtime_libs = "libs" + +[package.metadata.android.sdk] min_sdk_version = 16 target_sdk_version = 29 + +[package.metadata.android.application] +icon = "@mipmap/ic_launcher" + +[[package.metadata.android.application.meta_data]] +name = "com.samsung.android.vr.application.mode" +value = "vr_only" + +[package.metadata.android.application.activity] +theme = "@android:style/Theme.Black.NoTitleBar.Fullscreen" +config_changes = "density|keyboard|keyboardHidden|navigation|orientation|screenLayout|screenSize|uiMode" +launch_mode = "singleTask" +orientation = "landscape" +resizeable_activity = "false" + +[[package.metadata.android.application.activity.intent_filter]] +actions = ["android.intent.action.MAIN"] +categories = [ + "com.oculus.intent.category.VR", + "android.intent.category.LAUNCHER", + "android.intent.category.INFO", +] diff --git a/crates/bevy_internal/Cargo.toml b/crates/bevy_internal/Cargo.toml index f2df1808dcaea..f9d25c027c3af 100644 --- a/crates/bevy_internal/Cargo.toml +++ b/crates/bevy_internal/Cargo.toml @@ -81,6 +81,8 @@ bevy_ui = { path = "../bevy_ui", optional = true, version = "0.5.0" } bevy_wgpu = { path = "../bevy_wgpu", optional = true, version = "0.5.0" } bevy_winit = { path = "../bevy_winit", optional = true, version = "0.5.0" } bevy_gilrs = { path = "../bevy_gilrs", optional = true, version = "0.5.0" } +bevy_xr = { path = "../bevy_xr", optional = true, version = "0.5.0" } +bevy_openxr = { path = "../bevy_openxr", optional = true, version = "0.5.0" } [target.'cfg(target_os = "android")'.dependencies] diff --git a/crates/bevy_internal/src/default_plugins.rs b/crates/bevy_internal/src/default_plugins.rs index 0a97da009ac62..cc54fdf8a1047 100644 --- a/crates/bevy_internal/src/default_plugins.rs +++ b/crates/bevy_internal/src/default_plugins.rs @@ -12,6 +12,8 @@ use bevy_gilrs::GilrsPlugin; use bevy_gltf::GltfPlugin; use bevy_input::InputPlugin; use bevy_log::LogPlugin; +#[cfg(feature = "bevy_openxr")] +use bevy_openxr::OpenXrPlugin; #[cfg(feature = "bevy_pbr")] use bevy_pbr::PbrPlugin; #[cfg(feature = "bevy_render")] @@ -29,6 +31,8 @@ use bevy_wgpu::WgpuPlugin; use bevy_window::WindowPlugin; #[cfg(feature = "bevy_winit")] use bevy_winit::WinitPlugin; +#[cfg(feature = "bevy_xr")] +use bevy_xr::XrPlugin; /// This plugin group will add all the default plugins: /// * [`LogPlugin`] @@ -48,6 +52,8 @@ use bevy_winit::WinitPlugin; /// * [`GilrsPlugin`] - with feature `bevy_gilrs` /// * [`GltfPlugin`] - with feature `bevy_gltf` /// * [`WinitPlugin`] - with feature `bevy_winit` +/// * [`XrPlugin`] - with feature `bevy_xr` +/// * [`OpenXrPlugin`] - with feature `bevy_openxr` /// * [`WgpuPlugin`] - with feature `bevy_wgpu` pub struct DefaultPlugins; diff --git a/crates/bevy_internal/src/lib.rs b/crates/bevy_internal/src/lib.rs index 932971b6f1046..c9be002184d93 100644 --- a/crates/bevy_internal/src/lib.rs +++ b/crates/bevy_internal/src/lib.rs @@ -164,6 +164,18 @@ pub mod wgpu { pub use bevy_wgpu::*; } +#[cfg(feature = "bevy_xr")] +pub mod xr { + //! Common interface for XR backends + pub use bevy_xr::*; +} + +#[cfg(feature = "bevy_openxr")] +pub mod openxr { + //! OpenXR backend + pub use bevy_openxr::*; +} + #[cfg(feature = "bevy_dynamic_plugin")] pub mod dynamic_plugin { pub use bevy_dynamic_plugin::*; diff --git a/crates/bevy_openxr/Cargo.toml b/crates/bevy_openxr/Cargo.toml new file mode 100644 index 0000000000000..866b391c9c0c7 --- /dev/null +++ b/crates/bevy_openxr/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "bevy_openxr" +version = "0.5.0" +edition = "2018" +authors = [ + "Bevy Contributors ", + "Carter Anderson ", +] +description = "OpenXR presentation and input backend for Bevy Engine" +homepage = "https://bevyengine.org" +repository = "https://github.com/bevyengine/bevy" +license = "MIT" +keywords = ["bevy"] + +[dependencies] +# bevy +bevy_app = { path = "../bevy_app", version = "0.5.0" } +bevy_ecs = { path = "../bevy_ecs", version = "0.5.0" } +bevy_log = { path = "../bevy_log", version = "0.5.0" } +bevy_math = { path = "../bevy_math", version = "0.5.0" } +bevy_utils = { path = "../bevy_utils", version = "0.5.0" } +bevy_xr = { path = "../bevy_xr", version = "0.5.0" } + +# other +openxr = { git = "https://github.com/Ralith/openxrs" } +serde = "1" +ash = "0.33" +wgpu = "0.11" +wgpu-hal = { version = "0.11", features = ["vulkan"] } +thiserror = "1.0" +parking_lot = "0.11" + +[target.'cfg(target_os = "android")'.dependencies] +ndk-glue = "0.4.0" diff --git a/crates/bevy_openxr/lifecycle graphs.md b/crates/bevy_openxr/lifecycle graphs.md new file mode 100644 index 0000000000000..3b9afc5f9c45f --- /dev/null +++ b/crates/bevy_openxr/lifecycle graphs.md @@ -0,0 +1,17 @@ +# Lifecycle + +## Android + +```mermaid +graph +start(( )) -- Startup --> Idle -- Resume --> Running -- Pause --> Idle -- Exit --> start +``` + +## OpenXR + +```mermaid +graph +start(( )) -- Startup --> WaitingForDevice -- SessionCreated --> Idle +Idle -- Resume --> running["Running (Hidden/Visible/Focused)"] -- Pause --> Idle +Idle -- SessionEnd --> WaitingForDevice -- Exit --> start +``` diff --git a/crates/bevy_openxr/src/conversion.rs b/crates/bevy_openxr/src/conversion.rs new file mode 100644 index 0000000000000..4d506459853a1 --- /dev/null +++ b/crates/bevy_openxr/src/conversion.rs @@ -0,0 +1,15 @@ +use bevy_math::{Quat, Vec3}; +use bevy_utils::Duration; +use openxr as xr; + +pub fn from_duration(duration: Duration) -> xr::Duration { + xr::Duration::from_nanos(duration.as_nanos() as _) +} + +pub fn to_vec3(v: xr::Vector3f) -> Vec3 { + Vec3::new(v.x, v.y, v.z) +} + +pub fn to_quat(q: xr::Quaternionf) -> Quat { + Quat::from_xyzw(q.x, q.y, q.z, q.w) +} diff --git a/crates/bevy_openxr/src/interaction/mod.rs b/crates/bevy_openxr/src/interaction/mod.rs new file mode 100644 index 0000000000000..569b35251345b --- /dev/null +++ b/crates/bevy_openxr/src/interaction/mod.rs @@ -0,0 +1,364 @@ +mod tracking; + +use bevy_math::Vec2; +pub use tracking::*; + +use crate::{conversion::from_duration, OpenXrSession}; +use bevy_app::{Events, ManualEventReader}; +use bevy_xr::{ + XrActionSet, XrActionState, XrActionType, XrButtonState, XrHandType, XrProfileDescriptor, + XrVibrationEvent, XrVibrationEventType, +}; +use openxr as xr; +use parking_lot::Mutex; +use std::{collections::HashMap, sync::Arc}; + +// Profiles +pub const KHR_PROFILE: &str = "/interaction_profiles/khr/simple_controller"; +pub const DAYDREAM_PROFILE: &str = "/interaction_profiles/google/daydream_controller"; +pub const VIVE_PROFILE: &str = "/interaction_profiles/htc/vive_controller"; +pub const VIVE_PRO_PROFILE: &str = "/interaction_profiles/htc/vive_pro"; +pub const WMR_PROFILE: &str = "/interaction_profiles/microsoft/motion_controller"; +pub const XBOX_PROFILE: &str = "/interaction_profiles/microsoft/xbox_controller"; +pub const GO_PROFILE: &str = "/interaction_profiles/oculus/go_controller"; +pub const OCULUS_TOUCH_PROFILE: &str = "/interaction_profiles/oculus/touch_controller"; +pub const VALVE_INDEX_PROFILE: &str = "/interaction_profiles/valve/index_controller"; + +fn hand_str(hand_type: XrHandType) -> &'static str { + match hand_type { + XrHandType::Left => "left", + XrHandType::Right => "right", + } +} + +struct ButtonActions { + touch: xr::Action, + click: xr::Action, + value: xr::Action, +} + +pub(crate) struct InteractionContext { + // Every time `session.sync_action` is called, the result of `locate_space` can change. In case + // of concurrent use, this becomes unpredictable. Use a Mutex on the `action_set` to allow + // proper synchronization. (NB: synchronization is not ensured: the lock must be held until all + // `locate_space` calls have been performed) + pub action_set: Arc>, + button_actions: HashMap, + binary_actions: HashMap>, + scalar_actions: HashMap>, + vec_2d_actions: HashMap, xr::Action)>, + grip_actions: HashMap>, + target_ray_actions: HashMap>, + vibration_actions: HashMap>, +} + +impl InteractionContext { + pub fn new(instance: &xr::Instance, bindings: &[XrProfileDescriptor]) -> Self { + let action_set = instance + .create_action_set("bevy_bindings", "bevy bindings", 0) + .unwrap(); + + let mut button_actions = HashMap::new(); + for desc in bindings { + for (action_desc, _) in &desc.bindings { + if matches!(action_desc.action_type, XrActionType::Button { .. }) { + button_actions + .entry(action_desc.name.clone()) + .or_insert_with(|| { + let touch_name = format!("{}_touch", action_desc.name); + let click_name = format!("{}_click", action_desc.name); + let value_name = format!("{}_value", action_desc.name); + ButtonActions { + touch: action_set + .create_action(&touch_name, &touch_name, &[]) + .unwrap(), + click: action_set + .create_action(&click_name, &click_name, &[]) + .unwrap(), + value: action_set + .create_action(&value_name, &value_name, &[]) + .unwrap(), + } + }); + } + } + } + + let mut binary_actions = HashMap::new(); + for desc in bindings { + for (action_desc, _) in &desc.bindings { + if action_desc.action_type == XrActionType::Binary { + binary_actions + .entry(action_desc.name.clone()) + .or_insert_with(|| { + action_set + .create_action(&action_desc.name, &action_desc.name, &[]) + .unwrap() + }); + } + } + } + + let mut scalar_actions = HashMap::new(); + for desc in bindings { + for (action_desc, _) in &desc.bindings { + if action_desc.action_type == XrActionType::Scalar { + scalar_actions + .entry(action_desc.name.clone()) + .or_insert_with(|| { + action_set + .create_action(&action_desc.name, &action_desc.name, &[]) + .unwrap() + }); + } + } + } + + let mut vec_2d_actions = HashMap::new(); + for desc in bindings { + for (action_desc, _) in &desc.bindings { + if action_desc.action_type == XrActionType::Vec2D { + vec_2d_actions + .entry(action_desc.name.clone()) + .or_insert_with(|| { + let name_x = format!("{}_x", action_desc.name); + let name_y = format!("{}_y", action_desc.name); + ( + action_set.create_action(&name_x, &name_x, &[]).unwrap(), + action_set.create_action(&name_y, &name_y, &[]).unwrap(), + ) + }); + } + } + } + + let grip_actions = [XrHandType::Left, XrHandType::Right] + .iter() + .map(|hand| { + let name = format!("{}_grip", hand_str(*hand)); + (*hand, action_set.create_action(&name, &name, &[]).unwrap()) + }) + .collect::>(); + + let target_ray_actions = [XrHandType::Left, XrHandType::Right] + .iter() + .map(|hand| { + let name = format!("{}_target_ray", hand_str(*hand)); + (*hand, action_set.create_action(&name, &name, &[]).unwrap()) + }) + .collect::>(); + + let vibration_actions = [XrHandType::Left, XrHandType::Right] + .iter() + .map(|hand| { + let name = format!("{}_vibration", hand_str(*hand)); + (*hand, action_set.create_action(&name, &name, &[]).unwrap()) + }) + .collect::>(); + + for desc in bindings { + let mut bindings = vec![]; + + for (action_desc, path_string) in &desc.bindings { + let path = instance.string_to_path(path_string).unwrap(); + + match action_desc.action_type { + XrActionType::Button { touch } => { + let actions = button_actions.get(&action_desc.name).unwrap(); + + if touch { + bindings.push(xr::Binding::new( + &actions.touch, + instance + .string_to_path(&format!("{}/touch", path_string)) + .unwrap(), + )); + } + + // Note: `click` and `value` components are inferred and automatically + // polyfilled by the runtimes. The runtime may use a 0/1 value using the + // click path or infer the click using the value path and a hysteresis + // threshold. + bindings.push(xr::Binding::new(&actions.click, path)); + bindings.push(xr::Binding::new(&actions.value, path)); + } + XrActionType::Binary => { + let action = binary_actions.get(&action_desc.name).unwrap(); + bindings.push(xr::Binding::new(action, path)) + } + XrActionType::Scalar => { + let action = scalar_actions.get(&action_desc.name).unwrap(); + bindings.push(xr::Binding::new(action, path)) + } + XrActionType::Vec2D => { + let (action_x, action_y) = vec_2d_actions.get(&action_desc.name).unwrap(); + + bindings.push(xr::Binding::new( + action_x, + instance + .string_to_path(&format!("{}/x", path_string)) + .unwrap(), + )); + bindings.push(xr::Binding::new( + action_y, + instance + .string_to_path(&format!("{}/y", path_string)) + .unwrap(), + )); + } + } + } + + if desc.tracked { + for hand in [XrHandType::Left, XrHandType::Right] { + let path_prefix = format!("/user/hand/{}/input", hand_str(hand)); + + let action = grip_actions.get(&hand).unwrap(); + let path = format!("{}/grip/pose", path_prefix); + bindings.push(xr::Binding::new( + action, + instance.string_to_path(&path).unwrap(), + )); + + let action = target_ray_actions.get(&hand).unwrap(); + let path = format!("{}/aim/pose", path_prefix); + bindings.push(xr::Binding::new( + action, + instance.string_to_path(&path).unwrap(), + )); + } + } + + if desc.has_haptics { + for hand in [XrHandType::Left, XrHandType::Right] { + let action = vibration_actions.get(&hand).unwrap(); + let path = format!("/user/hand/{}/output/haptic", hand_str(hand)); + bindings.push(xr::Binding::new( + action, + instance.string_to_path(&path).unwrap(), + )); + } + } + + let profile_path = instance.string_to_path(&desc.profile).unwrap(); + + // Ignore error for unsupported profiles. + instance + .suggest_interaction_profile_bindings(profile_path, &bindings) + .ok(); + } + + InteractionContext { + action_set: Arc::new(Mutex::new(action_set)), + button_actions, + binary_actions, + scalar_actions, + vec_2d_actions, + grip_actions, + target_ray_actions, + vibration_actions, + } + } +} + +pub(crate) fn handle_input( + context: &InteractionContext, + session: &OpenXrSession, + action_set: &mut XrActionSet, +) { + // NB: hold the lock + let action_set_backend = &*context.action_set.lock(); + + session.sync_actions(&[action_set_backend.into()]).unwrap(); + + let mut states = HashMap::new(); + + for (name, actions) in &context.button_actions { + let touched = actions + .touch + .state(session, xr::Path::NULL) + .unwrap() + .current_state; + let pressed = actions + .click + .state(session, xr::Path::NULL) + .unwrap() + .current_state; + let value = actions + .value + .state(session, xr::Path::NULL) + .unwrap() + .current_state; + + let state = if pressed { + XrButtonState::Pressed + } else if touched { + XrButtonState::Touched + } else { + XrButtonState::Default + }; + + states.insert(name.clone(), XrActionState::Button { state, value }); + } + + for (name, action) in &context.binary_actions { + let value = action.state(session, xr::Path::NULL).unwrap().current_state; + states.insert(name.clone(), XrActionState::Binary(value)); + } + + for (name, action) in &context.scalar_actions { + let value = action.state(session, xr::Path::NULL).unwrap().current_state; + states.insert(name.clone(), XrActionState::Scalar(value)); + } + + for (name, (action1, action2)) in &context.vec_2d_actions { + let value1 = action1 + .state(session, xr::Path::NULL) + .unwrap() + .current_state; + let value2 = action2 + .state(session, xr::Path::NULL) + .unwrap() + .current_state; + states.insert( + name.clone(), + XrActionState::Vec2D(Vec2::new(value1, value2)), + ); + } + + action_set.set(states); +} + +pub(crate) fn handle_output( + context: &InteractionContext, + session: &OpenXrSession, + vibration_event_reader: &mut ManualEventReader, + vibration_events: &mut Events, +) { + for event in vibration_event_reader.iter(vibration_events) { + let action = context.vibration_actions.get(&event.hand); + if let Some(action) = action { + match &event.command { + XrVibrationEventType::Apply { + duration, + frequency, + amplitude, + } => { + let haptic_vibration = xr::HapticVibration::new() + .duration(from_duration(*duration)) + .frequency(*frequency) + .amplitude(*amplitude); + + action + .apply_feedback(session, xr::Path::NULL, &haptic_vibration) + .unwrap(); + } + XrVibrationEventType::Stop => { + action.stop_feedback(session, xr::Path::NULL).unwrap() + } + } + } + } + + vibration_events.update(); +} diff --git a/crates/bevy_openxr/src/interaction/tracking.rs b/crates/bevy_openxr/src/interaction/tracking.rs new file mode 100644 index 0000000000000..0ac99dfb074bd --- /dev/null +++ b/crates/bevy_openxr/src/interaction/tracking.rs @@ -0,0 +1,358 @@ +use crate::{ + conversion::{to_quat, to_vec3}, + InteractionContext, OpenXrSession, +}; +use bevy_math::Vec3; +use bevy_xr::{ + interaction::implementation::XrTrackingSourceBackend, XrHandType, XrJointPose, XrPose, + XrReferenceSpaceType, XrRigidTransform, +}; +use openxr as xr; +use parking_lot::{Mutex, RwLock}; +use std::sync::Arc; + +pub fn openxr_pose_to_rigid_transform(pose: xr::Posef) -> XrRigidTransform { + XrRigidTransform { + position: to_vec3(pose.position), + orientation: to_quat(pose.orientation), + } +} + +/// Usage: `prediction_time` must be the same time used to obtain `pose`. +pub fn openxr_pose_to_corrected_rigid_transform( + pose: xr::Posef, + reference: &OpenXrTrackingReference, + prediction_time: xr::Time, +) -> XrRigidTransform { + let transform = openxr_pose_to_rigid_transform(pose); + + if reference.change_time.as_nanos() > prediction_time.as_nanos() { + reference.previous_pose_offset * transform + } else { + transform + } +} + +pub fn predict_pose( + space: &xr::Space, + reference: &OpenXrTrackingReference, + prediction_time: xr::Time, +) -> Option { + let (location, velocity) = space.relate(&reference.space, prediction_time).ok()?; + if !location.location_flags.contains( + xr::SpaceLocationFlags::ORIENTATION_VALID | xr::SpaceLocationFlags::POSITION_VALID, + ) { + return None; + } + + let linear_velocity = velocity + .velocity_flags + .contains(xr::SpaceVelocityFlags::LINEAR_VALID) + .then(|| to_vec3(velocity.linear_velocity)); + let angular_velocity = velocity + .velocity_flags + .contains(xr::SpaceVelocityFlags::ANGULAR_VALID) + .then(|| to_vec3(velocity.angular_velocity)); + + Some(XrPose { + transform: openxr_pose_to_corrected_rigid_transform( + location.pose, + reference, + prediction_time, + ), + linear_velocity, + angular_velocity, + emulated_position: location + .location_flags + .contains(xr::SpaceLocationFlags::POSITION_TRACKED), + }) +} + +pub fn predict_hand_skeleton_pose( + hand_tracker: &xr::HandTracker, + reference: &OpenXrTrackingReference, + prediction_time: xr::Time, +) -> Option> { + let (poses, velocities) = reference + .space + .relate_hand_joints(hand_tracker, prediction_time) + .ok() + .flatten()?; + + Some( + poses + .iter() + .zip(velocities.iter()) + .skip(1) // exclude palm joint + .map(|(location, velocity)| { + let linear_velocity = velocity + .velocity_flags + .contains(xr::SpaceVelocityFlags::LINEAR_VALID) + .then(|| to_vec3(velocity.linear_velocity)); + let angular_velocity = velocity + .velocity_flags + .contains(xr::SpaceVelocityFlags::ANGULAR_VALID) + .then(|| to_vec3(velocity.angular_velocity)); + + XrJointPose { + pose: XrPose { + transform: openxr_pose_to_corrected_rigid_transform( + location.pose, + reference, + prediction_time, + ), + linear_velocity, + angular_velocity, + emulated_position: location + .location_flags + .contains(xr::SpaceLocationFlags::POSITION_TRACKED), + }, + radius: location.radius, + } + }) + .collect(), + ) +} + +pub struct OpenXrTrackingReference { + pub space_type: xr::ReferenceSpaceType, + pub space: xr::Space, + pub change_time: xr::Time, + pub previous_pose_offset: XrRigidTransform, +} + +pub struct OpenXrTrackingContext { + pub reference: RwLock, + pub grip_spaces: [xr::Space; 2], + pub target_ray_spaces: [xr::Space; 2], + pub hand_trackers: Option<[xr::HandTracker; 2]>, +} + +impl OpenXrTrackingContext { + pub(crate) fn new( + instance: &xr::Instance, + system: xr::SystemId, + interaction_context: &InteractionContext, + session: OpenXrSession, + ) -> Self { + // Select the most immersive type available + let reference = [ + xr::ReferenceSpaceType::STAGE, + xr::ReferenceSpaceType::LOCAL, + xr::ReferenceSpaceType::VIEW, + ] + .iter() + .cloned() + .find_map(|space_type| { + let space = session + .create_reference_space(space_type, xr::Posef::IDENTITY) + .ok()?; + + Some(OpenXrTrackingReference { + space_type, + space, + change_time: xr::Time::from_nanos(0), + previous_pose_offset: XrRigidTransform::default(), + }) + }) + .unwrap(); + + let grip_spaces = [ + interaction_context + .grip_actions + .get(&XrHandType::Left) + .unwrap() + .create_space((*session).clone(), xr::Path::NULL, xr::Posef::IDENTITY) + .unwrap(), + interaction_context + .grip_actions + .get(&XrHandType::Right) + .unwrap() + .create_space((*session).clone(), xr::Path::NULL, xr::Posef::IDENTITY) + .unwrap(), + ]; + let target_ray_spaces = [ + interaction_context + .target_ray_actions + .get(&XrHandType::Left) + .unwrap() + .create_space((*session).clone(), xr::Path::NULL, xr::Posef::IDENTITY) + .unwrap(), + interaction_context + .target_ray_actions + .get(&XrHandType::Right) + .unwrap() + .create_space((*session).clone(), xr::Path::NULL, xr::Posef::IDENTITY) + .unwrap(), + ]; + let hand_trackers = instance.supports_hand_tracking(system).unwrap().then(|| { + [ + session.create_hand_tracker(xr::Hand::LEFT).unwrap(), + session.create_hand_tracker(xr::Hand::RIGHT).unwrap(), + ] + }); + + Self { + reference: RwLock::new(reference), + grip_spaces, + target_ray_spaces, + hand_trackers, + } + } +} + +pub(crate) struct TrackingSource { + pub view_type: xr::ViewConfigurationType, + pub action_set: Arc>, + pub session: OpenXrSession, + pub context: Arc, + pub next_vsync_time: Arc>, +} + +impl XrTrackingSourceBackend for TrackingSource { + fn reference_space_type(&self) -> XrReferenceSpaceType { + match self.context.reference.read().space_type { + xr::ReferenceSpaceType::VIEW => XrReferenceSpaceType::Viewer, + xr::ReferenceSpaceType::LOCAL => XrReferenceSpaceType::Local, + xr::ReferenceSpaceType::STAGE => XrReferenceSpaceType::Stage, + _ => unreachable!(), + } + } + + fn set_reference_space_type(&self, mode: XrReferenceSpaceType) -> bool { + let reference_type = match mode { + XrReferenceSpaceType::Viewer => xr::ReferenceSpaceType::VIEW, + XrReferenceSpaceType::Local => xr::ReferenceSpaceType::LOCAL, + XrReferenceSpaceType::Stage => xr::ReferenceSpaceType::STAGE, + }; + if let Ok(space) = self + .session + .create_reference_space(reference_type, xr::Posef::IDENTITY) + { + let reference = &mut self.context.reference.write(); + reference.space_type = reference_type; + reference.space = space; + + true + } else { + false + } + } + + fn bounds_geometry(&self) -> Option> { + let rect = self + .session + .reference_space_bounds_rect(self.context.reference.read().space_type) + .ok() + .flatten()?; + let half_width = rect.width / 2_f32; + let half_height = rect.height / 2_f32; + + Some(vec![ + Vec3::new(-half_width, 0_f32, -half_height), + Vec3::new(half_width, 0_f32, -half_height), + Vec3::new(half_width, 0_f32, half_height), + Vec3::new(-half_width, 0_f32, half_height), + ]) + } + + fn views_poses(&self) -> Vec { + // NB: hold the lock + let action_set = &*self.action_set.lock(); + + self.session.sync_actions(&[action_set.into()]).unwrap(); + let reference = &self.context.reference.read(); + let display_time = *self.next_vsync_time.read(); + + let (flags, views) = self + .session + .locate_views(self.view_type, display_time, &reference.space) + .unwrap(); + + views + .into_iter() + .map(|view| XrPose { + transform: openxr_pose_to_corrected_rigid_transform( + view.pose, + reference, + display_time, + ), + linear_velocity: None, + angular_velocity: None, + emulated_position: flags.contains(xr::ViewStateFlags::POSITION_TRACKED), + }) + .collect() + } + + fn hands_pose(&self) -> [Option; 2] { + // NB: hold the lock + let action_set = &*self.action_set.lock(); + + self.session.sync_actions(&[action_set.into()]).unwrap(); + let reference = &self.context.reference.read(); + let display_time = *self.next_vsync_time.read(); + + [ + predict_pose(&self.context.grip_spaces[0], reference, display_time), + predict_pose(&self.context.grip_spaces[1], reference, display_time), + ] + } + + fn hands_skeleton_pose(&self) -> [Option>; 2] { + if let Some(hand_trackers) = &self.context.hand_trackers { + // NB: hold the lock + let action_set = &*self.action_set.lock(); + + self.session.sync_actions(&[action_set.into()]).unwrap(); + let display_time = *self.next_vsync_time.read(); + let reference = &self.context.reference.read(); + + [ + predict_hand_skeleton_pose(&hand_trackers[0], reference, display_time), + predict_hand_skeleton_pose(&hand_trackers[1], reference, display_time), + ] + } else { + [None, None] + } + } + + fn hands_target_ray(&self) -> [Option; 2] { + // NB: hold the lock + let action_set = &*self.action_set.lock(); + + self.session.sync_actions(&[action_set.into()]).unwrap(); + let display_time = *self.next_vsync_time.read(); + let reference = &self.context.reference.read(); + + [ + predict_pose(&self.context.target_ray_spaces[0], reference, display_time), + predict_pose(&self.context.target_ray_spaces[1], reference, display_time), + ] + } + + fn viewer_target_ray(&self) -> XrPose { + let poses = self.views_poses(); + let poses_count = poses.len() as f32; + + // fixme: this is wrong when views point outwards (Pimax) + // todo: quaternion averaging + let orientation = poses[0].transform.orientation; + + let position = poses + .iter() + .map(|pose| pose.transform.position) + .reduce(std::ops::Add::add) + .unwrap() + / poses_count; + + XrPose { + transform: XrRigidTransform { + position, + orientation, + }, + linear_velocity: None, + angular_velocity: None, + emulated_position: poses[0].emulated_position, + } + } +} diff --git a/crates/bevy_openxr/src/lib.rs b/crates/bevy_openxr/src/lib.rs new file mode 100644 index 0000000000000..9a9b1d7e8db54 --- /dev/null +++ b/crates/bevy_openxr/src/lib.rs @@ -0,0 +1,590 @@ +mod conversion; +mod interaction; +mod presentation; + +use ash::vk::Handle; +pub use interaction::*; + +use bevy_app::{App, AppExit, CoreStage, Events, ManualEventReader, Plugin}; +use bevy_ecs::schedule::Schedule; +use bevy_xr::{ + presentation::{XrEnvironmentBlendMode, XrGraphicsContext, XrInteractionMode}, + XrActionSet, XrProfiles, XrSessionMode, XrSystem, XrTrackingSource, XrVibrationEvent, + XrVisibilityState, +}; +use openxr::{self as xr, sys}; +use parking_lot::RwLock; +use presentation::GraphicsContextHandles; +use serde::{Deserialize, Serialize}; +use std::{error::Error, ops::Deref, sync::Arc, thread, time::Duration}; + +// The form-factor is selected at plugin-creation-time and cannot be changed anymore for the entire +// lifetime of the app. This will restrict which XrSessionMode can be selected. +#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Serialize, Deserialize)] +pub enum OpenXrFormFactor { + HeadMountedDisplay, + Handheld, +} + +enum SessionBackend { + Vulkan(xr::Session), + #[cfg(windows)] + D3D11(xr::Session), +} + +enum FrameStream { + Vulkan(xr::FrameStream), + #[cfg(windows)] + D3D11(xr::FrameStream), +} + +#[derive(Clone)] +pub struct OpenXrSession { + inner: Option>, + _wgpu_device: Arc, +} + +impl Deref for OpenXrSession { + type Target = xr::Session; + + fn deref(&self) -> &Self::Target { + self.inner.as_ref().unwrap() + } +} + +impl Drop for OpenXrSession { + fn drop(&mut self) { + // Drop OpenXR session before wgpu::Device. + self.inner.take(); + } +} + +#[derive(Debug)] +pub enum OpenXrError { + Loader(xr::LoadError), + InstanceCreation(sys::Result), + UnsupportedFormFactor, + UnavailableFormFactor, + GraphicsCreation(Box), +} + +fn selected_extensions(entry: &xr::Entry) -> xr::ExtensionSet { + let available = entry.enumerate_extensions().unwrap(); + + let mut exts = xr::ExtensionSet::default(); + // Complete list: https://www.khronos.org/registry/OpenXR/specs/1.0/html/xrspec.html#extension-appendices-list + exts.khr_composition_layer_depth = available.khr_composition_layer_depth; + // todo: set depth layer + exts.khr_vulkan_enable = available.khr_vulkan_enable; + exts.khr_vulkan_enable2 = available.khr_vulkan_enable2; + if cfg!(debug_assertions) { + exts.ext_debug_utils = available.ext_debug_utils; + } + exts.ext_eye_gaze_interaction = available.ext_eye_gaze_interaction; + // todo: implement eye tracking + exts.ext_hand_tracking = available.ext_hand_tracking; + exts.ext_hp_mixed_reality_controller = available.ext_hp_mixed_reality_controller; + exts.ext_performance_settings = available.ext_performance_settings; + // todo: implement performance API + exts.ext_samsung_odyssey_controller = available.ext_samsung_odyssey_controller; + exts.ext_thermal_query = available.ext_thermal_query; + // todo: implement thermal API + exts.fb_color_space = available.fb_color_space; + // todo: implement color space API + exts.fb_display_refresh_rate = available.fb_display_refresh_rate; + // todo: implement refresh rate API + exts.htc_vive_cosmos_controller_interaction = available.htc_vive_cosmos_controller_interaction; + exts.huawei_controller_interaction = available.huawei_controller_interaction; + exts.msft_hand_interaction = available.msft_hand_interaction; + // exts.msft_scene_unserstanding = available.msft_scene_unserstanding -> not available in openxrs + // todo: implement scene understanding API + // exts.msft_scene_unserstanding_serialization = available.msft_scene_unserstanding_serialization -> not available in openxrs + // todo: implement scene serialization + exts.msft_secondary_view_configuration = available.msft_secondary_view_configuration; + // todo: implement secondary view. This requires integration with winit. + exts.msft_spatial_anchor = available.msft_spatial_anchor; + // todo: implement spatial anchors API + exts.varjo_quad_views = available.varjo_quad_views; + + #[cfg(target_os = "android")] + { + exts.khr_android_create_instance = available.khr_android_create_instance; + exts.khr_android_thread_settings = available.khr_android_thread_settings; + // todo: set APPLICATION_MAIN and RENDER_MAIN threads + } + #[cfg(windows)] + { + exts.khr_d3d11_enable = available.khr_d3d11_enable; + } + + exts +} + +pub struct OpenXrContext { + instance: xr::Instance, + form_factor: xr::FormFactor, + system: xr::SystemId, + // Note: the lifecycle of graphics handles is managed by wgpu objects + graphics_handles: GraphicsContextHandles, + wgpu_device: Arc, + graphics_context: Option, +} + +impl OpenXrContext { + fn new(form_factor: OpenXrFormFactor) -> Result { + let entry = xr::Entry::load().map_err(OpenXrError::Loader)?; + + #[cfg(target_os = "android")] + entry.initialize_android_loader(); + + let extensions = selected_extensions(&entry); + + let instance = entry + .create_instance( + &xr::ApplicationInfo { + application_name: "Bevy App", + application_version: 0, + engine_name: "Bevy Engine", + engine_version: 0, + }, + &extensions, + &[], // todo: add debug layer + ) + .map_err(OpenXrError::InstanceCreation)?; + + let form_factor = match form_factor { + OpenXrFormFactor::HeadMountedDisplay => xr::FormFactor::HEAD_MOUNTED_DISPLAY, + OpenXrFormFactor::Handheld => xr::FormFactor::HEAD_MOUNTED_DISPLAY, + }; + + let system = instance.system(form_factor).map_err(|e| match e { + sys::Result::ERROR_FORM_FACTOR_UNSUPPORTED => OpenXrError::UnsupportedFormFactor, + sys::Result::ERROR_FORM_FACTOR_UNAVAILABLE => OpenXrError::UnavailableFormFactor, + e => panic!("{}", e), // should never happen + })?; + + let (graphics_handles, graphics_context) = + presentation::create_graphics_context(&instance, system) + .map_err(OpenXrError::GraphicsCreation)?; + + Ok(Self { + instance, + form_factor, + system, + graphics_handles, + wgpu_device: graphics_context.device.clone(), + graphics_context: Some(graphics_context), + }) + } +} + +fn get_system_info( + instance: &xr::Instance, + system: xr::SystemId, + mode: XrSessionMode, +) -> Option<(xr::ViewConfigurationType, xr::EnvironmentBlendMode)> { + let view_type = match mode { + XrSessionMode::ImmersiveVR | XrSessionMode::ImmersiveAR => { + if instance.exts().varjo_quad_views.is_some() { + xr::ViewConfigurationType::PRIMARY_QUAD_VARJO + } else { + xr::ViewConfigurationType::PRIMARY_STEREO + } + } + XrSessionMode::InlineVR | XrSessionMode::InlineAR => { + xr::ViewConfigurationType::PRIMARY_MONO + } + }; + + let blend_modes = instance + .enumerate_environment_blend_modes(system, view_type) + .unwrap(); + + let blend_mode = match mode { + XrSessionMode::ImmersiveVR | XrSessionMode::InlineVR => blend_modes + .into_iter() + .find(|b| *b == xr::EnvironmentBlendMode::OPAQUE)?, + XrSessionMode::ImmersiveAR | XrSessionMode::InlineAR => blend_modes + .iter() + .cloned() + .find(|b| *b == xr::EnvironmentBlendMode::ALPHA_BLEND) + .or_else(|| { + blend_modes + .into_iter() + .find(|b| *b == xr::EnvironmentBlendMode::ADDITIVE) + })?, + }; + + Some((view_type, blend_mode)) +} + +#[derive(Default)] +pub struct OpenXrPlugin; + +impl Plugin for OpenXrPlugin { + fn build(&self, app: &mut App) { + if !app.world.contains_resource::() { + let context = + OpenXrContext::new(OpenXrFormFactor::HeadMountedDisplay).unwrap_or_else(|_| { + match OpenXrContext::new(OpenXrFormFactor::Handheld) { + Ok(context) => context, + // In case OpenXR is suported, there should be always at least one supported + // form factor. If "Handheld" is unsupported, "HeadMountedDisplay" is + // supported (but in this case unavailable). + Err( + OpenXrError::UnsupportedFormFactor | OpenXrError::UnavailableFormFactor, + ) => panic!( + "OpenXR: No available form factors. Consider manually handling {}", + "the creation of the OpenXrContext resource." + ), + Err(e) => panic!( + "OpenXR: Failed to create OpenXrContext: {:?}\n{} {}", + e, + "Consider manually handling", + "the creation of the OpenXrContext resource." + ), + } + }); + app.world.insert_resource(context); + } + + let mut context = app.world.get_resource_mut::().unwrap(); + let graphics_context = context.graphics_context.take().unwrap(); + + app.insert_resource::(graphics_context) + .set_runner(runner); + } +} + +// Currently, only the session loop is implemented. If the session is destroyed or fails to +// create, the app will exit. +// todo: Implement the instance loop when the the lifecycle API is implemented. +fn runner(mut app: App) { + let ctx = app.world.remove_resource::().unwrap(); + + app.world.insert_resource(ctx.instance.clone()); + + let mut app_exit_event_reader = ManualEventReader::default(); + + let interaction_mode = if ctx.form_factor == xr::FormFactor::HEAD_MOUNTED_DISPLAY { + XrInteractionMode::WorldSpace + } else { + XrInteractionMode::ScreenSpace + }; + app.world.insert_resource(interaction_mode); + + // Find the available session modes + let available_session_modes = [ + XrSessionMode::ImmersiveVR, + XrSessionMode::ImmersiveAR, + XrSessionMode::InlineVR, + XrSessionMode::InlineAR, + ] + .iter() + .filter_map(|mode| get_system_info(&ctx.instance, ctx.system, *mode).map(|_| *mode)) + .collect(); + + app.world + .insert_resource(XrSystem::new(available_session_modes)); + + // Run the startup systems. The user can verify which session modes are supported and choose + // one. + app.schedule + .get_stage_mut::(&CoreStage::Startup) + .unwrap() + .run_once(&mut app.world); + + if app_exit_event_reader + .iter(&app.world.get_resource_mut::>().unwrap()) + .next_back() + .is_some() + { + return; + } + + let xr_system = app.world.get_resource::().unwrap(); + + let mode = xr_system.selected_session_mode(); + let bindings = xr_system.action_set(); + + let interaction_context = InteractionContext::new(&ctx.instance, bindings); + + // Remove XrSystem. The user cannot make any more changes to the session mode. + // todo: when the lifecycle API is implemented, allow the user to change the session mode at any + // moment. + app.world.remove_resource::(); + + let (view_type, blend_mode) = get_system_info(&ctx.instance, ctx.system, mode).unwrap(); + + let environment_blend_mode = match blend_mode { + xr::EnvironmentBlendMode::OPAQUE => XrEnvironmentBlendMode::Opaque, + xr::EnvironmentBlendMode::ALPHA_BLEND => XrEnvironmentBlendMode::AlphaBlend, + xr::EnvironmentBlendMode::ADDITIVE => XrEnvironmentBlendMode::Additive, + _ => unreachable!(), + }; + app.world.insert_resource(environment_blend_mode); + + let (session, graphics_session, mut frame_waiter, mut frame_stream) = match ctx.graphics_handles + { + GraphicsContextHandles::Vulkan { + instance, + physical_device, + device, + queue_family_index, + queue_index, + } => { + let (session, frame_waiter, frame_stream) = unsafe { + ctx.instance + .create_session( + ctx.system, + &xr::vulkan::SessionCreateInfo { + instance: instance.handle().as_raw() as *const _, + physical_device: physical_device.as_raw() as *const _, + device: device.handle().as_raw() as *const _, + queue_family_index, + queue_index, + }, + ) + .unwrap() + }; + ( + session.clone().into_any_graphics(), + SessionBackend::Vulkan(session), + frame_waiter, + FrameStream::Vulkan(frame_stream), + ) + } + #[cfg(windows)] + GraphicsContextHandles::D3D11 { device } => { + let (session, frame_waiter, frame_stream) = self + .instance + .create_session( + self.system_id, + &xr::d3d::SessionCreateInfo { + device: device as _, + }, + ) + .unwrap(); + ( + backend.clone().into_any_graphics(), + SessionBackend::D3D11(session), + frame_waiter, + FrameStream::D3D11(frame_stream), + ) + } + }; + + let session = OpenXrSession { + inner: Some(session), + _wgpu_device: ctx.wgpu_device, + }; + + // The user can have a limited access to the OpenXR session using OpenXrSession, which is + // clonable but safe because of the _wgpu_device internal handle. + app.world.insert_resource(session.clone()); + + session + .attach_action_sets(&[&interaction_context.action_set.lock()]) + .unwrap(); + + let tracking_context = Arc::new(OpenXrTrackingContext::new( + &ctx.instance, + ctx.system, + &interaction_context, + session.clone(), + )); + + let next_vsync_time = Arc::new(RwLock::new(xr::Time::from_nanos(0))); + + let tracking_source = TrackingSource { + view_type, + action_set: interaction_context.action_set.clone(), + session: session.clone(), + context: tracking_context.clone(), + next_vsync_time: next_vsync_time.clone(), + }; + + app.world.insert_resource(tracking_context.clone()); + app.world + .insert_resource(XrTrackingSource::new(Box::new(tracking_source))); + + // todo: use these views limits and recommendations + let views = ctx + .instance + .enumerate_view_configuration_views(ctx.system, view_type) + .unwrap(); + + let mut vibration_event_reader = ManualEventReader::default(); + + let mut event_storage = xr::EventDataBuffer::new(); + + let mut running = false; + 'session_loop: loop { + while let Some(event) = ctx.instance.poll_event(&mut event_storage).unwrap() { + match event { + xr::Event::EventsLost(e) => { + bevy_log::error!("OpenXR: Lost {} events", e.lost_event_count()); + } + xr::Event::InstanceLossPending(_) => { + bevy_log::info!("OpenXR: Shutting down for runtime request"); + break 'session_loop; + } + xr::Event::SessionStateChanged(e) => { + bevy_log::debug!("entered state {:?}", e.state()); + + match e.state() { + xr::SessionState::UNKNOWN | xr::SessionState::IDLE => (), + xr::SessionState::READY => { + session.begin(view_type).unwrap(); + running = true; + } + xr::SessionState::SYNCHRONIZED => { + app.world.insert_resource(XrVisibilityState::Hidden) + } + xr::SessionState::VISIBLE => app + .world + .insert_resource(XrVisibilityState::VisibleUnfocused), + xr::SessionState::FOCUSED => { + app.world.insert_resource(XrVisibilityState::VisibleFocused) + } + xr::SessionState::STOPPING => { + session.end().unwrap(); + running = false; + } + xr::SessionState::EXITING | xr::SessionState::LOSS_PENDING => { + break 'session_loop; + } + _ => unreachable!(), + } + } + xr::Event::ReferenceSpaceChangePending(e) => { + let reference_ref = &mut tracking_context.reference.write(); + + reference_ref.space_type = e.reference_space_type(); + reference_ref.change_time = e.change_time(); + reference_ref.previous_pose_offset = + openxr_pose_to_rigid_transform(e.pose_in_previous_space()) + } + xr::Event::PerfSettingsEXT(e) => { + let sub_domain = match e.sub_domain() { + xr::PerfSettingsSubDomainEXT::COMPOSITING => "compositing", + xr::PerfSettingsSubDomainEXT::RENDERING => "rendering", + xr::PerfSettingsSubDomainEXT::THERMAL => "thermal", + _ => unreachable!(), + }; + let domain = match e.domain() { + xr::PerfSettingsDomainEXT::CPU => "CPU", + xr::PerfSettingsDomainEXT::GPU => "GPU", + _ => unreachable!(), + }; + let from = match e.from_level() { + xr::PerfSettingsNotificationLevelEXT::NORMAL => "normal", + xr::PerfSettingsNotificationLevelEXT::WARNING => "warning", + xr::PerfSettingsNotificationLevelEXT::IMPAIRED => "critical", + _ => unreachable!(), + }; + let to = match e.to_level() { + xr::PerfSettingsNotificationLevelEXT::NORMAL => "normal", + xr::PerfSettingsNotificationLevelEXT::WARNING => "warning", + xr::PerfSettingsNotificationLevelEXT::IMPAIRED => "critical", + _ => unreachable!(), + }; + bevy_log::warn!( + "OpenXR: The {} state of the {} went from {} to {}", + sub_domain, + domain, + from, + to + ); + + // todo: react to performance notifications + } + xr::Event::VisibilityMaskChangedKHR(_) => (), // todo: update visibility mask + xr::Event::InteractionProfileChanged(_) => { + let left_hand = ctx + .instance + .path_to_string( + session + .current_interaction_profile( + ctx.instance.string_to_path("/user/hand/left").unwrap(), + ) + .unwrap(), + ) + .ok(); + let right_hand = ctx + .instance + .path_to_string( + session + .current_interaction_profile( + ctx.instance.string_to_path("/user/hand/right").unwrap(), + ) + .unwrap(), + ) + .ok(); + + app.world.insert_resource(XrProfiles { + left_hand, + right_hand, + }) + } + xr::Event::MainSessionVisibilityChangedEXTX(_) => (), // unused + xr::Event::DisplayRefreshRateChangedFB(_) => (), // shouldn't be needed + _ => bevy_log::debug!("OpenXR: Unhandled event"), + } + } + + if !running { + thread::sleep(Duration::from_millis(200)); + continue; + } + + let frame_state = frame_waiter.wait().unwrap(); + + *next_vsync_time.write() = frame_state.predicted_display_time; + + { + let world_cell = app.world.cell(); + handle_input( + &interaction_context, + &session, + &mut world_cell.get_resource_mut::().unwrap(), + ); + } + + match &mut frame_stream { + FrameStream::Vulkan(frame_stream) => frame_stream.begin().unwrap(), + #[cfg(windows)] + FrameStream::D3D11(frame_stream) => frame_stream.begin().unwrap(), + } + + app.update(); + + // match &mut frame_stream { + // FrameStream::Vulkan(frame_stream) => frame_stream + // .end(frame_state.predicted_display_time, blend_mode, todo!()) + // .unwrap(), + // #[cfg(windows)] + // FrameStream::D3D11(frame_stream) => frame_stream + // .end(frame_state.predicted_display_time, blend_mode, todo!()) + // .unwrap(), + // } + + handle_output( + &interaction_context, + &session, + &mut vibration_event_reader, + &mut app + .world + .get_resource_mut::>() + .unwrap(), + ); + + if app_exit_event_reader + .iter(&app.world.get_resource_mut::>().unwrap()) + .next_back() + .is_some() + { + session.request_exit().unwrap(); + } + } +} diff --git a/crates/bevy_openxr/src/presentation.rs b/crates/bevy_openxr/src/presentation.rs new file mode 100644 index 0000000000000..3535125a8082b --- /dev/null +++ b/crates/bevy_openxr/src/presentation.rs @@ -0,0 +1,200 @@ +use ash::vk::{self, Handle}; +use bevy_xr::presentation::XrGraphicsContext; +use openxr as xr; +use std::{error::Error, ffi::CString, sync::Arc}; +use wgpu_hal as hal; +#[cfg(windows)] +use winapi::um::d3d11::ID3D11Device; + +#[derive(Clone)] +pub enum GraphicsContextHandles { + Vulkan { + instance: ash::Instance, + physical_device: vk::PhysicalDevice, + device: ash::Device, + queue_family_index: u32, + queue_index: u32, + }, + #[cfg(windows)] + D3D11 { device: *const ID3D11Device }, +} + +#[derive(Debug, thiserror::Error)] +#[error("Error creating HAL adapter")] +pub struct AdapterError; + +pub fn create_graphics_context( + instance: &xr::Instance, + system: xr::SystemId, +) -> Result<(GraphicsContextHandles, XrGraphicsContext), Box> { + let device_descriptor = wgpu::DeviceDescriptor::default(); + + if instance.exts().khr_vulkan_enable2.is_some() { + let vk_entry = unsafe { ash::Entry::new().map_err(Box::new)? }; + + // Vulkan 1.0 constrained by Oculus Go support. + // todo: multiview support will require Vulkan 1.1 or specific extensions + let vk_version = vk::make_api_version(1, 0, 0, 0); + + // todo: check requirements + let _requirements = instance + .graphics_requirements::(system) + .unwrap(); + + let vk_app_info = vk::ApplicationInfo::builder() + .application_version(0) + .engine_version(0) + .api_version(vk_version); + + let mut flags = hal::InstanceFlags::empty(); + if cfg!(debug_assertions) { + flags |= hal::InstanceFlags::VALIDATION; + flags |= hal::InstanceFlags::DEBUG; + } + + let instance_extensions = ::Instance::required_extensions( + &vk_entry, vk_version, flags, + ) + .map_err(Box::new)?; + let instance_extensions_ptrs = instance_extensions + .iter() + .map(|x| x.as_ptr()) + .collect::>(); + + let vk_instance = unsafe { + let vk_instance = instance + .create_vulkan_instance( + system, + std::mem::transmute(vk_entry.static_fn().get_instance_proc_addr), + &vk::InstanceCreateInfo::builder() + .application_info(&vk_app_info) + .enabled_extension_names(&instance_extensions_ptrs) + as *const _ as *const _, + ) + .map_err(Box::new)? + .map_err(|e| Box::new(vk::Result::from_raw(e)))?; + + ash::Instance::load( + vk_entry.static_fn(), + vk::Instance::from_raw(vk_instance as _), + ) + }; + let hal_instance = unsafe { + ::Instance::from_raw( + vk_entry.clone(), + vk_instance.clone(), + vk_version, + instance_extensions, + flags, + Box::new(instance.clone()), + ) + .map_err(Box::new)? + }; + + let vk_physical_device = vk::PhysicalDevice::from_raw( + instance + .vulkan_graphics_device(system, vk_instance.handle().as_raw() as _) + .map_err(Box::new)? as _, + ); + let hal_exposed_adapter = hal_instance + .expose_adapter(vk_physical_device) + .ok_or_else(|| Box::new(AdapterError))?; + + let queue_family_index = unsafe { + vk_instance + .get_physical_device_queue_family_properties(vk_physical_device) + .into_iter() + .enumerate() + .find_map(|(queue_family_index, info)| { + if info.queue_flags.contains(vk::QueueFlags::GRAPHICS) { + Some(queue_family_index as u32) + } else { + None + } + }) + .unwrap() + }; + let queue_index = 0; + + let device_extensions = hal_exposed_adapter + .adapter + .required_device_extensions(device_descriptor.features); + let device_extensions_ptrs = device_extensions + .iter() + .map(|x| x.as_ptr()) + .collect::>(); + + let mut physical_features = hal_exposed_adapter + .adapter + .physical_device_features(&device_extensions, device_descriptor.features); + + let family_info = vk::DeviceQueueCreateInfo::builder() + .queue_family_index(queue_family_index) + .queue_priorities(&[1.0]) + .build(); + let family_infos = [family_info]; + + let vk_device = { + let info = vk::DeviceCreateInfo::builder() + .queue_create_infos(&family_infos) + .enabled_extension_names(&device_extensions_ptrs); + let info = physical_features.add_to_device_create_builder(info).build(); + + unsafe { + let vk_device = instance + .create_vulkan_device( + system, + std::mem::transmute(vk_entry.static_fn().get_instance_proc_addr), + vk_physical_device.as_raw() as _, + &info as *const _ as *const _, + ) + .map_err(Box::new)? + .map_err(|e| Box::new(vk::Result::from_raw(e)))?; + + ash::Device::load(vk_instance.fp_v1_0(), vk::Device::from_raw(vk_device as _)) + } + }; + let hal_device = unsafe { + hal_exposed_adapter + .adapter + .device_from_raw( + vk_device.clone(), + &device_extensions, + queue_family_index, + queue_index, + ) + .map_err(Box::new)? + }; + + // let wgpu_instance = unsafe { wgpu::Instance::from_hal::(hal_instance) }; + // let wgpu_adapter = unsafe { wgpu_instance.adapter_from_hal(hal_exposed_adapter) }; + // let (wgpu_device, wgpu_queue) = unsafe { + // wgpu_adapter + // .device_from_hal(hal_device, &device_descriptor, None) + // .map_err(Box::new)? + // }; + + Ok(( + GraphicsContextHandles::Vulkan { + instance: vk_instance, + physical_device: vk_physical_device, + device: vk_device, + queue_family_index, + queue_index, + }, + todo!(), + // XrGraphicsContext { + // instance: wgpu_instance, + // device: Arc::new(wgpu_device), + // queue: wgpu_queue, + // }, + )) + } else { + #[cfg(windows)] + if instance.exts().khr_d3d11_enable { + todo!() + } + + Err(Box::new(xr::sys::Result::ERROR_EXTENSION_NOT_PRESENT)) + } +} diff --git a/crates/bevy_render/Cargo.toml b/crates/bevy_render/Cargo.toml index c3ac5c6167dc0..5257ba83650b4 100644 --- a/crates/bevy_render/Cargo.toml +++ b/crates/bevy_render/Cargo.toml @@ -20,6 +20,7 @@ bevy_reflect = { path = "../bevy_reflect", version = "0.5.0", features = ["bevy" bevy_transform = { path = "../bevy_transform", version = "0.5.0" } bevy_window = { path = "../bevy_window", version = "0.5.0" } bevy_utils = { path = "../bevy_utils", version = "0.5.0" } +bevy_xr = { path = "../bevy_xr", version = "0.5.0" } # rendering image = { version = "0.23.12", default-features = false } diff --git a/crates/bevy_webxr/Cargo.toml b/crates/bevy_webxr/Cargo.toml new file mode 100644 index 0000000000000..3f71d8ba0b801 --- /dev/null +++ b/crates/bevy_webxr/Cargo.toml @@ -0,0 +1,60 @@ +[package] +name = "bevy_webxr" +version = "0.5.0" +edition = "2018" +authors = [ + "Bevy Contributors ", + "Carter Anderson ", +] +description = "OpenXR presentation and input backend for Bevy Engine" +homepage = "https://bevyengine.org" +repository = "https://github.com/bevyengine/bevy" +license = "MIT" +keywords = ["bevy"] + +[dependencies] +# bevy +bevy_app = { path = "../bevy_app", version = "0.5.0" } +bevy_ecs = { path = "../bevy_ecs", version = "0.5.0" } +bevy_log = { path = "../bevy_log", version = "0.5.0" } +bevy_math = { path = "../bevy_math", version = "0.5.0" } +bevy_utils = { path = "../bevy_utils", version = "0.5.0" } +bevy_xr = { path = "../bevy_xr", version = "0.5.0" } + +# other +web-sys = { version = "0.3", features = [ + "Window", + "Navigator", + "Xr", + "XrBoundedReferenceSpace", + "XrEye", + "XrFrame", + "XrHandedness", + "XrInputSource", + "XrInputSourceArray", + "XrInputSourceEvent", + "XrInputSourceEventInit", + "XrInputSourcesChangeEvent", + "XrInputSourcesChangeEventInit", + "XrPose", + "XrReferenceSpace", + "XrReferenceSpaceEvent", + "XrReferenceSpaceEventInit", + "XrReferenceSpaceType", + "XrRenderState", + "XrRenderStateInit", + "XrRigidTransform", + "XrSession", + "XrSessionEvent", + "XrSessionEventInit", + "XrSessionInit", + "XrSessionMode", + "XrSpace", + "XrTargetRayMode", + "XrView", + "XrViewerPose", + "XrViewport", + "XrVisibilityState", + "XrWebGlLayer", + "XrWebGlLayerInit", +] } diff --git a/crates/bevy_webxr/src/lib.rs b/crates/bevy_webxr/src/lib.rs new file mode 100644 index 0000000000000..b67021efe33b4 --- /dev/null +++ b/crates/bevy_webxr/src/lib.rs @@ -0,0 +1,99 @@ +// use bevy_app::{AppBuilder, Plugin}; +// use bevy_utils::Duration; +// use bevy_xr::XrMode; +// use std::{cell::RefCell, rc::Rc, thread}; +// use web_sys::{Closure, XrSession, XrSessionMode}; + +// #[derive(Clone)] +// struct WebXrContext { +// pub session: XrSession, +// pub mode: XrMode, +// } + +// struct WebXrPlugin; + +// impl Plugin for WebXrPlugin { +// fn build(&self, app: &mut AppBuilder) { +// let context = if let Some(context) = app.world().get_resource::() { +// context.clone() +// } else { +// let system = web_sys::window().unwrap().navigator().xr(); + +// // todo: this needs a better solution +// let vr_supported = Rc::new(RefCell::new(None)); +// system +// .is_session_supported(XrSessionMode::ImmersiveVr) +// .then(Closure::wrap(Box::new({ +// let vr_supported = Rc::clone(&vr_supported); +// move |res| { +// *vr_supported.borrow_mut() = Some(res); +// } +// }))); +// let ar_supported = Rc::new(RefCell::new(None)); +// system +// .is_session_supported(XrSessionMode::ImmersiveAr) +// .then(Closure::wrap(Box::new({ +// let ar_supported = Rc::clone(&ar_supported); +// move |res| { +// *ar_supported.borrow_mut() = Some(res); +// } +// }))); +// let vr_supported = loop { +// if let Some(res) = vr_supported.borrow_mut().take() { +// break res; +// } else { +// thread::sleep(Duration::from_millis(10)); +// } +// }; +// let ar_supported = loop { +// if let Some(res) = vr_supported.borrow_mut().take() { +// break res; +// } else { +// thread::sleep(Duration::from_millis(10)); +// } +// }; + +// let mode = app.world().get_resource::(); +// let session_mode = match mode { +// Some(XrMode::ImmersiveVR) | None if vr_supported => XrSessionMode::ImmersiveVr, +// Some(XrMode::ImmersiveAR) if ar_supported => XrSessionMode::ImmersiveAr, +// _ => XrSessionMode::Inline, +// }; + +// let new_mode = match session_mode { +// XrSessionMode::ImmersiveVr => XrMode::ImmersiveVR, +// XrSessionMode::ImmersiveAr => XrMode::ImmersiveAR, +// XrSessionMode::Inline => XrMode::InlineVR, +// }; + +// if let Some(mode) = mode { +// if new_mode != *mode { +// bevy_log::warn!("XrMode has been changed to {:?}", mode); +// } +// } + +// let session = Rc::new(RefCell::new(None)); +// system +// .request_session(session_mode) +// .then(Closure::wrap(Box::new({ +// let session = Rc::clone(&session); +// move |s: XrSession| { +// *session.borrow_mut() = Some(s); +// } +// }))); +// let session = loop { +// if let Some(session) = session.borrow_mut().take() { +// break session; +// } else { +// thread::sleep(Duration::from_millis(10)); +// } +// }; + +// WebXrContext { +// session, +// mode: new_mode, +// }; +// }; +// app.insert_resource(context.mode).insert_resource(context); +// } +// } diff --git a/crates/bevy_xr/Cargo.toml b/crates/bevy_xr/Cargo.toml new file mode 100644 index 0000000000000..72fd192e58ac8 --- /dev/null +++ b/crates/bevy_xr/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "bevy_xr" +version = "0.5.0" +edition = "2018" +authors = [ + "Bevy Contributors ", + "Carter Anderson ", +] +description = "XR presentation and input interface for Bevy Engine" +homepage = "https://bevyengine.org" +repository = "https://github.com/bevyengine/bevy" +license = "MIT" +keywords = ["bevy"] + +[dependencies] +# bevy +bevy_app = { path = "../bevy_app", version = "0.5.0" } +bevy_core = { path = "../bevy_core", version = "0.5.0" } +bevy_ecs = { path = "../bevy_ecs", version = "0.5.0" } +bevy_math = { path = "../bevy_math", version = "0.5.0" } +bevy_reflect = { path = "../bevy_reflect", version = "0.5.0", features = [ + "bevy", +] } +bevy_utils = { path = "../bevy_utils", version = "0.5.0" } + +# other +downcast-rs = "1.2" +serde = "1" +wgpu = "0.11" diff --git a/crates/bevy_xr/src/interaction.rs b/crates/bevy_xr/src/interaction.rs new file mode 100644 index 0000000000000..3394b5665089a --- /dev/null +++ b/crates/bevy_xr/src/interaction.rs @@ -0,0 +1,396 @@ +use bevy_math::{Mat4, Quat, Vec2, Vec3}; +use bevy_utils::Duration; +use serde::{Deserialize, Serialize}; +use std::{ + collections::HashMap, + ops::{Deref, Mul}, +}; + +// Note: indices follow WebXR convention. OpenXR's palm joint is missing, but it can be retrieved +// using `XrTrackingSource::hands_pose()`. +pub const XR_HAND_JOINT_WRIST: usize = 0; +pub const XR_HAND_JOINT_THUMB_METACARPAL: usize = 1; +pub const XR_HAND_JOINT_THUMB_PROXIMAL: usize = 2; +pub const XR_HAND_JOINT_THUMB_DISTAL: usize = 3; +pub const XR_HAND_JOINT_THUMB_TIP: usize = 4; +pub const XR_HAND_JOINT_INDEX_METACARPAL: usize = 5; +pub const XR_HAND_JOINT_INDEX_PROXIMAL: usize = 6; +pub const XR_HAND_JOINT_INDEX_INTERMEDIATE: usize = 7; +pub const XR_HAND_JOINT_INDEX_DISTAL: usize = 8; +pub const XR_HAND_JOINT_INDEX_TIP: usize = 9; +pub const XR_HAND_JOINT_MIDDLE_METACARPAL: usize = 10; +pub const XR_HAND_JOINT_MIDDLE_PROXIMAL: usize = 11; +pub const XR_HAND_JOINT_MIDDLE_INTERMEDIATE: usize = 12; +pub const XR_HAND_JOINT_MIDDLE_DISTAL: usize = 13; +pub const XR_HAND_JOINT_MIDDLE_TIP: usize = 14; +pub const XR_HAND_JOINT_RING_METACARPAL: usize = 15; +pub const XR_HAND_JOINT_RING_PROXIMAL: usize = 16; +pub const XR_HAND_JOINT_RING_INTERMEDIATE: usize = 17; +pub const XR_HAND_JOINT_RING_DISTAL: usize = 18; +pub const XR_HAND_JOINT_RING_TIP: usize = 19; +pub const XR_HAND_JOINT_LITTLE_METACARPAL: usize = 20; +pub const XR_HAND_JOINT_LITTLE_PROXIMAL: usize = 21; +pub const XR_HAND_JOINT_LITTLE_INTERMEDIATE: usize = 22; +pub const XR_HAND_JOINT_LITTLE_DISTAL: usize = 23; +pub const XR_HAND_JOINT_LITTLE_TIP: usize = 24; + +// To be verified: in all useful instances, when the orientation is valid, the position is also +// valid. In case of 3DOF headsets, position should always be emulated using a neck and arm model. +// In case of hand tracking, when a joint is estimated, both pose and orientation are available. +#[derive(Clone, Copy, Default, Debug, Serialize, Deserialize)] +pub struct XrRigidTransform { + pub position: Vec3, + pub orientation: Quat, +} + +impl Mul for XrRigidTransform { + type Output = XrRigidTransform; + + fn mul(self, rhs: Self) -> Self::Output { + XrRigidTransform { + position: self.position + self.orientation * rhs.position, + orientation: self.orientation * rhs.orientation, + } + } +} + +impl XrRigidTransform { + pub fn to_mat4(&self) -> Mat4 { + todo!() + } +} + +#[derive(Clone, Default, Debug, Serialize, Deserialize)] +pub struct XrPose { + pub transform: XrRigidTransform, + pub linear_velocity: Option, + pub angular_velocity: Option, + pub emulated_position: bool, +} + +impl Deref for XrPose { + type Target = XrRigidTransform; + + fn deref(&self) -> &Self::Target { + &self.transform + } +} + +#[derive(Clone, Default, Debug, Serialize, Deserialize)] +pub struct XrJointPose { + pub pose: XrPose, + + /// Radius of a sphere placed at the center of the joint that roughly touches the skin on both + /// sides of the hand. + pub radius: f32, +} + +impl Deref for XrJointPose { + type Target = XrPose; + + fn deref(&self) -> &Self::Target { + &self.pose + } +} + +#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug, Serialize, Deserialize)] +pub enum XrReferenceSpaceType { + /// The coordinate system (position and orientation) is set as the headset pose at startup or + /// after a recenter. This should be used only for experiences where the user is laid down. + Viewer, + + /// The coordinate system (position and gravity-aligned orientation) is calculated from the + /// headset pose at startup or after a recenter. This is for seated experiences. + Local, + + /// The coordinate system (position and orientation) corresponds to the center of a rectangle at + /// floor level, with +Y up. This is for stading or room-scale experiences. + Stage, +} + +pub mod implementation { + use super::XrReferenceSpaceType; + use crate::{interaction::XrPose, XrJointPose}; + use bevy_math::Vec3; + + pub trait XrTrackingSourceBackend: Send + Sync { + fn reference_space_type(&self) -> XrReferenceSpaceType; + fn set_reference_space_type(&self, reference_space_type: XrReferenceSpaceType) -> bool; + fn bounds_geometry(&self) -> Option>; + fn views_poses(&self) -> Vec; + fn hands_pose(&self) -> [Option; 2]; + fn hands_skeleton_pose(&self) -> [Option>; 2]; + fn hands_target_ray(&self) -> [Option; 2]; + fn viewer_target_ray(&self) -> XrPose; + } +} + +/// Component used to poll tracking data. Tracking data is obtained "on-demand" to get the best +/// precision possible. Poses are predicted for the next V-Sync. To obtain poses for an arbitrary +/// point in time, `bevy_openxr` backend provides this functionality with OpenXrTrackingState. +pub struct XrTrackingSource { + inner: Box, +} + +impl XrTrackingSource { + pub fn new(backend: Box) -> Self { + Self { inner: backend } + } + + pub fn reference_space_type(&self) -> XrReferenceSpaceType { + self.inner.reference_space_type() + } + + /// Returns true if the tracking mode has been set correctly. If false is returned the tracking + /// mode is not supported and another one must be chosen. + pub fn set_reference_space_type(&mut self, reference_space_type: XrReferenceSpaceType) -> bool { + self.inner.set_reference_space_type(reference_space_type) + } + + pub fn just_reset_reference_space(&mut self) -> bool { + todo!() + } + + /// Returns a list of points, ordered clockwise, that define the playspace boundary. Only + /// available when the reference space is set to `BoundedFloor`. Y component is always 0. + pub fn bounds_geometry(&self) -> Option> { + self.inner.bounds_geometry() + } + + pub fn views_poses(&self) -> Vec { + self.inner.views_poses() + } + + /// Index 0 corresponds to the left hand, index 1 corresponds to the right hand. + pub fn hands_pose(&self) -> [Option; 2] { + self.inner.hands_pose() + } + + /// Index 0 corresponds to the left hand, index 1 corresponds to the right hand. + pub fn hands_skeleton_pose(&self) -> [Option>; 2] { + self.inner.hands_skeleton_pose() + } + + /// Returns poses that can be used to render a target ray or cursor. The ray is along -Z. The + /// behavior is vendor-specific. Index 0 corresponds to the left hand, index 1 corresponds to + /// the right hand. + pub fn hand_target_ray(&self) -> [Option; 2] { + self.inner.hands_target_ray() + } + + /// Returns a pose that can be used to render a target ray or cursor. The ray is along -Z. The + /// origin is between the eyes for head-mounted displays and the center of the screen for + /// handheld devices. + pub fn viewer_target_ray(&self) -> XrPose { + self.inner.viewer_target_ray() + } + + // future extensions: + // * eye tracking + // * lower face tracking + // * AR face tracking + // * body/skeletal trackers + // * scene understanding (anchors, planes, meshes) +} + +#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug, Serialize, Deserialize)] +pub enum XrHandType { + Left, + Right, +} + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] +pub enum XrButtonState { + Default, + Touched, + Pressed, +} + +impl Default for XrButtonState { + fn default() -> Self { + Self::Default + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize)] +pub enum XrActionType { + /// Convenience type that groups click, touch and value actions for a single button. + /// The last segment of the path (`/click`, `/touch` or `/value`) must be omitted. + Button { + touch: bool, + }, + + Binary, + + Scalar, + + /// Convenience type that groups x and y axes for a touchpad or thumbstick action. + /// The last segment of the path (`/x` or `/y`) must be omitted. + Vec2D, +} + +#[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize)] +pub enum XrActionState { + Button { state: XrButtonState, value: f32 }, + Binary(bool), + Scalar(f32), + Vec2D(Vec2), +} + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct XrActionDescriptor { + pub name: String, + pub action_type: XrActionType, +} + +/// List bindings related to a single interaction profile. `tracked` and `has_haptics` can always be +/// set to false but if they are set to true and the interaction profile does not support them, the +/// the profile will be disabled completely. +pub struct XrProfileDescriptor { + pub profile: String, + pub bindings: Vec<(XrActionDescriptor, String)>, + pub tracked: bool, + pub has_haptics: bool, +} + +pub struct XrActionSet { + current_states: HashMap, + previous_states: HashMap, +} + +impl XrActionSet { + pub fn state(&self, action: &str) -> Option { + self.current_states.get(action).cloned() + } + + pub fn button_state(&self, action: &str) -> XrButtonState { + if let Some(XrActionState::Button { state, .. }) = self.current_states.get(action) { + *state + } else { + XrButtonState::Default + } + } + + pub fn button_touched(&self, action: &str) -> bool { + if let Some(XrActionState::Button { state, .. }) = self.current_states.get(action) { + *state != XrButtonState::Default + } else { + false + } + } + + pub fn button_pressed(&self, action: &str) -> bool { + if let Some(XrActionState::Button { state, .. }) = self.current_states.get(action) { + *state == XrButtonState::Pressed + } else { + false + } + } + + fn button_states(&self, action: &str) -> Option<(XrButtonState, XrButtonState)> { + if let ( + Some(XrActionState::Button { + state: current_state, + .. + }), + Some(XrActionState::Button { + state: previous_state, + .. + }), + ) = ( + self.current_states.get(action), + self.previous_states.get(action), + ) { + Some((*current_state, *previous_state)) + } else { + None + } + } + + pub fn button_just_touched(&self, action: &str) -> bool { + self.button_states(action) + .map(|(cur, prev)| cur != XrButtonState::Default && prev == XrButtonState::Default) + .unwrap_or(false) + } + + pub fn button_just_untouched(&self, action: &str) -> bool { + self.button_states(action) + .map(|(cur, prev)| cur == XrButtonState::Default && prev != XrButtonState::Default) + .unwrap_or(false) + } + + pub fn button_just_pressed(&self, action: &str) -> bool { + self.button_states(action) + .map(|(cur, prev)| cur == XrButtonState::Pressed && prev != XrButtonState::Pressed) + .unwrap_or(false) + } + + pub fn button_just_unpressed(&self, action: &str) -> bool { + self.button_states(action) + .map(|(cur, prev)| cur != XrButtonState::Pressed && prev == XrButtonState::Pressed) + .unwrap_or(false) + } + + pub fn binary_value(&self, action: &str) -> bool { + if let Some(XrActionState::Binary(value)) = self.current_states.get(action) { + *value + } else { + self.button_pressed(action) + } + } + + pub fn scalar_value(&self, action: &str) -> f32 { + if let Some(XrActionState::Scalar(value) | XrActionState::Button { value, .. }) = + self.current_states.get(action) + { + *value + } else { + 0.0 + } + } + + pub fn vec_2d_value(&self, action: &str) -> Vec2 { + if let Some(XrActionState::Vec2D(value)) = self.current_states.get(action) { + *value + } else { + Vec2::ZERO + } + } + + pub fn set(&mut self, states: HashMap) { + self.previous_states = self.current_states.clone(); + self.current_states = states; + } + + pub fn clear(&mut self) { + self.current_states.clear(); + self.previous_states.clear(); + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize)] +pub enum XrVibrationEventType { + Apply { + duration: Duration, + frequency: f32, + amplitude: f32, + }, + Stop, +} + +#[derive(Clone, PartialEq, Debug, Serialize, Deserialize)] +pub struct XrVibrationEvent { + pub hand: XrHandType, + pub command: XrVibrationEventType, +} + +/// Active interaction profiles. The format is backend-specific. They can be used to choose the +/// controller 3D models to display. +/// Note: in case skeletal hand tracking is active, the profiles still point to controller profiles. +/// The correct 3D model to display can be decided depending on if skeletal hand tracking data is +/// available or not. +#[derive(Clone, PartialEq, Default, Debug, Serialize, Deserialize)] +pub struct XrProfiles { + pub left_hand: Option, + pub right_hand: Option, +} diff --git a/crates/bevy_xr/src/lib.rs b/crates/bevy_xr/src/lib.rs new file mode 100644 index 0000000000000..3bef3e88f4b3a --- /dev/null +++ b/crates/bevy_xr/src/lib.rs @@ -0,0 +1,74 @@ +pub mod interaction; +pub mod presentation; + +pub use interaction::*; +pub use presentation::XrVisibilityState; + +use bevy_app::{App, Plugin}; + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub enum XrSessionMode { + ImmersiveVR, + ImmersiveAR, + InlineVR, + InlineAR, +} + +pub struct XrSystem { + available_session_modes: Vec, + session_mode: XrSessionMode, + action_set_desc: Vec, +} + +impl XrSystem { + pub fn new(available_session_modes: Vec) -> Self { + Self { + session_mode: available_session_modes[0], + available_session_modes, + action_set_desc: vec![], + } + } + + pub fn selected_session_mode(&self) -> XrSessionMode { + self.session_mode + } + + pub fn available_session_modes(&self) -> Vec { + self.available_session_modes.clone() + } + + /// In case this method returns false, it may be either because the mode is not supported or + /// currently not available. + pub fn is_session_mode_supported(&self, mode: XrSessionMode) -> bool { + self.available_session_modes.contains(&mode) + } + + /// Set session mode. Returns false if the mode is unsupported. + pub fn request_session_mode(&mut self, mode: XrSessionMode) -> bool { + if self.is_session_mode_supported(mode) { + self.session_mode = mode; + + true + } else { + false + } + } + + pub fn set_action_set(&mut self, action_set_desc: Vec) { + self.action_set_desc = action_set_desc; + } + + pub fn action_set(&self) -> &[XrProfileDescriptor] { + &self.action_set_desc + } +} + +#[derive(Default)] +pub struct XrPlugin; + +impl Plugin for XrPlugin { + fn build(&self, app: &mut App) { + app.add_event::() + .init_resource::(); + } +} diff --git a/crates/bevy_xr/src/presentation.rs b/crates/bevy_xr/src/presentation.rs new file mode 100644 index 0000000000000..d39f451e0c090 --- /dev/null +++ b/crates/bevy_xr/src/presentation.rs @@ -0,0 +1,33 @@ +use serde::{Deserialize, Serialize}; +use std::sync::Arc; + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] +pub enum XrEnvironmentBlendMode { + Opaque, + AlphaBlend, + Additive, +} + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] +pub enum XrInteractionMode { + ScreenSpace, + WorldSpace, +} + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] +pub enum XrVisibilityState { + VisibleFocused, + VisibleUnfocused, + Hidden, +} + +pub struct XrGraphicsContext { + pub instance: wgpu::Instance, + pub device: Arc, + pub queue: wgpu::Queue, +} + +// Trait implemented by XR backends that support display mode. +pub trait XrPresentationSession: Send + Sync + 'static { + fn get_swapchains(&mut self) -> Vec>; +} diff --git a/examples/README.md b/examples/README.md index 6d8a563bd8fb6..8f7efd06167db 100644 --- a/examples/README.md +++ b/examples/README.md @@ -65,6 +65,8 @@ git checkout v0.4.0 - [WASM](#wasm) - [Setup](#setup-2) - [Build & Run](#build--run-2) + - [XR (Virtual Reality)](#xr-virtual-reality) + - [Setup](#setup-3) # The Bare Minimum @@ -398,3 +400,13 @@ Example | File | Description `assets_wasm` | [`wasm/assets_wasm.rs`](./wasm/assets_wasm.rs) | Demonstrates how to load assets from wasm `headless_wasm` | [`wasm/headless_wasm.rs`](./wasm/headless_wasm.rs) | Sets up a schedule runner and continually logs a counter to the browser's console `winit_wasm` | [`wasm/winit_wasm.rs`](./wasm/winit_wasm.rs) | Logs user input to the browser's console. Requires the `bevy_winit` features + +## XR (Virtual Reality) + +### Setup + +If you have a wired headset, you need to install and enable an OpenXR runtime. If you have a standalone headset that runs Android, follow [Android setup](#setup), then download the latest OpenXR SDK compatible with your headset and copy `libopenxr_loader.so` to `examples/libs/arm64-v8a`. + +Example | File | Description +--- | --- | --- +`vr_cubes` | [`xr/vr_cubes.rs`](./xr/vr_cubes.rs) | Create floating cubes by pressing the trigger diff --git a/examples/xr/vr_cubes.rs b/examples/xr/vr_cubes.rs new file mode 100644 index 0000000000000..8b3d7a2cb232b --- /dev/null +++ b/examples/xr/vr_cubes.rs @@ -0,0 +1,127 @@ +use bevy::{ + app::AppExit, + openxr::{OpenXrPlugin, OCULUS_TOUCH_PROFILE}, + prelude::*, + utils::Duration, + xr::{ + XrActionDescriptor, XrActionSet, XrActionType, XrHandType, XrPlugin, XrProfileDescriptor, + XrReferenceSpaceType, XrSessionMode, XrSystem, XrTrackingSource, XrVibrationEvent, + XrVibrationEventType, + }, + DefaultPlugins, PipelinedDefaultPlugins, +}; + +#[bevy_main] +fn main() { + App::new() + .add_plugin(XrPlugin) + .add_plugin(OpenXrPlugin) + .add_plugins(PipelinedDefaultPlugins) + .add_startup_system(startup) + .add_system(interaction) + .run(); +} + +fn startup(mut xr_system: ResMut, mut app_exit_events: EventWriter) { + if xr_system.is_session_mode_supported(XrSessionMode::ImmersiveVR) { + xr_system.request_session_mode(XrSessionMode::ImmersiveVR); + } else { + bevy::log::error!("The XR device does not support immersive VR mode"); + app_exit_events.send(AppExit) + } + + let left_button = XrActionDescriptor { + name: "left_button".into(), + action_type: XrActionType::Button { touch: false }, + }; + let right_button = XrActionDescriptor { + name: "right_button".into(), + action_type: XrActionType::Button { touch: false }, + }; + let left_squeeze = XrActionDescriptor { + name: "left_squeeze".into(), + action_type: XrActionType::Scalar, + }; + let right_button = XrActionDescriptor { + name: "right_squeeze".into(), + action_type: XrActionType::Scalar, + }; + + let oculus_profile = XrProfileDescriptor { + profile: OCULUS_TOUCH_PROFILE.into(), + bindings: vec![ + (left_button.clone(), "/user/hand/left/input/trigger".into()), + (left_button, "/user/hand/left/input/x".into()), + ( + right_button.clone(), + "/user/hand/right/input/trigger".into(), + ), + (right_button, "/user/hand/right/input/a".into()), + (left_squeeze, "/user/hand/left/input/squeeze".into()), + (right_squeeze, "/user/hand/right/input/squeeze".into()), + ], + tracked: true, + has_haptics: true, + }; + + xr_system.set_action_set(vec![oculus_profile]); +} + +fn interaction( + action_set: Res, + mut tracking_source: ResMut, + mut vibration_events: EventWriter, +) { + if tracking_source.reference_space_type() != XrReferenceSpaceType::Local { + tracking_source.set_reference_space_type(XrReferenceSpaceType::Local); + } + + for (hand, button, squeeze) in [ + ( + XrHandType::Left, + "left_button".to_owned(), + "left_squeeze".to_owned(), + ), + ( + XrHandType::Right, + "right_button".to_owned(), + "right_squeeze".to_owned(), + ), + ] { + if action_set.button_just_pressed(button) { + // Short haptic click + vibration_events.send(XrVibrationEvent { + hand, + command: XrVibrationEventType::Apply { + duration: Duration::from_millis(2), + frequency: 3000_f32, // Hz + amplitude: 1_f32, + }, + }); + } else { + let squeeze_value = action_set.scalar_value(squeeze); + if squeeze_value > 0.0 { + // Low frequency rumble + vibration_events.send(XrVibrationEvent { + hand, + command: XrVibrationEventType::Apply { + duration: Duration::from_millis(100), + frequency: 100_f32, // Hz + // haptics intensity depends on the squeeze force + amplitude: squeeze_value, + }, + }); + } + } + } + + let [left_pose, right_pose] = tracking_source.hands_pose(); + if let Some(pose) = left_pose { + let left_pose = pose.to_mat4(); + } + if let Some(pose) = right_pose { + let right_pose = pose.to_mat4(); + } + + todo!() // Draw hands +}