more webxr support

This commit is contained in:
awtterpip
2024-02-13 16:06:30 -06:00
parent a078d4baa9
commit cea1d58a15
13 changed files with 588 additions and 590 deletions

142
src/actions.rs Normal file
View File

@@ -0,0 +1,142 @@
pub mod oculus_touch;
mod private {
use bevy::math::Vec2;
use crate::types::{Haptic, Pose};
pub trait Sealed {}
impl Sealed for bool {}
impl Sealed for f32 {}
impl Sealed for Vec2 {}
impl Sealed for Pose {}
impl Sealed for Haptic {}
}
use std::borrow::Cow;
use std::marker::PhantomData;
pub trait ActionType: private::Sealed {}
impl<T: private::Sealed> ActionType for T {}
pub trait ActionPathTrait {
type PathType: ActionType;
fn path(&self) -> Cow<'_, str>;
fn name(&self) -> Cow<'_, str>;
}
pub struct ActionPath<T: ActionType> {
path: &'static str,
name: &'static str,
_marker: PhantomData<T>,
}
impl<T: ActionType> ActionPathTrait for ActionPath<T> {
type PathType = T;
fn path(&self) -> Cow<'_, str> {
self.path.into()
}
fn name(&self) -> Cow<'_, str> {
self.name.into()
}
}
macro_rules! actions {
// create path struct
(
$($subpath:literal),*
$id:ident {
path: $path:literal;
}
) => {};
// handle action path attrs
(
$($subpath:literal),*
$id:ident {
path: $path:literal;
name: $name:literal;
path_type: $path_type:ty;
}
) => {
paste::paste! {
pub const [<$id:snake:upper>]: crate::actions::ActionPath<$path_type> = crate::actions::ActionPath {
path: concat!($($subpath,)* $path),
name: $name,
_marker: std::marker::PhantomData,
};
}
};
// handle action path attrs
(
$($subpath:literal),*
$id:ident {
path: $path:literal;
name: $name:literal;
path_type: $path_type:ty;
$($children:tt)*
}
) => {
crate::path::actions! {
$($subpath),*
$id {
path: $path;
name: $name;
path_type: $path_type;
}
}
crate::path::actions! {
$($subpath),*
$id {
path: $path;
$($children)*
}
}
};
// handle children
(
$($subpath:literal),*
$id:ident {
path: $path:literal;
$($children:tt)*
}
) => {
pub mod $id {
crate::actions::actions! {
$($subpath,)* $path
$($children)*
}
}
};
// handle siblings
(
$($subpath:literal),*
$id:ident {
path: $path:literal;
$($attrs:tt)*
}
$($siblings:tt)*
) => {
crate::actions::actions! {
$($subpath),*
$id {
path: $path;
$($attrs)*
}
}
crate::actions::actions! {
$($subpath),*
$($siblings)*
}
};
}
pub(crate) use actions;

238
src/actions/oculus_touch.rs Normal file
View File

@@ -0,0 +1,238 @@
super::actions! {
"/user"
hand {
path: "/hand";
left {
path: "/left";
input {
path: "/input";
x {
path: "/x";
click {
path: "/click";
name: "x_click";
path_type: bool;
}
touch {
path: "/touch";
name: "x_touch";
path_type: bool;
}
}
y {
path: "/y";
click {
path: "/click";
name: "y_click";
path_type: bool;
}
touch {
path: "/touch";
name: "y_touch";
path_type: bool;
}
}
menu {
path: "/menu";
click {
path: "/click";
name: "menu_click";
path_type: bool;
}
}
squeeze {
path: "/squeeze";
value {
path: "/value";
name: "left_grip_val";
path_type: f32;
}
}
trigger {
path: "/trigger";
value {
path: "/value";
name: "left_trigger_val";
path_type: f32;
}
touch {
path: "/touch";
name: "left_trigger_touch";
path_type: bool;
}
}
thumbstick {
path: "/thumbstick";
x {
path: "/x";
name: "left_thumbstick_x";
path_type: f32;
}
y {
path: "/y";
name: "left_thumbstick_y";
path_type: f32;
}
click {
path: "/click";
name: "left_thumbstick_click";
path_type: bool;
}
touch {
path: "/touch";
name: "left_thumbstick_touch";
path_type: bool;
}
}
thumbrest {
path: "/thumbrest";
touch {
path: "/touch";
name: "left_thumbrest_touch";
path_type: bool;
}
}
grip {
path: "/grip";
pose {
path: "/pose";
name: "left_grip_pose";
path_type: crate::types::Pose;
}
}
aim {
path: "/aim";
pose {
path: "/pose";
name: "left_aim_pose";
path_type: crate::types::Pose;
}
}
}
output {
path: "/output";
haptic {
path: "/haptic";
name: "left_controller_haptic";
path_type: crate::types::Haptic;
}
}
}
right {
path: "/right";
input {
path: "/input";
a {
path: "/a";
click {
path: "/click";
name: "a_click";
path_type: bool;
}
touch {
path: "/touch";
name: "a_touch";
path_type: bool;
}
}
b {
path: "/b";
click {
path: "/click";
name: "b_click";
path_type: bool;
}
touch {
path: "/touch";
name: "b_touch";
path_type: bool;
}
}
system {
path: "/system";
click {
path: "/click";
name: "system_click";
path_type: bool;
}
}
squeeze {
path: "/squeeze";
value {
path: "/value";
name: "right_grip_val";
path_type: f32;
}
}
trigger {
path: "/trigger";
value {
path: "/value";
name: "right_trigger_val";
path_type: f32;
}
touch {
path: "/touch";
name: "right_trigger_touch";
path_type: bool;
}
}
thumbstick {
path: "/thumbstick";
x {
path: "/x";
name: "right_thumbstick_x";
path_type: f32;
}
y {
path: "/y";
name: "right_thumbstick_y";
path_type: f32;
}
click {
path: "/click";
name: "right_thumbstick_click";
path_type: bool;
}
touch {
path: "/touch";
name: "right_thumbstick_touch";
path_type: bool;
}
}
thumbrest {
path: "/thumbrest";
touch {
path: "/touch";
name: "right_thumbrest_touch";
path_type: bool;
}
}
grip {
path: "/grip";
pose {
path: "/pose";
name: "right_grip_pose";
path_type: crate::types::Pose;
}
}
aim {
path: "/aim";
pose {
path: "/pose";
name: "right_aim_pose";
path_type: crate::types::Pose;
}
}
}
output {
path: "/output";
haptic {
path: "/haptic";
name: "right_controller_haptic";
path_type: crate::types::Haptic;
}
}
}
}
}

View File

@@ -1,366 +1,4 @@
use std::sync::Arc;
use bevy::{
app::PluginGroupBuilder,
core_pipeline::tonemapping::{DebandDither, Tonemapping},
math::Vec3A,
prelude::*,
render::{
camera::{
CameraProjection, CameraProjectionPlugin, CameraRenderGraph, ManualTextureView,
ManualTextureViewHandle, ManualTextureViews, RenderTarget,
},
pipelined_rendering::PipelinedRenderingPlugin,
primitives::Frustum,
renderer::{render_system, RenderAdapter, RenderAdapterInfo, RenderInstance, RenderQueue},
view::{ColorGrading, VisibleEntities},
Render, RenderApp, RenderPlugin,
},
window::PresentMode,
};
use xr_api::prelude::*;
pub const LEFT_XR_TEXTURE_HANDLE: ManualTextureViewHandle = ManualTextureViewHandle(1208214591);
pub const RIGHT_XR_TEXTURE_HANDLE: ManualTextureViewHandle = ManualTextureViewHandle(3383858418);
pub struct XrPlugin;
impl Plugin for XrPlugin {
fn build(&self, app: &mut App) {
let instance = Entry::new()
.create_instance(ExtensionSet { vulkan: true })
.unwrap();
let session = instance
.create_session(SessionCreateInfo {
texture_format: wgpu::TextureFormat::Rgba8UnormSrgb,
})
.unwrap();
let (device, queue, adapter_info, adapter, instance) =
session.get_render_resources().unwrap();
let input = session.create_input(Bindings::OculusTouch).unwrap();
let left_primary_button = input
.create_action(input::hand_left::PrimaryButton::CLICK)
.unwrap();
let left_hand_pose = input.create_action(input::hand_left::Grip::POSE).unwrap();
app.insert_non_send_resource(left_primary_button);
app.insert_non_send_resource(left_hand_pose);
app.insert_non_send_resource(session.clone());
app.add_plugins((
RenderPlugin {
render_creation: bevy::render::settings::RenderCreation::Manual(
device.into(),
RenderQueue(Arc::new(queue)),
RenderAdapterInfo(adapter_info),
RenderAdapter(Arc::new(adapter)),
RenderInstance(Arc::new(instance)),
),
},
CameraProjectionPlugin::<XRProjection>::default(),
));
app.add_systems(PreUpdate, begin_frame);
app.add_systems(Last, locate_views);
app.add_systems(Startup, setup);
let render_app = app.sub_app_mut(RenderApp);
render_app.insert_non_send_resource(session);
render_app.add_systems(Render, end_frame.after(render_system));
}
}
#[derive(Bundle)]
pub struct XrCameraBundle {
pub camera: Camera,
pub camera_render_graph: CameraRenderGraph,
pub xr_projection: PerspectiveProjection,
pub visible_entities: VisibleEntities,
pub frustum: Frustum,
pub transform: Transform,
pub global_transform: GlobalTransform,
pub camera_3d: Camera3d,
pub tonemapping: Tonemapping,
pub dither: DebandDither,
pub color_grading: ColorGrading,
pub xr_camera_type: XrCameraType,
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd, Component)]
pub enum XrCameraType {
Xr(Eye),
Flatscreen,
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)]
pub enum Eye {
Left = 0,
Right = 1,
}
impl XrCameraBundle {
pub fn new(eye: Eye) -> Self {
Self {
camera: Camera {
order: -1,
target: RenderTarget::TextureView(match eye {
Eye::Left => LEFT_XR_TEXTURE_HANDLE,
Eye::Right => RIGHT_XR_TEXTURE_HANDLE,
}),
viewport: None,
..default()
},
camera_render_graph: CameraRenderGraph::new(bevy::core_pipeline::core_3d::graph::NAME),
xr_projection: Default::default(),
visible_entities: Default::default(),
frustum: Default::default(),
transform: Default::default(),
global_transform: Default::default(),
camera_3d: Default::default(),
tonemapping: Default::default(),
dither: DebandDither::Enabled,
color_grading: Default::default(),
xr_camera_type: XrCameraType::Xr(eye),
}
}
}
#[derive(Debug, Clone, Component, Reflect)]
#[reflect(Component, Default)]
pub struct XRProjection {
pub near: f32,
pub far: f32,
#[reflect(ignore)]
pub fov: Fov,
}
impl Default for XRProjection {
fn default() -> Self {
Self {
near: 0.1,
far: 1000.,
fov: Default::default(),
}
}
}
impl CameraProjection for XRProjection {
// =============================================================================
// math code adapted from
// https://github.com/KhronosGroup/OpenXR-SDK-Source/blob/master/src/common/xr_linear.h
// Copyright (c) 2017 The Khronos Group Inc.
// Copyright (c) 2016 Oculus VR, LLC.
// SPDX-License-Identifier: Apache-2.0
// =============================================================================
fn get_projection_matrix(&self) -> Mat4 {
// symmetric perspective for debugging
// let x_fov = (self.fov.angle_left.abs() + self.fov.angle_right.abs());
// let y_fov = (self.fov.angle_up.abs() + self.fov.angle_down.abs());
// return Mat4::perspective_infinite_reverse_rh(y_fov, x_fov / y_fov, self.near);
let fov = self.fov;
let is_vulkan_api = false; // FIXME wgpu probably abstracts this
let near_z = self.near;
let far_z = -1.; // use infinite proj
// let far_z = self.far;
let tan_angle_left = fov.angle_left.tan();
let tan_angle_right = fov.angle_right.tan();
let tan_angle_down = fov.angle_down.tan();
let tan_angle_up = fov.angle_up.tan();
let tan_angle_width = tan_angle_right - tan_angle_left;
// Set to tanAngleDown - tanAngleUp for a clip space with positive Y
// down (Vulkan). Set to tanAngleUp - tanAngleDown for a clip space with
// positive Y up (OpenGL / D3D / Metal).
// const float tanAngleHeight =
// graphicsApi == GRAPHICS_VULKAN ? (tanAngleDown - tanAngleUp) : (tanAngleUp - tanAngleDown);
let tan_angle_height = if is_vulkan_api {
tan_angle_down - tan_angle_up
} else {
tan_angle_up - tan_angle_down
};
// Set to nearZ for a [-1,1] Z clip space (OpenGL / OpenGL ES).
// Set to zero for a [0,1] Z clip space (Vulkan / D3D / Metal).
// const float offsetZ =
// (graphicsApi == GRAPHICS_OPENGL || graphicsApi == GRAPHICS_OPENGL_ES) ? nearZ : 0;
// FIXME handle enum of graphics apis
let offset_z = 0.;
let mut cols: [f32; 16] = [0.0; 16];
if far_z <= near_z {
// place the far plane at infinity
cols[0] = 2. / tan_angle_width;
cols[4] = 0.;
cols[8] = (tan_angle_right + tan_angle_left) / tan_angle_width;
cols[12] = 0.;
cols[1] = 0.;
cols[5] = 2. / tan_angle_height;
cols[9] = (tan_angle_up + tan_angle_down) / tan_angle_height;
cols[13] = 0.;
cols[2] = 0.;
cols[6] = 0.;
cols[10] = -1.;
cols[14] = -(near_z + offset_z);
cols[3] = 0.;
cols[7] = 0.;
cols[11] = -1.;
cols[15] = 0.;
// bevy uses the _reverse_ infinite projection
// https://dev.theomader.com/depth-precision/
let z_reversal = Mat4::from_cols_array_2d(&[
[1f32, 0., 0., 0.],
[0., 1., 0., 0.],
[0., 0., -1., 0.],
[0., 0., 1., 1.],
]);
return z_reversal * Mat4::from_cols_array(&cols);
} else {
// normal projection
cols[0] = 2. / tan_angle_width;
cols[4] = 0.;
cols[8] = (tan_angle_right + tan_angle_left) / tan_angle_width;
cols[12] = 0.;
cols[1] = 0.;
cols[5] = 2. / tan_angle_height;
cols[9] = (tan_angle_up + tan_angle_down) / tan_angle_height;
cols[13] = 0.;
cols[2] = 0.;
cols[6] = 0.;
cols[10] = -(far_z + offset_z) / (far_z - near_z);
cols[14] = -(far_z * (near_z + offset_z)) / (far_z - near_z);
cols[3] = 0.;
cols[7] = 0.;
cols[11] = -1.;
cols[15] = 0.;
}
Mat4::from_cols_array(&cols)
}
fn update(&mut self, _width: f32, _height: f32) {}
fn far(&self) -> f32 {
self.far
}
fn get_frustum_corners(&self, z_near: f32, z_far: f32) -> [Vec3A; 8] {
let tan_angle_left = self.fov.angle_left.tan();
let tan_angle_right = self.fov.angle_right.tan();
let tan_angle_bottom = self.fov.angle_down.tan();
let tan_angle_top = self.fov.angle_up.tan();
// NOTE: These vertices are in the specific order required by [`calculate_cascade`].
[
Vec3A::new(tan_angle_right, tan_angle_bottom, 1.0) * z_near, // bottom right
Vec3A::new(tan_angle_right, tan_angle_top, 1.0) * z_near, // top right
Vec3A::new(tan_angle_left, tan_angle_top, 1.0) * z_near, // top left
Vec3A::new(tan_angle_left, tan_angle_bottom, 1.0) * z_near, // bottom left
Vec3A::new(tan_angle_right, tan_angle_bottom, 1.0) * z_far, // bottom right
Vec3A::new(tan_angle_right, tan_angle_top, 1.0) * z_far, // top right
Vec3A::new(tan_angle_left, tan_angle_top, 1.0) * z_far, // top left
Vec3A::new(tan_angle_left, tan_angle_bottom, 1.0) * z_far, // bottom left
]
}
}
#[derive(Resource)]
struct Cameras(Entity, Entity);
fn setup(mut commands: Commands) {
let left = commands.spawn(XrCameraBundle::new(Eye::Left)).id();
let right = commands.spawn(XrCameraBundle::new(Eye::Right)).id();
commands.insert_resource(Cameras(left, right));
}
pub fn begin_frame(session: NonSend<Session>, action: NonSend<Action<Pose>>) {
session.begin_frame().unwrap();
}
fn locate_views(
session: NonSend<Session>,
mut manual_texture_views: ResMut<ManualTextureViews>,
cameras: Res<Cameras>,
mut transforms: Query<(&mut Transform)>,
) {
let (left_view, right_view) = session.locate_views().unwrap();
let left = ManualTextureView {
texture_view: left_view.texture_view().unwrap().into(),
size: left_view.resolution(),
format: left_view.format(),
};
let right = ManualTextureView {
texture_view: right_view.texture_view().unwrap().into(),
size: right_view.resolution(),
format: right_view.format(),
};
if let Ok(mut transform) = transforms.get_mut(cameras.0) {
let Pose {
translation,
rotation,
} = left_view.pose();
transform.translation = translation;
transform.rotation = rotation;
}
if let Ok(mut transform) = transforms.get_mut(cameras.1) {
let Pose {
translation,
rotation,
} = right_view.pose();
transform.translation = translation;
transform.rotation = rotation;
}
manual_texture_views.insert(RIGHT_XR_TEXTURE_HANDLE, right);
manual_texture_views.insert(LEFT_XR_TEXTURE_HANDLE, left);
}
pub fn end_frame(session: NonSend<Session>) {
session.end_frame().unwrap();
}
pub struct DefaultXrPlugins;
impl PluginGroup for DefaultXrPlugins {
fn build(self) -> PluginGroupBuilder {
DefaultPlugins
.build()
.disable::<RenderPlugin>()
.disable::<PipelinedRenderingPlugin>()
.add_before::<RenderPlugin, _>(XrPlugin)
.set(WindowPlugin {
#[cfg(not(target_os = "android"))]
primary_window: Some(Window {
transparent: true,
present_mode: PresentMode::AutoNoVsync,
..default()
}),
#[cfg(target_os = "android")]
primary_window: None,
#[cfg(target_os = "android")]
exit_condition: bevy::window::ExitCondition::DontExit,
#[cfg(target_os = "android")]
close_when_requested: true,
..default()
})
}
}
pub mod actions;
pub mod types;
#[cfg(target_family = "wasm")]
pub mod webxr;

View File

@@ -1,50 +0,0 @@
//! A simple 3D scene with light shining over a cube sitting on a plane.
use bevy::{
core_pipeline::clear_color::ClearColorConfig, prelude::*, render::camera::RenderTarget,
};
use bevy_oxr::{DefaultXrPlugins, LEFT_XR_TEXTURE_HANDLE};
fn main() {
App::new()
.add_plugins(DefaultXrPlugins)
.add_systems(Startup, setup)
.run();
}
/// set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// circular base
commands.spawn(PbrBundle {
mesh: meshes.add(shape::Circle::new(4.0).into()),
material: materials.add(Color::WHITE.into()),
transform: Transform::from_rotation(Quat::from_rotation_x(-std::f32::consts::FRAC_PI_2)),
..default()
});
// cube
commands.spawn(PbrBundle {
mesh: meshes.add(Mesh::from(shape::Cube { size: 1.0 })),
material: materials.add(Color::rgb_u8(124, 144, 255).into()),
transform: Transform::from_xyz(0.0, 0.5, 0.0),
..default()
});
// light
commands.spawn(PointLightBundle {
point_light: PointLight {
intensity: 1500.0,
shadows_enabled: true,
..default()
},
transform: Transform::from_xyz(4.0, 8.0, 4.0),
..default()
});
// camera
commands.spawn(Camera3dBundle {
transform: Transform::from_xyz(-2.5, 4.5, 9.0).looking_at(Vec3::ZERO, Vec3::Y),
..default()
});
}

8
src/types.rs Normal file
View File

@@ -0,0 +1,8 @@
use bevy::math::{Quat, Vec3};
pub struct Pose {
pub translation: Vec3,
pub rotation: Quat,
}
pub struct Haptic;

175
src/webxr.rs Normal file
View File

@@ -0,0 +1,175 @@
use std::cell::RefCell;
use std::rc::Rc;
use std::sync::Mutex;
use std::time::Duration;
use bevy::app::{App, Plugin, PluginsState};
use bevy::ecs::entity::Entity;
use bevy::ecs::query::With;
use bevy::ecs::world::World;
use bevy::log::{error, info};
use bevy::render::RenderApp;
use bevy::window::PrimaryWindow;
use bevy::winit::WinitWindows;
use js_sys::Object;
use wasm_bindgen::closure::Closure;
use wasm_bindgen::{JsCast, JsValue};
use wasm_bindgen_futures::JsFuture;
use web_sys::{
HtmlCanvasElement, WebGl2RenderingContext, XrFrame, XrReferenceSpace, XrReferenceSpaceType,
XrRenderStateInit, XrSession, XrSessionMode, XrWebGlLayer,
};
use winit::platform::web::WindowExtWebSys;
#[derive(Clone)]
struct FutureXrSession(Rc<Mutex<Option<Result<(XrSession, XrReferenceSpace), JsValue>>>>);
pub struct XrInitPlugin;
impl Plugin for XrInitPlugin {
fn build(&self, app: &mut App) {
let canvas = get_canvas(&mut app.world).unwrap();
let future_session = FutureXrSession(Default::default());
app.set_runner(webxr_runner);
app.insert_non_send_resource(future_session.clone());
bevy::tasks::IoTaskPool::get().spawn_local(async move {
let result = init_webxr(
canvas,
XrSessionMode::ImmersiveVr,
XrReferenceSpaceType::Local,
)
.await;
*future_session.0.lock().unwrap() = Some(result);
});
}
fn ready(&self, app: &App) -> bool {
app.world
.get_non_send_resource::<FutureXrSession>()
.and_then(|fxr| fxr.0.try_lock().map(|locked| locked.is_some()).ok())
.unwrap_or(true)
}
fn finish(&self, app: &mut App) {
info!("finishing");
if let Some(Ok((session, reference_space))) = app
.world
.remove_non_send_resource::<FutureXrSession>()
.and_then(|fxr| fxr.0.lock().unwrap().take())
{
app.insert_non_send_resource(session.clone())
.insert_non_send_resource(reference_space.clone());
app.sub_app_mut(RenderApp)
.insert_non_send_resource(session)
.insert_non_send_resource(reference_space);
}
}
}
fn webxr_runner(mut app: App) {
fn set_timeout(f: &Closure<dyn FnMut()>, dur: Duration) {
web_sys::window()
.unwrap()
.set_timeout_with_callback_and_timeout_and_arguments_0(
f.as_ref().unchecked_ref(),
dur.as_millis() as i32,
)
.expect("Should register `setTimeout`.");
}
let run_xr_inner = Rc::new(RefCell::new(None));
let run_xr: Rc<RefCell<Option<Closure<dyn FnMut()>>>> = run_xr_inner.clone();
*run_xr.borrow_mut() = Some(Closure::new(move || {
let app = &mut app;
if app.plugins_state() == PluginsState::Ready {
app.finish();
app.cleanup();
run_xr_app(std::mem::take(app));
} else {
set_timeout(
run_xr_inner.borrow().as_ref().unwrap(),
Duration::from_millis(1),
);
}
}));
set_timeout(run_xr.borrow().as_ref().unwrap(), Duration::from_millis(1));
}
fn run_xr_app(mut app: App) {
let session = app.world.non_send_resource::<XrSession>().clone();
let inner_closure: Rc<RefCell<Option<Closure<dyn FnMut(f64, XrFrame)>>>> =
Rc::new(RefCell::new(None));
let closure = inner_closure.clone();
*closure.borrow_mut() = Some(Closure::new(move |_time, frame: XrFrame| {
let session = frame.session();
app.insert_non_send_resource(frame);
info!("update");
app.update();
session.request_animation_frame(
inner_closure
.borrow()
.as_ref()
.unwrap()
.as_ref()
.unchecked_ref(),
);
}));
session.request_animation_frame(closure.borrow().as_ref().unwrap().as_ref().unchecked_ref());
}
fn get_canvas(world: &mut World) -> Option<HtmlCanvasElement> {
let window_entity = world
.query_filtered::<Entity, With<PrimaryWindow>>()
.get_single(world)
.ok()?;
let windows = world.get_non_send_resource::<WinitWindows>()?;
Some(windows.get_window(window_entity)?.canvas())
}
async fn init_webxr(
canvas: HtmlCanvasElement,
mode: XrSessionMode,
reference_type: XrReferenceSpaceType,
) -> Result<(XrSession, XrReferenceSpace), JsValue> {
let xr = web_sys::window().unwrap().navigator().xr();
let supports_session = JsFuture::from(xr.is_session_supported(mode)).await?;
if supports_session == false {
error!("XR session {:?} not supported", mode);
return Err(JsValue::from_str(&format!(
"XR session {:?} not supported",
mode
)));
}
info!("creating session");
let session: XrSession = JsFuture::from(xr.request_session(mode)).await?.into();
info!("creating gl");
let gl: WebGl2RenderingContext = {
let gl_attribs = Object::new();
js_sys::Reflect::set(
&gl_attribs,
&JsValue::from_str("xrCompatible"),
&JsValue::TRUE,
)?;
canvas
.get_context_with_context_options("webgl2", &gl_attribs)?
.ok_or(JsValue::from_str(
"Unable to create WebGL rendering context",
))?
.dyn_into()?
};
let xr_gl_layer = XrWebGlLayer::new_with_web_gl2_rendering_context(&session, &gl)?;
let mut render_state_init = XrRenderStateInit::new();
render_state_init.base_layer(Some(&xr_gl_layer));
session.update_render_state_with_state(&render_state_init);
info!("creating ref space");
let reference_space = JsFuture::from(session.request_reference_space(reference_type))
.await?
.into();
info!("finished");
Ok((session, reference_space))
}