new renderer resource init

This commit is contained in:
awtterpip
2023-09-08 18:16:57 -05:00
parent 535e64d751
commit 6df51bb70f
3 changed files with 465 additions and 199 deletions

View File

@@ -16,6 +16,7 @@ wgpu-hal = "0.16.0"
[dev-dependencies] [dev-dependencies]
bevy = { git = "https://github.com/awtterpip/bevy" } bevy = { git = "https://github.com/awtterpip/bevy" }
color-eyre = "0.6.2"
[[example]] [[example]]
name = "xr" name = "xr"

View File

@@ -1,16 +1,217 @@
//! A simple 3D scene with light shining over a cube sitting on a plane. use bevy::core_pipeline::core_3d;
use bevy_openxr::{DefaultXrPlugins, LEFT_XR_TEXTURE_HANDLE, RIGHT_XR_TEXTURE_HANDLE}; use bevy::core_pipeline::tonemapping::{DebandDither, Tonemapping};
use bevy::ecs::prelude::{Bundle, Component, ReflectComponent};
use bevy::math::Mat4;
use bevy::prelude::Camera3d;
use bevy::reflect::{std_traits::ReflectDefault, Reflect};
use bevy::render::view::ColorGrading;
use bevy::render::{
camera::{Camera, CameraProjection, CameraRenderGraph},
primitives::Frustum,
view::VisibleEntities,
};
use bevy::transform::components::{GlobalTransform, Transform};
// mostly copied from https://github.com/blaind/bevy_openxr/tree/main/crates/bevy_openxr/src/render_graph/camera
use openxr::Fovf;
#[derive(Bundle)]
pub struct XrCameraBundle {
pub camera: Camera,
pub camera_render_graph: CameraRenderGraph,
pub xr_projection: XRProjection,
pub visible_entities: VisibleEntities,
pub frustum: Frustum,
pub transform: Transform,
pub global_transform: GlobalTransform,
pub camera_3d: Camera3d,
pub tonemapping: Tonemapping,
pub dither: DebandDither,
pub color_grading: ColorGrading,
}
// NOTE: ideally Perspective and Orthographic defaults can share the same impl, but sadly it breaks rust's type inference
impl Default for XrCameraBundle {
fn default() -> Self {
Self {
camera_render_graph: CameraRenderGraph::new(core_3d::graph::NAME),
camera: Default::default(),
xr_projection: Default::default(),
visible_entities: Default::default(),
frustum: Default::default(),
transform: Default::default(),
global_transform: Default::default(),
camera_3d: Default::default(),
tonemapping: Default::default(),
dither: DebandDither::Enabled,
color_grading: ColorGrading::default(),
}
}
}
#[derive(Debug, Clone, Component, Reflect)]
#[reflect(Component, Default)]
pub struct XRProjection {
pub near: f32,
pub far: f32,
#[reflect(ignore)]
pub fov: Fovf,
}
impl Default for XRProjection {
fn default() -> Self {
Self {
near: 0.1,
far: 1000.,
fov: Default::default(),
}
}
}
impl XRProjection {
pub fn new(near: f32, far: f32, fov: Fovf) -> Self {
XRProjection { near, far, fov }
}
}
impl CameraProjection for XRProjection {
// =============================================================================
// math code adapted from
// https://github.com/KhronosGroup/OpenXR-SDK-Source/blob/master/src/common/xr_linear.h
// Copyright (c) 2017 The Khronos Group Inc.
// Copyright (c) 2016 Oculus VR, LLC.
// SPDX-License-Identifier: Apache-2.0
// =============================================================================
fn get_projection_matrix(&self) -> Mat4 {
// symmetric perspective for debugging
// let x_fov = (self.fov.angle_left.abs() + self.fov.angle_right.abs());
// let y_fov = (self.fov.angle_up.abs() + self.fov.angle_down.abs());
// return Mat4::perspective_infinite_reverse_rh(y_fov, x_fov / y_fov, self.near);
let fov = self.fov;
let is_vulkan_api = false; // FIXME wgpu probably abstracts this
let near_z = self.near;
let far_z = -1.; // use infinite proj
// let far_z = self.far;
let tan_angle_left = fov.angle_left.tan();
let tan_angle_right = fov.angle_right.tan();
let tan_angle_down = fov.angle_down.tan();
let tan_angle_up = fov.angle_up.tan();
let tan_angle_width = tan_angle_right - tan_angle_left;
// Set to tanAngleDown - tanAngleUp for a clip space with positive Y
// down (Vulkan). Set to tanAngleUp - tanAngleDown for a clip space with
// positive Y up (OpenGL / D3D / Metal).
// const float tanAngleHeight =
// graphicsApi == GRAPHICS_VULKAN ? (tanAngleDown - tanAngleUp) : (tanAngleUp - tanAngleDown);
let tan_angle_height = if is_vulkan_api {
tan_angle_down - tan_angle_up
} else {
tan_angle_up - tan_angle_down
};
// Set to nearZ for a [-1,1] Z clip space (OpenGL / OpenGL ES).
// Set to zero for a [0,1] Z clip space (Vulkan / D3D / Metal).
// const float offsetZ =
// (graphicsApi == GRAPHICS_OPENGL || graphicsApi == GRAPHICS_OPENGL_ES) ? nearZ : 0;
// FIXME handle enum of graphics apis
let offset_z = 0.;
let mut cols: [f32; 16] = [0.0; 16];
if far_z <= near_z {
// place the far plane at infinity
cols[0] = 2. / tan_angle_width;
cols[4] = 0.;
cols[8] = (tan_angle_right + tan_angle_left) / tan_angle_width;
cols[12] = 0.;
cols[1] = 0.;
cols[5] = 2. / tan_angle_height;
cols[9] = (tan_angle_up + tan_angle_down) / tan_angle_height;
cols[13] = 0.;
cols[2] = 0.;
cols[6] = 0.;
cols[10] = -1.;
cols[14] = -(near_z + offset_z);
cols[3] = 0.;
cols[7] = 0.;
cols[11] = -1.;
cols[15] = 0.;
// bevy uses the _reverse_ infinite projection
// https://dev.theomader.com/depth-precision/
let z_reversal = Mat4::from_cols_array_2d(&[
[1f32, 0., 0., 0.],
[0., 1., 0., 0.],
[0., 0., -1., 0.],
[0., 0., 1., 1.],
]);
return z_reversal * Mat4::from_cols_array(&cols);
} else {
// normal projection
cols[0] = 2. / tan_angle_width;
cols[4] = 0.;
cols[8] = (tan_angle_right + tan_angle_left) / tan_angle_width;
cols[12] = 0.;
cols[1] = 0.;
cols[5] = 2. / tan_angle_height;
cols[9] = (tan_angle_up + tan_angle_down) / tan_angle_height;
cols[13] = 0.;
cols[2] = 0.;
cols[6] = 0.;
cols[10] = -(far_z + offset_z) / (far_z - near_z);
cols[14] = -(far_z * (near_z + offset_z)) / (far_z - near_z);
cols[3] = 0.;
cols[7] = 0.;
cols[11] = -1.;
cols[15] = 0.;
}
Mat4::from_cols_array(&cols)
}
fn update(&mut self, _width: f32, _height: f32) {}
fn far(&self) -> f32 {
self.far
}
}
use bevy::render::camera::CameraProjectionPlugin;
use bevy::render::view::{update_frusta, VisibilitySystems};
use bevy::transform::TransformSystem;
use bevy::{prelude::*, render::camera::RenderTarget}; use bevy::{prelude::*, render::camera::RenderTarget};
use bevy::prelude::Component;
use bevy::render::camera::Viewport;
use bevy_openxr::input::XrInput; use bevy_openxr::input::XrInput;
use bevy_openxr::resources::{XrInstance, XrSession, XrViews}; use bevy_openxr::resources::{XrFrameState, XrSession, XrViews};
use bevy_openxr::{DefaultXrPlugins, LEFT_XR_TEXTURE_HANDLE, RIGHT_XR_TEXTURE_HANDLE};
use openxr::ActiveActionSet;
fn main() { fn main() {
color_eyre::install().unwrap();
info!("Running `openxr-6dof` skill");
App::new() App::new()
.add_plugins(DefaultXrPlugins) .add_plugins(DefaultXrPlugins)
.add_plugins(CameraProjectionPlugin::<XRProjection>::default())
.add_systems(Startup, setup) .add_systems(Startup, setup)
.add_systems(Update, head_movement) .add_systems(PreUpdate, head_movement)
.add_systems(PreUpdate, hands)
.add_systems(
PostUpdate,
update_frusta::<XRProjection>
.after(TransformSystem::TransformPropagate)
.before(VisibilitySystems::UpdatePerspectiveFrusta),
)
.run(); .run();
} }
@@ -21,7 +222,6 @@ enum CameraType {
Middle, Middle,
} }
/// set up a simple 3D scene /// set up a simple 3D scene
fn setup( fn setup(
mut commands: Commands, mut commands: Commands,
@@ -36,7 +236,7 @@ fn setup(
}); });
// cube // cube
commands.spawn(PbrBundle { commands.spawn(PbrBundle {
mesh: meshes.add(Mesh::from(shape::Cube { size: 1.0 })), mesh: meshes.add(Mesh::from(shape::Cube { size: 0.1 })),
material: materials.add(Color::rgb(0.8, 0.7, 0.6).into()), material: materials.add(Color::rgb(0.8, 0.7, 0.6).into()),
transform: Transform::from_xyz(0.0, 0.5, 0.0), transform: Transform::from_xyz(0.0, 0.5, 0.0),
..default() ..default()
@@ -52,19 +252,19 @@ fn setup(
..default() ..default()
}); });
// camera // camera
commands.spawn((Camera3dBundle { commands.spawn((
transform: Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y), Camera3dBundle {
transform: Transform::from_xyz(-2.0, 2.5, 5.0)
.looking_at(Vec3::ZERO, Vec3::Y),
..default() ..default()
}, CameraType::Middle)); },
CameraType::Middle,
));
// let viewport = Viewport{ commands.spawn((
// physical_position: Default::default(), XrCameraBundle {
// physical_size: UVec2::splat(2000), transform: Transform::from_xyz(-2.0, 2.5, 5.0)
// depth: 0.0..1.0, .looking_at(Vec3::ZERO, Vec3::Y),
// };
commands.spawn((Camera3dBundle {
transform: Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y),
camera: Camera { camera: Camera {
order: -1, order: -1,
target: RenderTarget::TextureView(LEFT_XR_TEXTURE_HANDLE), target: RenderTarget::TextureView(LEFT_XR_TEXTURE_HANDLE),
@@ -72,9 +272,13 @@ fn setup(
..default() ..default()
}, },
..default() ..default()
}, CameraType::Left)); },
commands.spawn((Camera3dBundle { CameraType::Left,
transform: Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y), ));
commands.spawn((
XrCameraBundle {
transform: Transform::from_xyz(-2.0, 2.5, 5.0)
.looking_at(Vec3::ZERO, Vec3::Y),
camera: Camera { camera: Camera {
order: -1, order: -1,
target: RenderTarget::TextureView(RIGHT_XR_TEXTURE_HANDLE), target: RenderTarget::TextureView(RIGHT_XR_TEXTURE_HANDLE),
@@ -82,25 +286,65 @@ fn setup(
..default() ..default()
}, },
..default() ..default()
}, CameraType::Right)); },
CameraType::Right,
));
} }
fn head_movement(views: ResMut<XrViews>, mut query: Query<(&mut Transform, &Camera, &CameraType)>) {
fn hands(
mut gizmos: Gizmos,
xr_input: Res<XrInput>,
session: Res<XrSession>,
frame_state: Res<XrFrameState>,
) {
//let pose = xr_input.left_action.create_space(Session::clone(&session), Path, Posef::IDENTITY).unwrap();
let act = ActiveActionSet::new(&xr_input.action_set);
session.sync_actions(&[act]).unwrap();
frame_state.lock().unwrap().map(|a| {
//let b = pose.locate(&*xr_input.stage, a.predicted_display_time).unwrap();
let b = xr_input
.left_space
.relate(&xr_input.stage, a.predicted_display_time)
.unwrap();
gizmos.rect(
b.0.pose.position.to_vec3(),
b.0.pose.orientation.to_quat(),
Vec2::new(0.05, 0.2),
Color::YELLOW_GREEN,
);
let c = xr_input
.right_space
.relate(&xr_input.stage, a.predicted_display_time)
.unwrap();
gizmos.rect(
c.0.pose.position.to_vec3(),
c.0.pose.orientation.to_quat(),
Vec2::new(0.05, 0.2),
Color::YELLOW_GREEN,
)
});
}
fn head_movement(
views: ResMut<XrViews>,
mut query: Query<(&mut Transform, &mut Camera, &CameraType, &mut XRProjection)>,
) {
let views = views.lock().unwrap(); let views = views.lock().unwrap();
let mut f = || -> Option<()> { let mut f = || -> Option<()> {
let midpoint = (views.get(0)?.pose.position.to_vec3() let midpoint = (views.get(0)?.pose.position.to_vec3()
+ views.get(1)?.pose.position.to_vec3()) + views.get(1)?.pose.position.to_vec3())
/ 2.; / 2.;
for (mut t, _, camera_type) in query.iter_mut() { for (mut t, _, camera_type, _) in query.iter_mut() {
match camera_type { match camera_type {
CameraType::Left => { CameraType::Left => {
t.translation = views.get(0)?.pose.position.to_vec3() t.translation = views.get(0)?.pose.position.to_vec3()
}, }
CameraType::Right => { CameraType::Right => {
t.translation = views.get(1)?.pose.position.to_vec3() t.translation = views.get(1)?.pose.position.to_vec3()
}, }
CameraType::Middle => { CameraType::Middle => {
t.translation = midpoint; t.translation = midpoint;
}, }
} }
} }
let left_rot = views.get(0).unwrap().pose.orientation.to_quat(); let left_rot = views.get(0).unwrap().pose.orientation.to_quat();
@@ -110,34 +354,29 @@ fn head_movement(views: ResMut<XrViews>, mut query: Query<(&mut Transform, &Came
} else { } else {
right_rot.slerp(left_rot, 0.5) right_rot.slerp(left_rot, 0.5)
}; };
for (mut t, _, camera_type) in query.iter_mut() { for (mut t, _, camera_type, _) in query.iter_mut() {
match camera_type { match camera_type {
CameraType::Left => { CameraType::Left => t.rotation = left_rot,
t.rotation = left_rot CameraType::Right => t.rotation = right_rot,
},
CameraType::Right => {
t.rotation = right_rot
},
CameraType::Middle => { CameraType::Middle => {
t.rotation = mid_rot; t.rotation = mid_rot;
}, }
} }
} }
for (mut transform, _cam, camera_type, mut xr_projection) in query.iter_mut() {
let view_idx = match camera_type {
CameraType::Left => 0,
CameraType::Right => 1,
CameraType::Middle => panic!(),
};
let view = views.get(view_idx).unwrap();
xr_projection.fov = view.fov;
// for (mut projection, mut transform, eye) in cam.iter_mut() { transform.rotation = view.pose.orientation.to_quat();
// let view_idx = match eye { let pos = view.pose.position;
// Eye::Left => 0, transform.translation = pos.to_vec3();
// Eye::Right => 1, }
// };
// let view = views.get(view_idx).unwrap();
//
// projection.fov = view.fov;
//
// transform.rotation = view.pose.orientation.to_quat();
// let pos = view.pose.position;
// transform.translation = pos.to_vec3();
// }
Some(()) Some(())
}; };
@@ -161,10 +400,3 @@ impl QuatConv for openxr::Quaternionf {
Quat::from_xyzw(self.x, self.y, self.z, self.w) Quat::from_xyzw(self.x, self.y, self.z, self.w)
} }
} }
// fn head_movement(right_camera: Query<(&mut Transform, &RightCamera), Without<LeftCamera>>, left_camera: Query<(&mut Transform, &LeftCamera), Without<RightCamera>>, xr_input: Res<bevy_openxr::input::XrInput>, instance: Res<XrInstance>, session: Res<XrSession>) {
//
// // let stage =
// // session.create_reference_space(openxr::ReferenceSpaceType::VIEW, openxr::Posef::IDENTITY).unwrap();
// // eprintln!("a: {:#?}", stage.locate(&xr_input.stage, xr_input.action_set.).unwrap().pose);
// }

View File

@@ -1,18 +1,22 @@
mod graphics;
pub mod input; pub mod input;
pub mod resource_macros; pub mod resource_macros;
pub mod resources; pub mod resources;
mod graphics;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use bevy::app::PluginGroupBuilder;
use bevy::ecs::system::SystemState; use bevy::ecs::system::SystemState;
use bevy::prelude::*; use bevy::prelude::*;
use bevy::render::camera::{ManualTextureViews, ManualTextureView, ManualTextureViewHandle}; use bevy::render::camera::{ManualTextureView, ManualTextureViewHandle, ManualTextureViews};
use bevy::render::{FutureRendererResources, RenderPlugin, RenderApp, Render, RenderSet}; use bevy::render::renderer::{RenderAdapterInfo, RenderAdapter, RenderDevice, RenderQueue};
use bevy::render::settings::RenderSettings;
use bevy::render::{Render, RenderApp, RenderPlugin, RenderSet};
use bevy::window::{PrimaryWindow, RawHandleWrapper}; use bevy::window::{PrimaryWindow, RawHandleWrapper};
use input::XrInput; use input::XrInput;
use resources::*;
use openxr as xr; use openxr as xr;
use resources::*;
use wgpu::Instance;
const VIEW_TYPE: xr::ViewConfigurationType = xr::ViewConfigurationType::PRIMARY_STEREO; const VIEW_TYPE: xr::ViewConfigurationType = xr::ViewConfigurationType::PRIMARY_STEREO;
@@ -44,36 +48,44 @@ pub struct FutureXrResources(
impl Plugin for OpenXrPlugin { impl Plugin for OpenXrPlugin {
fn build(&self, app: &mut App) { fn build(&self, app: &mut App) {
let future_renderer_resources_wrapper = Arc::new(Mutex::new(None));
app.insert_resource(FutureRendererResources(
future_renderer_resources_wrapper.clone(),
));
let future_xr_resources_wrapper = Arc::new(Mutex::new(None)); let future_xr_resources_wrapper = Arc::new(Mutex::new(None));
app.insert_resource(FutureXrResources( app.insert_resource(FutureXrResources(future_xr_resources_wrapper.clone()));
future_xr_resources_wrapper.clone()
));
let mut system_state: SystemState<Query<&RawHandleWrapper, With<PrimaryWindow>>> = let mut system_state: SystemState<Query<&RawHandleWrapper, With<PrimaryWindow>>> =
SystemState::new(&mut app.world); SystemState::new(&mut app.world);
let primary_window = system_state.get(&app.world).get_single().ok().cloned(); let primary_window = system_state.get(&app.world).get_single().ok().cloned();
bevy::tasks::IoTaskPool::get() let (
.spawn_local(async move { device,
let (device, queue, adapter_info, render_adapter, instance, xr_instance, session, blend_mode, session_running, frame_waiter, swapchain, input, views, frame_state) = graphics::initialize_xr_graphics(primary_window).unwrap(); queue,
adapter_info,
render_adapter,
instance,
xr_instance,
session,
blend_mode,
session_running,
frame_waiter,
swapchain,
input,
views,
frame_state,
) = graphics::initialize_xr_graphics(primary_window).unwrap();
debug!("Configured wgpu adapter Limits: {:#?}", device.limits()); debug!("Configured wgpu adapter Limits: {:#?}", device.limits());
debug!("Configured wgpu adapter Features: {:#?}", device.features()); debug!("Configured wgpu adapter Features: {:#?}", device.features());
let mut future_renderer_resources_inner =
future_renderer_resources_wrapper.lock().unwrap();
*future_renderer_resources_inner =
Some((device, queue, adapter_info, render_adapter, instance));
let mut future_xr_resources_inner = future_xr_resources_wrapper.lock().unwrap(); let mut future_xr_resources_inner = future_xr_resources_wrapper.lock().unwrap();
*future_xr_resources_inner = *future_xr_resources_inner = Some((
Some((xr_instance, session, blend_mode, session_running, frame_waiter, swapchain, input, views, frame_state)); xr_instance,
}) session,
.detach(); blend_mode,
session_running,
frame_waiter,
swapchain,
input,
views,
frame_state,
));
app.add_plugins(DefaultPlugins.set(RenderPlugin { render_settings: RenderSettings::Manual(device, queue, adapter_info, render_adapter, Mutex::new(instance))}));
} }
fn ready(&self, app: &App) -> bool { fn ready(&self, app: &App) -> bool {
@@ -84,13 +96,20 @@ impl Plugin for OpenXrPlugin {
} }
fn finish(&self, app: &mut App) { fn finish(&self, app: &mut App) {
if let Some(future_renderer_resources) = if let Some(future_renderer_resources) = app.world.remove_resource::<FutureXrResources>() {
app.world.remove_resource::<FutureXrResources>() let (
{ xr_instance,
let (instance, session, blend_mode, session_running, frame_waiter, swapchain, input, views, frame_state) = session,
future_renderer_resources.0.lock().unwrap().take().unwrap(); blend_mode,
session_running,
frame_waiter,
swapchain,
input,
views,
frame_state,
) = future_renderer_resources.0.lock().unwrap().take().unwrap();
app.insert_resource(instance.clone()) app.insert_resource(xr_instance.clone())
.insert_resource(session.clone()) .insert_resource(session.clone())
.insert_resource(blend_mode.clone()) .insert_resource(blend_mode.clone())
.insert_resource(session_running.clone()) .insert_resource(session_running.clone())
@@ -120,7 +139,8 @@ impl Plugin for OpenXrPlugin {
drop(swapchain_mut); drop(swapchain_mut);
let render_app = app.sub_app_mut(RenderApp); let render_app = app.sub_app_mut(RenderApp);
render_app.insert_resource(instance) render_app
.insert_resource(xr_instance)
.insert_resource(session) .insert_resource(session)
.insert_resource(blend_mode) .insert_resource(blend_mode)
.insert_resource(session_running) .insert_resource(session_running)
@@ -130,19 +150,25 @@ impl Plugin for OpenXrPlugin {
.insert_resource(views) .insert_resource(views)
.insert_resource(frame_state); .insert_resource(frame_state);
render_app.add_systems(Render, (pre_frame.in_set(RenderSet::Prepare).before(post_frame), post_frame.in_set(RenderSet::Prepare), post_queue_submit.in_set(RenderSet::Cleanup))); render_app.add_systems(
Render,
(
pre_frame.in_set(RenderSet::Prepare).before(post_frame),
post_frame.in_set(RenderSet::Prepare),
post_queue_submit.in_set(RenderSet::Cleanup),
),
);
} }
} }
} }
pub struct DefaultXrPlugins; pub struct DefaultXrPlugins;
impl PluginGroup for DefaultXrPlugins { impl PluginGroup for DefaultXrPlugins {
fn build(self) -> bevy::app::PluginGroupBuilder { fn build(self) -> PluginGroupBuilder {
DefaultPlugins let mut group = PluginGroupBuilder::start::<Self>();
.build() group = group.add(OpenXrPlugin);
.add_before::<RenderPlugin, _>(OpenXrPlugin) group
} }
} }
@@ -155,7 +181,7 @@ pub fn pre_frame(
swapchain: Res<XrSwapchain>, swapchain: Res<XrSwapchain>,
xr_input: Res<XrInput>, xr_input: Res<XrInput>,
mut manual_texture_views: ResMut<ManualTextureViews>, mut manual_texture_views: ResMut<ManualTextureViews>,
){ ) {
while let Some(event) = instance.poll_event(&mut Default::default()).unwrap() { while let Some(event) = instance.poll_event(&mut Default::default()).unwrap() {
use xr::Event::*; use xr::Event::*;
match event { match event {
@@ -172,15 +198,11 @@ pub fn pre_frame(
session.end().unwrap(); session.end().unwrap();
session_running.store(false, std::sync::atomic::Ordering::Relaxed); session_running.store(false, std::sync::atomic::Ordering::Relaxed);
} }
xr::SessionState::EXITING | xr::SessionState::LOSS_PENDING => { xr::SessionState::EXITING | xr::SessionState::LOSS_PENDING => return,
return
}
_ => {} _ => {}
} }
} }
InstanceLossPending(_) => { InstanceLossPending(_) => return,
return
}
EventsLost(e) => { EventsLost(e) => {
warn!("lost {} XR events", e.lost_event_count()); warn!("lost {} XR events", e.lost_event_count());
} }
@@ -190,7 +212,7 @@ pub fn pre_frame(
if !session_running.load(std::sync::atomic::Ordering::Relaxed) { if !session_running.load(std::sync::atomic::Ordering::Relaxed) {
// Don't grind up the CPU // Don't grind up the CPU
std::thread::sleep(std::time::Duration::from_millis(10)); std::thread::sleep(std::time::Duration::from_millis(10));
return return;
} }
*frame_state.lock().unwrap() = Some(frame_waiter.lock().unwrap().wait().unwrap()); *frame_state.lock().unwrap() = Some(frame_waiter.lock().unwrap().wait().unwrap());
@@ -228,11 +250,18 @@ pub fn post_frame(
session: Res<XrSession>, session: Res<XrSession>,
xr_frame_state: Res<XrFrameState>, xr_frame_state: Res<XrFrameState>,
) { ) {
*views.lock().unwrap() = session.locate_views( *views.lock().unwrap() = session
.locate_views(
VIEW_TYPE, VIEW_TYPE,
xr_frame_state.lock().unwrap().unwrap().predicted_display_time, xr_frame_state
.lock()
.unwrap()
.unwrap()
.predicted_display_time,
&input.stage, &input.stage,
).unwrap().1; )
.unwrap()
.1;
} }
pub fn post_queue_submit( pub fn post_queue_submit(
@@ -245,5 +274,9 @@ pub fn post_queue_submit(
let xr_frame_state = xr_frame_state.lock().unwrap().unwrap(); let xr_frame_state = xr_frame_state.lock().unwrap().unwrap();
let views = &*views.lock().unwrap(); let views = &*views.lock().unwrap();
let stage = &input.stage; let stage = &input.stage;
swapchain.lock().unwrap().post_queue_submit(xr_frame_state, views, stage, **environment_blend_mode).unwrap(); swapchain
.lock()
.unwrap()
.post_queue_submit(xr_frame_state, views, stage, **environment_blend_mode)
.unwrap();
} }