This commit is contained in:
Schmarni
2024-02-05 02:27:27 +01:00
parent 5c81f135e7
commit 9704607c8c
6 changed files with 100 additions and 24 deletions

View File

@@ -356,7 +356,7 @@ pub fn start_xr_session(
});
let swapchain_format = surface
.as_ref()
.map(|surface| surface.get_capabilities(&wgpu_adapter).formats[0])
.map(|surface| surface.get_capabilities(wgpu_adapter).formats[0])
.unwrap_or(wgpu::TextureFormat::Rgba8UnormSrgb);
let resolution = uvec2(
@@ -387,6 +387,7 @@ pub fn start_xr_session(
let buffers = images
.into_iter()
.map(|color_image| {
info!("image map swapchain");
let color_image = vk::Image::from_raw(color_image);
let wgpu_hal_texture = unsafe {
<V as Api>::Device::texture_from_raw(
@@ -449,6 +450,7 @@ pub fn start_xr_session(
.into(),
XrInput::new(xr_instance, &session.into_any_graphics())?,
Vec::default().into(),
// Feels wrong to return a FrameState here, we probably should just wait for the next frame
xr::FrameState {
predicted_display_time: xr::Time::from_nanos(1),
predicted_display_period: xr::Duration::from_nanos(1),

View File

@@ -154,20 +154,20 @@ impl Plugin for OpenXrPlugin {
.after(RenderSet::ExtractCommands),
// .in_set(RenderSet::Prepare),
);
render_app.add_systems(
Render,
(
locate_views,
xr_input::xr_camera::xr_camera_head_sync,
sync_simple_transforms,
propagate_transforms,
update_cam_views,
)
.chain()
.run_if(xr_only())
// .run_if(xr_render_only())
.in_set(RenderSet::Prepare),
);
// render_app.add_systems(
// Render,
// (
// locate_views,
// xr_input::xr_camera::xr_camera_head_sync_render,
// // sync_simple_transforms,
// // propagate_transforms,
// // update_cam_views,
// )
// .chain()
// .run_if(xr_only())
// // .run_if(xr_render_only())
// .in_set(RenderSet::Prepare),
// );
render_app.add_systems(
Render,
xr_end_frame
@@ -187,6 +187,8 @@ impl Plugin for OpenXrPlugin {
}
}
// Confirmed Working
// Not Working Actually, the cam doesn't render with the new pose for some reason
fn update_cam_views(mut query: Query<(&mut ExtractedView, &GlobalTransform)>) {
for (mut view, transform) in &mut query {
view.transform = *transform;
@@ -199,7 +201,6 @@ fn xr_skip_frame(
environment_blend_mode: Res<XrEnvironmentBlendMode>,
) {
let swapchain: &Swapchain = &xr_swapchain;
// swapchain.begin().unwrap();
match swapchain {
Swapchain::Vulkan(swap) => {
swap.stream
@@ -344,10 +345,14 @@ pub fn xr_wait_frame(
return;
}
};
frame_state.predicted_display_time = xr::Time::from_nanos(
info!(
"Post Wait Time: {}",
frame_state.predicted_display_time.as_nanos()
+ frame_state.predicted_display_period.as_nanos(),
);
// frame_state.predicted_display_time = xr::Time::from_nanos(
// frame_state.predicted_display_time.as_nanos()
// + frame_state.predicted_display_period.as_nanos(),
// );
info!("Post Frame Wait");
**should_render = frame_state.should_render;
**waited = true;
@@ -406,6 +411,10 @@ pub fn xr_end_frame(
}
{
let _span = info_span!("xr_end_frame").entered();
info!(
"End Frame Time: {}",
xr_frame_state.predicted_display_time.as_nanos()
);
let result = swapchain.end(
xr_frame_state.predicted_display_time,
&views,

View File

@@ -32,6 +32,35 @@ macro_rules! xr_resource_wrapper {
};
}
#[macro_export]
macro_rules! xr_resource_wrapper_no_extract {
($wrapper_type:ident, $xr_type:ty) => {
#[derive(
Clone, Copy, bevy::prelude::Resource, bevy::prelude::Deref, bevy::prelude::DerefMut,
)]
pub struct $wrapper_type($xr_type);
impl $wrapper_type {
pub fn new(value: $xr_type) -> Self {
Self(value)
}
}
// impl std::ops::Deref for $wrapper_type {
// type Target = $xr_type;
//
// fn deref(&self) -> &Self::Target {
// &self.0
// }
// }
impl From<$xr_type> for $wrapper_type {
fn from(value: $xr_type) -> Self {
Self::new(value)
}
}
};
}
#[macro_export]
macro_rules! xr_arc_resource_wrapper {
($wrapper_type:ident, $xr_type:ty) => {

View File

@@ -4,10 +4,10 @@ use std::sync::Mutex;
use crate::input::XrInput;
// use crate::passthrough::XrPassthroughLayer;
use crate::resource_macros::*;
use crate::xr_init::XrStatus;
use crate::{resource_macros::*, xr_resource_wrapper_no_extract};
use bevy::prelude::*;
use bevy::render::extract_resource::ExtractResourcePlugin;
use bevy::render::extract_resource::{ExtractResource, ExtractResourcePlugin};
use openxr as xr;
xr_resource_wrapper!(XrInstance, xr::Instance);
@@ -15,12 +15,24 @@ xr_resource_wrapper!(XrSession, xr::Session<xr::AnyGraphics>);
xr_resource_wrapper!(XrEnvironmentBlendMode, xr::EnvironmentBlendMode);
xr_resource_wrapper!(XrResolution, UVec2);
xr_resource_wrapper!(XrFormat, wgpu::TextureFormat);
xr_resource_wrapper!(XrFrameState, xr::FrameState);
xr_resource_wrapper_no_extract!(XrFrameState, xr::FrameState);
xr_resource_wrapper!(XrViews, Vec<xr::View>);
xr_arc_resource_wrapper!(XrSessionRunning, AtomicBool);
xr_arc_resource_wrapper!(XrSwapchain, Swapchain);
xr_no_clone_resource_wrapper!(XrFrameWaiter, xr::FrameWaiter);
impl ExtractResource for XrFrameState {
type Source = Self;
fn extract_resource(source: &Self::Source) -> Self {
let mut state = *source;
state.predicted_display_time = xr::Time::from_nanos(
state.predicted_display_time.as_nanos() + state.predicted_display_period.as_nanos(),
);
state
}
}
pub(crate) struct VulkanOXrSessionSetupInfo {
pub(crate) device_ptr: *const c_void,
pub(crate) physical_device_ptr: *const c_void,

View File

@@ -36,7 +36,7 @@ use self::trackers::{
adopt_open_xr_trackers, update_open_xr_controllers, OpenXRLeftEye, OpenXRRightEye,
OpenXRTrackingRoot,
};
use self::xr_camera::{XrCameraType, TransformExtract};
use self::xr_camera::{GlobalTransformExtract, TransformExtract, XrCameraType};
#[derive(Copy, Clone)]
pub struct OpenXrInput {
@@ -87,6 +87,7 @@ impl Plugin for OpenXrInput {
app.add_plugins(ExtractComponentPlugin::<XrCameraType>::default());
app.add_plugins(ExtractComponentPlugin::<XRProjection>::default());
app.add_plugins(ExtractComponentPlugin::<TransformExtract>::default());
app.add_plugins(ExtractComponentPlugin::<GlobalTransformExtract>::default());
}
}

View File

@@ -7,7 +7,7 @@ use bevy::prelude::*;
use bevy::render::camera::{CameraProjection, CameraRenderGraph, RenderTarget};
use bevy::render::extract_component::ExtractComponent;
use bevy::render::primitives::Frustum;
use bevy::render::view::{ColorGrading, VisibleEntities};
use bevy::render::view::{ColorGrading, ExtractedView, VisibleEntities};
use openxr::Fovf;
#[derive(Bundle)]
@@ -79,6 +79,7 @@ impl ExtractComponent for GlobalTransformExtract {
Some(*item)
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)]
pub enum Eye {
Left = 0,
@@ -271,7 +272,7 @@ impl CameraProjection for XRProjection {
}
pub fn xr_camera_head_sync(
views: ResMut<crate::resources::XrViews>,
views: Res<crate::resources::XrViews>,
mut query: Query<(&mut Transform, &XrCameraType, &mut XRProjection)>,
) {
//TODO calculate HMD position
@@ -290,3 +291,25 @@ pub fn xr_camera_head_sync(
transform.translation = view.pose.position.to_vec3();
}
}
pub fn xr_camera_head_sync_render(
views: Res<crate::resources::XrViews>,
mut query: Query<(&mut ExtractedView, &XrCameraType)>,
) {
//TODO calculate HMD position
for (mut transform, camera_type) in query.iter_mut() {
// let mut t = Transform::IDENTITY;
// let view_idx = match camera_type {
// XrCameraType::Xr(eye) => *eye as usize,
// // I don't belive we need a flatscrenn cam, that's just a cam without this component
// XrCameraType::Flatscreen => continue,
// };
// let view = match views.get(view_idx) {
// Some(views) => views,
// None => continue,
// };
// t.rotation = view.pose.orientation.to_quat();
// t.translation = view.pose.position.to_vec3();
info!("cam update");
transform.transform = GlobalTransform::IDENTITY;
}
}