diff options
Diffstat (limited to 'modules/openxr/openxr_api.cpp')
-rw-r--r-- | modules/openxr/openxr_api.cpp | 448 |
1 files changed, 273 insertions, 175 deletions
diff --git a/modules/openxr/openxr_api.cpp b/modules/openxr/openxr_api.cpp index 1fe402341b..40e3ecfefc 100644 --- a/modules/openxr/openxr_api.cpp +++ b/modules/openxr/openxr_api.cpp @@ -160,7 +160,7 @@ void OpenXRAPI::OpenXRSwapChainInfo::free() { } } -bool OpenXRAPI::OpenXRSwapChainInfo::acquire(XrBool32 &p_should_render) { +bool OpenXRAPI::OpenXRSwapChainInfo::acquire(bool &p_should_render) { ERR_FAIL_COND_V(image_acquired, true); // This was not released when it should be, error out and reuse... OpenXRAPI *openxr_api = OpenXRAPI::get_singleton(); @@ -193,10 +193,18 @@ bool OpenXRAPI::OpenXRSwapChainInfo::acquire(XrBool32 &p_should_render) { XrSwapchainImageWaitInfo swapchain_image_wait_info = { XR_TYPE_SWAPCHAIN_IMAGE_WAIT_INFO, // type nullptr, // next - 17000000 // timeout in nanoseconds + 1000000000 // 1s timeout in nanoseconds }; - result = openxr_api->xrWaitSwapchainImage(swapchain, &swapchain_image_wait_info); + // Wait for a maximum of 10 seconds before calling it a critical failure... + for (int retry = 0; retry < 10; retry++) { + result = openxr_api->xrWaitSwapchainImage(swapchain, &swapchain_image_wait_info); + if (result != XR_TIMEOUT_EXPIRED) { + break; + } + WARN_PRINT("OpenXR: timed out waiting for swapchain image."); + } + if (!XR_UNQUALIFIED_SUCCESS(result)) { // Make sure end_frame knows we need to submit an empty frame p_should_render = false; @@ -206,6 +214,8 @@ bool OpenXRAPI::OpenXRSwapChainInfo::acquire(XrBool32 &p_should_render) { print_line("OpenXR: failed to wait for swapchain image [", openxr_api->get_error_string(result), "]"); return false; } else { + WARN_PRINT("OpenXR: couldn't to wait for swapchain but not a complete error [" + openxr_api->get_error_string(result) + "]"); + // Make sure to skip trying to acquire the swapchain image in the next frame skip_acquire_swapchain = true; return false; @@ -760,21 +770,6 @@ bool OpenXRAPI::load_supported_view_configuration_views(XrViewConfigurationType print_verbose(String(" - recommended render sample count: ") + itos(view_configuration_views[i].recommendedSwapchainSampleCount)); } - // Allocate buffers we'll be populating with view information. - views = (XrView *)memalloc(sizeof(XrView) * view_count); - ERR_FAIL_NULL_V_MSG(views, false, "OpenXR Couldn't allocate memory for views"); - memset(views, 0, sizeof(XrView) * view_count); - - projection_views = (XrCompositionLayerProjectionView *)memalloc(sizeof(XrCompositionLayerProjectionView) * view_count); - ERR_FAIL_NULL_V_MSG(projection_views, false, "OpenXR Couldn't allocate memory for projection views"); - memset(projection_views, 0, sizeof(XrCompositionLayerProjectionView) * view_count); - - if (submit_depth_buffer && OpenXRCompositionLayerDepthExtension::get_singleton()->is_available()) { - depth_views = (XrCompositionLayerDepthInfoKHR *)memalloc(sizeof(XrCompositionLayerDepthInfoKHR) * view_count); - ERR_FAIL_NULL_V_MSG(depth_views, false, "OpenXR Couldn't allocate memory for depth views"); - memset(depth_views, 0, sizeof(XrCompositionLayerDepthInfoKHR) * view_count); - } - return true; } @@ -927,6 +922,9 @@ bool OpenXRAPI::setup_play_space() { // If we've previously created a play space, clean it up first. if (play_space != XR_NULL_HANDLE) { + // TODO Investigate if destroying our play space here is safe, + // it may still be used in the rendering thread. + xrDestroySpace(play_space); } play_space = new_play_space; @@ -936,7 +934,11 @@ bool OpenXRAPI::setup_play_space() { if (emulating_local_floor) { // We'll use the STAGE space to get the floor height, but we can't do that until // after xrWaitFrame(), so just set this flag for now. + // Render state will be updated then. should_reset_emulated_floor_height = true; + } else { + // Update render state so this play space is used rendering the upcoming frame. + set_render_play_space(play_space); } return true; @@ -1016,7 +1018,7 @@ bool OpenXRAPI::reset_emulated_floor_height() { identityPose, // pose }; - result = xrLocateSpace(stage_space, local_space, get_next_frame_time(), &stage_location); + result = xrLocateSpace(stage_space, local_space, get_predicted_display_time(), &stage_location); xrDestroySpace(local_space); xrDestroySpace(stage_space); @@ -1042,6 +1044,9 @@ bool OpenXRAPI::reset_emulated_floor_height() { // report that as the reference space to the outside world. reference_space = XR_REFERENCE_SPACE_TYPE_LOCAL_FLOOR_EXT; + // Update render state so this play space is used rendering the upcoming frame. + set_render_play_space(play_space); + return true; } @@ -1136,6 +1141,7 @@ bool OpenXRAPI::obtain_swapchain_formats() { } bool OpenXRAPI::create_main_swapchains(Size2i p_size) { + ERR_NOT_ON_RENDER_THREAD_V(false); ERR_FAIL_NULL_V(graphics_extension, false); ERR_FAIL_COND_V(session == XR_NULL_HANDLE, false); @@ -1154,12 +1160,12 @@ bool OpenXRAPI::create_main_swapchains(Size2i p_size) { as we render 3D content into internal buffers that are copied into the swapchain, we do now have (basic) VRS support */ - main_swapchain_size = p_size; + render_state.main_swapchain_size = p_size; uint32_t sample_count = 1; // We start with our color swapchain... if (color_swapchain_format != 0) { - if (!main_swapchains[OPENXR_SWAPCHAIN_COLOR].create(0, XR_SWAPCHAIN_USAGE_SAMPLED_BIT | XR_SWAPCHAIN_USAGE_COLOR_ATTACHMENT_BIT | XR_SWAPCHAIN_USAGE_MUTABLE_FORMAT_BIT, color_swapchain_format, main_swapchain_size.width, main_swapchain_size.height, sample_count, view_count)) { + if (!render_state.main_swapchains[OPENXR_SWAPCHAIN_COLOR].create(0, XR_SWAPCHAIN_USAGE_SAMPLED_BIT | XR_SWAPCHAIN_USAGE_COLOR_ATTACHMENT_BIT | XR_SWAPCHAIN_USAGE_MUTABLE_FORMAT_BIT, color_swapchain_format, render_state.main_swapchain_size.width, render_state.main_swapchain_size.height, sample_count, view_count)) { return false; } } @@ -1169,7 +1175,7 @@ bool OpenXRAPI::create_main_swapchains(Size2i p_size) { // - we support our depth layer extension // - we have our spacewarp extension (not yet implemented) if (depth_swapchain_format != 0 && submit_depth_buffer && OpenXRCompositionLayerDepthExtension::get_singleton()->is_available()) { - if (!main_swapchains[OPENXR_SWAPCHAIN_DEPTH].create(0, XR_SWAPCHAIN_USAGE_SAMPLED_BIT | XR_SWAPCHAIN_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, depth_swapchain_format, main_swapchain_size.width, main_swapchain_size.height, sample_count, view_count)) { + if (!render_state.main_swapchains[OPENXR_SWAPCHAIN_DEPTH].create(0, XR_SWAPCHAIN_USAGE_SAMPLED_BIT | XR_SWAPCHAIN_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, depth_swapchain_format, render_state.main_swapchain_size.width, render_state.main_swapchain_size.height, sample_count, view_count)) { return false; } } @@ -1180,36 +1186,36 @@ bool OpenXRAPI::create_main_swapchains(Size2i p_size) { // TBD } - for (uint32_t i = 0; i < view_count; i++) { - views[i].type = XR_TYPE_VIEW; - views[i].next = nullptr; - - projection_views[i].type = XR_TYPE_COMPOSITION_LAYER_PROJECTION_VIEW; - projection_views[i].next = nullptr; - projection_views[i].subImage.swapchain = main_swapchains[OPENXR_SWAPCHAIN_COLOR].get_swapchain(); - projection_views[i].subImage.imageArrayIndex = i; - projection_views[i].subImage.imageRect.offset.x = 0; - projection_views[i].subImage.imageRect.offset.y = 0; - projection_views[i].subImage.imageRect.extent.width = main_swapchain_size.width; - projection_views[i].subImage.imageRect.extent.height = main_swapchain_size.height; - - if (submit_depth_buffer && OpenXRCompositionLayerDepthExtension::get_singleton()->is_available() && depth_views) { - projection_views[i].next = &depth_views[i]; - - depth_views[i].type = XR_TYPE_COMPOSITION_LAYER_DEPTH_INFO_KHR; - depth_views[i].next = nullptr; - depth_views[i].subImage.swapchain = main_swapchains[OPENXR_SWAPCHAIN_DEPTH].get_swapchain(); - depth_views[i].subImage.imageArrayIndex = i; - depth_views[i].subImage.imageRect.offset.x = 0; - depth_views[i].subImage.imageRect.offset.y = 0; - depth_views[i].subImage.imageRect.extent.width = main_swapchain_size.width; - depth_views[i].subImage.imageRect.extent.height = main_swapchain_size.height; + for (uint32_t i = 0; i < render_state.view_count; i++) { + render_state.views[i].type = XR_TYPE_VIEW; + render_state.views[i].next = nullptr; + + render_state.projection_views[i].type = XR_TYPE_COMPOSITION_LAYER_PROJECTION_VIEW; + render_state.projection_views[i].next = nullptr; + render_state.projection_views[i].subImage.swapchain = render_state.main_swapchains[OPENXR_SWAPCHAIN_COLOR].get_swapchain(); + render_state.projection_views[i].subImage.imageArrayIndex = i; + render_state.projection_views[i].subImage.imageRect.offset.x = 0; + render_state.projection_views[i].subImage.imageRect.offset.y = 0; + render_state.projection_views[i].subImage.imageRect.extent.width = render_state.main_swapchain_size.width; + render_state.projection_views[i].subImage.imageRect.extent.height = render_state.main_swapchain_size.height; + + if (render_state.submit_depth_buffer && OpenXRCompositionLayerDepthExtension::get_singleton()->is_available() && render_state.depth_views) { + render_state.projection_views[i].next = &render_state.depth_views[i]; + + render_state.depth_views[i].type = XR_TYPE_COMPOSITION_LAYER_DEPTH_INFO_KHR; + render_state.depth_views[i].next = nullptr; + render_state.depth_views[i].subImage.swapchain = render_state.main_swapchains[OPENXR_SWAPCHAIN_DEPTH].get_swapchain(); + render_state.depth_views[i].subImage.imageArrayIndex = i; + render_state.depth_views[i].subImage.imageRect.offset.x = 0; + render_state.depth_views[i].subImage.imageRect.offset.y = 0; + render_state.depth_views[i].subImage.imageRect.extent.width = render_state.main_swapchain_size.width; + render_state.depth_views[i].subImage.imageRect.extent.height = render_state.main_swapchain_size.height; // OpenXR spec says that: minDepth < maxDepth. - depth_views[i].minDepth = 0.0; - depth_views[i].maxDepth = 1.0; + render_state.depth_views[i].minDepth = 0.0; + render_state.depth_views[i].maxDepth = 1.0; // But we can reverse near and far for reverse-Z. - depth_views[i].nearZ = 100.0; // Near and far Z will be set to the correct values in fill_projection_matrix - depth_views[i].farZ = 0.01; + render_state.depth_views[i].nearZ = 100.0; // Near and far Z will be set to the correct values in fill_projection_matrix + render_state.depth_views[i].farZ = 0.01; } }; @@ -1217,23 +1223,33 @@ bool OpenXRAPI::create_main_swapchains(Size2i p_size) { }; void OpenXRAPI::destroy_session() { - if (running && session != XR_NULL_HANDLE) { - xrEndSession(session); + // TODO need to figure out if we're still rendering our current frame + // in a separate rendering thread and if so, + // if we need to wait for completion. + // We could be pulling the rug from underneath rendering... + + if (running) { + if (session != XR_NULL_HANDLE) { + xrEndSession(session); + } + + running = false; + render_state.running = false; } - if (views != nullptr) { - memfree(views); - views = nullptr; + if (render_state.views != nullptr) { + memfree(render_state.views); + render_state.views = nullptr; } - if (projection_views != nullptr) { - memfree(projection_views); - projection_views = nullptr; + if (render_state.projection_views != nullptr) { + memfree(render_state.projection_views); + render_state.projection_views = nullptr; } - if (depth_views != nullptr) { - memfree(depth_views); - depth_views = nullptr; + if (render_state.depth_views != nullptr) { + memfree(render_state.depth_views); + render_state.depth_views = nullptr; } free_main_swapchains(); @@ -1248,6 +1264,7 @@ void OpenXRAPI::destroy_session() { if (play_space != XR_NULL_HANDLE) { xrDestroySpace(play_space); play_space = XR_NULL_HANDLE; + render_state.play_space = XR_NULL_HANDLE; } if (view_space != XR_NULL_HANDLE) { xrDestroySpace(view_space); @@ -1298,6 +1315,7 @@ bool OpenXRAPI::on_state_ready() { // we're running running = true; + set_render_session_running(true); for (OpenXRExtensionWrapper *wrapper : registered_extension_wrappers) { wrapper->on_state_ready(); @@ -1374,34 +1392,37 @@ bool OpenXRAPI::on_state_stopping() { } running = false; + set_render_session_running(false); } - // TODO further cleanup - return true; } bool OpenXRAPI::on_state_loss_pending() { print_verbose("On state loss pending"); + if (xr_interface) { + xr_interface->on_state_loss_pending(); + } + for (OpenXRExtensionWrapper *wrapper : registered_extension_wrappers) { wrapper->on_state_loss_pending(); } - // TODO need to look into the correct action here, read up on the spec but we may need to signal Godot to exit (if it's not already exiting) - return true; } bool OpenXRAPI::on_state_exiting() { print_verbose("On state existing"); + if (xr_interface) { + xr_interface->on_state_exiting(); + } + for (OpenXRExtensionWrapper *wrapper : registered_extension_wrappers) { wrapper->on_state_exiting(); } - // TODO need to look into the correct action here, read up on the spec but we may need to signal Godot to exit (if it's not already exiting) - return true; } @@ -1419,10 +1440,7 @@ void OpenXRAPI::set_view_configuration(XrViewConfigurationType p_view_configurat bool OpenXRAPI::set_requested_reference_space(XrReferenceSpaceType p_requested_reference_space) { requested_reference_space = p_requested_reference_space; - - if (is_initialized()) { - return setup_play_space(); - } + play_space_is_dirty = true; return true; } @@ -1625,11 +1643,6 @@ bool OpenXRAPI::initialize_session() { return false; } - if (!setup_play_space()) { - destroy_session(); - return false; - } - if (!setup_view_space()) { destroy_session(); return false; @@ -1645,6 +1658,8 @@ bool OpenXRAPI::initialize_session() { return false; } + allocate_view_buffers(view_count, submit_depth_buffer); + return true; } @@ -1696,12 +1711,18 @@ XrHandTrackerEXT OpenXRAPI::get_hand_tracker(int p_hand_index) { } Size2 OpenXRAPI::get_recommended_target_size() { + RenderingServer *rendering_server = RenderingServer::get_singleton(); ERR_FAIL_NULL_V(view_configuration_views, Size2()); Size2 target_size; - target_size.width = view_configuration_views[0].recommendedImageRectWidth * render_target_size_multiplier; - target_size.height = view_configuration_views[0].recommendedImageRectHeight * render_target_size_multiplier; + if (rendering_server && rendering_server->is_on_render_thread()) { + target_size.width = view_configuration_views[0].recommendedImageRectWidth * render_state.render_target_size_multiplier; + target_size.height = view_configuration_views[0].recommendedImageRectHeight * render_state.render_target_size_multiplier; + } else { + target_size.width = view_configuration_views[0].recommendedImageRectWidth * render_target_size_multiplier; + target_size.height = view_configuration_views[0].recommendedImageRectHeight * render_target_size_multiplier; + } return target_size; } @@ -1713,14 +1734,12 @@ XRPose::TrackingConfidence OpenXRAPI::get_head_center(Transform3D &r_transform, return XRPose::XR_TRACKING_CONFIDENCE_NONE; } - // xrWaitFrame not run yet - if (frame_state.predictedDisplayTime == 0) { + // Get display time + XrTime display_time = get_predicted_display_time(); + if (display_time == 0) { return XRPose::XR_TRACKING_CONFIDENCE_NONE; } - // Get timing for the next frame, as that is the current frame we're processing - XrTime display_time = get_next_frame_time(); - XrSpaceVelocity velocity = { XR_TYPE_SPACE_VELOCITY, // type nullptr, // next @@ -1764,54 +1783,47 @@ XRPose::TrackingConfidence OpenXRAPI::get_head_center(Transform3D &r_transform, } bool OpenXRAPI::get_view_transform(uint32_t p_view, Transform3D &r_transform) { - if (!running) { - return false; - } + ERR_NOT_ON_RENDER_THREAD_V(false); - // xrWaitFrame not run yet - if (frame_state.predictedDisplayTime == 0) { + if (!render_state.running) { return false; } // we don't have valid view info - if (views == nullptr || !view_pose_valid) { + if (render_state.views == nullptr || !render_state.view_pose_valid) { return false; } // Note, the timing of this is set right before rendering, which is what we need here. - r_transform = transform_from_pose(views[p_view].pose); + r_transform = transform_from_pose(render_state.views[p_view].pose); return true; } bool OpenXRAPI::get_view_projection(uint32_t p_view, double p_z_near, double p_z_far, Projection &p_camera_matrix) { + ERR_NOT_ON_RENDER_THREAD_V(false); ERR_FAIL_NULL_V(graphics_extension, false); - if (!running) { - return false; - } - - // xrWaitFrame not run yet - if (frame_state.predictedDisplayTime == 0) { + if (!render_state.running) { return false; } // we don't have valid view info - if (views == nullptr || !view_pose_valid) { + if (render_state.views == nullptr || !render_state.view_pose_valid) { return false; } // if we're using depth views, make sure we update our near and far there... - if (depth_views != nullptr) { - for (uint32_t i = 0; i < view_count; i++) { + if (render_state.depth_views != nullptr) { + for (uint32_t i = 0; i < render_state.view_count; i++) { // As we are using reverse-Z these need to be flipped. - depth_views[i].nearZ = p_z_far; - depth_views[i].farZ = p_z_near; + render_state.depth_views[i].nearZ = p_z_far; + render_state.depth_views[i].farZ = p_z_near; } } // now update our projection - return graphics_extension->create_projection_fov(views[p_view].fov, p_z_near, p_z_far, p_camera_matrix); + return graphics_extension->create_projection_fov(render_state.views[p_view].fov, p_z_near, p_z_far, p_camera_matrix); } bool OpenXRAPI::poll_events() { @@ -1934,53 +1946,85 @@ bool OpenXRAPI::poll_events() { } } -bool OpenXRAPI::process() { - ERR_FAIL_COND_V(instance == XR_NULL_HANDLE, false); +void OpenXRAPI::_allocate_view_buffers(uint32_t p_view_count, bool p_submit_depth_buffer) { + // Must be called from rendering thread! + ERR_NOT_ON_RENDER_THREAD; - if (!poll_events()) { - return false; - } + OpenXRAPI *openxr_api = OpenXRAPI::get_singleton(); + ERR_FAIL_NULL(openxr_api); - if (!running) { - return false; - } + openxr_api->render_state.view_count = p_view_count; + openxr_api->render_state.submit_depth_buffer = p_submit_depth_buffer; - for (OpenXRExtensionWrapper *wrapper : registered_extension_wrappers) { - wrapper->on_process(); + // Allocate buffers we'll be populating with view information. + openxr_api->render_state.views = (XrView *)memalloc(sizeof(XrView) * p_view_count); + ERR_FAIL_NULL_MSG(openxr_api->render_state.views, "OpenXR Couldn't allocate memory for views"); + memset(openxr_api->render_state.views, 0, sizeof(XrView) * p_view_count); + + openxr_api->render_state.projection_views = (XrCompositionLayerProjectionView *)memalloc(sizeof(XrCompositionLayerProjectionView) * p_view_count); + ERR_FAIL_NULL_MSG(openxr_api->render_state.projection_views, "OpenXR Couldn't allocate memory for projection views"); + memset(openxr_api->render_state.projection_views, 0, sizeof(XrCompositionLayerProjectionView) * p_view_count); + + if (p_submit_depth_buffer && OpenXRCompositionLayerDepthExtension::get_singleton()->is_available()) { + openxr_api->render_state.depth_views = (XrCompositionLayerDepthInfoKHR *)memalloc(sizeof(XrCompositionLayerDepthInfoKHR) * p_view_count); + ERR_FAIL_NULL_MSG(openxr_api->render_state.depth_views, "OpenXR Couldn't allocate memory for depth views"); + memset(openxr_api->render_state.depth_views, 0, sizeof(XrCompositionLayerDepthInfoKHR) * p_view_count); } +} - return true; +void OpenXRAPI::_set_render_session_running(bool p_is_running) { + // Must be called from rendering thread! + ERR_NOT_ON_RENDER_THREAD; + + OpenXRAPI *openxr_api = OpenXRAPI::get_singleton(); + ERR_FAIL_NULL(openxr_api); + openxr_api->render_state.running = p_is_running; } -void OpenXRAPI::free_main_swapchains() { - for (int i = 0; i < OPENXR_SWAPCHAIN_MAX; i++) { - main_swapchains[i].queue_free(); - } +void OpenXRAPI::_set_render_display_info(XrTime p_predicted_display_time, bool p_should_render) { + // Must be called from rendering thread! + ERR_NOT_ON_RENDER_THREAD; + + OpenXRAPI *openxr_api = OpenXRAPI::get_singleton(); + ERR_FAIL_NULL(openxr_api); + openxr_api->render_state.predicted_display_time = p_predicted_display_time; + openxr_api->render_state.should_render = p_should_render; } -void OpenXRAPI::pre_render() { - ERR_FAIL_COND(instance == XR_NULL_HANDLE); +void OpenXRAPI::_set_render_play_space(uint64_t p_play_space) { + // Must be called from rendering thread! + ERR_NOT_ON_RENDER_THREAD; - if (!running) { - return; - } + OpenXRAPI *openxr_api = OpenXRAPI::get_singleton(); + ERR_FAIL_NULL(openxr_api); + openxr_api->render_state.play_space = XrSpace(p_play_space); +} - // Process any swapchains that were queued to be freed - OpenXRSwapChainInfo::free_queued(); +void OpenXRAPI::_set_render_state_multiplier(double p_render_target_size_multiplier) { + // Must be called from rendering thread! + ERR_NOT_ON_RENDER_THREAD; - Size2i swapchain_size = get_recommended_target_size(); - if (swapchain_size != main_swapchain_size) { - // Out with the old. - free_main_swapchains(); + OpenXRAPI *openxr_api = OpenXRAPI::get_singleton(); + ERR_FAIL_NULL(openxr_api); + openxr_api->render_state.render_target_size_multiplier = p_render_target_size_multiplier; +} - // In with the new. - create_main_swapchains(swapchain_size); +bool OpenXRAPI::process() { + ERR_FAIL_COND_V(instance == XR_NULL_HANDLE, false); + + if (!poll_events()) { + return false; } - // Waitframe does 2 important things in our process: - // 1) It provides us with predictive timing, telling us when OpenXR expects to display the frame we're about to commit - // 2) It will use the previous timing to pause our thread so that rendering starts as close to displaying as possible - // This must thus be called as close to when we start rendering as possible + if (!running) { + return false; + } + + // We call xrWaitFrame as early as possible, this will allow OpenXR to get + // proper timing info between this point, and when we're ready to start rendering. + // As the name suggests, OpenXR can pause the thread to minimize the time between + // retrieving tracking data and using that tracking data to render. + // OpenXR thus works best if rendering is performed on a separate thread. XrFrameWaitInfo frame_wait_info = { XR_TYPE_FRAME_WAIT_INFO, nullptr }; frame_state.predictedDisplayTime = 0; frame_state.predictedDisplayPeriod = 0; @@ -1995,7 +2039,9 @@ void OpenXRAPI::pre_render() { frame_state.predictedDisplayPeriod = 0; frame_state.shouldRender = false; - return; + set_render_display_info(0, false); + + return false; } if (frame_state.predictedDisplayPeriod > 500000000) { @@ -2004,12 +2050,54 @@ void OpenXRAPI::pre_render() { frame_state.predictedDisplayPeriod = 0; } + set_render_display_info(frame_state.predictedDisplayTime, frame_state.shouldRender); + + if (unlikely(play_space_is_dirty)) { + setup_play_space(); + play_space_is_dirty = false; + } + if (unlikely(should_reset_emulated_floor_height)) { reset_emulated_floor_height(); should_reset_emulated_floor_height = false; } for (OpenXRExtensionWrapper *wrapper : registered_extension_wrappers) { + wrapper->on_process(); + } + + return true; +} + +void OpenXRAPI::free_main_swapchains() { + for (int i = 0; i < OPENXR_SWAPCHAIN_MAX; i++) { + render_state.main_swapchains[i].queue_free(); + } +} + +void OpenXRAPI::pre_render() { + ERR_FAIL_COND(session == XR_NULL_HANDLE); + + // Must be called from rendering thread! + ERR_NOT_ON_RENDER_THREAD; + + if (!render_state.running) { + return; + } + + // Process any swapchains that were queued to be freed + OpenXRSwapChainInfo::free_queued(); + + Size2i swapchain_size = get_recommended_target_size(); + if (swapchain_size != render_state.main_swapchain_size) { + // Out with the old. + free_main_swapchains(); + + // In with the new. + create_main_swapchains(swapchain_size); + } + + for (OpenXRExtensionWrapper *wrapper : registered_extension_wrappers) { wrapper->on_pre_render(); } @@ -2028,8 +2116,8 @@ void OpenXRAPI::pre_render() { XR_TYPE_VIEW_LOCATE_INFO, // type nullptr, // next view_configuration, // viewConfigurationType - frame_state.predictedDisplayTime, // displayTime - play_space // space + render_state.predicted_display_time, // displayTime + render_state.play_space // space }; XrViewState view_state = { XR_TYPE_VIEW_STATE, // type @@ -2037,7 +2125,7 @@ void OpenXRAPI::pre_render() { 0 // viewStateFlags }; uint32_t view_count_output; - result = xrLocateViews(session, &view_locate_info, &view_state, view_count, &view_count_output, views); + XrResult result = xrLocateViews(session, &view_locate_info, &view_state, render_state.view_count, &view_count_output, render_state.views); if (XR_FAILED(result)) { print_line("OpenXR: Couldn't locate views [", get_error_string(result), "]"); return; @@ -2050,9 +2138,9 @@ void OpenXRAPI::pre_render() { pose_valid = false; } } - if (view_pose_valid != pose_valid) { - view_pose_valid = pose_valid; - if (!view_pose_valid) { + if (render_state.view_pose_valid != pose_valid) { + render_state.view_pose_valid = pose_valid; + if (!render_state.view_pose_valid) { print_verbose("OpenXR View pose became invalid"); } else { print_verbose("OpenXR View pose became valid"); @@ -2071,23 +2159,24 @@ void OpenXRAPI::pre_render() { } // Reset this, we haven't found a viewport for output yet - has_xr_viewport = false; + render_state.has_xr_viewport = false; } bool OpenXRAPI::pre_draw_viewport(RID p_render_target) { + // Must be called from rendering thread! + ERR_NOT_ON_RENDER_THREAD_V(false); + // We found an XR viewport! - has_xr_viewport = true; + render_state.has_xr_viewport = true; - if (!can_render()) { + if (instance == XR_NULL_HANDLE || session == XR_NULL_HANDLE || !render_state.running || !render_state.view_pose_valid || !render_state.should_render) { return false; } - // TODO: at some point in time we may support multiple viewports in which case we need to handle that... - // Acquire our images for (int i = 0; i < OPENXR_SWAPCHAIN_MAX; i++) { - if (!main_swapchains[i].is_image_acquired() && main_swapchains[i].get_swapchain() != XR_NULL_HANDLE) { - if (!main_swapchains[i].acquire(frame_state.shouldRender)) { + if (!render_state.main_swapchains[i].is_image_acquired() && render_state.main_swapchains[i].get_swapchain() != XR_NULL_HANDLE) { + if (!render_state.main_swapchains[i].acquire(render_state.should_render)) { return false; } } @@ -2101,24 +2190,33 @@ bool OpenXRAPI::pre_draw_viewport(RID p_render_target) { } XrSwapchain OpenXRAPI::get_color_swapchain() { - return main_swapchains[OPENXR_SWAPCHAIN_COLOR].get_swapchain(); + ERR_NOT_ON_RENDER_THREAD_V(XR_NULL_HANDLE); + + return render_state.main_swapchains[OPENXR_SWAPCHAIN_COLOR].get_swapchain(); } RID OpenXRAPI::get_color_texture() { - return main_swapchains[OPENXR_SWAPCHAIN_COLOR].get_image(); + ERR_NOT_ON_RENDER_THREAD_V(RID()); + + return render_state.main_swapchains[OPENXR_SWAPCHAIN_COLOR].get_image(); } RID OpenXRAPI::get_depth_texture() { + ERR_NOT_ON_RENDER_THREAD_V(RID()); + // Note, image will not be acquired if we didn't have a suitable swap chain format. - if (submit_depth_buffer) { - return main_swapchains[OPENXR_SWAPCHAIN_DEPTH].get_image(); + if (render_state.submit_depth_buffer && render_state.main_swapchains[OPENXR_SWAPCHAIN_DEPTH].is_image_acquired()) { + return render_state.main_swapchains[OPENXR_SWAPCHAIN_DEPTH].get_image(); } else { return RID(); } } void OpenXRAPI::post_draw_viewport(RID p_render_target) { - if (!can_render()) { + // Must be called from rendering thread! + ERR_NOT_ON_RENDER_THREAD; + + if (instance == XR_NULL_HANDLE || session == XR_NULL_HANDLE || !render_state.running || !render_state.view_pose_valid || !render_state.should_render) { return; } @@ -2130,30 +2228,33 @@ void OpenXRAPI::post_draw_viewport(RID p_render_target) { void OpenXRAPI::end_frame() { XrResult result; - ERR_FAIL_COND(instance == XR_NULL_HANDLE); + ERR_FAIL_COND(session == XR_NULL_HANDLE); - if (!running) { + // Must be called from rendering thread! + ERR_NOT_ON_RENDER_THREAD; + + if (!render_state.running) { return; } - if (frame_state.shouldRender && view_pose_valid) { - if (!has_xr_viewport) { + if (render_state.should_render && render_state.view_pose_valid) { + if (!render_state.has_xr_viewport) { print_line("OpenXR: No viewport was marked with use_xr, there is no rendered output!"); - } else if (!main_swapchains[OPENXR_SWAPCHAIN_COLOR].is_image_acquired()) { + } else if (!render_state.main_swapchains[OPENXR_SWAPCHAIN_COLOR].is_image_acquired()) { print_line("OpenXR: No swapchain could be acquired to render to!"); } } // must have: - // - shouldRender set to true + // - should_render set to true // - a valid view pose for projection_views[eye].pose to submit layer // - an image to render - if (!frame_state.shouldRender || !view_pose_valid || !main_swapchains[OPENXR_SWAPCHAIN_COLOR].is_image_acquired()) { + if (!render_state.should_render || !render_state.view_pose_valid || !render_state.main_swapchains[OPENXR_SWAPCHAIN_COLOR].is_image_acquired()) { // submit 0 layers when we shouldn't render XrFrameEndInfo frame_end_info = { XR_TYPE_FRAME_END_INFO, // type nullptr, // next - frame_state.predictedDisplayTime, // displayTime + render_state.predicted_display_time, // displayTime environment_blend_mode, // environmentBlendMode 0, // layerCount nullptr // layers @@ -2170,14 +2271,14 @@ void OpenXRAPI::end_frame() { // release our swapchain image if we acquired it for (int i = 0; i < OPENXR_SWAPCHAIN_MAX; i++) { - if (main_swapchains[i].is_image_acquired()) { - main_swapchains[i].release(); + if (render_state.main_swapchains[i].is_image_acquired()) { + render_state.main_swapchains[i].release(); } } - for (uint32_t eye = 0; eye < view_count; eye++) { - projection_views[eye].fov = views[eye].fov; - projection_views[eye].pose = views[eye].pose; + for (uint32_t eye = 0; eye < render_state.view_count; eye++) { + render_state.projection_views[eye].fov = render_state.views[eye].fov; + render_state.projection_views[eye].pose = render_state.views[eye].pose; } Vector<OrderedCompositionLayer> ordered_layers_list; @@ -2210,9 +2311,9 @@ void OpenXRAPI::end_frame() { XR_TYPE_COMPOSITION_LAYER_PROJECTION, // type nullptr, // next layer_flags, // layerFlags - play_space, // space - view_count, // viewCount - projection_views, // views + render_state.play_space, // space + render_state.view_count, // viewCount + render_state.projection_views, // views }; ordered_layers_list.push_back({ (const XrCompositionLayerBaseHeader *)&projection_layer, 0 }); @@ -2228,7 +2329,7 @@ void OpenXRAPI::end_frame() { XrFrameEndInfo frame_end_info = { XR_TYPE_FRAME_END_INFO, // type nullptr, // next - frame_state.predictedDisplayTime, // displayTime + render_state.predicted_display_time, // displayTime environment_blend_mode, // environmentBlendMode static_cast<uint32_t>(layers_list.size()), // layerCount layers_list.ptr() // layers @@ -2271,6 +2372,7 @@ double OpenXRAPI::get_render_target_size_multiplier() const { void OpenXRAPI::set_render_target_size_multiplier(double multiplier) { render_target_size_multiplier = multiplier; + set_render_state_multiplier(multiplier); } bool OpenXRAPI::is_foveation_supported() const { @@ -2414,10 +2516,6 @@ OpenXRAPI::OpenXRAPI() { submit_depth_buffer = GLOBAL_GET("xr/openxr/submit_depth_buffer"); } - - // Reset a few things that can't be done in our class definition. - frame_state.predictedDisplayTime = 0; - frame_state.predictedDisplayPeriod = 0; } OpenXRAPI::~OpenXRAPI() { @@ -3132,7 +3230,7 @@ XRPose::TrackingConfidence OpenXRAPI::get_action_pose(RID p_action, RID p_tracke return XRPose::XR_TRACKING_CONFIDENCE_NONE; } - XrTime display_time = get_next_frame_time(); + XrTime display_time = get_predicted_display_time(); if (display_time == 0) { return XRPose::XR_TRACKING_CONFIDENCE_NONE; } |