diff --git a/src/core/gpu_thread.cpp b/src/core/gpu_thread.cpp index 7e26c5716..81e4a5490 100644 --- a/src/core/gpu_thread.cpp +++ b/src/core/gpu_thread.cpp @@ -83,6 +83,7 @@ static bool CreateGPUBackendOnThread(GPURenderer renderer, bool upload_vram, Err static void DestroyGPUBackendOnThread(); static void DestroyGPUPresenterOnThread(); +static void SetThreadEnabled(bool enabled); static void UpdateSettingsOnThread(GPUSettings&& new_settings); static void UpdateRunIdle(); @@ -136,60 +137,6 @@ void GPUThread::ResetCommandFIFO() s_state.command_fifo_read_ptr.store(0, std::memory_order_release); } -void GPUThread::Internal::SetThreadEnabled(bool enabled) -{ - if (s_state.use_gpu_thread == enabled) - return; - - if (s_state.use_gpu_thread) - { - SyncGPUThread(false); - std::atomic_thread_fence(std::memory_order_acquire); - } - - // Was anything active? - if (!g_gpu_device) - { - // Thread should be idle. Just reset the FIFO. - s_state.use_gpu_thread = enabled; - ResetCommandFIFO(); - return; - } - - const bool fullscreen = Host::IsFullscreen(); - const bool requested_fullscreen_ui = s_state.requested_fullscreen_ui; - const std::optional requested_renderer = s_state.requested_renderer; - std::string serial = s_state.game_serial; - - // Force VRAM download, we're recreating. - if (requested_renderer.has_value()) - { - GPUBackendReadVRAMCommand* cmd = GPUBackend::NewReadVRAMCommand(); - cmd->x = 0; - cmd->y = 0; - cmd->width = VRAM_WIDTH; - cmd->height = VRAM_HEIGHT; - PushCommand(cmd); - } - - // Shutdown reconfigure. - Reconfigure(std::string(), std::nullopt, false, false, false, false, nullptr); - - // Thread should be idle at this point. Reset the FIFO. - ResetCommandFIFO(); - - // Update state and reconfigure again. - s_state.use_gpu_thread = enabled; - - Error error; - if (!Reconfigure(std::move(serial), requested_renderer, requested_renderer.has_value(), fullscreen, - requested_fullscreen_ui, true, &error)) - { - ERROR_LOG("Reconfigure failed: {}", error.GetDescription()); - ReportFatalErrorAndShutdown(fmt::format("Reconfigure failed: {}", error.GetDescription())); - } -} - void GPUThread::Internal::ProcessStartup() { s_state.thread_spin_time = Timer::ConvertNanosecondsToValue(THREAD_SPIN_TIME_US * 1000.0); @@ -991,6 +938,60 @@ bool GPUThread::Internal::PresentFrameAndRestoreContext() return true; } +void GPUThread::SetThreadEnabled(bool enabled) +{ + if (s_state.use_gpu_thread == enabled) + return; + + if (s_state.use_gpu_thread) + { + SyncGPUThread(false); + std::atomic_thread_fence(std::memory_order_acquire); + } + + // Was anything active? + if (!g_gpu_device) + { + // Thread should be idle. Just reset the FIFO. + s_state.use_gpu_thread = enabled; + ResetCommandFIFO(); + return; + } + + const bool fullscreen = Host::IsFullscreen(); + const bool requested_fullscreen_ui = s_state.requested_fullscreen_ui; + const std::optional requested_renderer = s_state.requested_renderer; + std::string serial = s_state.game_serial; + + // Force VRAM download, we're recreating. + if (requested_renderer.has_value()) + { + GPUBackendReadVRAMCommand* cmd = GPUBackend::NewReadVRAMCommand(); + cmd->x = 0; + cmd->y = 0; + cmd->width = VRAM_WIDTH; + cmd->height = VRAM_HEIGHT; + PushCommand(cmd); + } + + // Shutdown reconfigure. + Reconfigure(std::string(), std::nullopt, false, false, false, false, nullptr); + + // Thread should be idle at this point. Reset the FIFO. + ResetCommandFIFO(); + + // Update state and reconfigure again. + s_state.use_gpu_thread = enabled; + + Error error; + if (!Reconfigure(std::move(serial), requested_renderer, requested_renderer.has_value(), fullscreen, + requested_fullscreen_ui, true, &error)) + { + ERROR_LOG("Reconfigure failed: {}", error.GetDescription()); + ReportFatalErrorAndShutdown(fmt::format("Reconfigure failed: {}", error.GetDescription())); + } +} + void GPUThread::UpdateSettingsOnThread(GPUSettings&& new_settings) { VERBOSE_LOG("Updating GPU settings on thread..."); @@ -1082,9 +1083,15 @@ void GPUThread::EndASyncBufferCall(GPUThreadCommand* cmd) PushCommand(cmd); } -void GPUThread::UpdateSettings(bool gpu_settings_changed, bool device_settings_changed) +void GPUThread::UpdateSettings(bool gpu_settings_changed, bool device_settings_changed, bool thread_changed) { - if (device_settings_changed) + // thread should be a device setting + if (thread_changed) + { + DebugAssert(device_settings_changed); + SetThreadEnabled(g_settings.gpu_use_thread); + } + else if (device_settings_changed) { INFO_LOG("Reconfiguring after device settings changed."); diff --git a/src/core/gpu_thread.h b/src/core/gpu_thread.h index 9914adfca..32921a997 100644 --- a/src/core/gpu_thread.h +++ b/src/core/gpu_thread.h @@ -29,7 +29,7 @@ struct GPUBackendUpdateDisplayCommand; namespace GPUThread { using AsyncCallType = std::function; using AsyncBackendCallType = std::function; -using AsyncBufferCallType = void(*)(void*); +using AsyncBufferCallType = void (*)(void*); enum class RunIdleReason : u8 { @@ -66,7 +66,7 @@ void ResizeDisplayWindow(s32 width, s32 height, float scale); /// Access to main window size from CPU thread. const WindowInfo& GetRenderWindowInfo(); -void UpdateSettings(bool gpu_settings_changed, bool device_settings_changed); +void UpdateSettings(bool gpu_settings_changed, bool device_settings_changed, bool thread_changed); /// Triggers an abnormal system shutdown and waits for it to destroy the backend. void ReportFatalErrorAndShutdown(std::string_view reason); @@ -95,7 +95,6 @@ void SyncGPUThread(bool spin); namespace Internal { const Threading::ThreadHandle& GetThreadHandle(); void ProcessStartup(); -void SetThreadEnabled(bool enabled); void DoRunIdle(); void RequestShutdown(); void GPUThreadEntryPoint(); diff --git a/src/core/hotkeys.cpp b/src/core/hotkeys.cpp index 398166fb0..f5f1b5f7d 100644 --- a/src/core/hotkeys.cpp +++ b/src/core/hotkeys.cpp @@ -61,7 +61,7 @@ static void HotkeyModifyResolutionScale(s32 increment) if (System::IsValid()) { - GPUThread::UpdateSettings(true, false); + GPUThread::UpdateSettings(true, false, false); System::ClearMemorySaveStates(true, false); } } @@ -136,7 +136,7 @@ static void HotkeyToggleOSD() g_settings.display_show_inputs ^= Host::GetBoolSettingValue("Display", "ShowInputs", false); g_settings.display_show_enhancements ^= Host::GetBoolSettingValue("Display", "ShowEnhancements", false); - GPUThread::UpdateSettings(true, false); + GPUThread::UpdateSettings(true, false, false); } #ifndef __ANDROID__ @@ -375,7 +375,7 @@ DEFINE_HOTKEY("TogglePGXP", TRANSLATE_NOOP("Hotkeys", "Graphics"), TRANSLATE_NOO System::ClearMemorySaveStates(true, true); g_settings.gpu_pgxp_enable = !g_settings.gpu_pgxp_enable; - GPUThread::UpdateSettings(true, false); + GPUThread::UpdateSettings(true, false, false); Host::AddKeyedOSDMessage("TogglePGXP", g_settings.gpu_pgxp_enable ? @@ -442,7 +442,7 @@ DEFINE_HOTKEY("TogglePGXPDepth", TRANSLATE_NOOP("Hotkeys", "Graphics"), System::ClearMemorySaveStates(true, true); g_settings.gpu_pgxp_depth_buffer = !g_settings.gpu_pgxp_depth_buffer; - GPUThread::UpdateSettings(true, false); + GPUThread::UpdateSettings(true, false, false); Host::AddKeyedOSDMessage("TogglePGXPDepth", g_settings.gpu_pgxp_depth_buffer ? diff --git a/src/core/settings.cpp b/src/core/settings.cpp index bf24713a1..b5d0b3ab7 100644 --- a/src/core/settings.cpp +++ b/src/core/settings.cpp @@ -1102,7 +1102,8 @@ void Settings::FixIncompatibleSettings(const SettingsInterface& si, bool display bool Settings::AreGPUDeviceSettingsChanged(const Settings& old_settings) const { - return (gpu_adapter != old_settings.gpu_adapter || gpu_use_debug_device != old_settings.gpu_use_debug_device || + return (gpu_adapter != old_settings.gpu_adapter || gpu_use_thread != old_settings.gpu_use_thread || + gpu_use_debug_device != old_settings.gpu_use_debug_device || gpu_disable_shader_cache != old_settings.gpu_disable_shader_cache || gpu_disable_dual_source_blend != old_settings.gpu_disable_dual_source_blend || gpu_disable_framebuffer_fetch != old_settings.gpu_disable_framebuffer_fetch || diff --git a/src/core/system.cpp b/src/core/system.cpp index e348289c5..c0a41e69d 100644 --- a/src/core/system.cpp +++ b/src/core/system.cpp @@ -170,7 +170,6 @@ static void ClearRunningGame(); static void DestroySystem(); static void RecreateGPU(GPURenderer new_renderer); -static void SetGPUThreadEnabled(bool enabled); static std::string GetScreenshotPath(const char* extension); static bool StartMediaCapture(std::string path, bool capture_video, bool capture_audio, u32 video_width, u32 video_height); @@ -1204,27 +1203,6 @@ void System::RecreateGPU(GPURenderer renderer) GPUThread::PresentCurrentFrame(); } -void System::SetGPUThreadEnabled(bool enabled) -{ - // can be called without valid system - if (!IsValid()) - { - GPUThread::Internal::SetThreadEnabled(g_settings.gpu_use_thread); - return; - } - - FreeMemoryStateStorage(false, true, false); - StopMediaCapture(); - - GPUThread::Internal::SetThreadEnabled(g_settings.gpu_use_thread); - - ClearMemorySaveStates(true, false); - - g_gpu.UpdateDisplay(false); - if (IsPaused()) - GPUThread::PresentCurrentFrame(); -} - void System::LoadSettings(bool display_osd_messages) { std::unique_lock lock = Host::GetSettingsLock(); @@ -4497,7 +4475,7 @@ void System::CheckForSettingsChanges(const Settings& old_settings) g_settings.runahead_frames != old_settings.runahead_frames || g_settings.texture_replacements != old_settings.texture_replacements) { - GPUThread::UpdateSettings(true, false); + GPUThread::UpdateSettings(true, false, false); // NOTE: Must come after the GPU thread settings update, otherwise it allocs the wrong size textures. const bool use_existing_textures = (g_settings.gpu_resolution_scale == old_settings.gpu_resolution_scale); @@ -4524,13 +4502,29 @@ void System::CheckForSettingsChanges(const Settings& old_settings) g_settings.display_screenshot_format != old_settings.display_screenshot_format || g_settings.display_screenshot_quality != old_settings.display_screenshot_quality) { - // don't need to represent when paused - GPUThread::UpdateSettings(true, device_settings_changed); + if (device_settings_changed) + { + // device changes are super icky, we need to purge and recreate any rewind states + FreeMemoryStateStorage(false, true, false); + StopMediaCapture(); + GPUThread::UpdateSettings(true, true, g_settings.gpu_use_thread != old_settings.gpu_use_thread); + ClearMemorySaveStates(true, false); + + // and display the current frame on the new device + g_gpu.UpdateDisplay(false); + if (IsPaused()) + GPUThread::PresentCurrentFrame(); + } + else + { + // don't need to represent here, because the OSD isn't visible while paused anyway + GPUThread::UpdateSettings(true, false, false); + } } else { // still need to update debug windows - GPUThread::UpdateSettings(false, false); + GPUThread::UpdateSettings(false, false, false); } if (g_settings.gpu_widescreen_hack != old_settings.gpu_widescreen_hack || @@ -4640,7 +4634,7 @@ void System::CheckForSettingsChanges(const Settings& old_settings) { // handle device setting updates as well if (g_settings.gpu_renderer != old_settings.gpu_renderer || g_settings.AreGPUDeviceSettingsChanged(old_settings)) - GPUThread::UpdateSettings(false, true); + GPUThread::UpdateSettings(false, true, g_settings.gpu_use_thread != old_settings.gpu_use_thread); if (g_settings.display_vsync != old_settings.display_vsync || g_settings.display_disable_mailbox_presentation != old_settings.display_disable_mailbox_presentation) @@ -4675,15 +4669,8 @@ void System::CheckForSettingsChanges(const Settings& old_settings) } } - if (g_settings.gpu_use_thread != old_settings.gpu_use_thread) [[unlikely]] - { - SetGPUThreadEnabled(g_settings.gpu_use_thread); - } - else if (g_settings.gpu_use_thread && g_settings.gpu_max_queued_frames != old_settings.gpu_max_queued_frames) - [[unlikely]] - { + if (g_settings.gpu_use_thread && g_settings.gpu_max_queued_frames != old_settings.gpu_max_queued_frames) [[unlikely]] GPUThread::SyncGPUThread(false); - } } void System::SetTaintsFromSettings()