From 453d7419d92fc089c8a3c55e1c9ac344ebd161b3 Mon Sep 17 00:00:00 2001 From: ReinUsesLisp Date: Tue, 14 Apr 2020 01:34:20 -0300 Subject: gl_shader_cache: Use CompileDepth::FullDecompile on GLSL From my testing on a Splatoon 2 shader that takes 3800ms on average to compile changing to FullDecompile reduces it to 900ms on average. The shader decoder will automatically fallback to a more naive method if it can't use full decompile. --- src/video_core/renderer_opengl/gl_shader_cache.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'src/video_core') diff --git a/src/video_core/renderer_opengl/gl_shader_cache.cpp b/src/video_core/renderer_opengl/gl_shader_cache.cpp index 6d2ff20f9..12c6dcfde 100644 --- a/src/video_core/renderer_opengl/gl_shader_cache.cpp +++ b/src/video_core/renderer_opengl/gl_shader_cache.cpp @@ -34,6 +34,8 @@ namespace OpenGL { using Tegra::Engines::ShaderType; +using VideoCommon::Shader::CompileDepth; +using VideoCommon::Shader::CompilerSettings; using VideoCommon::Shader::ProgramCode; using VideoCommon::Shader::Registry; using VideoCommon::Shader::ShaderIR; @@ -43,7 +45,7 @@ namespace { constexpr u32 STAGE_MAIN_OFFSET = 10; constexpr u32 KERNEL_MAIN_OFFSET = 0; -constexpr VideoCommon::Shader::CompilerSettings COMPILER_SETTINGS{}; +constexpr CompilerSettings COMPILER_SETTINGS{CompileDepth::FullDecompile}; /// Gets the address for the specified shader stage program GPUVAddr GetShaderAddress(Core::System& system, Maxwell::ShaderProgram program) { -- cgit v1.2.3