From 14ac0c2923c41df9c6fc4833d2a8e46a6efe5b59 Mon Sep 17 00:00:00 2001 From: ameerj <52414509+ameerj@users.noreply.github.com> Date: Fri, 24 Dec 2021 20:00:28 -0500 Subject: shader: Add integer attribute get optimization pass Works around an nvidia driver bug, where casting the integer attributes to float and back to an integer always returned 0. --- src/shader_recompiler/frontend/ir/opcodes.inc | 1 + 1 file changed, 1 insertion(+) (limited to 'src/shader_recompiler/frontend') diff --git a/src/shader_recompiler/frontend/ir/opcodes.inc b/src/shader_recompiler/frontend/ir/opcodes.inc index 6929919df..b94ce7406 100644 --- a/src/shader_recompiler/frontend/ir/opcodes.inc +++ b/src/shader_recompiler/frontend/ir/opcodes.inc @@ -40,6 +40,7 @@ OPCODE(GetCbufU32, U32, U32, OPCODE(GetCbufF32, F32, U32, U32, ) OPCODE(GetCbufU32x2, U32x2, U32, U32, ) OPCODE(GetAttribute, F32, Attribute, U32, ) +OPCODE(GetAttributeU32, U32, Attribute, U32, ) OPCODE(SetAttribute, Void, Attribute, F32, U32, ) OPCODE(GetAttributeIndexed, F32, U32, U32, ) OPCODE(SetAttributeIndexed, Void, U32, F32, U32, ) -- cgit v1.2.3