summaryrefslogtreecommitdiffstats
path: root/src/shader_recompiler/backend
diff options
context:
space:
mode:
authorReinUsesLisp <reinuseslisp@airmail.cc>2021-05-23 09:21:17 +0200
committerameerj <52414509+ameerj@users.noreply.github.com>2021-07-23 03:51:29 +0200
commitfde47152d924c8137447bc0df38bc17bf7c15137 (patch)
tree5eca84135a01d96fc6ae0cbc2902d6b9b2090f3b /src/shader_recompiler/backend
parentspirv: Add OpKill fallback to demote (diff)
downloadyuzu-fde47152d924c8137447bc0df38bc17bf7c15137.tar
yuzu-fde47152d924c8137447bc0df38bc17bf7c15137.tar.gz
yuzu-fde47152d924c8137447bc0df38bc17bf7c15137.tar.bz2
yuzu-fde47152d924c8137447bc0df38bc17bf7c15137.tar.lz
yuzu-fde47152d924c8137447bc0df38bc17bf7c15137.tar.xz
yuzu-fde47152d924c8137447bc0df38bc17bf7c15137.tar.zst
yuzu-fde47152d924c8137447bc0df38bc17bf7c15137.zip
Diffstat (limited to '')
-rw-r--r--src/shader_recompiler/backend/spirv/emit_spirv_memory.cpp136
1 files changed, 99 insertions, 37 deletions
diff --git a/src/shader_recompiler/backend/spirv/emit_spirv_memory.cpp b/src/shader_recompiler/backend/spirv/emit_spirv_memory.cpp
index 8849258e3..a6a3f3351 100644
--- a/src/shader_recompiler/backend/spirv/emit_spirv_memory.cpp
+++ b/src/shader_recompiler/backend/spirv/emit_spirv_memory.cpp
@@ -8,44 +8,62 @@
namespace Shader::Backend::SPIRV {
namespace {
-Id StorageIndex(EmitContext& ctx, const IR::Value& offset, size_t element_size) {
+Id StorageIndex(EmitContext& ctx, const IR::Value& offset, size_t element_size,
+ u32 index_offset = 0) {
if (offset.IsImmediate()) {
- const u32 imm_offset{static_cast<u32>(offset.U32() / element_size)};
+ const u32 imm_offset{static_cast<u32>(offset.U32() / element_size) + index_offset};
return ctx.Const(imm_offset);
}
const u32 shift{static_cast<u32>(std::countr_zero(element_size))};
- const Id index{ctx.Def(offset)};
- if (shift == 0) {
- return index;
+ Id index{ctx.Def(offset)};
+ if (shift != 0) {
+ const Id shift_id{ctx.Const(shift)};
+ index = ctx.OpShiftRightLogical(ctx.U32[1], index, shift_id);
}
- const Id shift_id{ctx.Const(shift)};
- return ctx.OpShiftRightLogical(ctx.U32[1], index, shift_id);
+ if (index_offset != 0) {
+ index = ctx.OpIAdd(ctx.U32[1], index, ctx.Const(index_offset));
+ }
+ return index;
}
Id StoragePointer(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
const StorageTypeDefinition& type_def, size_t element_size,
- Id StorageDefinitions::*member_ptr) {
+ Id StorageDefinitions::*member_ptr, u32 index_offset = 0) {
if (!binding.IsImmediate()) {
throw NotImplementedException("Dynamic storage buffer indexing");
}
const Id ssbo{ctx.ssbos[binding.U32()].*member_ptr};
- const Id index{StorageIndex(ctx, offset, element_size)};
+ const Id index{StorageIndex(ctx, offset, element_size, index_offset)};
return ctx.OpAccessChain(type_def.element, ssbo, ctx.u32_zero_value, index);
}
Id LoadStorage(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, Id result_type,
const StorageTypeDefinition& type_def, size_t element_size,
- Id StorageDefinitions::*member_ptr) {
- const Id pointer{StoragePointer(ctx, binding, offset, type_def, element_size, member_ptr)};
+ Id StorageDefinitions::*member_ptr, u32 index_offset = 0) {
+ const Id pointer{
+ StoragePointer(ctx, binding, offset, type_def, element_size, member_ptr, index_offset)};
return ctx.OpLoad(result_type, pointer);
}
+Id LoadStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
+ u32 index_offset = 0) {
+ return LoadStorage(ctx, binding, offset, ctx.U32[1], ctx.storage_types.U32, sizeof(u32),
+ &StorageDefinitions::U32, index_offset);
+}
+
void WriteStorage(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, Id value,
const StorageTypeDefinition& type_def, size_t element_size,
- Id StorageDefinitions::*member_ptr) {
- const Id pointer{StoragePointer(ctx, binding, offset, type_def, element_size, member_ptr)};
+ Id StorageDefinitions::*member_ptr, u32 index_offset = 0) {
+ const Id pointer{
+ StoragePointer(ctx, binding, offset, type_def, element_size, member_ptr, index_offset)};
ctx.OpStore(pointer, value);
}
+
+void WriteStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, Id value,
+ u32 index_offset = 0) {
+ WriteStorage(ctx, binding, offset, value, ctx.storage_types.U32, sizeof(u32),
+ &StorageDefinitions::U32, index_offset);
+}
} // Anonymous namespace
void EmitLoadGlobalU8(EmitContext&) {
@@ -105,42 +123,73 @@ void EmitWriteGlobal128(EmitContext& ctx, Id address, Id value) {
}
Id EmitLoadStorageU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
- return ctx.OpUConvert(ctx.U32[1],
- LoadStorage(ctx, binding, offset, ctx.U8, ctx.storage_types.U8,
- sizeof(u8), &StorageDefinitions::U8));
+ if (ctx.profile.support_descriptor_aliasing) {
+ return ctx.OpUConvert(ctx.U32[1],
+ LoadStorage(ctx, binding, offset, ctx.U8, ctx.storage_types.U8,
+ sizeof(u8), &StorageDefinitions::U8));
+ } else {
+ return ctx.OpBitFieldUExtract(ctx.U32[1], LoadStorage32(ctx, binding, offset),
+ ctx.BitOffset8(offset), ctx.Const(8u));
+ }
}
Id EmitLoadStorageS8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
- return ctx.OpSConvert(ctx.U32[1],
- LoadStorage(ctx, binding, offset, ctx.S8, ctx.storage_types.S8,
- sizeof(s8), &StorageDefinitions::S8));
+ if (ctx.profile.support_descriptor_aliasing) {
+ return ctx.OpSConvert(ctx.U32[1],
+ LoadStorage(ctx, binding, offset, ctx.S8, ctx.storage_types.S8,
+ sizeof(s8), &StorageDefinitions::S8));
+ } else {
+ return ctx.OpBitFieldSExtract(ctx.U32[1], LoadStorage32(ctx, binding, offset),
+ ctx.BitOffset8(offset), ctx.Const(8u));
+ }
}
Id EmitLoadStorageU16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
- return ctx.OpUConvert(ctx.U32[1],
- LoadStorage(ctx, binding, offset, ctx.U16, ctx.storage_types.U16,
- sizeof(u16), &StorageDefinitions::U16));
+ if (ctx.profile.support_descriptor_aliasing) {
+ return ctx.OpUConvert(ctx.U32[1],
+ LoadStorage(ctx, binding, offset, ctx.U16, ctx.storage_types.U16,
+ sizeof(u16), &StorageDefinitions::U16));
+ } else {
+ return ctx.OpBitFieldUExtract(ctx.U32[1], LoadStorage32(ctx, binding, offset),
+ ctx.BitOffset16(offset), ctx.Const(16u));
+ }
}
Id EmitLoadStorageS16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
- return ctx.OpSConvert(ctx.U32[1],
- LoadStorage(ctx, binding, offset, ctx.S16, ctx.storage_types.S16,
- sizeof(s16), &StorageDefinitions::S16));
+ if (ctx.profile.support_descriptor_aliasing) {
+ return ctx.OpSConvert(ctx.U32[1],
+ LoadStorage(ctx, binding, offset, ctx.S16, ctx.storage_types.S16,
+ sizeof(s16), &StorageDefinitions::S16));
+ } else {
+ return ctx.OpBitFieldSExtract(ctx.U32[1], LoadStorage32(ctx, binding, offset),
+ ctx.BitOffset16(offset), ctx.Const(16u));
+ }
}
Id EmitLoadStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
- return LoadStorage(ctx, binding, offset, ctx.U32[1], ctx.storage_types.U32, sizeof(u32),
- &StorageDefinitions::U32);
+ return LoadStorage32(ctx, binding, offset);
}
Id EmitLoadStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
- return LoadStorage(ctx, binding, offset, ctx.U32[2], ctx.storage_types.U32x2, sizeof(u32[2]),
- &StorageDefinitions::U32x2);
+ if (ctx.profile.support_descriptor_aliasing) {
+ return LoadStorage(ctx, binding, offset, ctx.U32[2], ctx.storage_types.U32x2,
+ sizeof(u32[2]), &StorageDefinitions::U32x2);
+ } else {
+ return ctx.OpCompositeConstruct(ctx.U32[2], LoadStorage32(ctx, binding, offset, 0),
+ LoadStorage32(ctx, binding, offset, 1));
+ }
}
Id EmitLoadStorage128(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
- return LoadStorage(ctx, binding, offset, ctx.U32[4], ctx.storage_types.U32x4, sizeof(u32[4]),
- &StorageDefinitions::U32x4);
+ if (ctx.profile.support_descriptor_aliasing) {
+ return LoadStorage(ctx, binding, offset, ctx.U32[4], ctx.storage_types.U32x4,
+ sizeof(u32[4]), &StorageDefinitions::U32x4);
+ } else {
+ return ctx.OpCompositeConstruct(ctx.U32[4], LoadStorage32(ctx, binding, offset, 0),
+ LoadStorage32(ctx, binding, offset, 1),
+ LoadStorage32(ctx, binding, offset, 2),
+ LoadStorage32(ctx, binding, offset, 3));
+ }
}
void EmitWriteStorageU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
@@ -169,20 +218,33 @@ void EmitWriteStorageS16(EmitContext& ctx, const IR::Value& binding, const IR::V
void EmitWriteStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
Id value) {
- WriteStorage(ctx, binding, offset, value, ctx.storage_types.U32, sizeof(u32),
- &StorageDefinitions::U32);
+ WriteStorage32(ctx, binding, offset, value);
}
void EmitWriteStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
Id value) {
- WriteStorage(ctx, binding, offset, value, ctx.storage_types.U32x2, sizeof(u32[2]),
- &StorageDefinitions::U32x2);
+ if (ctx.profile.support_descriptor_aliasing) {
+ WriteStorage(ctx, binding, offset, value, ctx.storage_types.U32x2, sizeof(u32[2]),
+ &StorageDefinitions::U32x2);
+ } else {
+ for (u32 index = 0; index < 2; ++index) {
+ const Id element{ctx.OpCompositeExtract(ctx.U32[1], value, index)};
+ WriteStorage32(ctx, binding, offset, element, index);
+ }
+ }
}
void EmitWriteStorage128(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
Id value) {
- WriteStorage(ctx, binding, offset, value, ctx.storage_types.U32x4, sizeof(u32[4]),
- &StorageDefinitions::U32x4);
+ if (ctx.profile.support_descriptor_aliasing) {
+ WriteStorage(ctx, binding, offset, value, ctx.storage_types.U32x4, sizeof(u32[4]),
+ &StorageDefinitions::U32x4);
+ } else {
+ for (u32 index = 0; index < 4; ++index) {
+ const Id element{ctx.OpCompositeExtract(ctx.U32[1], value, index)};
+ WriteStorage32(ctx, binding, offset, element, index);
+ }
+ }
}
} // namespace Shader::Backend::SPIRV