mirror of
https://github.com/yuzu-emu/yuzu.git
synced 2024-07-04 23:31:19 +01:00
shaders: Add U64->U32x2 Atomic fallback functions
This commit is contained in:
parent
11099dda2e
commit
ad58d7eae7
9 changed files with 469 additions and 1 deletions
|
@ -372,6 +372,8 @@ void EmitSharedAtomicExchange32(EmitContext& ctx, IR::Inst& inst, ScalarU32 poin
|
||||||
ScalarU32 value);
|
ScalarU32 value);
|
||||||
void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset,
|
void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset,
|
||||||
Register value);
|
Register value);
|
||||||
|
void EmitSharedAtomicExchange32x2(EmitContext& ctx, IR::Inst& inst, ScalarU32 pointer_offset,
|
||||||
|
Register value);
|
||||||
void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
ScalarU32 offset, ScalarU32 value);
|
ScalarU32 offset, ScalarU32 value);
|
||||||
void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
@ -412,6 +414,24 @@ void EmitStorageAtomicXor64(EmitContext& ctx, IR::Inst& inst, const IR::Value& b
|
||||||
ScalarU32 offset, Register value);
|
ScalarU32 offset, Register value);
|
||||||
void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
ScalarU32 offset, Register value);
|
ScalarU32 offset, Register value);
|
||||||
|
void EmitStorageAtomicIAdd32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
ScalarU32 offset, Register value);
|
||||||
|
void EmitStorageAtomicSMin32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
ScalarU32 offset, Register value);
|
||||||
|
void EmitStorageAtomicUMin32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
ScalarU32 offset, Register value);
|
||||||
|
void EmitStorageAtomicSMax32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
ScalarU32 offset, Register value);
|
||||||
|
void EmitStorageAtomicUMax32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
ScalarU32 offset, Register value);
|
||||||
|
void EmitStorageAtomicAnd32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
ScalarU32 offset, Register value);
|
||||||
|
void EmitStorageAtomicOr32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
ScalarU32 offset, Register value);
|
||||||
|
void EmitStorageAtomicXor32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
ScalarU32 offset, Register value);
|
||||||
|
void EmitStorageAtomicExchange32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
ScalarU32 offset, Register value);
|
||||||
void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
ScalarU32 offset, ScalarF32 value);
|
ScalarU32 offset, ScalarF32 value);
|
||||||
void EmitStorageAtomicAddF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
void EmitStorageAtomicAddF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
@ -448,6 +468,17 @@ void EmitGlobalAtomicAnd64(EmitContext& ctx);
|
||||||
void EmitGlobalAtomicOr64(EmitContext& ctx);
|
void EmitGlobalAtomicOr64(EmitContext& ctx);
|
||||||
void EmitGlobalAtomicXor64(EmitContext& ctx);
|
void EmitGlobalAtomicXor64(EmitContext& ctx);
|
||||||
void EmitGlobalAtomicExchange64(EmitContext& ctx);
|
void EmitGlobalAtomicExchange64(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicIAdd32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicSMin32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicUMin32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicSMax32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicUMax32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicInc32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicDec32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicAnd32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicOr32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicXor32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicExchange32x2(EmitContext& ctx);
|
||||||
void EmitGlobalAtomicAddF32(EmitContext& ctx);
|
void EmitGlobalAtomicAddF32(EmitContext& ctx);
|
||||||
void EmitGlobalAtomicAddF16x2(EmitContext& ctx);
|
void EmitGlobalAtomicAddF16x2(EmitContext& ctx);
|
||||||
void EmitGlobalAtomicAddF32x2(EmitContext& ctx);
|
void EmitGlobalAtomicAddF32x2(EmitContext& ctx);
|
||||||
|
|
|
@ -311,6 +311,13 @@ void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, ScalarU32 poin
|
||||||
ctx.LongAdd("ATOMS.EXCH.U64 {}.x,{},shared_mem[{}];", inst, value, pointer_offset);
|
ctx.LongAdd("ATOMS.EXCH.U64 {}.x,{},shared_mem[{}];", inst, value, pointer_offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void EmitSharedAtomicExchange32x2([[maybe_unused]] EmitContext& ctx,
|
||||||
|
[[maybe_unused]] IR::Inst& inst,
|
||||||
|
[[maybe_unused]] ScalarU32 pointer_offset,
|
||||||
|
[[maybe_unused]] Register value) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
ScalarU32 offset, ScalarU32 value) {
|
ScalarU32 offset, ScalarU32 value) {
|
||||||
Atom(ctx, inst, binding, offset, value, "ADD", "U32");
|
Atom(ctx, inst, binding, offset, value, "ADD", "U32");
|
||||||
|
@ -411,6 +418,62 @@ void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Val
|
||||||
Atom(ctx, inst, binding, offset, value, "EXCH", "U64");
|
Atom(ctx, inst, binding, offset, value, "EXCH", "U64");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicIAdd32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
||||||
|
[[maybe_unused]] const IR::Value& binding,
|
||||||
|
[[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicSMin32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
||||||
|
[[maybe_unused]] const IR::Value& binding,
|
||||||
|
[[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicUMin32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
||||||
|
[[maybe_unused]] const IR::Value& binding,
|
||||||
|
[[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicSMax32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
||||||
|
[[maybe_unused]] const IR::Value& binding,
|
||||||
|
[[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicUMax32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
||||||
|
[[maybe_unused]] const IR::Value& binding,
|
||||||
|
[[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicAnd32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
||||||
|
[[maybe_unused]] const IR::Value& binding,
|
||||||
|
[[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicOr32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
||||||
|
[[maybe_unused]] const IR::Value& binding,
|
||||||
|
[[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicXor32x2([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
||||||
|
[[maybe_unused]] const IR::Value& binding,
|
||||||
|
[[maybe_unused]] ScalarU32 offset, [[maybe_unused]] Register value) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicExchange32x2([[maybe_unused]] EmitContext& ctx,
|
||||||
|
[[maybe_unused]] IR::Inst& inst,
|
||||||
|
[[maybe_unused]] const IR::Value& binding,
|
||||||
|
[[maybe_unused]] ScalarU32 offset,
|
||||||
|
[[maybe_unused]] Register value) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
ScalarU32 offset, ScalarF32 value) {
|
ScalarU32 offset, ScalarF32 value) {
|
||||||
Atom(ctx, inst, binding, offset, value, "ADD", "F32");
|
Atom(ctx, inst, binding, offset, value, "ADD", "F32");
|
||||||
|
@ -537,6 +600,50 @@ void EmitGlobalAtomicExchange64(EmitContext&) {
|
||||||
throw NotImplementedException("GLASM instruction");
|
throw NotImplementedException("GLASM instruction");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicIAdd32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicSMin32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicUMin32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicSMax32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicUMax32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicInc32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicDec32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicAnd32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicOr32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicXor32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicExchange32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLASM instruction");
|
||||||
|
}
|
||||||
|
|
||||||
void EmitGlobalAtomicAddF32(EmitContext&) {
|
void EmitGlobalAtomicAddF32(EmitContext&) {
|
||||||
throw NotImplementedException("GLASM instruction");
|
throw NotImplementedException("GLASM instruction");
|
||||||
}
|
}
|
||||||
|
|
|
@ -105,6 +105,13 @@ void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, std::string_vi
|
||||||
pointer_offset, value, pointer_offset, value);
|
pointer_offset, value, pointer_offset, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void EmitSharedAtomicExchange32x2(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
|
||||||
|
std::string_view value) {
|
||||||
|
LOG_WARNING(Shader_GLSL, "Int64 atomics not supported, fallback to non-atomic");
|
||||||
|
ctx.AddU32x2("{}=uvec2(smem[{}>>2],smem[({}+4)>>2]);", inst, pointer_offset, pointer_offset);
|
||||||
|
ctx.Add("smem[{}>>2]={}.x;smem[({}+4)>>2]={}.y;", pointer_offset, value, pointer_offset, value);
|
||||||
|
}
|
||||||
|
|
||||||
void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
const IR::Value& offset, std::string_view value) {
|
const IR::Value& offset, std::string_view value) {
|
||||||
ctx.AddU32("{}=atomicAdd({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(),
|
ctx.AddU32("{}=atomicAdd({}_ssbo{}[{}>>2],{});", inst, ctx.stage_name, binding.U32(),
|
||||||
|
@ -265,6 +272,51 @@ void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Val
|
||||||
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value);
|
ctx.stage_name, binding.U32(), ctx.var_alloc.Consume(offset), value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicIAdd32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicSMin32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicUMin32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicSMax32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicUMax32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicAnd32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicOr32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicXor32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitStorageAtomicExchange32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
const IR::Value& offset, std::string_view value) {
|
const IR::Value& offset, std::string_view value) {
|
||||||
SsboCasFunctionF32(ctx, inst, binding, offset, value, "CasFloatAdd");
|
SsboCasFunctionF32(ctx, inst, binding, offset, value, "CasFloatAdd");
|
||||||
|
@ -388,6 +440,50 @@ void EmitGlobalAtomicExchange64(EmitContext&) {
|
||||||
throw NotImplementedException("GLSL Instrucion");
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicIAdd32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicSMin32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicUMin32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicSMax32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicUMax32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicInc32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicDec32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicAnd32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicOr32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicXor32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
|
void EmitGlobalAtomicExchange32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
|
}
|
||||||
|
|
||||||
void EmitGlobalAtomicAddF32(EmitContext&) {
|
void EmitGlobalAtomicAddF32(EmitContext&) {
|
||||||
throw NotImplementedException("GLSL Instrucion");
|
throw NotImplementedException("GLSL Instrucion");
|
||||||
}
|
}
|
||||||
|
|
|
@ -442,6 +442,8 @@ void EmitSharedAtomicExchange32(EmitContext& ctx, IR::Inst& inst, std::string_vi
|
||||||
std::string_view value);
|
std::string_view value);
|
||||||
void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
|
void EmitSharedAtomicExchange64(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
|
||||||
std::string_view value);
|
std::string_view value);
|
||||||
|
void EmitSharedAtomicExchange32x2(EmitContext& ctx, IR::Inst& inst, std::string_view pointer_offset,
|
||||||
|
std::string_view value);
|
||||||
void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
void EmitStorageAtomicIAdd32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
const IR::Value& offset, std::string_view value);
|
const IR::Value& offset, std::string_view value);
|
||||||
void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
void EmitStorageAtomicSMin32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
@ -482,6 +484,24 @@ void EmitStorageAtomicXor64(EmitContext& ctx, IR::Inst& inst, const IR::Value& b
|
||||||
const IR::Value& offset, std::string_view value);
|
const IR::Value& offset, std::string_view value);
|
||||||
void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
void EmitStorageAtomicExchange64(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
const IR::Value& offset, std::string_view value);
|
const IR::Value& offset, std::string_view value);
|
||||||
|
void EmitStorageAtomicIAdd32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value);
|
||||||
|
void EmitStorageAtomicSMin32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value);
|
||||||
|
void EmitStorageAtomicUMin32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value);
|
||||||
|
void EmitStorageAtomicSMax32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value);
|
||||||
|
void EmitStorageAtomicUMax32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value);
|
||||||
|
void EmitStorageAtomicAnd32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value);
|
||||||
|
void EmitStorageAtomicOr32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value);
|
||||||
|
void EmitStorageAtomicXor32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value);
|
||||||
|
void EmitStorageAtomicExchange32x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, std::string_view value);
|
||||||
void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
void EmitStorageAtomicAddF32(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
const IR::Value& offset, std::string_view value);
|
const IR::Value& offset, std::string_view value);
|
||||||
void EmitStorageAtomicAddF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
void EmitStorageAtomicAddF16x2(EmitContext& ctx, IR::Inst& inst, const IR::Value& binding,
|
||||||
|
@ -518,6 +538,17 @@ void EmitGlobalAtomicAnd64(EmitContext& ctx);
|
||||||
void EmitGlobalAtomicOr64(EmitContext& ctx);
|
void EmitGlobalAtomicOr64(EmitContext& ctx);
|
||||||
void EmitGlobalAtomicXor64(EmitContext& ctx);
|
void EmitGlobalAtomicXor64(EmitContext& ctx);
|
||||||
void EmitGlobalAtomicExchange64(EmitContext& ctx);
|
void EmitGlobalAtomicExchange64(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicIAdd32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicSMin32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicUMin32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicSMax32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicUMax32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicInc32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicDec32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicAnd32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicOr32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicXor32x2(EmitContext& ctx);
|
||||||
|
void EmitGlobalAtomicExchange32x2(EmitContext& ctx);
|
||||||
void EmitGlobalAtomicAddF32(EmitContext& ctx);
|
void EmitGlobalAtomicAddF32(EmitContext& ctx);
|
||||||
void EmitGlobalAtomicAddF16x2(EmitContext& ctx);
|
void EmitGlobalAtomicAddF16x2(EmitContext& ctx);
|
||||||
void EmitGlobalAtomicAddF32x2(EmitContext& ctx);
|
void EmitGlobalAtomicAddF32x2(EmitContext& ctx);
|
||||||
|
|
|
@ -82,6 +82,17 @@ Id StorageAtomicU64(EmitContext& ctx, const IR::Value& binding, const IR::Value&
|
||||||
ctx.OpStore(pointer, ctx.OpBitcast(ctx.U32[2], result));
|
ctx.OpStore(pointer, ctx.OpBitcast(ctx.U32[2], result));
|
||||||
return original_value;
|
return original_value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Id StorageAtomicU32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, Id value,
|
||||||
|
Id (Sirit::Module::*non_atomic_func)(Id, Id, Id)) {
|
||||||
|
LOG_WARNING(Shader_SPIRV, "Int64 atomics not supported, fallback to non-atomic");
|
||||||
|
const Id pointer{StoragePointer(ctx, ctx.storage_types.U32x2, &StorageDefinitions::U32x2,
|
||||||
|
binding, offset, sizeof(u32[2]))};
|
||||||
|
const Id original_value{ctx.OpLoad(ctx.U32[2], pointer)};
|
||||||
|
const Id result{(ctx.*non_atomic_func)(ctx.U32[2], value, original_value)};
|
||||||
|
ctx.OpStore(pointer, result);
|
||||||
|
return original_value;
|
||||||
|
}
|
||||||
} // Anonymous namespace
|
} // Anonymous namespace
|
||||||
|
|
||||||
Id EmitSharedAtomicIAdd32(EmitContext& ctx, Id offset, Id value) {
|
Id EmitSharedAtomicIAdd32(EmitContext& ctx, Id offset, Id value) {
|
||||||
|
@ -141,7 +152,7 @@ Id EmitSharedAtomicExchange64(EmitContext& ctx, Id offset, Id value) {
|
||||||
const auto [scope, semantics]{AtomicArgs(ctx)};
|
const auto [scope, semantics]{AtomicArgs(ctx)};
|
||||||
return ctx.OpAtomicExchange(ctx.U64, pointer, scope, semantics, value);
|
return ctx.OpAtomicExchange(ctx.U64, pointer, scope, semantics, value);
|
||||||
}
|
}
|
||||||
LOG_ERROR(Shader_SPIRV, "Int64 atomics not supported, fallback to non-atomic");
|
LOG_WARNING(Shader_SPIRV, "Int64 atomics not supported, fallback to non-atomic");
|
||||||
const Id pointer_1{SharedPointer(ctx, offset, 0)};
|
const Id pointer_1{SharedPointer(ctx, offset, 0)};
|
||||||
const Id pointer_2{SharedPointer(ctx, offset, 1)};
|
const Id pointer_2{SharedPointer(ctx, offset, 1)};
|
||||||
const Id value_1{ctx.OpLoad(ctx.U32[1], pointer_1)};
|
const Id value_1{ctx.OpLoad(ctx.U32[1], pointer_1)};
|
||||||
|
@ -152,6 +163,18 @@ Id EmitSharedAtomicExchange64(EmitContext& ctx, Id offset, Id value) {
|
||||||
return ctx.OpBitcast(ctx.U64, ctx.OpCompositeConstruct(ctx.U32[2], value_1, value_2));
|
return ctx.OpBitcast(ctx.U64, ctx.OpCompositeConstruct(ctx.U32[2], value_1, value_2));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Id EmitSharedAtomicExchange32x2(EmitContext& ctx, Id offset, Id value) {
|
||||||
|
LOG_WARNING(Shader_SPIRV, "Int64 atomics not supported, fallback to non-atomic");
|
||||||
|
const Id pointer_1{SharedPointer(ctx, offset, 0)};
|
||||||
|
const Id pointer_2{SharedPointer(ctx, offset, 1)};
|
||||||
|
const Id value_1{ctx.OpLoad(ctx.U32[1], pointer_1)};
|
||||||
|
const Id value_2{ctx.OpLoad(ctx.U32[1], pointer_2)};
|
||||||
|
const Id new_vector{ctx.OpBitcast(ctx.U32[2], value)};
|
||||||
|
ctx.OpStore(pointer_1, ctx.OpCompositeExtract(ctx.U32[1], new_vector, 0U));
|
||||||
|
ctx.OpStore(pointer_2, ctx.OpCompositeExtract(ctx.U32[1], new_vector, 1U));
|
||||||
|
return ctx.OpCompositeConstruct(ctx.U32[2], value_1, value_2);
|
||||||
|
}
|
||||||
|
|
||||||
Id EmitStorageAtomicIAdd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
Id EmitStorageAtomicIAdd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
Id value) {
|
Id value) {
|
||||||
return StorageAtomicU32(ctx, binding, offset, value, &Sirit::Module::OpAtomicIAdd);
|
return StorageAtomicU32(ctx, binding, offset, value, &Sirit::Module::OpAtomicIAdd);
|
||||||
|
@ -275,6 +298,56 @@ Id EmitStorageAtomicExchange64(EmitContext& ctx, const IR::Value& binding, const
|
||||||
return original;
|
return original;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Id EmitStorageAtomicIAdd32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
Id value) {
|
||||||
|
return StorageAtomicU32x2(ctx, binding, offset, value, &Sirit::Module::OpIAdd);
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitStorageAtomicSMin32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
Id value) {
|
||||||
|
return StorageAtomicU32x2(ctx, binding, offset, value, &Sirit::Module::OpSMin);
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitStorageAtomicUMin32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
Id value) {
|
||||||
|
return StorageAtomicU32x2(ctx, binding, offset, value, &Sirit::Module::OpUMin);
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitStorageAtomicSMax32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
Id value) {
|
||||||
|
return StorageAtomicU32x2(ctx, binding, offset, value, &Sirit::Module::OpSMax);
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitStorageAtomicUMax32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
Id value) {
|
||||||
|
return StorageAtomicU32x2(ctx, binding, offset, value, &Sirit::Module::OpUMax);
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitStorageAtomicAnd32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
Id value) {
|
||||||
|
return StorageAtomicU32x2(ctx, binding, offset, value, &Sirit::Module::OpBitwiseAnd);
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitStorageAtomicOr32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
Id value) {
|
||||||
|
return StorageAtomicU32x2(ctx, binding, offset, value, &Sirit::Module::OpBitwiseOr);
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitStorageAtomicXor32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
Id value) {
|
||||||
|
return StorageAtomicU32x2(ctx, binding, offset, value, &Sirit::Module::OpBitwiseXor);
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitStorageAtomicExchange32x2(EmitContext& ctx, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, Id value) {
|
||||||
|
LOG_WARNING(Shader_SPIRV, "Int64 atomics not supported, fallback to non-atomic");
|
||||||
|
const Id pointer{StoragePointer(ctx, ctx.storage_types.U32x2, &StorageDefinitions::U32x2,
|
||||||
|
binding, offset, sizeof(u32[2]))};
|
||||||
|
const Id original{ctx.OpLoad(ctx.U32[2], pointer)};
|
||||||
|
ctx.OpStore(pointer, value);
|
||||||
|
return original;
|
||||||
|
}
|
||||||
|
|
||||||
Id EmitStorageAtomicAddF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
Id EmitStorageAtomicAddF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
Id value) {
|
Id value) {
|
||||||
const Id ssbo{ctx.ssbos[binding.U32()].U32};
|
const Id ssbo{ctx.ssbos[binding.U32()].U32};
|
||||||
|
@ -418,6 +491,50 @@ Id EmitGlobalAtomicExchange64(EmitContext&) {
|
||||||
throw NotImplementedException("SPIR-V Instruction");
|
throw NotImplementedException("SPIR-V Instruction");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Id EmitGlobalAtomicIAdd32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("SPIR-V Instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitGlobalAtomicSMin32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("SPIR-V Instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitGlobalAtomicUMin32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("SPIR-V Instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitGlobalAtomicSMax32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("SPIR-V Instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitGlobalAtomicUMax32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("SPIR-V Instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitGlobalAtomicInc32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("SPIR-V Instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitGlobalAtomicDec32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("SPIR-V Instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitGlobalAtomicAnd32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("SPIR-V Instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitGlobalAtomicOr32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("SPIR-V Instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitGlobalAtomicXor32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("SPIR-V Instruction");
|
||||||
|
}
|
||||||
|
|
||||||
|
Id EmitGlobalAtomicExchange32x2(EmitContext&) {
|
||||||
|
throw NotImplementedException("SPIR-V Instruction");
|
||||||
|
}
|
||||||
|
|
||||||
Id EmitGlobalAtomicAddF32(EmitContext&) {
|
Id EmitGlobalAtomicAddF32(EmitContext&) {
|
||||||
throw NotImplementedException("SPIR-V Instruction");
|
throw NotImplementedException("SPIR-V Instruction");
|
||||||
}
|
}
|
||||||
|
|
|
@ -335,6 +335,7 @@ Id EmitSharedAtomicOr32(EmitContext& ctx, Id pointer_offset, Id value);
|
||||||
Id EmitSharedAtomicXor32(EmitContext& ctx, Id pointer_offset, Id value);
|
Id EmitSharedAtomicXor32(EmitContext& ctx, Id pointer_offset, Id value);
|
||||||
Id EmitSharedAtomicExchange32(EmitContext& ctx, Id pointer_offset, Id value);
|
Id EmitSharedAtomicExchange32(EmitContext& ctx, Id pointer_offset, Id value);
|
||||||
Id EmitSharedAtomicExchange64(EmitContext& ctx, Id pointer_offset, Id value);
|
Id EmitSharedAtomicExchange64(EmitContext& ctx, Id pointer_offset, Id value);
|
||||||
|
Id EmitSharedAtomicExchange32x2(EmitContext& ctx, Id pointer_offset, Id value);
|
||||||
Id EmitStorageAtomicIAdd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
Id EmitStorageAtomicIAdd32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
Id value);
|
Id value);
|
||||||
Id EmitStorageAtomicSMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
Id EmitStorageAtomicSMin32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
@ -375,6 +376,24 @@ Id EmitStorageAtomicXor64(EmitContext& ctx, const IR::Value& binding, const IR::
|
||||||
Id value);
|
Id value);
|
||||||
Id EmitStorageAtomicExchange64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
Id EmitStorageAtomicExchange64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
Id value);
|
Id value);
|
||||||
|
Id EmitStorageAtomicIAdd32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
Id value);
|
||||||
|
Id EmitStorageAtomicSMin32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
Id value);
|
||||||
|
Id EmitStorageAtomicUMin32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
Id value);
|
||||||
|
Id EmitStorageAtomicSMax32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
Id value);
|
||||||
|
Id EmitStorageAtomicUMax32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
Id value);
|
||||||
|
Id EmitStorageAtomicAnd32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
Id value);
|
||||||
|
Id EmitStorageAtomicOr32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
Id value);
|
||||||
|
Id EmitStorageAtomicXor32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
Id value);
|
||||||
|
Id EmitStorageAtomicExchange32x2(EmitContext& ctx, const IR::Value& binding,
|
||||||
|
const IR::Value& offset, Id value);
|
||||||
Id EmitStorageAtomicAddF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
Id EmitStorageAtomicAddF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
Id value);
|
Id value);
|
||||||
Id EmitStorageAtomicAddF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
Id EmitStorageAtomicAddF16x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
@ -411,6 +430,17 @@ Id EmitGlobalAtomicAnd64(EmitContext& ctx);
|
||||||
Id EmitGlobalAtomicOr64(EmitContext& ctx);
|
Id EmitGlobalAtomicOr64(EmitContext& ctx);
|
||||||
Id EmitGlobalAtomicXor64(EmitContext& ctx);
|
Id EmitGlobalAtomicXor64(EmitContext& ctx);
|
||||||
Id EmitGlobalAtomicExchange64(EmitContext& ctx);
|
Id EmitGlobalAtomicExchange64(EmitContext& ctx);
|
||||||
|
Id EmitGlobalAtomicIAdd32x2(EmitContext& ctx);
|
||||||
|
Id EmitGlobalAtomicSMin32x2(EmitContext& ctx);
|
||||||
|
Id EmitGlobalAtomicUMin32x2(EmitContext& ctx);
|
||||||
|
Id EmitGlobalAtomicSMax32x2(EmitContext& ctx);
|
||||||
|
Id EmitGlobalAtomicUMax32x2(EmitContext& ctx);
|
||||||
|
Id EmitGlobalAtomicInc32x2(EmitContext& ctx);
|
||||||
|
Id EmitGlobalAtomicDec32x2(EmitContext& ctx);
|
||||||
|
Id EmitGlobalAtomicAnd32x2(EmitContext& ctx);
|
||||||
|
Id EmitGlobalAtomicOr32x2(EmitContext& ctx);
|
||||||
|
Id EmitGlobalAtomicXor32x2(EmitContext& ctx);
|
||||||
|
Id EmitGlobalAtomicExchange32x2(EmitContext& ctx);
|
||||||
Id EmitGlobalAtomicAddF32(EmitContext& ctx);
|
Id EmitGlobalAtomicAddF32(EmitContext& ctx);
|
||||||
Id EmitGlobalAtomicAddF16x2(EmitContext& ctx);
|
Id EmitGlobalAtomicAddF16x2(EmitContext& ctx);
|
||||||
Id EmitGlobalAtomicAddF32x2(EmitContext& ctx);
|
Id EmitGlobalAtomicAddF32x2(EmitContext& ctx);
|
||||||
|
|
|
@ -118,6 +118,7 @@ bool Inst::MayHaveSideEffects() const noexcept {
|
||||||
case Opcode::SharedAtomicXor32:
|
case Opcode::SharedAtomicXor32:
|
||||||
case Opcode::SharedAtomicExchange32:
|
case Opcode::SharedAtomicExchange32:
|
||||||
case Opcode::SharedAtomicExchange64:
|
case Opcode::SharedAtomicExchange64:
|
||||||
|
case Opcode::SharedAtomicExchange32x2:
|
||||||
case Opcode::GlobalAtomicIAdd32:
|
case Opcode::GlobalAtomicIAdd32:
|
||||||
case Opcode::GlobalAtomicSMin32:
|
case Opcode::GlobalAtomicSMin32:
|
||||||
case Opcode::GlobalAtomicUMin32:
|
case Opcode::GlobalAtomicUMin32:
|
||||||
|
@ -138,6 +139,15 @@ bool Inst::MayHaveSideEffects() const noexcept {
|
||||||
case Opcode::GlobalAtomicOr64:
|
case Opcode::GlobalAtomicOr64:
|
||||||
case Opcode::GlobalAtomicXor64:
|
case Opcode::GlobalAtomicXor64:
|
||||||
case Opcode::GlobalAtomicExchange64:
|
case Opcode::GlobalAtomicExchange64:
|
||||||
|
case Opcode::GlobalAtomicIAdd32x2:
|
||||||
|
case Opcode::GlobalAtomicSMin32x2:
|
||||||
|
case Opcode::GlobalAtomicUMin32x2:
|
||||||
|
case Opcode::GlobalAtomicSMax32x2:
|
||||||
|
case Opcode::GlobalAtomicUMax32x2:
|
||||||
|
case Opcode::GlobalAtomicAnd32x2:
|
||||||
|
case Opcode::GlobalAtomicOr32x2:
|
||||||
|
case Opcode::GlobalAtomicXor32x2:
|
||||||
|
case Opcode::GlobalAtomicExchange32x2:
|
||||||
case Opcode::GlobalAtomicAddF32:
|
case Opcode::GlobalAtomicAddF32:
|
||||||
case Opcode::GlobalAtomicAddF16x2:
|
case Opcode::GlobalAtomicAddF16x2:
|
||||||
case Opcode::GlobalAtomicAddF32x2:
|
case Opcode::GlobalAtomicAddF32x2:
|
||||||
|
@ -165,6 +175,15 @@ bool Inst::MayHaveSideEffects() const noexcept {
|
||||||
case Opcode::StorageAtomicOr64:
|
case Opcode::StorageAtomicOr64:
|
||||||
case Opcode::StorageAtomicXor64:
|
case Opcode::StorageAtomicXor64:
|
||||||
case Opcode::StorageAtomicExchange64:
|
case Opcode::StorageAtomicExchange64:
|
||||||
|
case Opcode::StorageAtomicIAdd32x2:
|
||||||
|
case Opcode::StorageAtomicSMin32x2:
|
||||||
|
case Opcode::StorageAtomicUMin32x2:
|
||||||
|
case Opcode::StorageAtomicSMax32x2:
|
||||||
|
case Opcode::StorageAtomicUMax32x2:
|
||||||
|
case Opcode::StorageAtomicAnd32x2:
|
||||||
|
case Opcode::StorageAtomicOr32x2:
|
||||||
|
case Opcode::StorageAtomicXor32x2:
|
||||||
|
case Opcode::StorageAtomicExchange32x2:
|
||||||
case Opcode::StorageAtomicAddF32:
|
case Opcode::StorageAtomicAddF32:
|
||||||
case Opcode::StorageAtomicAddF16x2:
|
case Opcode::StorageAtomicAddF16x2:
|
||||||
case Opcode::StorageAtomicAddF32x2:
|
case Opcode::StorageAtomicAddF32x2:
|
||||||
|
|
|
@ -341,6 +341,7 @@ OPCODE(SharedAtomicOr32, U32, U32,
|
||||||
OPCODE(SharedAtomicXor32, U32, U32, U32, )
|
OPCODE(SharedAtomicXor32, U32, U32, U32, )
|
||||||
OPCODE(SharedAtomicExchange32, U32, U32, U32, )
|
OPCODE(SharedAtomicExchange32, U32, U32, U32, )
|
||||||
OPCODE(SharedAtomicExchange64, U64, U32, U64, )
|
OPCODE(SharedAtomicExchange64, U64, U32, U64, )
|
||||||
|
OPCODE(SharedAtomicExchange32x2, U32x2, U32, U32x2, )
|
||||||
|
|
||||||
OPCODE(GlobalAtomicIAdd32, U32, U64, U32, )
|
OPCODE(GlobalAtomicIAdd32, U32, U64, U32, )
|
||||||
OPCODE(GlobalAtomicSMin32, U32, U64, U32, )
|
OPCODE(GlobalAtomicSMin32, U32, U64, U32, )
|
||||||
|
@ -362,6 +363,15 @@ OPCODE(GlobalAtomicAnd64, U64, U64,
|
||||||
OPCODE(GlobalAtomicOr64, U64, U64, U64, )
|
OPCODE(GlobalAtomicOr64, U64, U64, U64, )
|
||||||
OPCODE(GlobalAtomicXor64, U64, U64, U64, )
|
OPCODE(GlobalAtomicXor64, U64, U64, U64, )
|
||||||
OPCODE(GlobalAtomicExchange64, U64, U64, U64, )
|
OPCODE(GlobalAtomicExchange64, U64, U64, U64, )
|
||||||
|
OPCODE(GlobalAtomicIAdd32x2, U32x2, U64, U32x2, )
|
||||||
|
OPCODE(GlobalAtomicSMin32x2, U32x2, U64, U32x2, )
|
||||||
|
OPCODE(GlobalAtomicUMin32x2, U32x2, U64, U32x2, )
|
||||||
|
OPCODE(GlobalAtomicSMax32x2, U32x2, U64, U32x2, )
|
||||||
|
OPCODE(GlobalAtomicUMax32x2, U32x2, U64, U32x2, )
|
||||||
|
OPCODE(GlobalAtomicAnd32x2, U32x2, U64, U32x2, )
|
||||||
|
OPCODE(GlobalAtomicOr32x2, U32x2, U64, U32x2, )
|
||||||
|
OPCODE(GlobalAtomicXor32x2, U32x2, U64, U32x2, )
|
||||||
|
OPCODE(GlobalAtomicExchange32x2, U32x2, U64, U32x2, )
|
||||||
OPCODE(GlobalAtomicAddF32, F32, U64, F32, )
|
OPCODE(GlobalAtomicAddF32, F32, U64, F32, )
|
||||||
OPCODE(GlobalAtomicAddF16x2, U32, U64, F16x2, )
|
OPCODE(GlobalAtomicAddF16x2, U32, U64, F16x2, )
|
||||||
OPCODE(GlobalAtomicAddF32x2, U32, U64, F32x2, )
|
OPCODE(GlobalAtomicAddF32x2, U32, U64, F32x2, )
|
||||||
|
@ -390,6 +400,15 @@ OPCODE(StorageAtomicAnd64, U64, U32,
|
||||||
OPCODE(StorageAtomicOr64, U64, U32, U32, U64, )
|
OPCODE(StorageAtomicOr64, U64, U32, U32, U64, )
|
||||||
OPCODE(StorageAtomicXor64, U64, U32, U32, U64, )
|
OPCODE(StorageAtomicXor64, U64, U32, U32, U64, )
|
||||||
OPCODE(StorageAtomicExchange64, U64, U32, U32, U64, )
|
OPCODE(StorageAtomicExchange64, U64, U32, U32, U64, )
|
||||||
|
OPCODE(StorageAtomicIAdd32x2, U32x2, U32, U32, U32x2, )
|
||||||
|
OPCODE(StorageAtomicSMin32x2, U32x2, U32, U32, U32x2, )
|
||||||
|
OPCODE(StorageAtomicUMin32x2, U32x2, U32, U32, U32x2, )
|
||||||
|
OPCODE(StorageAtomicSMax32x2, U32x2, U32, U32, U32x2, )
|
||||||
|
OPCODE(StorageAtomicUMax32x2, U32x2, U32, U32, U32x2, )
|
||||||
|
OPCODE(StorageAtomicAnd32x2, U32x2, U32, U32, U32x2, )
|
||||||
|
OPCODE(StorageAtomicOr32x2, U32x2, U32, U32, U32x2, )
|
||||||
|
OPCODE(StorageAtomicXor32x2, U32x2, U32, U32, U32x2, )
|
||||||
|
OPCODE(StorageAtomicExchange32x2, U32x2, U32, U32, U32x2, )
|
||||||
OPCODE(StorageAtomicAddF32, F32, U32, U32, F32, )
|
OPCODE(StorageAtomicAddF32, F32, U32, U32, F32, )
|
||||||
OPCODE(StorageAtomicAddF16x2, U32, U32, U32, F16x2, )
|
OPCODE(StorageAtomicAddF16x2, U32, U32, U32, F16x2, )
|
||||||
OPCODE(StorageAtomicAddF32x2, U32, U32, U32, F32x2, )
|
OPCODE(StorageAtomicAddF32x2, U32, U32, U32, F32x2, )
|
||||||
|
|
|
@ -360,6 +360,15 @@ void VisitUsages(Info& info, IR::Inst& inst) {
|
||||||
case IR::Opcode::GlobalAtomicOr64:
|
case IR::Opcode::GlobalAtomicOr64:
|
||||||
case IR::Opcode::GlobalAtomicXor64:
|
case IR::Opcode::GlobalAtomicXor64:
|
||||||
case IR::Opcode::GlobalAtomicExchange64:
|
case IR::Opcode::GlobalAtomicExchange64:
|
||||||
|
case IR::Opcode::GlobalAtomicIAdd32x2:
|
||||||
|
case IR::Opcode::GlobalAtomicSMin32x2:
|
||||||
|
case IR::Opcode::GlobalAtomicUMin32x2:
|
||||||
|
case IR::Opcode::GlobalAtomicSMax32x2:
|
||||||
|
case IR::Opcode::GlobalAtomicUMax32x2:
|
||||||
|
case IR::Opcode::GlobalAtomicAnd32x2:
|
||||||
|
case IR::Opcode::GlobalAtomicOr32x2:
|
||||||
|
case IR::Opcode::GlobalAtomicXor32x2:
|
||||||
|
case IR::Opcode::GlobalAtomicExchange32x2:
|
||||||
case IR::Opcode::GlobalAtomicAddF32:
|
case IR::Opcode::GlobalAtomicAddF32:
|
||||||
case IR::Opcode::GlobalAtomicAddF16x2:
|
case IR::Opcode::GlobalAtomicAddF16x2:
|
||||||
case IR::Opcode::GlobalAtomicAddF32x2:
|
case IR::Opcode::GlobalAtomicAddF32x2:
|
||||||
|
@ -597,6 +606,15 @@ void VisitUsages(Info& info, IR::Inst& inst) {
|
||||||
break;
|
break;
|
||||||
case IR::Opcode::LoadStorage64:
|
case IR::Opcode::LoadStorage64:
|
||||||
case IR::Opcode::WriteStorage64:
|
case IR::Opcode::WriteStorage64:
|
||||||
|
case IR::Opcode::StorageAtomicIAdd32x2:
|
||||||
|
case IR::Opcode::StorageAtomicSMin32x2:
|
||||||
|
case IR::Opcode::StorageAtomicUMin32x2:
|
||||||
|
case IR::Opcode::StorageAtomicSMax32x2:
|
||||||
|
case IR::Opcode::StorageAtomicUMax32x2:
|
||||||
|
case IR::Opcode::StorageAtomicAnd32x2:
|
||||||
|
case IR::Opcode::StorageAtomicOr32x2:
|
||||||
|
case IR::Opcode::StorageAtomicXor32x2:
|
||||||
|
case IR::Opcode::StorageAtomicExchange32x2:
|
||||||
info.used_storage_buffer_types |= IR::Type::U32x2;
|
info.used_storage_buffer_types |= IR::Type::U32x2;
|
||||||
break;
|
break;
|
||||||
case IR::Opcode::LoadStorage128:
|
case IR::Opcode::LoadStorage128:
|
||||||
|
|
Loading…
Reference in a new issue