2021-02-08 05:54:35 +00:00
|
|
|
// Copyright 2021 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
2021-02-17 03:59:28 +00:00
|
|
|
#include <span>
|
|
|
|
#include <tuple>
|
2021-02-08 05:54:35 +00:00
|
|
|
#include <type_traits>
|
2021-02-17 03:59:28 +00:00
|
|
|
#include <utility>
|
|
|
|
#include <vector>
|
2021-02-08 05:54:35 +00:00
|
|
|
|
|
|
|
#include "shader_recompiler/backend/spirv/emit_spirv.h"
|
|
|
|
#include "shader_recompiler/frontend/ir/basic_block.h"
|
|
|
|
#include "shader_recompiler/frontend/ir/microinstruction.h"
|
|
|
|
#include "shader_recompiler/frontend/ir/program.h"
|
|
|
|
|
|
|
|
namespace Shader::Backend::SPIRV {
|
2021-02-16 07:10:22 +00:00
|
|
|
namespace {
|
|
|
|
template <class Func>
|
2021-02-17 03:59:28 +00:00
|
|
|
struct FuncTraits : FuncTraits<Func> {};
|
2021-02-08 05:54:35 +00:00
|
|
|
|
2021-02-17 03:59:28 +00:00
|
|
|
template <class ReturnType_, class... Args>
|
|
|
|
struct FuncTraits<ReturnType_ (*)(Args...)> {
|
2021-02-16 07:10:22 +00:00
|
|
|
using ReturnType = ReturnType_;
|
2021-02-08 05:54:35 +00:00
|
|
|
|
2021-02-16 07:10:22 +00:00
|
|
|
static constexpr size_t NUM_ARGS = sizeof...(Args);
|
2021-02-08 05:54:35 +00:00
|
|
|
|
2021-02-16 07:10:22 +00:00
|
|
|
template <size_t I>
|
|
|
|
using ArgType = std::tuple_element_t<I, std::tuple<Args...>>;
|
|
|
|
};
|
2021-02-14 04:24:32 +00:00
|
|
|
|
2021-02-17 03:59:28 +00:00
|
|
|
template <auto func, typename... Args>
|
|
|
|
void SetDefinition(EmitContext& ctx, IR::Inst* inst, Args... args) {
|
2021-02-16 07:10:22 +00:00
|
|
|
const Id forward_id{inst->Definition<Id>()};
|
|
|
|
const bool has_forward_id{Sirit::ValidId(forward_id)};
|
|
|
|
Id current_id{};
|
|
|
|
if (has_forward_id) {
|
|
|
|
current_id = ctx.ExchangeCurrentId(forward_id);
|
|
|
|
}
|
2021-02-17 03:59:28 +00:00
|
|
|
const Id new_id{func(ctx, std::forward<Args>(args)...)};
|
2021-02-16 07:10:22 +00:00
|
|
|
if (has_forward_id) {
|
|
|
|
ctx.ExchangeCurrentId(current_id);
|
|
|
|
} else {
|
|
|
|
inst->SetDefinition<Id>(new_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
template <typename ArgType>
|
|
|
|
ArgType Arg(EmitContext& ctx, const IR::Value& arg) {
|
|
|
|
if constexpr (std::is_same_v<ArgType, Id>) {
|
|
|
|
return ctx.Def(arg);
|
|
|
|
} else if constexpr (std::is_same_v<ArgType, const IR::Value&>) {
|
|
|
|
return arg;
|
|
|
|
} else if constexpr (std::is_same_v<ArgType, u32>) {
|
|
|
|
return arg.U32();
|
|
|
|
} else if constexpr (std::is_same_v<ArgType, IR::Block*>) {
|
|
|
|
return arg.Label();
|
2021-03-19 22:28:31 +00:00
|
|
|
} else if constexpr (std::is_same_v<ArgType, IR::Attribute>) {
|
|
|
|
return arg.Attribute();
|
2021-04-02 05:17:47 +01:00
|
|
|
} else if constexpr (std::is_same_v<ArgType, IR::Reg>) {
|
|
|
|
return arg.Reg();
|
2021-02-16 07:10:22 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-17 03:59:28 +00:00
|
|
|
template <auto func, bool is_first_arg_inst, size_t... I>
|
|
|
|
void Invoke(EmitContext& ctx, IR::Inst* inst, std::index_sequence<I...>) {
|
|
|
|
using Traits = FuncTraits<decltype(func)>;
|
2021-02-16 07:10:22 +00:00
|
|
|
if constexpr (std::is_same_v<Traits::ReturnType, Id>) {
|
|
|
|
if constexpr (is_first_arg_inst) {
|
2021-02-17 03:59:28 +00:00
|
|
|
SetDefinition<func>(ctx, inst, inst, Arg<Traits::ArgType<I + 2>>(ctx, inst->Arg(I))...);
|
2021-02-16 07:10:22 +00:00
|
|
|
} else {
|
2021-02-17 03:59:28 +00:00
|
|
|
SetDefinition<func>(ctx, inst, Arg<Traits::ArgType<I + 1>>(ctx, inst->Arg(I))...);
|
2021-02-16 07:10:22 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if constexpr (is_first_arg_inst) {
|
2021-02-17 03:59:28 +00:00
|
|
|
func(ctx, inst, Arg<Traits::ArgType<I + 2>>(ctx, inst->Arg(I))...);
|
2021-02-16 07:10:22 +00:00
|
|
|
} else {
|
2021-02-17 03:59:28 +00:00
|
|
|
func(ctx, Arg<Traits::ArgType<I + 1>>(ctx, inst->Arg(I))...);
|
2021-02-08 05:54:35 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-17 03:59:28 +00:00
|
|
|
template <auto func>
|
|
|
|
void Invoke(EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
using Traits = FuncTraits<decltype(func)>;
|
2021-02-16 07:10:22 +00:00
|
|
|
static_assert(Traits::NUM_ARGS >= 1, "Insufficient arguments");
|
|
|
|
if constexpr (Traits::NUM_ARGS == 1) {
|
2021-02-17 03:59:28 +00:00
|
|
|
Invoke<func, false>(ctx, inst, std::make_index_sequence<0>{});
|
2021-02-16 07:10:22 +00:00
|
|
|
} else {
|
|
|
|
using FirstArgType = typename Traits::template ArgType<1>;
|
|
|
|
static constexpr bool is_first_arg_inst = std::is_same_v<FirstArgType, IR::Inst*>;
|
|
|
|
using Indices = std::make_index_sequence<Traits::NUM_ARGS - (is_first_arg_inst ? 2 : 1)>;
|
2021-02-17 03:59:28 +00:00
|
|
|
Invoke<func, is_first_arg_inst>(ctx, inst, Indices{});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitInst(EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
switch (inst->Opcode()) {
|
|
|
|
#define OPCODE(name, result_type, ...) \
|
|
|
|
case IR::Opcode::name: \
|
|
|
|
return Invoke<&Emit##name>(ctx, inst);
|
|
|
|
#include "shader_recompiler/frontend/ir/opcodes.inc"
|
|
|
|
#undef OPCODE
|
|
|
|
}
|
|
|
|
throw LogicError("Invalid opcode {}", inst->Opcode());
|
|
|
|
}
|
|
|
|
|
|
|
|
Id TypeId(const EmitContext& ctx, IR::Type type) {
|
|
|
|
switch (type) {
|
|
|
|
case IR::Type::U1:
|
|
|
|
return ctx.U1;
|
|
|
|
case IR::Type::U32:
|
|
|
|
return ctx.U32[1];
|
|
|
|
default:
|
|
|
|
throw NotImplementedException("Phi node type {}", type);
|
2021-02-16 07:10:22 +00:00
|
|
|
}
|
|
|
|
}
|
2021-02-20 06:30:13 +00:00
|
|
|
|
2021-03-20 22:11:56 +00:00
|
|
|
Id DefineMain(EmitContext& ctx, IR::Program& program) {
|
|
|
|
const Id void_function{ctx.TypeFunction(ctx.void_id)};
|
|
|
|
const Id main{ctx.OpFunction(ctx.void_id, spv::FunctionControlMask::MaskNone, void_function)};
|
|
|
|
for (IR::Block* const block : program.blocks) {
|
|
|
|
ctx.AddLabel(block->Definition<Id>());
|
|
|
|
for (IR::Inst& inst : block->Instructions()) {
|
|
|
|
EmitInst(ctx, &inst);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ctx.OpFunctionEnd();
|
|
|
|
return main;
|
|
|
|
}
|
|
|
|
|
2021-03-27 06:08:31 +00:00
|
|
|
void DefineEntryPoint(const IR::Program& program, EmitContext& ctx, Id main) {
|
2021-03-20 22:11:56 +00:00
|
|
|
const std::span interfaces(ctx.interfaces.data(), ctx.interfaces.size());
|
|
|
|
spv::ExecutionModel execution_model{};
|
2021-03-26 21:45:38 +00:00
|
|
|
switch (program.stage) {
|
2021-03-20 22:11:56 +00:00
|
|
|
case Shader::Stage::Compute: {
|
2021-03-27 06:08:31 +00:00
|
|
|
const std::array<u32, 3> workgroup_size{program.workgroup_size};
|
2021-03-20 22:11:56 +00:00
|
|
|
execution_model = spv::ExecutionModel::GLCompute;
|
|
|
|
ctx.AddExecutionMode(main, spv::ExecutionMode::LocalSize, workgroup_size[0],
|
|
|
|
workgroup_size[1], workgroup_size[2]);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case Shader::Stage::VertexB:
|
|
|
|
execution_model = spv::ExecutionModel::Vertex;
|
|
|
|
break;
|
|
|
|
case Shader::Stage::Fragment:
|
|
|
|
execution_model = spv::ExecutionModel::Fragment;
|
|
|
|
ctx.AddExecutionMode(main, spv::ExecutionMode::OriginUpperLeft);
|
2021-03-26 21:45:38 +00:00
|
|
|
if (program.info.stores_frag_depth) {
|
|
|
|
ctx.AddExecutionMode(main, spv::ExecutionMode::DepthReplacing);
|
|
|
|
}
|
2021-03-20 22:11:56 +00:00
|
|
|
break;
|
|
|
|
default:
|
2021-03-27 06:08:31 +00:00
|
|
|
throw NotImplementedException("Stage {}", program.stage);
|
2021-03-20 22:11:56 +00:00
|
|
|
}
|
|
|
|
ctx.AddEntryPoint(execution_model, main, "main", interfaces);
|
|
|
|
}
|
|
|
|
|
2021-02-20 06:30:13 +00:00
|
|
|
void SetupDenormControl(const Profile& profile, const IR::Program& program, EmitContext& ctx,
|
|
|
|
Id main_func) {
|
|
|
|
const Info& info{program.info};
|
|
|
|
if (info.uses_fp32_denorms_flush && info.uses_fp32_denorms_preserve) {
|
|
|
|
// LOG_ERROR(HW_GPU, "Fp32 denorm flush and preserve on the same shader");
|
|
|
|
} else if (info.uses_fp32_denorms_flush) {
|
|
|
|
if (profile.support_fp32_denorm_flush) {
|
|
|
|
ctx.AddCapability(spv::Capability::DenormFlushToZero);
|
|
|
|
ctx.AddExecutionMode(main_func, spv::ExecutionMode::DenormFlushToZero, 32U);
|
|
|
|
} else {
|
|
|
|
// Drivers will most likely flush denorms by default, no need to warn
|
|
|
|
}
|
|
|
|
} else if (info.uses_fp32_denorms_preserve) {
|
|
|
|
if (profile.support_fp32_denorm_preserve) {
|
|
|
|
ctx.AddCapability(spv::Capability::DenormPreserve);
|
|
|
|
ctx.AddExecutionMode(main_func, spv::ExecutionMode::DenormPreserve, 32U);
|
|
|
|
} else {
|
|
|
|
// LOG_WARNING(HW_GPU, "Fp32 denorm preserve used in shader without host support");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (!profile.support_separate_denorm_behavior) {
|
|
|
|
// No separate denorm behavior
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (info.uses_fp16_denorms_flush && info.uses_fp16_denorms_preserve) {
|
|
|
|
// LOG_ERROR(HW_GPU, "Fp16 denorm flush and preserve on the same shader");
|
|
|
|
} else if (info.uses_fp16_denorms_flush) {
|
|
|
|
if (profile.support_fp16_denorm_flush) {
|
|
|
|
ctx.AddCapability(spv::Capability::DenormFlushToZero);
|
2021-02-22 02:42:38 +00:00
|
|
|
ctx.AddExecutionMode(main_func, spv::ExecutionMode::DenormFlushToZero, 16U);
|
2021-02-20 06:30:13 +00:00
|
|
|
} else {
|
|
|
|
// Same as fp32, no need to warn as most drivers will flush by default
|
|
|
|
}
|
2021-02-22 02:42:38 +00:00
|
|
|
} else if (info.uses_fp16_denorms_preserve) {
|
2021-02-20 06:30:13 +00:00
|
|
|
if (profile.support_fp16_denorm_preserve) {
|
|
|
|
ctx.AddCapability(spv::Capability::DenormPreserve);
|
|
|
|
ctx.AddExecutionMode(main_func, spv::ExecutionMode::DenormPreserve, 16U);
|
|
|
|
} else {
|
|
|
|
// LOG_WARNING(HW_GPU, "Fp16 denorm preserve used in shader without host support");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-02-24 21:37:47 +00:00
|
|
|
|
2021-03-21 23:28:37 +00:00
|
|
|
void SetupSignedNanCapabilities(const Profile& profile, const IR::Program& program,
|
|
|
|
EmitContext& ctx, Id main_func) {
|
|
|
|
if (program.info.uses_fp16 && profile.support_fp16_signed_zero_nan_preserve) {
|
|
|
|
ctx.AddCapability(spv::Capability::SignedZeroInfNanPreserve);
|
|
|
|
ctx.AddExecutionMode(main_func, spv::ExecutionMode::SignedZeroInfNanPreserve, 16U);
|
|
|
|
}
|
|
|
|
if (profile.support_fp32_signed_zero_nan_preserve) {
|
|
|
|
ctx.AddCapability(spv::Capability::SignedZeroInfNanPreserve);
|
|
|
|
ctx.AddExecutionMode(main_func, spv::ExecutionMode::SignedZeroInfNanPreserve, 32U);
|
|
|
|
}
|
|
|
|
if (program.info.uses_fp64 && profile.support_fp64_signed_zero_nan_preserve) {
|
|
|
|
ctx.AddCapability(spv::Capability::SignedZeroInfNanPreserve);
|
|
|
|
ctx.AddExecutionMode(main_func, spv::ExecutionMode::SignedZeroInfNanPreserve, 64U);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-20 22:11:56 +00:00
|
|
|
void SetupCapabilities(const Profile& profile, const Info& info, EmitContext& ctx) {
|
|
|
|
if (info.uses_sampled_1d) {
|
|
|
|
ctx.AddCapability(spv::Capability::Sampled1D);
|
|
|
|
}
|
|
|
|
if (info.uses_sparse_residency) {
|
|
|
|
ctx.AddCapability(spv::Capability::SparseResidency);
|
|
|
|
}
|
|
|
|
if (info.uses_demote_to_helper_invocation) {
|
|
|
|
ctx.AddExtension("SPV_EXT_demote_to_helper_invocation");
|
|
|
|
ctx.AddCapability(spv::Capability::DemoteToHelperInvocationEXT);
|
|
|
|
}
|
2021-04-01 07:34:45 +01:00
|
|
|
if (info.stores_viewport_index) {
|
|
|
|
ctx.AddCapability(spv::Capability::MultiViewport);
|
|
|
|
if (profile.support_viewport_index_layer_non_geometry &&
|
2021-04-03 00:48:39 +01:00
|
|
|
ctx.stage != Shader::Stage::Geometry) {
|
2021-04-01 07:34:45 +01:00
|
|
|
ctx.AddExtension("SPV_EXT_shader_viewport_index_layer");
|
|
|
|
ctx.AddCapability(spv::Capability::ShaderViewportIndexLayerEXT);
|
|
|
|
}
|
|
|
|
}
|
2021-03-20 22:11:56 +00:00
|
|
|
if (!profile.support_vertex_instance_id && (info.loads_instance_id || info.loads_vertex_id)) {
|
|
|
|
ctx.AddExtension("SPV_KHR_shader_draw_parameters");
|
|
|
|
ctx.AddCapability(spv::Capability::DrawParameters);
|
|
|
|
}
|
2021-03-25 15:31:37 +00:00
|
|
|
if ((info.uses_subgroup_vote || info.uses_subgroup_invocation_id) && profile.support_vote) {
|
2021-03-24 00:27:17 +00:00
|
|
|
ctx.AddExtension("SPV_KHR_shader_ballot");
|
|
|
|
ctx.AddCapability(spv::Capability::SubgroupBallotKHR);
|
|
|
|
if (!profile.warp_size_potentially_larger_than_guest) {
|
|
|
|
// vote ops are only used when not taking the long path
|
|
|
|
ctx.AddExtension("SPV_KHR_subgroup_vote");
|
|
|
|
ctx.AddCapability(spv::Capability::SubgroupVoteKHR);
|
|
|
|
}
|
|
|
|
}
|
2021-03-20 22:11:56 +00:00
|
|
|
// TODO: Track this usage
|
|
|
|
ctx.AddCapability(spv::Capability::ImageGatherExtended);
|
2021-03-26 21:45:38 +00:00
|
|
|
ctx.AddCapability(spv::Capability::ImageQuery);
|
2021-03-20 22:11:56 +00:00
|
|
|
}
|
|
|
|
|
2021-02-24 21:37:47 +00:00
|
|
|
Id PhiArgDef(EmitContext& ctx, IR::Inst* inst, size_t index) {
|
|
|
|
// Phi nodes can have forward declarations, if an argument is not defined provide a forward
|
|
|
|
// declaration of it. Invoke will take care of giving it the right definition when it's
|
|
|
|
// actually defined.
|
|
|
|
const IR::Value arg{inst->Arg(index)};
|
|
|
|
if (arg.IsImmediate()) {
|
|
|
|
// Let the context handle immediate definitions, as it already knows how
|
|
|
|
return ctx.Def(arg);
|
|
|
|
}
|
2021-04-01 05:07:51 +01:00
|
|
|
IR::Inst* const arg_inst{arg.InstRecursive()};
|
2021-02-24 21:37:47 +00:00
|
|
|
if (const Id def{arg_inst->Definition<Id>()}; Sirit::ValidId(def)) {
|
|
|
|
// Return the current definition if it exists
|
|
|
|
return def;
|
|
|
|
}
|
|
|
|
if (arg_inst == inst) {
|
|
|
|
// This is a self referencing phi node
|
|
|
|
// Self-referencing definition will be set by the caller, so just grab the current id
|
|
|
|
return ctx.CurrentId();
|
|
|
|
}
|
|
|
|
// If it hasn't been defined and it's not a self reference, get a forward declaration
|
|
|
|
const Id def{ctx.ForwardDeclarationId()};
|
|
|
|
arg_inst->SetDefinition<Id>(def);
|
|
|
|
return def;
|
|
|
|
}
|
2021-02-16 07:10:22 +00:00
|
|
|
} // Anonymous namespace
|
2021-02-08 05:54:35 +00:00
|
|
|
|
2021-03-27 06:08:31 +00:00
|
|
|
std::vector<u32> EmitSPIRV(const Profile& profile, IR::Program& program, u32& binding) {
|
2021-03-19 22:28:31 +00:00
|
|
|
EmitContext ctx{profile, program, binding};
|
2021-03-20 22:11:56 +00:00
|
|
|
const Id main{DefineMain(ctx, program)};
|
2021-03-27 06:08:31 +00:00
|
|
|
DefineEntryPoint(program, ctx, main);
|
2021-03-21 23:28:37 +00:00
|
|
|
if (profile.support_float_controls) {
|
|
|
|
ctx.AddExtension("SPV_KHR_float_controls");
|
|
|
|
SetupDenormControl(profile, program, ctx, main);
|
|
|
|
SetupSignedNanCapabilities(profile, program, ctx, main);
|
|
|
|
}
|
2021-03-20 22:11:56 +00:00
|
|
|
SetupCapabilities(profile, program.info, ctx);
|
2021-02-17 03:59:28 +00:00
|
|
|
return ctx.Assemble();
|
2021-02-11 19:39:06 +00:00
|
|
|
}
|
|
|
|
|
2021-02-17 03:59:28 +00:00
|
|
|
Id EmitPhi(EmitContext& ctx, IR::Inst* inst) {
|
2021-02-11 19:39:06 +00:00
|
|
|
const size_t num_args{inst->NumArgs()};
|
2021-02-15 01:46:40 +00:00
|
|
|
boost::container::small_vector<Id, 32> operands;
|
2021-02-11 19:39:06 +00:00
|
|
|
operands.reserve(num_args * 2);
|
|
|
|
for (size_t index = 0; index < num_args; ++index) {
|
2021-02-24 21:37:47 +00:00
|
|
|
operands.push_back(PhiArgDef(ctx, inst, index));
|
|
|
|
operands.push_back(inst->PhiBlock(index)->Definition<Id>());
|
2021-02-11 19:39:06 +00:00
|
|
|
}
|
2021-03-30 07:19:50 +01:00
|
|
|
// The type of a phi instruction is stored in its flags
|
|
|
|
const Id result_type{TypeId(ctx, inst->Flags<IR::Type>())};
|
2021-02-11 19:39:06 +00:00
|
|
|
return ctx.OpPhi(result_type, std::span(operands.data(), operands.size()));
|
2021-02-08 05:54:35 +00:00
|
|
|
}
|
|
|
|
|
2021-02-17 03:59:28 +00:00
|
|
|
void EmitVoid(EmitContext&) {}
|
2021-02-08 05:54:35 +00:00
|
|
|
|
2021-02-17 03:59:28 +00:00
|
|
|
Id EmitIdentity(EmitContext& ctx, const IR::Value& value) {
|
2021-04-01 05:07:51 +01:00
|
|
|
if (const Id id = ctx.Def(value); Sirit::ValidId(id)) {
|
|
|
|
return id;
|
|
|
|
}
|
|
|
|
const Id def{ctx.ForwardDeclarationId()};
|
|
|
|
value.InstRecursive()->SetDefinition<Id>(def);
|
|
|
|
return def;
|
2021-02-08 05:54:35 +00:00
|
|
|
}
|
|
|
|
|
2021-02-17 03:59:28 +00:00
|
|
|
void EmitGetZeroFromOp(EmitContext&) {
|
2021-02-08 05:54:35 +00:00
|
|
|
throw LogicError("Unreachable instruction");
|
|
|
|
}
|
|
|
|
|
2021-02-17 03:59:28 +00:00
|
|
|
void EmitGetSignFromOp(EmitContext&) {
|
2021-02-08 05:54:35 +00:00
|
|
|
throw LogicError("Unreachable instruction");
|
|
|
|
}
|
|
|
|
|
2021-02-17 03:59:28 +00:00
|
|
|
void EmitGetCarryFromOp(EmitContext&) {
|
2021-02-08 05:54:35 +00:00
|
|
|
throw LogicError("Unreachable instruction");
|
|
|
|
}
|
|
|
|
|
2021-02-17 03:59:28 +00:00
|
|
|
void EmitGetOverflowFromOp(EmitContext&) {
|
2021-02-08 05:54:35 +00:00
|
|
|
throw LogicError("Unreachable instruction");
|
|
|
|
}
|
|
|
|
|
2021-03-08 21:31:53 +00:00
|
|
|
void EmitGetSparseFromOp(EmitContext&) {
|
|
|
|
throw LogicError("Unreachable instruction");
|
|
|
|
}
|
|
|
|
|
2021-03-25 15:31:37 +00:00
|
|
|
void EmitGetInBoundsFromOp(EmitContext&) {
|
|
|
|
throw LogicError("Unreachable instruction");
|
|
|
|
}
|
|
|
|
|
2021-02-08 05:54:35 +00:00
|
|
|
} // namespace Shader::Backend::SPIRV
|