2
1
Fork 0
mirror of https://github.com/yuzu-emu/yuzu.git synced 2024-07-04 23:31:19 +01:00

Merge pull request #8702 from liamwhite/format-swap

renderer_vulkan: add format fallbacks for R16G16B16_SFLOAT, R16G16B16_SSCALED, R8G8B8_SSCALED
This commit is contained in:
bunnei 2022-08-04 09:51:56 -07:00 committed by GitHub
commit 1f7e62e86e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 348 additions and 284 deletions

View file

@ -317,7 +317,9 @@ VkPrimitiveTopology PrimitiveTopology([[maybe_unused]] const Device& device,
}
}
VkFormat VertexFormat(Maxwell::VertexAttribute::Type type, Maxwell::VertexAttribute::Size size) {
VkFormat VertexFormat(const Device& device, Maxwell::VertexAttribute::Type type,
Maxwell::VertexAttribute::Size size) {
const VkFormat format{([&]() {
switch (type) {
case Maxwell::VertexAttribute::Type::UnsignedNorm:
switch (size) {
@ -504,8 +506,15 @@ VkFormat VertexFormat(Maxwell::VertexAttribute::Type type, Maxwell::VertexAttrib
}
break;
}
return VK_FORMAT_UNDEFINED;
})()};
if (format == VK_FORMAT_UNDEFINED) {
UNIMPLEMENTED_MSG("Unimplemented vertex format of type={} and size={}", type, size);
return {};
}
return device.GetSupportedFormat(format, VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,
FormatType::Buffer);
}
VkCompareOp ComparisonOp(Maxwell::ComparisonOp comparison) {

View file

@ -48,7 +48,8 @@ VkShaderStageFlagBits ShaderStage(Shader::Stage stage);
VkPrimitiveTopology PrimitiveTopology(const Device& device, Maxwell::PrimitiveTopology topology);
VkFormat VertexFormat(Maxwell::VertexAttribute::Type type, Maxwell::VertexAttribute::Size size);
VkFormat VertexFormat(const Device& device, Maxwell::VertexAttribute::Type type,
Maxwell::VertexAttribute::Size size);
VkCompareOp ComparisonOp(Maxwell::ComparisonOp comparison);

View file

@ -559,7 +559,7 @@ void GraphicsPipeline::MakePipeline(VkRenderPass render_pass) {
vertex_attributes.push_back({
.location = static_cast<u32>(index),
.binding = attribute.buffer,
.format = MaxwellToVK::VertexFormat(attribute.Type(), attribute.Size()),
.format = MaxwellToVK::VertexFormat(device, attribute.Type(), attribute.Size()),
.offset = attribute.offset,
});
}

View file

@ -939,7 +939,7 @@ void RasterizerVulkan::UpdateVertexInput(Tegra::Engines::Maxwell3D::Regs& regs)
.pNext = nullptr,
.location = static_cast<u32>(index),
.binding = binding,
.format = MaxwellToVK::VertexFormat(attribute.type, attribute.size),
.format = MaxwellToVK::VertexFormat(device, attribute.type, attribute.size),
.offset = attribute.offset,
});
}

View file

@ -50,6 +50,21 @@ constexpr std::array R4G4_UNORM_PACK8{
VK_FORMAT_UNDEFINED,
};
constexpr std::array R16G16B16_SFLOAT{
VK_FORMAT_R16G16B16A16_SFLOAT,
VK_FORMAT_UNDEFINED,
};
constexpr std::array R16G16B16_SSCALED{
VK_FORMAT_R16G16B16A16_SSCALED,
VK_FORMAT_UNDEFINED,
};
constexpr std::array R8G8B8_SSCALED{
VK_FORMAT_R8G8B8A8_SSCALED,
VK_FORMAT_UNDEFINED,
};
} // namespace Alternatives
enum class NvidiaArchitecture {
@ -102,6 +117,12 @@ constexpr const VkFormat* GetFormatAlternatives(VkFormat format) {
return Alternatives::B5G6R5_UNORM_PACK16.data();
case VK_FORMAT_R4G4_UNORM_PACK8:
return Alternatives::R4G4_UNORM_PACK8.data();
case VK_FORMAT_R16G16B16_SFLOAT:
return Alternatives::R16G16B16_SFLOAT.data();
case VK_FORMAT_R16G16B16_SSCALED:
return Alternatives::R16G16B16_SSCALED.data();
case VK_FORMAT_R8G8B8_SSCALED:
return Alternatives::R8G8B8_SSCALED.data();
default:
return nullptr;
}
@ -122,109 +143,142 @@ VkFormatFeatureFlags GetFormatFeatures(VkFormatProperties properties, FormatType
std::unordered_map<VkFormat, VkFormatProperties> GetFormatProperties(vk::PhysicalDevice physical) {
static constexpr std::array formats{
VK_FORMAT_A8B8G8R8_UNORM_PACK32,
VK_FORMAT_A8B8G8R8_UINT_PACK32,
VK_FORMAT_A8B8G8R8_SNORM_PACK32,
VK_FORMAT_A8B8G8R8_SINT_PACK32,
VK_FORMAT_A8B8G8R8_SRGB_PACK32,
VK_FORMAT_R5G6B5_UNORM_PACK16,
VK_FORMAT_B5G6R5_UNORM_PACK16,
VK_FORMAT_R5G5B5A1_UNORM_PACK16,
VK_FORMAT_B5G5R5A1_UNORM_PACK16,
VK_FORMAT_A2B10G10R10_UNORM_PACK32,
VK_FORMAT_A2B10G10R10_UINT_PACK32,
VK_FORMAT_A1R5G5B5_UNORM_PACK16,
VK_FORMAT_A2B10G10R10_SINT_PACK32,
VK_FORMAT_A2B10G10R10_SNORM_PACK32,
VK_FORMAT_A2B10G10R10_SSCALED_PACK32,
VK_FORMAT_A2B10G10R10_UINT_PACK32,
VK_FORMAT_A2B10G10R10_UNORM_PACK32,
VK_FORMAT_A2B10G10R10_USCALED_PACK32,
VK_FORMAT_A8B8G8R8_SINT_PACK32,
VK_FORMAT_A8B8G8R8_SNORM_PACK32,
VK_FORMAT_A8B8G8R8_SRGB_PACK32,
VK_FORMAT_A8B8G8R8_UINT_PACK32,
VK_FORMAT_A8B8G8R8_UNORM_PACK32,
VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
VK_FORMAT_B10G11R11_UFLOAT_PACK32,
VK_FORMAT_B4G4R4A4_UNORM_PACK16,
VK_FORMAT_B5G5R5A1_UNORM_PACK16,
VK_FORMAT_B5G6R5_UNORM_PACK16,
VK_FORMAT_B8G8R8A8_SRGB,
VK_FORMAT_B8G8R8A8_UNORM,
VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
VK_FORMAT_BC2_SRGB_BLOCK,
VK_FORMAT_BC2_UNORM_BLOCK,
VK_FORMAT_BC3_SRGB_BLOCK,
VK_FORMAT_BC3_UNORM_BLOCK,
VK_FORMAT_BC4_SNORM_BLOCK,
VK_FORMAT_BC4_UNORM_BLOCK,
VK_FORMAT_BC5_SNORM_BLOCK,
VK_FORMAT_BC5_UNORM_BLOCK,
VK_FORMAT_BC6H_SFLOAT_BLOCK,
VK_FORMAT_BC6H_UFLOAT_BLOCK,
VK_FORMAT_BC7_SRGB_BLOCK,
VK_FORMAT_BC7_UNORM_BLOCK,
VK_FORMAT_D16_UNORM,
VK_FORMAT_D16_UNORM_S8_UINT,
VK_FORMAT_D24_UNORM_S8_UINT,
VK_FORMAT_D32_SFLOAT,
VK_FORMAT_D32_SFLOAT_S8_UINT,
VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
VK_FORMAT_R16G16B16A16_SFLOAT,
VK_FORMAT_R16G16B16A16_SINT,
VK_FORMAT_R16G16B16A16_SNORM,
VK_FORMAT_R16G16B16A16_SSCALED,
VK_FORMAT_R16G16B16A16_UINT,
VK_FORMAT_R16G16B16A16_UNORM,
VK_FORMAT_R16G16B16A16_USCALED,
VK_FORMAT_R16G16B16_SFLOAT,
VK_FORMAT_R16G16B16_SINT,
VK_FORMAT_R16G16B16_SNORM,
VK_FORMAT_R16G16B16_SSCALED,
VK_FORMAT_R16G16B16_UINT,
VK_FORMAT_R16G16B16_UNORM,
VK_FORMAT_R16G16B16_USCALED,
VK_FORMAT_R16G16_SFLOAT,
VK_FORMAT_R16G16_SINT,
VK_FORMAT_R16G16_SNORM,
VK_FORMAT_R16G16_SSCALED,
VK_FORMAT_R16G16_UINT,
VK_FORMAT_R16G16_UNORM,
VK_FORMAT_R16G16_USCALED,
VK_FORMAT_R16_SFLOAT,
VK_FORMAT_R16_SINT,
VK_FORMAT_R16_SNORM,
VK_FORMAT_R16_SSCALED,
VK_FORMAT_R16_UINT,
VK_FORMAT_R16_UNORM,
VK_FORMAT_R16_USCALED,
VK_FORMAT_R32G32B32A32_SFLOAT,
VK_FORMAT_R32G32B32A32_SINT,
VK_FORMAT_R32G32B32A32_UINT,
VK_FORMAT_R32G32B32_SFLOAT,
VK_FORMAT_R32G32B32_SINT,
VK_FORMAT_R32G32B32_UINT,
VK_FORMAT_R32G32_SFLOAT,
VK_FORMAT_R32G32_SINT,
VK_FORMAT_R32G32_UINT,
VK_FORMAT_R16G16B16A16_SINT,
VK_FORMAT_R16G16B16A16_UINT,
VK_FORMAT_R16G16B16A16_SNORM,
VK_FORMAT_R16G16B16A16_UNORM,
VK_FORMAT_R16G16_UNORM,
VK_FORMAT_R16G16_SNORM,
VK_FORMAT_R16G16_SFLOAT,
VK_FORMAT_R16G16_UINT,
VK_FORMAT_R16G16_SINT,
VK_FORMAT_R16_UNORM,
VK_FORMAT_R16_SNORM,
VK_FORMAT_R16_UINT,
VK_FORMAT_R8G8B8A8_SRGB,
VK_FORMAT_R8G8_UNORM,
VK_FORMAT_R8G8_SNORM,
VK_FORMAT_R8G8_SINT,
VK_FORMAT_R8G8_UINT,
VK_FORMAT_R8_UNORM,
VK_FORMAT_R8_SNORM,
VK_FORMAT_R8_SINT,
VK_FORMAT_R8_UINT,
VK_FORMAT_B10G11R11_UFLOAT_PACK32,
VK_FORMAT_R32_SFLOAT,
VK_FORMAT_R32_UINT,
VK_FORMAT_R32_SINT,
VK_FORMAT_R16_SFLOAT,
VK_FORMAT_R16G16B16A16_SFLOAT,
VK_FORMAT_B8G8R8A8_UNORM,
VK_FORMAT_B8G8R8A8_SRGB,
VK_FORMAT_R4G4_UNORM_PACK8,
VK_FORMAT_R32_UINT,
VK_FORMAT_R4G4B4A4_UNORM_PACK16,
VK_FORMAT_B4G4R4A4_UNORM_PACK16,
VK_FORMAT_D32_SFLOAT,
VK_FORMAT_D16_UNORM,
VK_FORMAT_R4G4_UNORM_PACK8,
VK_FORMAT_R5G5B5A1_UNORM_PACK16,
VK_FORMAT_R5G6B5_UNORM_PACK16,
VK_FORMAT_R8G8B8A8_SINT,
VK_FORMAT_R8G8B8A8_SNORM,
VK_FORMAT_R8G8B8A8_SRGB,
VK_FORMAT_R8G8B8A8_SSCALED,
VK_FORMAT_R8G8B8A8_UINT,
VK_FORMAT_R8G8B8A8_UNORM,
VK_FORMAT_R8G8B8A8_USCALED,
VK_FORMAT_R8G8B8_SINT,
VK_FORMAT_R8G8B8_SNORM,
VK_FORMAT_R8G8B8_SSCALED,
VK_FORMAT_R8G8B8_UINT,
VK_FORMAT_R8G8B8_UNORM,
VK_FORMAT_R8G8B8_USCALED,
VK_FORMAT_R8G8_SINT,
VK_FORMAT_R8G8_SNORM,
VK_FORMAT_R8G8_SSCALED,
VK_FORMAT_R8G8_UINT,
VK_FORMAT_R8G8_UNORM,
VK_FORMAT_R8G8_USCALED,
VK_FORMAT_R8_SINT,
VK_FORMAT_R8_SNORM,
VK_FORMAT_R8_SSCALED,
VK_FORMAT_R8_UINT,
VK_FORMAT_R8_UNORM,
VK_FORMAT_R8_USCALED,
VK_FORMAT_S8_UINT,
VK_FORMAT_D16_UNORM_S8_UINT,
VK_FORMAT_D24_UNORM_S8_UINT,
VK_FORMAT_D32_SFLOAT_S8_UINT,
VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
VK_FORMAT_BC2_UNORM_BLOCK,
VK_FORMAT_BC3_UNORM_BLOCK,
VK_FORMAT_BC4_UNORM_BLOCK,
VK_FORMAT_BC4_SNORM_BLOCK,
VK_FORMAT_BC5_UNORM_BLOCK,
VK_FORMAT_BC5_SNORM_BLOCK,
VK_FORMAT_BC7_UNORM_BLOCK,
VK_FORMAT_BC6H_UFLOAT_BLOCK,
VK_FORMAT_BC6H_SFLOAT_BLOCK,
VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
VK_FORMAT_BC2_SRGB_BLOCK,
VK_FORMAT_BC3_SRGB_BLOCK,
VK_FORMAT_BC7_SRGB_BLOCK,
VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
};
std::unordered_map<VkFormat, VkFormatProperties> format_properties;
for (const auto format : formats) {
@ -739,7 +793,7 @@ VkFormat Device::GetSupportedFormat(VkFormat wanted_format, VkFormatFeatureFlags
if (!IsFormatSupported(alternative, wanted_usage, format_type)) {
continue;
}
LOG_WARNING(Render_Vulkan,
LOG_DEBUG(Render_Vulkan,
"Emulating format={} with alternative format={} with usage={} and type={}",
wanted_format, alternative, wanted_usage, format_type);
return alternative;