diff --git a/src/video_core/renderer_vulkan/maxwell_to_vk.cpp b/src/video_core/renderer_vulkan/maxwell_to_vk.cpp index a7a878f18c..408a7b83e2 100644 --- a/src/video_core/renderer_vulkan/maxwell_to_vk.cpp +++ b/src/video_core/renderer_vulkan/maxwell_to_vk.cpp @@ -112,147 +112,142 @@ VkCompareOp DepthCompareFunction(Tegra::Texture::DepthCompareFunc depth_compare_ } // namespace Sampler -namespace { -constexpr u32 Attachable = 1 << 0; -constexpr u32 Storage = 1 << 1; - struct FormatTuple { - VkFormat format; ///< Vulkan format - int usage = 0; ///< Describes image format usage -} constexpr tex_format_tuples[] = { - {VK_FORMAT_A8B8G8R8_UNORM_PACK32, Attachable | Storage}, // A8B8G8R8_UNORM - {VK_FORMAT_A8B8G8R8_SNORM_PACK32, Attachable | Storage}, // A8B8G8R8_SNORM - {VK_FORMAT_A8B8G8R8_SINT_PACK32, Attachable | Storage}, // A8B8G8R8_SINT - {VK_FORMAT_A8B8G8R8_UINT_PACK32, Attachable | Storage}, // A8B8G8R8_UINT - {VK_FORMAT_R5G6B5_UNORM_PACK16, Attachable}, // R5G6B5_UNORM - {VK_FORMAT_B5G6R5_UNORM_PACK16}, // B5G6R5_UNORM - {VK_FORMAT_A1R5G5B5_UNORM_PACK16, Attachable}, // A1R5G5B5_UNORM - {VK_FORMAT_A2B10G10R10_UNORM_PACK32, Attachable | Storage}, // A2B10G10R10_UNORM - {VK_FORMAT_A2B10G10R10_UINT_PACK32, Attachable | Storage}, // A2B10G10R10_UINT - {VK_FORMAT_A2R10G10B10_UNORM_PACK32, Attachable}, // A2R10G10B10_UNORM - {VK_FORMAT_A1R5G5B5_UNORM_PACK16, Attachable}, // A1B5G5R5_UNORM (flipped with swizzle) - {VK_FORMAT_R5G5B5A1_UNORM_PACK16}, // A5B5G5R1_UNORM (specially swizzled) - {VK_FORMAT_R8_UNORM, Attachable | Storage}, // R8_UNORM - {VK_FORMAT_R8_SNORM, Attachable | Storage}, // R8_SNORM - {VK_FORMAT_R8_SINT, Attachable | Storage}, // R8_SINT - {VK_FORMAT_R8_UINT, Attachable | Storage}, // R8_UINT - {VK_FORMAT_R16G16B16A16_SFLOAT, Attachable | Storage}, // R16G16B16A16_FLOAT - {VK_FORMAT_R16G16B16A16_UNORM, Attachable | Storage}, // R16G16B16A16_UNORM - {VK_FORMAT_R16G16B16A16_SNORM, Attachable | Storage}, // R16G16B16A16_SNORM - {VK_FORMAT_R16G16B16A16_SINT, Attachable | Storage}, // R16G16B16A16_SINT - {VK_FORMAT_R16G16B16A16_UINT, Attachable | Storage}, // R16G16B16A16_UINT - {VK_FORMAT_B10G11R11_UFLOAT_PACK32, Attachable | Storage}, // B10G11R11_FLOAT - {VK_FORMAT_R32G32B32A32_UINT, Attachable | Storage}, // R32G32B32A32_UINT - {VK_FORMAT_BC1_RGBA_UNORM_BLOCK}, // BC1_RGBA_UNORM - {VK_FORMAT_BC2_UNORM_BLOCK}, // BC2_UNORM - {VK_FORMAT_BC3_UNORM_BLOCK}, // BC3_UNORM - {VK_FORMAT_BC4_UNORM_BLOCK}, // BC4_UNORM - {VK_FORMAT_BC4_SNORM_BLOCK}, // BC4_SNORM - {VK_FORMAT_BC5_UNORM_BLOCK}, // BC5_UNORM - {VK_FORMAT_BC5_SNORM_BLOCK}, // BC5_SNORM - {VK_FORMAT_BC7_UNORM_BLOCK}, // BC7_UNORM - {VK_FORMAT_BC6H_UFLOAT_BLOCK}, // BC6H_UFLOAT - {VK_FORMAT_BC6H_SFLOAT_BLOCK}, // BC6H_SFLOAT - {VK_FORMAT_ASTC_4x4_UNORM_BLOCK}, // ASTC_2D_4X4_UNORM - {VK_FORMAT_B8G8R8A8_UNORM, Attachable | Storage}, // B8G8R8A8_UNORM - {VK_FORMAT_R32G32B32A32_SFLOAT, Attachable | Storage}, // R32G32B32A32_FLOAT - {VK_FORMAT_R32G32B32A32_SINT, Attachable | Storage}, // R32G32B32A32_SINT - {VK_FORMAT_R32G32_SFLOAT, Attachable | Storage}, // R32G32_FLOAT - {VK_FORMAT_R32G32_SINT, Attachable | Storage}, // R32G32_SINT - {VK_FORMAT_R32_SFLOAT, Attachable | Storage}, // R32_FLOAT - {VK_FORMAT_R16_SFLOAT, Attachable | Storage}, // R16_FLOAT - {VK_FORMAT_R16_UNORM, Attachable | Storage}, // R16_UNORM - {VK_FORMAT_R16_SNORM, Attachable | Storage}, // R16_SNORM - {VK_FORMAT_R16_UINT, Attachable | Storage}, // R16_UINT - {VK_FORMAT_R16_SINT, Attachable | Storage}, // R16_SINT - {VK_FORMAT_R16G16_UNORM, Attachable | Storage}, // R16G16_UNORM - {VK_FORMAT_R16G16_SFLOAT, Attachable | Storage}, // R16G16_FLOAT - {VK_FORMAT_R16G16_UINT, Attachable | Storage}, // R16G16_UINT - {VK_FORMAT_R16G16_SINT, Attachable | Storage}, // R16G16_SINT - {VK_FORMAT_R16G16_SNORM, Attachable | Storage}, // R16G16_SNORM - {VK_FORMAT_R32G32B32_SFLOAT}, // R32G32B32_FLOAT - {VK_FORMAT_A8B8G8R8_SRGB_PACK32, Attachable}, // A8B8G8R8_SRGB - {VK_FORMAT_R8G8_UNORM, Attachable | Storage}, // R8G8_UNORM - {VK_FORMAT_R8G8_SNORM, Attachable | Storage}, // R8G8_SNORM - {VK_FORMAT_R8G8_SINT, Attachable | Storage}, // R8G8_SINT - {VK_FORMAT_R8G8_UINT, Attachable | Storage}, // R8G8_UINT - {VK_FORMAT_R32G32_UINT, Attachable | Storage}, // R32G32_UINT - {VK_FORMAT_R16G16B16A16_SFLOAT, Attachable | Storage}, // R16G16B16X16_FLOAT - {VK_FORMAT_R32_UINT, Attachable | Storage}, // R32_UINT - {VK_FORMAT_R32_SINT, Attachable | Storage}, // R32_SINT - {VK_FORMAT_ASTC_8x8_UNORM_BLOCK}, // ASTC_2D_8X8_UNORM - {VK_FORMAT_ASTC_8x5_UNORM_BLOCK}, // ASTC_2D_8X5_UNORM - {VK_FORMAT_ASTC_5x4_UNORM_BLOCK}, // ASTC_2D_5X4_UNORM - {VK_FORMAT_B8G8R8A8_SRGB, Attachable}, // B8G8R8A8_SRGB - {VK_FORMAT_BC1_RGBA_SRGB_BLOCK}, // BC1_RGBA_SRGB - {VK_FORMAT_BC2_SRGB_BLOCK}, // BC2_SRGB - {VK_FORMAT_BC3_SRGB_BLOCK}, // BC3_SRGB - {VK_FORMAT_BC7_SRGB_BLOCK}, // BC7_SRGB - {VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT}, // A4B4G4R4_UNORM - {VK_FORMAT_R4G4_UNORM_PACK8}, // G4R4_UNORM - {VK_FORMAT_ASTC_4x4_SRGB_BLOCK}, // ASTC_2D_4X4_SRGB - {VK_FORMAT_ASTC_8x8_SRGB_BLOCK}, // ASTC_2D_8X8_SRGB - {VK_FORMAT_ASTC_8x5_SRGB_BLOCK}, // ASTC_2D_8X5_SRGB - {VK_FORMAT_ASTC_5x4_SRGB_BLOCK}, // ASTC_2D_5X4_SRGB - {VK_FORMAT_ASTC_5x5_UNORM_BLOCK}, // ASTC_2D_5X5_UNORM - {VK_FORMAT_ASTC_5x5_SRGB_BLOCK}, // ASTC_2D_5X5_SRGB - {VK_FORMAT_ASTC_10x8_UNORM_BLOCK}, // ASTC_2D_10X8_UNORM - {VK_FORMAT_ASTC_10x8_SRGB_BLOCK}, // ASTC_2D_10X8_SRGB - {VK_FORMAT_ASTC_6x6_UNORM_BLOCK}, // ASTC_2D_6X6_UNORM - {VK_FORMAT_ASTC_6x6_SRGB_BLOCK}, // ASTC_2D_6X6_SRGB - {VK_FORMAT_ASTC_10x6_UNORM_BLOCK}, // ASTC_2D_10X6_UNORM - {VK_FORMAT_ASTC_10x6_SRGB_BLOCK}, // ASTC_2D_10X6_SRGB - {VK_FORMAT_ASTC_10x5_UNORM_BLOCK}, // ASTC_2D_10X5_UNORM - {VK_FORMAT_ASTC_10x5_SRGB_BLOCK}, // ASTC_2D_10X5_SRGB - {VK_FORMAT_ASTC_10x10_UNORM_BLOCK}, // ASTC_2D_10X10_UNORM - {VK_FORMAT_ASTC_10x10_SRGB_BLOCK}, // ASTC_2D_10X10_SRGB - {VK_FORMAT_ASTC_12x10_UNORM_BLOCK}, // ASTC_2D_12X10_UNORM - {VK_FORMAT_ASTC_12x10_SRGB_BLOCK}, // ASTC_2D_12X10_SRGB - {VK_FORMAT_ASTC_12x12_UNORM_BLOCK}, // ASTC_2D_12X12_UNORM - {VK_FORMAT_ASTC_12x12_SRGB_BLOCK}, // ASTC_2D_12X12_SRGB - {VK_FORMAT_ASTC_8x6_UNORM_BLOCK}, // ASTC_2D_8X6_UNORM - {VK_FORMAT_ASTC_8x6_SRGB_BLOCK}, // ASTC_2D_8X6_SRGB - {VK_FORMAT_ASTC_6x5_UNORM_BLOCK}, // ASTC_2D_6X5_UNORM - {VK_FORMAT_ASTC_6x5_SRGB_BLOCK}, // ASTC_2D_6X5_SRGB - {VK_FORMAT_E5B9G9R9_UFLOAT_PACK32}, // E5B9G9R9_FLOAT - - // Depth formats - {VK_FORMAT_D32_SFLOAT, Attachable}, // D32_FLOAT - {VK_FORMAT_D16_UNORM, Attachable}, // D16_UNORM - {VK_FORMAT_X8_D24_UNORM_PACK32, Attachable}, // X8_D24_UNORM - - // Stencil formats - {VK_FORMAT_S8_UINT, Attachable}, // S8_UINT - - // DepthStencil formats - {VK_FORMAT_D24_UNORM_S8_UINT, Attachable}, // D24_UNORM_S8_UINT - {VK_FORMAT_D24_UNORM_S8_UINT, Attachable}, // S8_UINT_D24_UNORM (emulated) - {VK_FORMAT_D32_SFLOAT_S8_UINT, Attachable}, // D32_FLOAT_S8_UINT + VkFormat format{}; ///< Vulkan format + s32 usage = 0; ///< Describes image format usage }; -static_assert(std::size(tex_format_tuples) == VideoCore::Surface::MaxPixelFormat); - constexpr bool IsZetaFormat(PixelFormat pixel_format) { - return pixel_format >= PixelFormat::MaxColorFormat && - pixel_format < PixelFormat::MaxDepthStencilFormat; + return pixel_format >= PixelFormat::MaxColorFormat && pixel_format < PixelFormat::MaxDepthStencilFormat; } -} // Anonymous namespace - -FormatInfo SurfaceFormat(const Device& device, FormatType format_type, bool with_srgb, - PixelFormat pixel_format) { - ASSERT(static_cast(pixel_format) < std::size(tex_format_tuples)); - FormatTuple tuple = tex_format_tuples[static_cast(pixel_format)]; +FormatInfo SurfaceFormat(const Device& device, FormatType format_type, bool with_srgb, PixelFormat pixel_format) { + u32 const usage_attachable = 1 << 0; + u32 const usage_storage = 1 << 1; + FormatTuple tuple; + switch (pixel_format) { +#define SURFACE_FORMAT_LIST \ + SURFACE_FORMAT_ELEM(VK_FORMAT_B8G8R8A8_UNORM, usage_attachable | usage_storage, A8B8G8R8_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_B8G8R8A8_SNORM, usage_attachable | usage_storage, A8B8G8R8_SNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_B8G8R8A8_SINT, usage_attachable | usage_storage, A8B8G8R8_SINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_B8G8R8A8_UINT, usage_attachable | usage_storage, A8B8G8R8_UINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R5G6B5_UNORM_PACK16, usage_attachable, R5G6B5_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_B5G6R5_UNORM_PACK16, 0, B5G6R5_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_A1R5G5B5_UNORM_PACK16, usage_attachable, A1R5G5B5_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_A2B10G10R10_UNORM_PACK32, usage_attachable | usage_storage, A2B10G10R10_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_A2B10G10R10_UINT_PACK32, usage_attachable | usage_storage, A2B10G10R10_UINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_A2R10G10B10_UNORM_PACK32, usage_attachable, A2R10G10B10_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_A1R5G5B5_UNORM_PACK16, usage_attachable, A1B5G5R5_UNORM) /*flipped with swizzle*/ \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R5G5B5A1_UNORM_PACK16, 0, A5B5G5R1_UNORM) /*specially swizzled*/ \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R8_UNORM, usage_attachable | usage_storage, R8_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R8_SNORM, usage_attachable | usage_storage, R8_SNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R8_SINT, usage_attachable | usage_storage, R8_SINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R8_UINT, usage_attachable | usage_storage, R8_UINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R16G16B16A16_SFLOAT, usage_attachable | usage_storage, R16G16B16A16_FLOAT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R16G16B16A16_UNORM, usage_attachable | usage_storage, R16G16B16A16_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R16G16B16A16_SNORM, usage_attachable | usage_storage, R16G16B16A16_SNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R16G16B16A16_SINT, usage_attachable | usage_storage, R16G16B16A16_SINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R16G16B16A16_UINT, usage_attachable | usage_storage, R16G16B16A16_UINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_B10G11R11_UFLOAT_PACK32, usage_attachable | usage_storage, B10G11R11_FLOAT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R32G32B32A32_UINT, usage_attachable | usage_storage, R32G32B32A32_UINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_BC1_RGBA_UNORM_BLOCK, 0, BC1_RGBA_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_BC2_UNORM_BLOCK, 0, BC2_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_BC3_UNORM_BLOCK, 0, BC3_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_BC4_UNORM_BLOCK, 0, BC4_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_BC4_SNORM_BLOCK, 0, BC4_SNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_BC5_UNORM_BLOCK, 0, BC5_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_BC5_SNORM_BLOCK, 0, BC5_SNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_BC7_UNORM_BLOCK, 0, BC7_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_BC6H_UFLOAT_BLOCK, 0, BC6H_UFLOAT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_BC6H_SFLOAT_BLOCK, 0, BC6H_SFLOAT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_4x4_UNORM_BLOCK, 0, ASTC_2D_4X4_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_B8G8R8A8_UNORM, usage_attachable | usage_storage, B8G8R8A8_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R32G32B32A32_SFLOAT, usage_attachable | usage_storage, R32G32B32A32_FLOAT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R32G32B32A32_SINT, usage_attachable | usage_storage, R32G32B32A32_SINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R32G32_SFLOAT, usage_attachable | usage_storage, R32G32_FLOAT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R32G32_SINT, usage_attachable | usage_storage, R32G32_SINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R32_SFLOAT, usage_attachable | usage_storage, R32_FLOAT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R16_SFLOAT, usage_attachable | usage_storage, R16_FLOAT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R16_UNORM, usage_attachable | usage_storage, R16_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R16_SNORM, usage_attachable | usage_storage, R16_SNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R16_UINT, usage_attachable | usage_storage, R16_UINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R16_SINT, usage_attachable | usage_storage, R16_SINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R16G16_UNORM, usage_attachable | usage_storage, R16G16_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R16G16_SFLOAT, usage_attachable | usage_storage, R16G16_FLOAT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R16G16_UINT, usage_attachable | usage_storage, R16G16_UINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R16G16_SINT, usage_attachable | usage_storage, R16G16_SINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R16G16_SNORM, usage_attachable | usage_storage, R16G16_SNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R32G32B32_SFLOAT, 0, R32G32B32_FLOAT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_A8B8G8R8_SRGB_PACK32, usage_attachable, A8B8G8R8_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R8G8_UNORM, usage_attachable | usage_storage, R8G8_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R8G8_SNORM, usage_attachable | usage_storage, R8G8_SNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R8G8_SINT, usage_attachable | usage_storage, R8G8_SINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R8G8_UINT, usage_attachable | usage_storage, R8G8_UINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R32G32_UINT, usage_attachable | usage_storage, R32G32_UINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R16G16B16A16_SFLOAT, usage_attachable | usage_storage, R16G16B16X16_FLOAT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R32_UINT, usage_attachable | usage_storage, R32_UINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R32_SINT, usage_attachable | usage_storage, R32_SINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_8x8_UNORM_BLOCK, 0, ASTC_2D_8X8_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_8x5_UNORM_BLOCK, 0, ASTC_2D_8X5_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_5x4_UNORM_BLOCK, 0, ASTC_2D_5X4_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_B8G8R8A8_SRGB, usage_attachable, B8G8R8A8_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_BC1_RGBA_SRGB_BLOCK, 0, BC1_RGBA_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_BC2_SRGB_BLOCK, 0, BC2_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_BC3_SRGB_BLOCK, 0, BC3_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_BC7_SRGB_BLOCK, 0, BC7_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT, 0, A4B4G4R4_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_R4G4_UNORM_PACK8, 0, G4R4_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_4x4_SRGB_BLOCK, 0, ASTC_2D_4X4_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_8x8_SRGB_BLOCK, 0, ASTC_2D_8X8_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_8x5_SRGB_BLOCK, 0, ASTC_2D_8X5_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_5x4_SRGB_BLOCK, 0, ASTC_2D_5X4_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_5x5_UNORM_BLOCK, 0, ASTC_2D_5X5_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_5x5_SRGB_BLOCK, 0, ASTC_2D_5X5_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_10x8_UNORM_BLOCK, 0, ASTC_2D_10X8_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_10x8_SRGB_BLOCK, 0, ASTC_2D_10X8_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_6x6_UNORM_BLOCK, 0, ASTC_2D_6X6_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_6x6_SRGB_BLOCK, 0, ASTC_2D_6X6_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_10x6_UNORM_BLOCK, 0, ASTC_2D_10X6_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_10x6_SRGB_BLOCK, 0, ASTC_2D_10X6_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_10x5_UNORM_BLOCK, 0, ASTC_2D_10X5_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_10x5_SRGB_BLOCK, 0, ASTC_2D_10X5_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_10x10_UNORM_BLOCK, 0, ASTC_2D_10X10_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_10x10_SRGB_BLOCK, 0, ASTC_2D_10X10_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_12x10_UNORM_BLOCK, 0, ASTC_2D_12X10_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_12x10_SRGB_BLOCK, 0, ASTC_2D_12X10_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_12x12_UNORM_BLOCK, 0, ASTC_2D_12X12_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_12x12_SRGB_BLOCK, 0, ASTC_2D_12X12_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_8x6_UNORM_BLOCK, 0, ASTC_2D_8X6_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_8x6_SRGB_BLOCK, 0, ASTC_2D_8X6_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_6x5_UNORM_BLOCK, 0, ASTC_2D_6X5_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_ASTC_6x5_SRGB_BLOCK, 0, ASTC_2D_6X5_SRGB) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, 0, E5B9G9R9_FLOAT) \ + /* Depth formats */ \ + SURFACE_FORMAT_ELEM(VK_FORMAT_D32_SFLOAT, usage_attachable, D32_FLOAT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_D16_UNORM, usage_attachable, D16_UNORM) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_X8_D24_UNORM_PACK32, usage_attachable, X8_D24_UNORM) \ + /* Stencil formats */ \ + SURFACE_FORMAT_ELEM(VK_FORMAT_S8_UINT, usage_attachable, S8_UINT) \ + /* DepthStencil formats */ \ + SURFACE_FORMAT_ELEM(VK_FORMAT_D24_UNORM_S8_UINT, usage_attachable, D24_UNORM_S8_UINT) \ + SURFACE_FORMAT_ELEM(VK_FORMAT_D24_UNORM_S8_UINT, usage_attachable, S8_UINT_D24_UNORM) /* emulated */ \ + SURFACE_FORMAT_ELEM(VK_FORMAT_D32_SFLOAT_S8_UINT, usage_attachable, D32_FLOAT_S8_UINT) +#define SURFACE_FORMAT_ELEM(res, usage, pixel) case PixelFormat::pixel: tuple = {res, usage}; break; + SURFACE_FORMAT_LIST + default: UNREACHABLE_MSG("unknown format {}", pixel_format); +#undef SURFACE_FORMAT_ELEM +#undef SURFACE_FORMAT_LIST + } + LOG_WARNING(Frontend, "FORMAT {}", pixel_format); + bool const is_srgb = with_srgb && VideoCore::Surface::IsPixelFormatSRGB(pixel_format); // Transcode on hardware that doesn't support ASTC natively if (!device.IsOptimalAstcSupported() && VideoCore::Surface::IsPixelFormatASTC(pixel_format)) { - const bool is_srgb = with_srgb && VideoCore::Surface::IsPixelFormatSRGB(pixel_format); - switch (Settings::values.astc_recompression.GetValue()) { case Settings::AstcRecompression::Uncompressed: if (is_srgb) { tuple.format = VK_FORMAT_A8B8G8R8_SRGB_PACK32; } else { tuple.format = VK_FORMAT_A8B8G8R8_UNORM_PACK32; - tuple.usage |= Storage; + tuple.usage |= usage_storage; } break; case Settings::AstcRecompression::Bc1: @@ -265,7 +260,6 @@ FormatInfo SurfaceFormat(const Device& device, FormatType format_type, bool with } // Transcode on hardware that doesn't support BCn natively if (!device.IsOptimalBcnSupported() && VideoCore::Surface::IsPixelFormatBCn(pixel_format)) { - const bool is_srgb = with_srgb && VideoCore::Surface::IsPixelFormatSRGB(pixel_format); if (pixel_format == PixelFormat::BC4_SNORM) { tuple.format = VK_FORMAT_R8_SNORM; } else if (pixel_format == PixelFormat::BC4_UNORM) { @@ -274,8 +268,7 @@ FormatInfo SurfaceFormat(const Device& device, FormatType format_type, bool with tuple.format = VK_FORMAT_R8G8_SNORM; } else if (pixel_format == PixelFormat::BC5_UNORM) { tuple.format = VK_FORMAT_R8G8_UNORM; - } else if (pixel_format == PixelFormat::BC6H_SFLOAT || - pixel_format == PixelFormat::BC6H_UFLOAT) { + } else if (pixel_format == PixelFormat::BC6H_SFLOAT || pixel_format == PixelFormat::BC6H_UFLOAT) { tuple.format = VK_FORMAT_R16G16B16A16_SFLOAT; } else if (is_srgb) { tuple.format = VK_FORMAT_A8B8G8R8_SRGB_PACK32; @@ -283,9 +276,8 @@ FormatInfo SurfaceFormat(const Device& device, FormatType format_type, bool with tuple.format = VK_FORMAT_A8B8G8R8_UNORM_PACK32; } } - const bool attachable = (tuple.usage & Attachable) != 0; - const bool storage = (tuple.usage & Storage) != 0; - + bool const attachable = (tuple.usage & usage_attachable) != 0; + bool const storage = (tuple.usage & usage_storage) != 0; VkFormatFeatureFlags usage{}; switch (format_type) { case FormatType::Buffer: