From b267b0f8c3a7561813117dd5c9b732a621643c29 Mon Sep 17 00:00:00 2001 From: unknown Date: Mon, 28 Aug 2023 11:22:03 -0600 Subject: [PATCH] src: Add vk_format_utils.h to UtilityHeaders The library originates from Vulkan-ValidationLayers, but is being moved into this repo to make it easier for others to use it. The library has also been modified to be header only and C compatible, which allows more developers to be able to use it. This does require some changes, but only affects the vkuFormatElementSize and vkuFormatTexelSize functions which used default parameters. Two new functions, vkuFormatElementSizeWithAspect and vkuFormatTexelSizeWithAspect have been added to handle the non-default image aspect case (the default was COLOR_BIT). Renaming was done using the following convention: * public header files begin with `vk_` * enums begin with VKU_FORMAT_ * functions begin with vku --- BUILD.gn | 1 + include/CMakeLists.txt | 2 + include/vulkan/utility/vk_format_utils.h | 2264 ++++++++++++++++++ scripts/generate_source.py | 5 + scripts/generators/format_utils_generator.py | 662 +++++ tests/CMakeLists.txt | 1 + tests/add_subdirectory/CMakeLists.txt | 2 + tests/add_subdirectory/vk_format_utils.c | 17 + tests/add_subdirectory/vk_format_utils_2.c | 18 + tests/find_package/CMakeLists.txt | 1 + tests/format_utils/CMakeLists.txt | 25 + tests/format_utils/test_formats.cpp | 662 +++++ 12 files changed, 3660 insertions(+) create mode 100644 include/vulkan/utility/vk_format_utils.h create mode 100644 scripts/generators/format_utils_generator.py create mode 100644 tests/add_subdirectory/vk_format_utils.c create mode 100644 tests/add_subdirectory/vk_format_utils_2.c create mode 100644 tests/format_utils/CMakeLists.txt create mode 100644 tests/format_utils/test_formats.cpp diff --git a/BUILD.gn b/BUILD.gn index e8e01ca..c5b41fb 100644 --- a/BUILD.gn +++ b/BUILD.gn @@ -16,6 +16,7 @@ static_library("vulkan_layer_settings") { "include/vulkan/layer/vk_layer_settings.hpp", "include/vulkan/layer/vk_layer_settings_ext.h", "include/vulkan/utility/vk_dispatch_table.h", + "include/vulkan/utility/vk_format_utils.h", "include/vulkan/vk_enum_string_helper.h", "src/layer/layer_settings_manager.cpp", "src/layer/layer_settings_manager.hpp", diff --git a/include/CMakeLists.txt b/include/CMakeLists.txt index 1138185..a9a5e77 100644 --- a/include/CMakeLists.txt +++ b/include/CMakeLists.txt @@ -23,6 +23,7 @@ if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.19") target_sources(VulkanUtilityHeaders PRIVATE vulkan/utility/vk_dispatch_table.h vulkan/vk_enum_string_helper.h + vulkan/utility/vk_format_utils.h ) endif() @@ -31,3 +32,4 @@ endif() target_link_Libraries(VulkanUtilityHeaders INTERFACE Vulkan::Headers) target_include_directories(VulkanUtilityHeaders INTERFACE $) + diff --git a/include/vulkan/utility/vk_format_utils.h b/include/vulkan/utility/vk_format_utils.h new file mode 100644 index 0000000..9af341c --- /dev/null +++ b/include/vulkan/utility/vk_format_utils.h @@ -0,0 +1,2264 @@ +// *** THIS FILE IS GENERATED - DO NOT EDIT *** +// See format_utils_generator.py for modifications +// Copyright 2023 The Khronos Group Inc. +// Copyright 2023 Valve Corporation +// Copyright 2023 LunarG, Inc. +// +// SPDX-License-Identifier: Apache-2.0 + +#pragma once + +#ifdef __cplusplus +extern "C" { +#endif + +#include + +#include + +#define VKU_FORMAT_INVALID_INDEX 0xFFFFFFFF +#define VKU_FORMAT_MAX_PLANES 3 +#define VKU_FORMAT_MAX_COMPONENTS 4 + +enum VKU_FORMAT_NUMERICAL_TYPE { + VKU_FORMAT_NUMERICAL_TYPE_NONE = 0, + VKU_FORMAT_NUMERICAL_TYPE_SFLOAT, + VKU_FORMAT_NUMERICAL_TYPE_SINT, + VKU_FORMAT_NUMERICAL_TYPE_SNORM, + VKU_FORMAT_NUMERICAL_TYPE_SRGB, + VKU_FORMAT_NUMERICAL_TYPE_SSCALED, + VKU_FORMAT_NUMERICAL_TYPE_UFLOAT, + VKU_FORMAT_NUMERICAL_TYPE_UINT, + VKU_FORMAT_NUMERICAL_TYPE_UNORM, + VKU_FORMAT_NUMERICAL_TYPE_USCALED, +}; + +enum VKU_FORMAT_COMPATIBILITY_CLASS { + VKU_FORMAT_COMPATIBILITY_CLASS_NONE = 0, + VKU_FORMAT_COMPATIBILITY_CLASS_10BIT_2PLANE_420, + VKU_FORMAT_COMPATIBILITY_CLASS_10BIT_2PLANE_422, + VKU_FORMAT_COMPATIBILITY_CLASS_10BIT_2PLANE_444, + VKU_FORMAT_COMPATIBILITY_CLASS_10BIT_3PLANE_420, + VKU_FORMAT_COMPATIBILITY_CLASS_10BIT_3PLANE_422, + VKU_FORMAT_COMPATIBILITY_CLASS_10BIT_3PLANE_444, + VKU_FORMAT_COMPATIBILITY_CLASS_128BIT, + VKU_FORMAT_COMPATIBILITY_CLASS_12BIT_2PLANE_420, + VKU_FORMAT_COMPATIBILITY_CLASS_12BIT_2PLANE_422, + VKU_FORMAT_COMPATIBILITY_CLASS_12BIT_2PLANE_444, + VKU_FORMAT_COMPATIBILITY_CLASS_12BIT_3PLANE_420, + VKU_FORMAT_COMPATIBILITY_CLASS_12BIT_3PLANE_422, + VKU_FORMAT_COMPATIBILITY_CLASS_12BIT_3PLANE_444, + VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, + VKU_FORMAT_COMPATIBILITY_CLASS_16BIT_2PLANE_420, + VKU_FORMAT_COMPATIBILITY_CLASS_16BIT_2PLANE_422, + VKU_FORMAT_COMPATIBILITY_CLASS_16BIT_2PLANE_444, + VKU_FORMAT_COMPATIBILITY_CLASS_16BIT_3PLANE_420, + VKU_FORMAT_COMPATIBILITY_CLASS_16BIT_3PLANE_422, + VKU_FORMAT_COMPATIBILITY_CLASS_16BIT_3PLANE_444, + VKU_FORMAT_COMPATIBILITY_CLASS_192BIT, + VKU_FORMAT_COMPATIBILITY_CLASS_24BIT, + VKU_FORMAT_COMPATIBILITY_CLASS_256BIT, + VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, + VKU_FORMAT_COMPATIBILITY_CLASS_32BIT_B8G8R8G8, + VKU_FORMAT_COMPATIBILITY_CLASS_32BIT_G8B8G8R8, + VKU_FORMAT_COMPATIBILITY_CLASS_48BIT, + VKU_FORMAT_COMPATIBILITY_CLASS_64BIT, + VKU_FORMAT_COMPATIBILITY_CLASS_64BIT_B10G10R10G10, + VKU_FORMAT_COMPATIBILITY_CLASS_64BIT_B12G12R12G12, + VKU_FORMAT_COMPATIBILITY_CLASS_64BIT_B16G16R16G16, + VKU_FORMAT_COMPATIBILITY_CLASS_64BIT_G10B10G10R10, + VKU_FORMAT_COMPATIBILITY_CLASS_64BIT_G12B12G12R12, + VKU_FORMAT_COMPATIBILITY_CLASS_64BIT_G16B16G16R16, + VKU_FORMAT_COMPATIBILITY_CLASS_64BIT_R10G10B10A10, + VKU_FORMAT_COMPATIBILITY_CLASS_64BIT_R12G12B12A12, + VKU_FORMAT_COMPATIBILITY_CLASS_8BIT, + VKU_FORMAT_COMPATIBILITY_CLASS_8BIT_2PLANE_420, + VKU_FORMAT_COMPATIBILITY_CLASS_8BIT_2PLANE_422, + VKU_FORMAT_COMPATIBILITY_CLASS_8BIT_2PLANE_444, + VKU_FORMAT_COMPATIBILITY_CLASS_8BIT_3PLANE_420, + VKU_FORMAT_COMPATIBILITY_CLASS_8BIT_3PLANE_422, + VKU_FORMAT_COMPATIBILITY_CLASS_8BIT_3PLANE_444, + VKU_FORMAT_COMPATIBILITY_CLASS_8BIT_ALPHA, + VKU_FORMAT_COMPATIBILITY_CLASS_96BIT, + VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_10X10, + VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_10X5, + VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_10X6, + VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_10X8, + VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_12X10, + VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_12X12, + VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_4X4, + VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_5X4, + VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_5X5, + VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_6X5, + VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_6X6, + VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_8X5, + VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_8X6, + VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_8X8, + VKU_FORMAT_COMPATIBILITY_CLASS_BC1_RGB, + VKU_FORMAT_COMPATIBILITY_CLASS_BC1_RGBA, + VKU_FORMAT_COMPATIBILITY_CLASS_BC2, + VKU_FORMAT_COMPATIBILITY_CLASS_BC3, + VKU_FORMAT_COMPATIBILITY_CLASS_BC4, + VKU_FORMAT_COMPATIBILITY_CLASS_BC5, + VKU_FORMAT_COMPATIBILITY_CLASS_BC6H, + VKU_FORMAT_COMPATIBILITY_CLASS_BC7, + VKU_FORMAT_COMPATIBILITY_CLASS_D16, + VKU_FORMAT_COMPATIBILITY_CLASS_D16S8, + VKU_FORMAT_COMPATIBILITY_CLASS_D24, + VKU_FORMAT_COMPATIBILITY_CLASS_D24S8, + VKU_FORMAT_COMPATIBILITY_CLASS_D32, + VKU_FORMAT_COMPATIBILITY_CLASS_D32S8, + VKU_FORMAT_COMPATIBILITY_CLASS_EAC_R, + VKU_FORMAT_COMPATIBILITY_CLASS_EAC_RG, + VKU_FORMAT_COMPATIBILITY_CLASS_ETC2_EAC_RGBA, + VKU_FORMAT_COMPATIBILITY_CLASS_ETC2_RGB, + VKU_FORMAT_COMPATIBILITY_CLASS_ETC2_RGBA, + VKU_FORMAT_COMPATIBILITY_CLASS_PVRTC1_2BPP, + VKU_FORMAT_COMPATIBILITY_CLASS_PVRTC1_4BPP, + VKU_FORMAT_COMPATIBILITY_CLASS_PVRTC2_2BPP, + VKU_FORMAT_COMPATIBILITY_CLASS_PVRTC2_4BPP, + VKU_FORMAT_COMPATIBILITY_CLASS_S8, +}; +// Return the plane index of a given VkImageAspectFlagBits. +// VK_IMAGE_ASPECT_PLANE_0_BIT -> 0 +// VK_IMAGE_ASPECT_PLANE_1_BIT -> 1 +// VK_IMAGE_ASPECT_PLANE_2_BIT -> 2 +// -> VKU_FORMAT_INVALID_INDEX +inline uint32_t vkuGetPlaneIndex(VkImageAspectFlagBits aspect); + +// Returns whether a VkFormat is of the numerical format SFLOAT +// Format must only contain one numerical format, so formats like D16_UNORM_S8_UINT always return false +inline bool vkuFormatIsSFLOAT(VkFormat format); + +// Returns whether a VkFormat is of the numerical format SINT +// Format must only contain one numerical format, so formats like D16_UNORM_S8_UINT always return false +inline bool vkuFormatIsSINT(VkFormat format); + +// Returns whether a VkFormat is of the numerical format SNORM +// Format must only contain one numerical format, so formats like D16_UNORM_S8_UINT always return false +inline bool vkuFormatIsSNORM(VkFormat format); + +// Returns whether a VkFormat is of the numerical format SRGB +// Format must only contain one numerical format, so formats like D16_UNORM_S8_UINT always return false +inline bool vkuFormatIsSRGB(VkFormat format); + +// Returns whether a VkFormat is of the numerical format SSCALED +// Format must only contain one numerical format, so formats like D16_UNORM_S8_UINT always return false +inline bool vkuFormatIsSSCALED(VkFormat format); + +// Returns whether a VkFormat is of the numerical format UFLOAT +// Format must only contain one numerical format, so formats like D16_UNORM_S8_UINT always return false +inline bool vkuFormatIsUFLOAT(VkFormat format); + +// Returns whether a VkFormat is of the numerical format UINT +// Format must only contain one numerical format, so formats like D16_UNORM_S8_UINT always return false +inline bool vkuFormatIsUINT(VkFormat format); + +// Returns whether a VkFormat is of the numerical format UNORM +// Format must only contain one numerical format, so formats like D16_UNORM_S8_UINT always return false +inline bool vkuFormatIsUNORM(VkFormat format); + +// Returns whether a VkFormat is of the numerical format USCALED +// Format must only contain one numerical format, so formats like D16_UNORM_S8_UINT always return false +inline bool vkuFormatIsUSCALED(VkFormat format); + +// Returns whether the type of a VkFormat is a OpTypeInt (SPIR-V) from "Interpretation of Numeric Format" table +inline bool vkuFormatIsSampledInt(VkFormat format); + +// Returns whether the type of a VkFormat is a OpTypeFloat (SPIR-V) from "Interpretation of Numeric Format" table +inline bool vkuFormatIsSampledFloat(VkFormat format); + +// Returns whether a VkFormat is a compressed format of type ASTC_HDR +inline bool vkuFormatIsCompressed_ASTC_HDR(VkFormat format); + +// Returns whether a VkFormat is a compressed format of type ASTC_LDR +inline bool vkuFormatIsCompressed_ASTC_LDR(VkFormat format); + +// Returns whether a VkFormat is a compressed format of type BC +inline bool vkuFormatIsCompressed_BC(VkFormat format); + +// Returns whether a VkFormat is a compressed format of type EAC +inline bool vkuFormatIsCompressed_EAC(VkFormat format); + +// Returns whether a VkFormat is a compressed format of type ETC2 +inline bool vkuFormatIsCompressed_ETC2(VkFormat format); + +// Returns whether a VkFormat is a compressed format of type PVRTC +inline bool vkuFormatIsCompressed_PVRTC(VkFormat format); + +// Returns whether a VkFormat is of any compressed format type +inline bool vkuFormatIsCompressed(VkFormat format); + +// Returns whether a VkFormat is either a depth or stencil format +inline bool vkuFormatIsDepthOrStencil(VkFormat format); + +// Returns whether a VkFormat is a depth and stencil format +inline bool vkuFormatIsDepthAndStencil(VkFormat format); + +// Returns whether a VkFormat is a depth only format +inline bool vkuFormatIsDepthOnly(VkFormat format); + +// Returns whether a VkFormat is a stencil only format +inline bool vkuFormatIsStencilOnly(VkFormat format); + +// Returns whether a VkFormat has a depth component +inline bool vkuFormatHasDepth(VkFormat format) { return (vkuFormatIsDepthOnly(format) || vkuFormatIsDepthAndStencil(format)); } + +// Returns whether a VkFormat has a stencil component +inline bool vkuFormatHasStencil(VkFormat format) { return (vkuFormatIsStencilOnly(format) || vkuFormatIsDepthAndStencil(format)); } + +// Returns the size of the depth component in bits if it has one. Otherwise it returns 0 +inline uint32_t vkuFormatDepthSize(VkFormat format); + +// Returns the size of the stencil component in bits if it has one. Otherwise it returns 0 +inline uint32_t vkuFormatStencilSize(VkFormat format); + +// Returns the numerical type of the depth component if it has one. Otherwise it returns VKU_FORMAT_NUMERICAL_TYPE_NONE +inline enum VKU_FORMAT_NUMERICAL_TYPE vkuFormatDepthNumericalType(VkFormat format); + +// Returns the numerical type of the stencil component if it has one. Otherwise it returns VKU_FORMAT_NUMERICAL_TYPE_NONE +inline enum VKU_FORMAT_NUMERICAL_TYPE vkuFormatStencilNumericalType(VkFormat format); + +// Returns whether a VkFormat is packed +inline bool vkuFormatIsPacked(VkFormat format); + +// Returns whether a VkFormat is YCbCr +// This corresponds to formats with _444, _422, or _420 in their name +inline bool vkuFormatRequiresYcbcrConversion(VkFormat format); + +// Returns whether a VkFormat is XChromaSubsampled +// This corresponds to formats with _422 in their name +inline bool vkuFormatIsXChromaSubsampled(VkFormat format); + +// Returns whether a VkFormat is YChromaSubsampled +// This corresponds to formats with _420 in their name +inline bool vkuFormatIsYChromaSubsampled(VkFormat format); + +// Returns whether a VkFormat is Multiplane +// Single-plane "_422" formats are treated as 2x1 compressed (for copies) +inline bool vkuFormatIsSinglePlane_422(VkFormat format); + +// Returns number of planes in format (which is 1 by default) +inline uint32_t vkuFormatPlaneCount(VkFormat format); + +// Returns whether a VkFormat is multiplane +inline bool vkuFormatIsMultiplane(VkFormat format) { return ((vkuFormatPlaneCount(format)) > 1u); } + +// Returns a VkFormat that is compatible with a given plane of a multiplane format +// Will return VK_FORMAT_UNDEFINED if given a plane aspect that doesn't exist for the format +inline VkFormat vkuFindMultiplaneCompatibleFormat(VkFormat mp_fmt, VkImageAspectFlagBits plane_aspect); + +// Returns the extent divisors of a multiplane format given a plane +// Will return {1, 1} if given a plane aspect that doesn't exist for the VkFormat +inline VkExtent2D vkuFindMultiplaneExtentDivisors(VkFormat mp_fmt, VkImageAspectFlagBits plane_aspect); + +// Returns the count of components in a VkFormat +inline uint32_t vkuFormatComponentCount(VkFormat format); + +// Returns the texel block extent of a VkFormat +inline VkExtent3D vkuFormatTexelBlockExtent(VkFormat format); + +// Returns the Compatibility Class of a VkFormat as defined by the spec +inline enum VKU_FORMAT_COMPATIBILITY_CLASS vkuFormatCompatibilityClass(VkFormat format); + +// Return true if a VkFormat is 'normal', with one texel per format element +inline bool vkuFormatElementIsTexel(VkFormat format); + +// Return size, in bytes, of one element of a VkFormat +// Format must not be a depth, stencil, or multiplane format +inline uint32_t vkuFormatElementSize(VkFormat format); + +// Return the size in bytes of one texel of a VkFormat +// For compressed or multi-plane, this may be a fractional number +inline uint32_t vkuFormatElementSizeWithAspect(VkFormat format, VkImageAspectFlagBits aspectMask); + +// Return the size in bytes of one texel of a VkFormat +// Format must not be a depth, stencil, or multiplane format +inline double vkuFormatTexelSize(VkFormat format); + +// Return the size in bytes of one texel of a VkFormat +// For compressed or multi-plane, this may be a fractional number +inline double vkuFormatTexelSizeWithAspect(VkFormat format, VkImageAspectFlagBits aspectMask); + +// Returns whether a VkFormat contains only 8-bit sized components +inline bool vkuFormatIs8bit(VkFormat format); + +// Returns whether a VkFormat contains only 16-bit sized components +inline bool vkuFormatIs16bit(VkFormat format); + +// Returns whether a VkFormat contains only 32-bit sized components +inline bool vkuFormatIs32bit(VkFormat format); + +// Returns whether a VkFormat contains only 64-bit sized components +inline bool vkuFormatIs64bit(VkFormat format); + +// Returns whether a VkFormat has a component of a given size +inline bool vkuFormatHasComponentSize(VkFormat format, uint32_t size); + +// Returns whether a VkFormat has a Red color component +inline bool vkuFormatHasRed(VkFormat format); + +// Returns whether a VkFormat has a Green color component +inline bool vkuFormatHasGreen(VkFormat format); + +// Returns whether a VkFormat has a Blue color component +inline bool vkuFormatHasBlue(VkFormat format); + +// Returns whether a VkFormat has a Alpha color component +inline bool vkuFormatHasAlpha(VkFormat format); + +// Returns whether a VkFormat is equal to VK_FORMAT_UNDEFINED +inline bool vkuFormatIsUndefined(VkFormat format) { return (format == VK_FORMAT_UNDEFINED); } + +// Returns whether a VkFormat is a "blocked image" as defined in the spec (vkspec.html#blocked-image) +inline bool vkuFormatIsBlockedImage(VkFormat format) { + return (vkuFormatIsCompressed(format) || vkuFormatIsSinglePlane_422(format)); +} + +// Returns whether a VkFormat is a "color format'. Because there is no official specification definition of +// "color format", it is defined here as anything that isn't a depth/stencil format, multiplane format, or the undefined format. +inline bool vkuFormatIsColor(VkFormat format) { + return !(vkuFormatIsUndefined(format) || vkuFormatIsDepthOrStencil(format) || vkuFormatIsMultiplane(format)); +} + +enum VKU_FORMAT_COMPONENT_TYPE { + VKU_FORMAT_COMPONENT_TYPE_NONE, + VKU_FORMAT_COMPONENT_TYPE_R, + VKU_FORMAT_COMPONENT_TYPE_G, + VKU_FORMAT_COMPONENT_TYPE_B, + VKU_FORMAT_COMPONENT_TYPE_A, + VKU_FORMAT_COMPONENT_TYPE_D, + VKU_FORMAT_COMPONENT_TYPE_S, +}; + +// Compressed formats don't have a defined component size +const uint32_t VKU_FORMAT_COMPRESSED_COMPONENT = 0xFFFFFFFF; + +struct VKU_FORMAT_COMPONENT_INFO { + enum VKU_FORMAT_COMPONENT_TYPE type; + uint32_t size; // bits +}; + +// Generic information for all formats +struct VKU_FORMAT_INFO { + enum VKU_FORMAT_COMPATIBILITY_CLASS compatibility; + uint32_t block_size; // bytes + uint32_t texel_per_block; + VkExtent3D block_extent; + uint32_t component_count; + struct VKU_FORMAT_COMPONENT_INFO components[VKU_FORMAT_MAX_COMPONENTS]; +}; +// clang-format off +inline const struct VKU_FORMAT_INFO vkuGetFormatInfo(VkFormat format) { + switch (format) { + case VK_FORMAT_R4G4_UNORM_PACK8: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_8BIT, 1, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 4}, {VKU_FORMAT_COMPONENT_TYPE_G, 4}}}; + return out; } + case VK_FORMAT_R4G4B4A4_UNORM_PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 4}, {VKU_FORMAT_COMPONENT_TYPE_G, 4}, {VKU_FORMAT_COMPONENT_TYPE_B, 4}, {VKU_FORMAT_COMPONENT_TYPE_A, 4}}}; + return out; } + case VK_FORMAT_B4G4R4A4_UNORM_PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_B, 4}, {VKU_FORMAT_COMPONENT_TYPE_G, 4}, {VKU_FORMAT_COMPONENT_TYPE_R, 4}, {VKU_FORMAT_COMPONENT_TYPE_A, 4}}}; + return out; } + case VK_FORMAT_R5G6B5_UNORM_PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 5}, {VKU_FORMAT_COMPONENT_TYPE_G, 6}, {VKU_FORMAT_COMPONENT_TYPE_B, 5}}}; + return out; } + case VK_FORMAT_B5G6R5_UNORM_PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_B, 5}, {VKU_FORMAT_COMPONENT_TYPE_G, 6}, {VKU_FORMAT_COMPONENT_TYPE_R, 5}}}; + return out; } + case VK_FORMAT_R5G5B5A1_UNORM_PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 5}, {VKU_FORMAT_COMPONENT_TYPE_G, 5}, {VKU_FORMAT_COMPONENT_TYPE_B, 5}, {VKU_FORMAT_COMPONENT_TYPE_A, 1}}}; + return out; } + case VK_FORMAT_B5G5R5A1_UNORM_PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_B, 5}, {VKU_FORMAT_COMPONENT_TYPE_G, 5}, {VKU_FORMAT_COMPONENT_TYPE_R, 5}, {VKU_FORMAT_COMPONENT_TYPE_A, 1}}}; + return out; } + case VK_FORMAT_A1R5G5B5_UNORM_PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 1}, {VKU_FORMAT_COMPONENT_TYPE_R, 5}, {VKU_FORMAT_COMPONENT_TYPE_G, 5}, {VKU_FORMAT_COMPONENT_TYPE_B, 5}}}; + return out; } + case VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 1}, {VKU_FORMAT_COMPONENT_TYPE_B, 5}, {VKU_FORMAT_COMPONENT_TYPE_G, 5}, {VKU_FORMAT_COMPONENT_TYPE_R, 5}}}; + return out; } + case VK_FORMAT_A8_UNORM_KHR: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_8BIT_ALPHA, 1, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_A, 8}}}; + return out; } + case VK_FORMAT_R8_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_8BIT, 1, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_R8_SNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_8BIT, 1, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_R8_USCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_8BIT, 1, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_R8_SSCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_8BIT, 1, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_R8_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_8BIT, 1, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_R8_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_8BIT, 1, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_R8_SRGB: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_8BIT, 1, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_R8G8_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}}}; + return out; } + case VK_FORMAT_R8G8_SNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}}}; + return out; } + case VK_FORMAT_R8G8_USCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}}}; + return out; } + case VK_FORMAT_R8G8_SSCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}}}; + return out; } + case VK_FORMAT_R8G8_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}}}; + return out; } + case VK_FORMAT_R8G8_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}}}; + return out; } + case VK_FORMAT_R8G8_SRGB: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}}}; + return out; } + case VK_FORMAT_R8G8B8_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_24BIT, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}}}; + return out; } + case VK_FORMAT_R8G8B8_SNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_24BIT, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}}}; + return out; } + case VK_FORMAT_R8G8B8_USCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_24BIT, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}}}; + return out; } + case VK_FORMAT_R8G8B8_SSCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_24BIT, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}}}; + return out; } + case VK_FORMAT_R8G8B8_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_24BIT, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}}}; + return out; } + case VK_FORMAT_R8G8B8_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_24BIT, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}}}; + return out; } + case VK_FORMAT_R8G8B8_SRGB: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_24BIT, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}}}; + return out; } + case VK_FORMAT_B8G8R8_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_24BIT, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_B8G8R8_SNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_24BIT, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_B8G8R8_USCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_24BIT, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_B8G8R8_SSCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_24BIT, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_B8G8R8_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_24BIT, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_B8G8R8_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_24BIT, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_B8G8R8_SRGB: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_24BIT, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_R8G8B8A8_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_A, 8}}}; + return out; } + case VK_FORMAT_R8G8B8A8_SNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_A, 8}}}; + return out; } + case VK_FORMAT_R8G8B8A8_USCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_A, 8}}}; + return out; } + case VK_FORMAT_R8G8B8A8_SSCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_A, 8}}}; + return out; } + case VK_FORMAT_R8G8B8A8_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_A, 8}}}; + return out; } + case VK_FORMAT_R8G8B8A8_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_A, 8}}}; + return out; } + case VK_FORMAT_R8G8B8A8_SRGB: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_A, 8}}}; + return out; } + case VK_FORMAT_B8G8R8A8_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_A, 8}}}; + return out; } + case VK_FORMAT_B8G8R8A8_SNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_A, 8}}}; + return out; } + case VK_FORMAT_B8G8R8A8_USCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_A, 8}}}; + return out; } + case VK_FORMAT_B8G8R8A8_SSCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_A, 8}}}; + return out; } + case VK_FORMAT_B8G8R8A8_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_A, 8}}}; + return out; } + case VK_FORMAT_B8G8R8A8_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_A, 8}}}; + return out; } + case VK_FORMAT_B8G8R8A8_SRGB: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_A, 8}}}; + return out; } + case VK_FORMAT_A8B8G8R8_UNORM_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_A8B8G8R8_SNORM_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_A8B8G8R8_USCALED_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_A8B8G8R8_SSCALED_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_A8B8G8R8_UINT_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_A8B8G8R8_SINT_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_A8B8G8R8_SRGB_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_A2R10G10B10_UNORM_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 2}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}}}; + return out; } + case VK_FORMAT_A2R10G10B10_SNORM_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 2}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}}}; + return out; } + case VK_FORMAT_A2R10G10B10_USCALED_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 2}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}}}; + return out; } + case VK_FORMAT_A2R10G10B10_SSCALED_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 2}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}}}; + return out; } + case VK_FORMAT_A2R10G10B10_UINT_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 2}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}}}; + return out; } + case VK_FORMAT_A2R10G10B10_SINT_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 2}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}}}; + return out; } + case VK_FORMAT_A2B10G10R10_UNORM_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 2}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}}}; + return out; } + case VK_FORMAT_A2B10G10R10_SNORM_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 2}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}}}; + return out; } + case VK_FORMAT_A2B10G10R10_USCALED_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 2}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}}}; + return out; } + case VK_FORMAT_A2B10G10R10_SSCALED_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 2}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}}}; + return out; } + case VK_FORMAT_A2B10G10R10_UINT_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 2}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}}}; + return out; } + case VK_FORMAT_A2B10G10R10_SINT_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 2}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}}}; + return out; } + case VK_FORMAT_R16_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}}}; + return out; } + case VK_FORMAT_R16_SNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}}}; + return out; } + case VK_FORMAT_R16_USCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}}}; + return out; } + case VK_FORMAT_R16_SSCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}}}; + return out; } + case VK_FORMAT_R16_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}}}; + return out; } + case VK_FORMAT_R16_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}}}; + return out; } + case VK_FORMAT_R16_SFLOAT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}}}; + return out; } + case VK_FORMAT_R16G16_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}}}; + return out; } + case VK_FORMAT_R16G16_SNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}}}; + return out; } + case VK_FORMAT_R16G16_USCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}}}; + return out; } + case VK_FORMAT_R16G16_SSCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}}}; + return out; } + case VK_FORMAT_R16G16_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}}}; + return out; } + case VK_FORMAT_R16G16_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}}}; + return out; } + case VK_FORMAT_R16G16_SFLOAT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}}}; + return out; } + case VK_FORMAT_R16G16B16_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_48BIT, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}}}; + return out; } + case VK_FORMAT_R16G16B16_SNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_48BIT, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}}}; + return out; } + case VK_FORMAT_R16G16B16_USCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_48BIT, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}}}; + return out; } + case VK_FORMAT_R16G16B16_SSCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_48BIT, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}}}; + return out; } + case VK_FORMAT_R16G16B16_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_48BIT, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}}}; + return out; } + case VK_FORMAT_R16G16B16_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_48BIT, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}}}; + return out; } + case VK_FORMAT_R16G16B16_SFLOAT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_48BIT, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}}}; + return out; } + case VK_FORMAT_R16G16B16A16_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT, 8, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}, {VKU_FORMAT_COMPONENT_TYPE_A, 16}}}; + return out; } + case VK_FORMAT_R16G16B16A16_SNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT, 8, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}, {VKU_FORMAT_COMPONENT_TYPE_A, 16}}}; + return out; } + case VK_FORMAT_R16G16B16A16_USCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT, 8, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}, {VKU_FORMAT_COMPONENT_TYPE_A, 16}}}; + return out; } + case VK_FORMAT_R16G16B16A16_SSCALED: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT, 8, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}, {VKU_FORMAT_COMPONENT_TYPE_A, 16}}}; + return out; } + case VK_FORMAT_R16G16B16A16_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT, 8, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}, {VKU_FORMAT_COMPONENT_TYPE_A, 16}}}; + return out; } + case VK_FORMAT_R16G16B16A16_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT, 8, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}, {VKU_FORMAT_COMPONENT_TYPE_A, 16}}}; + return out; } + case VK_FORMAT_R16G16B16A16_SFLOAT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT, 8, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}, {VKU_FORMAT_COMPONENT_TYPE_A, 16}}}; + return out; } + case VK_FORMAT_R32_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 32}}}; + return out; } + case VK_FORMAT_R32_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 32}}}; + return out; } + case VK_FORMAT_R32_SFLOAT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 32}}}; + return out; } + case VK_FORMAT_R32G32_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT, 8, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 32}, {VKU_FORMAT_COMPONENT_TYPE_G, 32}}}; + return out; } + case VK_FORMAT_R32G32_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT, 8, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 32}, {VKU_FORMAT_COMPONENT_TYPE_G, 32}}}; + return out; } + case VK_FORMAT_R32G32_SFLOAT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT, 8, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 32}, {VKU_FORMAT_COMPONENT_TYPE_G, 32}}}; + return out; } + case VK_FORMAT_R32G32B32_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_96BIT, 12, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 32}, {VKU_FORMAT_COMPONENT_TYPE_G, 32}, {VKU_FORMAT_COMPONENT_TYPE_B, 32}}}; + return out; } + case VK_FORMAT_R32G32B32_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_96BIT, 12, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 32}, {VKU_FORMAT_COMPONENT_TYPE_G, 32}, {VKU_FORMAT_COMPONENT_TYPE_B, 32}}}; + return out; } + case VK_FORMAT_R32G32B32_SFLOAT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_96BIT, 12, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 32}, {VKU_FORMAT_COMPONENT_TYPE_G, 32}, {VKU_FORMAT_COMPONENT_TYPE_B, 32}}}; + return out; } + case VK_FORMAT_R32G32B32A32_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_128BIT, 16, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 32}, {VKU_FORMAT_COMPONENT_TYPE_G, 32}, {VKU_FORMAT_COMPONENT_TYPE_B, 32}, {VKU_FORMAT_COMPONENT_TYPE_A, 32}}}; + return out; } + case VK_FORMAT_R32G32B32A32_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_128BIT, 16, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 32}, {VKU_FORMAT_COMPONENT_TYPE_G, 32}, {VKU_FORMAT_COMPONENT_TYPE_B, 32}, {VKU_FORMAT_COMPONENT_TYPE_A, 32}}}; + return out; } + case VK_FORMAT_R32G32B32A32_SFLOAT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_128BIT, 16, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 32}, {VKU_FORMAT_COMPONENT_TYPE_G, 32}, {VKU_FORMAT_COMPONENT_TYPE_B, 32}, {VKU_FORMAT_COMPONENT_TYPE_A, 32}}}; + return out; } + case VK_FORMAT_R64_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT, 8, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 64}}}; + return out; } + case VK_FORMAT_R64_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT, 8, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 64}}}; + return out; } + case VK_FORMAT_R64_SFLOAT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT, 8, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 64}}}; + return out; } + case VK_FORMAT_R64G64_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_128BIT, 16, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 64}, {VKU_FORMAT_COMPONENT_TYPE_G, 64}}}; + return out; } + case VK_FORMAT_R64G64_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_128BIT, 16, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 64}, {VKU_FORMAT_COMPONENT_TYPE_G, 64}}}; + return out; } + case VK_FORMAT_R64G64_SFLOAT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_128BIT, 16, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 64}, {VKU_FORMAT_COMPONENT_TYPE_G, 64}}}; + return out; } + case VK_FORMAT_R64G64B64_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_192BIT, 24, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 64}, {VKU_FORMAT_COMPONENT_TYPE_G, 64}, {VKU_FORMAT_COMPONENT_TYPE_B, 64}}}; + return out; } + case VK_FORMAT_R64G64B64_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_192BIT, 24, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 64}, {VKU_FORMAT_COMPONENT_TYPE_G, 64}, {VKU_FORMAT_COMPONENT_TYPE_B, 64}}}; + return out; } + case VK_FORMAT_R64G64B64_SFLOAT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_192BIT, 24, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, 64}, {VKU_FORMAT_COMPONENT_TYPE_G, 64}, {VKU_FORMAT_COMPONENT_TYPE_B, 64}}}; + return out; } + case VK_FORMAT_R64G64B64A64_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_256BIT, 32, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 64}, {VKU_FORMAT_COMPONENT_TYPE_G, 64}, {VKU_FORMAT_COMPONENT_TYPE_B, 64}, {VKU_FORMAT_COMPONENT_TYPE_A, 64}}}; + return out; } + case VK_FORMAT_R64G64B64A64_SINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_256BIT, 32, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 64}, {VKU_FORMAT_COMPONENT_TYPE_G, 64}, {VKU_FORMAT_COMPONENT_TYPE_B, 64}, {VKU_FORMAT_COMPONENT_TYPE_A, 64}}}; + return out; } + case VK_FORMAT_R64G64B64A64_SFLOAT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_256BIT, 32, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 64}, {VKU_FORMAT_COMPONENT_TYPE_G, 64}, {VKU_FORMAT_COMPONENT_TYPE_B, 64}, {VKU_FORMAT_COMPONENT_TYPE_A, 64}}}; + return out; } + case VK_FORMAT_B10G11R11_UFLOAT_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_B, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 11}, {VKU_FORMAT_COMPONENT_TYPE_R, 11}}}; + return out; } + case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_B, 9}, {VKU_FORMAT_COMPONENT_TYPE_G, 9}, {VKU_FORMAT_COMPONENT_TYPE_R, 9}}}; + return out; } + case VK_FORMAT_D16_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_D16, 2, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_D, 16}}}; + return out; } + case VK_FORMAT_X8_D24_UNORM_PACK32: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_D24, 4, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_D, 24}}}; + return out; } + case VK_FORMAT_D32_SFLOAT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_D32, 4, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_D, 32}}}; + return out; } + case VK_FORMAT_S8_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_S8, 1, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_S, 8}}}; + return out; } + case VK_FORMAT_D16_UNORM_S8_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_D16S8, 3, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_D, 16}, {VKU_FORMAT_COMPONENT_TYPE_S, 8}}}; + return out; } + case VK_FORMAT_D24_UNORM_S8_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_D24S8, 4, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_D, 24}, {VKU_FORMAT_COMPONENT_TYPE_S, 8}}}; + return out; } + case VK_FORMAT_D32_SFLOAT_S8_UINT: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_D32S8, 5, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_D, 32}, {VKU_FORMAT_COMPONENT_TYPE_S, 8}}}; + return out; } + case VK_FORMAT_BC1_RGB_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_BC1_RGB, 8, 16, {4, 4, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_BC1_RGB_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_BC1_RGB, 8, 16, {4, 4, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_BC1_RGBA, 8, 16, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_BC1_RGBA_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_BC1_RGBA, 8, 16, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_BC2_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_BC2, 16, 16, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_BC2_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_BC2, 16, 16, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_BC3_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_BC3, 16, 16, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_BC3_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_BC3, 16, 16, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_BC4_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_BC4, 8, 16, {4, 4, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_BC4_SNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_BC4, 8, 16, {4, 4, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_BC5_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_BC5, 16, 16, {4, 4, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_BC5_SNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_BC5, 16, 16, {4, 4, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_BC6H_UFLOAT_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_BC6H, 16, 16, {4, 4, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_BC6H_SFLOAT_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_BC6H, 16, 16, {4, 4, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_BC7_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_BC7, 16, 16, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_BC7_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_BC7, 16, 16, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ETC2_RGB, 8, 16, {4, 4, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ETC2_RGB, 8, 16, {4, 4, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ETC2_RGBA, 8, 16, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ETC2_RGBA, 8, 16, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ETC2_EAC_RGBA, 16, 16, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ETC2_EAC_RGBA, 16, 16, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_EAC_R11_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_EAC_R, 8, 16, {4, 4, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 11}}}; + return out; } + case VK_FORMAT_EAC_R11_SNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_EAC_R, 8, 16, {4, 4, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 11}}}; + return out; } + case VK_FORMAT_EAC_R11G11_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_EAC_RG, 16, 16, {4, 4, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 11}, {VKU_FORMAT_COMPONENT_TYPE_G, 11}}}; + return out; } + case VK_FORMAT_EAC_R11G11_SNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_EAC_RG, 16, 16, {4, 4, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 11}, {VKU_FORMAT_COMPONENT_TYPE_G, 11}}}; + return out; } + case VK_FORMAT_ASTC_4x4_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_4X4, 16, 16, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_4x4_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_4X4, 16, 16, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_5x4_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_5X4, 16, 20, {5, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_5x4_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_5X4, 16, 20, {5, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_5x5_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_5X5, 16, 25, {5, 5, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_5x5_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_5X5, 16, 25, {5, 5, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_6x5_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_6X5, 16, 30, {6, 5, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_6x5_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_6X5, 16, 30, {6, 5, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_6x6_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_6X6, 16, 36, {6, 6, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_6x6_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_6X6, 16, 36, {6, 6, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_8x5_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_8X5, 16, 40, {8, 5, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_8x5_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_8X5, 16, 40, {8, 5, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_8x6_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_8X6, 16, 48, {8, 6, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_8x6_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_8X6, 16, 48, {8, 6, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_8x8_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_8X8, 16, 64, {8, 8, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_8x8_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_8X8, 16, 64, {8, 8, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_10x5_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_10X5, 16, 50, {10, 5, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_10x5_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_10X5, 16, 50, {10, 5, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_10x6_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_10X6, 16, 60, {10, 6, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_10x6_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_10X6, 16, 60, {10, 6, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_10x8_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_10X8, 16, 80, {10, 8, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_10x8_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_10X8, 16, 80, {10, 8, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_10x10_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_10X10, 16, 100, {10, 10, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_10x10_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_10X10, 16, 100, {10, 10, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_12x10_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_12X10, 16, 120, {12, 10, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_12x10_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_12X10, 16, 120, {12, 10, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_12x12_UNORM_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_12X12, 16, 144, {12, 12, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_12x12_SRGB_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_12X12, 16, 144, {12, 12, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_G8B8G8R8_422_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT_G8B8G8R8, 4, 1, {2, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_B8G8R8G8_422_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT_B8G8R8G8, 4, 1, {2, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}, {VKU_FORMAT_COMPONENT_TYPE_G, 8}}}; + return out; } + case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_8BIT_3PLANE_420, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_8BIT_2PLANE_420, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_8BIT_3PLANE_422, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_8BIT_2PLANE_422, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_8BIT_3PLANE_444, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_R10X6_UNORM_PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 10}}}; + return out; } + case VK_FORMAT_R10X6G10X6_UNORM_2PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 10}}}; + return out; } + case VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT_R10G10B10A10, 8, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}, {VKU_FORMAT_COMPONENT_TYPE_A, 10}}}; + return out; } + case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT_G10B10G10R10, 8, 1, {2, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}}}; + return out; } + case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT_B10G10R10G10, 8, 1, {2, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_B, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}, {VKU_FORMAT_COMPONENT_TYPE_G, 10}}}; + return out; } + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_10BIT_3PLANE_420, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}}}; + return out; } + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_10BIT_2PLANE_420, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}}}; + return out; } + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_10BIT_3PLANE_422, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}}}; + return out; } + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_10BIT_2PLANE_422, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}}}; + return out; } + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_10BIT_3PLANE_444, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}}}; + return out; } + case VK_FORMAT_R12X4_UNORM_PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 1, {{VKU_FORMAT_COMPONENT_TYPE_R, 12}}}; + return out; } + case VK_FORMAT_R12X4G12X4_UNORM_2PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 12}, {VKU_FORMAT_COMPONENT_TYPE_G, 12}}}; + return out; } + case VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT_R12G12B12A12, 8, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, 12}, {VKU_FORMAT_COMPONENT_TYPE_G, 12}, {VKU_FORMAT_COMPONENT_TYPE_B, 12}, {VKU_FORMAT_COMPONENT_TYPE_A, 12}}}; + return out; } + case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT_G12B12G12R12, 8, 1, {2, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_G, 12}, {VKU_FORMAT_COMPONENT_TYPE_B, 12}, {VKU_FORMAT_COMPONENT_TYPE_G, 12}, {VKU_FORMAT_COMPONENT_TYPE_R, 12}}}; + return out; } + case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT_B12G12R12G12, 8, 1, {2, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_B, 12}, {VKU_FORMAT_COMPONENT_TYPE_G, 12}, {VKU_FORMAT_COMPONENT_TYPE_R, 12}, {VKU_FORMAT_COMPONENT_TYPE_G, 12}}}; + return out; } + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_12BIT_3PLANE_420, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 12}, {VKU_FORMAT_COMPONENT_TYPE_B, 12}, {VKU_FORMAT_COMPONENT_TYPE_R, 12}}}; + return out; } + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_12BIT_2PLANE_420, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 12}, {VKU_FORMAT_COMPONENT_TYPE_B, 12}, {VKU_FORMAT_COMPONENT_TYPE_R, 12}}}; + return out; } + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_12BIT_3PLANE_422, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 12}, {VKU_FORMAT_COMPONENT_TYPE_B, 12}, {VKU_FORMAT_COMPONENT_TYPE_R, 12}}}; + return out; } + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_12BIT_2PLANE_422, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 12}, {VKU_FORMAT_COMPONENT_TYPE_B, 12}, {VKU_FORMAT_COMPONENT_TYPE_R, 12}}}; + return out; } + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_12BIT_3PLANE_444, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 12}, {VKU_FORMAT_COMPONENT_TYPE_B, 12}, {VKU_FORMAT_COMPONENT_TYPE_R, 12}}}; + return out; } + case VK_FORMAT_G16B16G16R16_422_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT_G16B16G16R16, 8, 1, {2, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_R, 16}}}; + return out; } + case VK_FORMAT_B16G16R16G16_422_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_64BIT_B16G16R16G16, 8, 1, {2, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_B, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}}}; + return out; } + case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT_3PLANE_420, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}, {VKU_FORMAT_COMPONENT_TYPE_R, 16}}}; + return out; } + case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT_2PLANE_420, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}, {VKU_FORMAT_COMPONENT_TYPE_R, 16}}}; + return out; } + case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT_3PLANE_422, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}, {VKU_FORMAT_COMPONENT_TYPE_R, 16}}}; + return out; } + case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT_2PLANE_422, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}, {VKU_FORMAT_COMPONENT_TYPE_R, 16}}}; + return out; } + case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT_3PLANE_444, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}, {VKU_FORMAT_COMPONENT_TYPE_R, 16}}}; + return out; } + case VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_PVRTC1_2BPP, 8, 1, {8, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_PVRTC1_4BPP, 8, 1, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_PVRTC2_2BPP, 8, 1, {8, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_PVRTC2_4BPP, 8, 1, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_PVRTC1_2BPP, 8, 1, {8, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_PVRTC1_4BPP, 8, 1, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_PVRTC2_2BPP, 8, 1, {8, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_PVRTC2_4BPP, 8, 1, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_4X4, 16, 16, {4, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_5X4, 16, 20, {5, 4, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_5X5, 16, 25, {5, 5, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_6X5, 16, 30, {6, 5, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_6X6, 16, 36, {6, 6, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_8X5, 16, 40, {8, 5, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_8X6, 16, 48, {8, 6, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_8X8, 16, 64, {8, 8, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_10X5, 16, 50, {10, 5, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_10X6, 16, 60, {10, 6, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_10X8, 16, 80, {10, 8, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_10X10, 16, 100, {10, 10, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_12X10, 16, 120, {12, 10, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_ASTC_12X12, 16, 144, {12, 12, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_R, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_G, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_B, VKU_FORMAT_COMPRESSED_COMPONENT}, {VKU_FORMAT_COMPONENT_TYPE_A, VKU_FORMAT_COMPRESSED_COMPONENT}}}; + return out; } + case VK_FORMAT_G8_B8R8_2PLANE_444_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_8BIT_2PLANE_444, 3, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 8}, {VKU_FORMAT_COMPONENT_TYPE_B, 8}, {VKU_FORMAT_COMPONENT_TYPE_R, 8}}}; + return out; } + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_10BIT_2PLANE_444, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 10}, {VKU_FORMAT_COMPONENT_TYPE_B, 10}, {VKU_FORMAT_COMPONENT_TYPE_R, 10}}}; + return out; } + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_12BIT_2PLANE_444, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 12}, {VKU_FORMAT_COMPONENT_TYPE_B, 12}, {VKU_FORMAT_COMPONENT_TYPE_R, 12}}}; + return out; } + case VK_FORMAT_G16_B16R16_2PLANE_444_UNORM: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT_2PLANE_444, 6, 1, {1, 1, 1}, 3, {{VKU_FORMAT_COMPONENT_TYPE_G, 16}, {VKU_FORMAT_COMPONENT_TYPE_B, 16}, {VKU_FORMAT_COMPONENT_TYPE_R, 16}}}; + return out; } + case VK_FORMAT_A4R4G4B4_UNORM_PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 4}, {VKU_FORMAT_COMPONENT_TYPE_R, 4}, {VKU_FORMAT_COMPONENT_TYPE_G, 4}, {VKU_FORMAT_COMPONENT_TYPE_B, 4}}}; + return out; } + case VK_FORMAT_A4B4G4R4_UNORM_PACK16: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_16BIT, 2, 1, {1, 1, 1}, 4, {{VKU_FORMAT_COMPONENT_TYPE_A, 4}, {VKU_FORMAT_COMPONENT_TYPE_B, 4}, {VKU_FORMAT_COMPONENT_TYPE_G, 4}, {VKU_FORMAT_COMPONENT_TYPE_R, 4}}}; + return out; } + case VK_FORMAT_R16G16_S10_5_NV: { + struct VKU_FORMAT_INFO out = {VKU_FORMAT_COMPATIBILITY_CLASS_32BIT, 4, 1, {1, 1, 1}, 2, {{VKU_FORMAT_COMPONENT_TYPE_R, 16}, {VKU_FORMAT_COMPONENT_TYPE_G, 16}}}; + return out; } + + default: { + // return values for VK_FORMAT_UNDEFINED + struct VKU_FORMAT_INFO out = { VKU_FORMAT_COMPATIBILITY_CLASS_NONE, 0, 0, {0, 0, 0}, 0, {{VKU_FORMAT_COMPONENT_TYPE_NONE, 0}, {VKU_FORMAT_COMPONENT_TYPE_NONE, 0}, {VKU_FORMAT_COMPONENT_TYPE_NONE, 0}, {VKU_FORMAT_COMPONENT_TYPE_NONE, 0}} }; + return out; + } + }; +} +// clang-format on + +struct VKU_FORMAT_PER_PLANE_COMPATIBILITY { + uint32_t width_divisor; + uint32_t height_divisor; + VkFormat compatible_format; +}; + +// Information for multiplanar formats +struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY { + struct VKU_FORMAT_PER_PLANE_COMPATIBILITY per_plane[VKU_FORMAT_MAX_PLANES]; +}; + +// Source: Vulkan spec Table 47. Plane Format Compatibility Table +// clang-format off +inline const struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY vkuGetFormatCompatibility(VkFormat format) { + switch (format) { + case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R8_UNORM }, {2, 2, VK_FORMAT_R8_UNORM }, {2, 2, VK_FORMAT_R8_UNORM }}}; + return out; } + case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R8_UNORM }, {2, 2, VK_FORMAT_R8G8_UNORM }, {1, 1, VK_FORMAT_UNDEFINED }}}; + return out; } + case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R8_UNORM }, {2, 1, VK_FORMAT_R8_UNORM }, {2, 1, VK_FORMAT_R8_UNORM }}}; + return out; } + case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R8_UNORM }, {2, 1, VK_FORMAT_R8G8_UNORM }, {1, 1, VK_FORMAT_UNDEFINED }}}; + return out; } + case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R8_UNORM }, {1, 1, VK_FORMAT_R8_UNORM }, {1, 1, VK_FORMAT_R8_UNORM }}}; + return out; } + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R10X6_UNORM_PACK16 }, {2, 2, VK_FORMAT_R10X6_UNORM_PACK16 }, {2, 2, VK_FORMAT_R10X6_UNORM_PACK16 }}}; + return out; } + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R10X6_UNORM_PACK16 }, {2, 2, VK_FORMAT_R10X6G10X6_UNORM_2PACK16 }, {1, 1, VK_FORMAT_UNDEFINED }}}; + return out; } + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R10X6_UNORM_PACK16 }, {2, 1, VK_FORMAT_R10X6_UNORM_PACK16 }, {2, 1, VK_FORMAT_R10X6_UNORM_PACK16 }}}; + return out; } + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R10X6_UNORM_PACK16 }, {2, 1, VK_FORMAT_R10X6G10X6_UNORM_2PACK16 }, {1, 1, VK_FORMAT_UNDEFINED }}}; + return out; } + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R10X6_UNORM_PACK16 }, {1, 1, VK_FORMAT_R10X6_UNORM_PACK16 }, {1, 1, VK_FORMAT_R10X6_UNORM_PACK16 }}}; + return out; } + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R12X4_UNORM_PACK16 }, {2, 2, VK_FORMAT_R12X4_UNORM_PACK16 }, {2, 2, VK_FORMAT_R12X4_UNORM_PACK16 }}}; + return out; } + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R12X4_UNORM_PACK16 }, {2, 2, VK_FORMAT_R12X4G12X4_UNORM_2PACK16 }, {1, 1, VK_FORMAT_UNDEFINED }}}; + return out; } + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R12X4_UNORM_PACK16 }, {2, 1, VK_FORMAT_R12X4_UNORM_PACK16 }, {2, 1, VK_FORMAT_R12X4_UNORM_PACK16 }}}; + return out; } + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R12X4_UNORM_PACK16 }, {2, 1, VK_FORMAT_R12X4G12X4_UNORM_2PACK16 }, {1, 1, VK_FORMAT_UNDEFINED }}}; + return out; } + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R12X4_UNORM_PACK16 }, {1, 1, VK_FORMAT_R12X4_UNORM_PACK16 }, {1, 1, VK_FORMAT_R12X4_UNORM_PACK16 }}}; + return out; } + case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R16_UNORM }, {2, 2, VK_FORMAT_R16_UNORM }, {2, 2, VK_FORMAT_R16_UNORM }}}; + return out; } + case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R16_UNORM }, {2, 2, VK_FORMAT_R16G16_UNORM }, {1, 1, VK_FORMAT_UNDEFINED }}}; + return out; } + case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R16_UNORM }, {2, 1, VK_FORMAT_R16_UNORM }, {2, 1, VK_FORMAT_R16_UNORM }}}; + return out; } + case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R16_UNORM }, {2, 1, VK_FORMAT_R16G16_UNORM }, {1, 1, VK_FORMAT_UNDEFINED }}}; + return out; } + case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R16_UNORM }, {1, 1, VK_FORMAT_R16_UNORM }, {1, 1, VK_FORMAT_R16_UNORM }}}; + return out; } + case VK_FORMAT_G8_B8R8_2PLANE_444_UNORM: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R8_UNORM }, {1, 1, VK_FORMAT_R8G8_UNORM }, {1, 1, VK_FORMAT_UNDEFINED }}}; + return out; } + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R10X6_UNORM_PACK16 }, {1, 1, VK_FORMAT_R10X6G10X6_UNORM_2PACK16 }, {1, 1, VK_FORMAT_UNDEFINED }}}; + return out; } + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R12X4_UNORM_PACK16 }, {1, 1, VK_FORMAT_R12X4G12X4_UNORM_2PACK16 }, {1, 1, VK_FORMAT_UNDEFINED }}}; + return out; } + case VK_FORMAT_G16_B16R16_2PLANE_444_UNORM: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_R16_UNORM }, {1, 1, VK_FORMAT_R16G16_UNORM }, {1, 1, VK_FORMAT_UNDEFINED }}}; + return out; } + default: { + struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_UNDEFINED}, {1, 1, VK_FORMAT_UNDEFINED}, {1, 1, VK_FORMAT_UNDEFINED}}}; + return out; } + }; +} +// clang-format on + +// Return true if all components in a format are an SFLOAT +bool vkuFormatIsSFLOAT(VkFormat format) { + switch (format) { + case VK_FORMAT_R16_SFLOAT: + case VK_FORMAT_R16G16_SFLOAT: + case VK_FORMAT_R16G16B16_SFLOAT: + case VK_FORMAT_R16G16B16A16_SFLOAT: + case VK_FORMAT_R32_SFLOAT: + case VK_FORMAT_R32G32_SFLOAT: + case VK_FORMAT_R32G32B32_SFLOAT: + case VK_FORMAT_R32G32B32A32_SFLOAT: + case VK_FORMAT_R64_SFLOAT: + case VK_FORMAT_R64G64_SFLOAT: + case VK_FORMAT_R64G64B64_SFLOAT: + case VK_FORMAT_R64G64B64A64_SFLOAT: + case VK_FORMAT_D32_SFLOAT: + case VK_FORMAT_BC6H_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK: + return true; + default: + return false; + } +} + +// Return true if all components in a format are an SINT +bool vkuFormatIsSINT(VkFormat format) { + switch (format) { + case VK_FORMAT_R8_SINT: + case VK_FORMAT_R8G8_SINT: + case VK_FORMAT_R8G8B8_SINT: + case VK_FORMAT_B8G8R8_SINT: + case VK_FORMAT_R8G8B8A8_SINT: + case VK_FORMAT_B8G8R8A8_SINT: + case VK_FORMAT_A8B8G8R8_SINT_PACK32: + case VK_FORMAT_A2R10G10B10_SINT_PACK32: + case VK_FORMAT_A2B10G10R10_SINT_PACK32: + case VK_FORMAT_R16_SINT: + case VK_FORMAT_R16G16_SINT: + case VK_FORMAT_R16G16B16_SINT: + case VK_FORMAT_R16G16B16A16_SINT: + case VK_FORMAT_R32_SINT: + case VK_FORMAT_R32G32_SINT: + case VK_FORMAT_R32G32B32_SINT: + case VK_FORMAT_R32G32B32A32_SINT: + case VK_FORMAT_R64_SINT: + case VK_FORMAT_R64G64_SINT: + case VK_FORMAT_R64G64B64_SINT: + case VK_FORMAT_R64G64B64A64_SINT: + case VK_FORMAT_R16G16_S10_5_NV: + return true; + default: + return false; + } +} + +// Return true if all components in a format are an SNORM +bool vkuFormatIsSNORM(VkFormat format) { + switch (format) { + case VK_FORMAT_R8_SNORM: + case VK_FORMAT_R8G8_SNORM: + case VK_FORMAT_R8G8B8_SNORM: + case VK_FORMAT_B8G8R8_SNORM: + case VK_FORMAT_R8G8B8A8_SNORM: + case VK_FORMAT_B8G8R8A8_SNORM: + case VK_FORMAT_A8B8G8R8_SNORM_PACK32: + case VK_FORMAT_A2R10G10B10_SNORM_PACK32: + case VK_FORMAT_A2B10G10R10_SNORM_PACK32: + case VK_FORMAT_R16_SNORM: + case VK_FORMAT_R16G16_SNORM: + case VK_FORMAT_R16G16B16_SNORM: + case VK_FORMAT_R16G16B16A16_SNORM: + case VK_FORMAT_BC4_SNORM_BLOCK: + case VK_FORMAT_BC5_SNORM_BLOCK: + case VK_FORMAT_EAC_R11_SNORM_BLOCK: + case VK_FORMAT_EAC_R11G11_SNORM_BLOCK: + return true; + default: + return false; + } +} + +// Return true if all components in a format are an SRGB +bool vkuFormatIsSRGB(VkFormat format) { + switch (format) { + case VK_FORMAT_R8_SRGB: + case VK_FORMAT_R8G8_SRGB: + case VK_FORMAT_R8G8B8_SRGB: + case VK_FORMAT_B8G8R8_SRGB: + case VK_FORMAT_R8G8B8A8_SRGB: + case VK_FORMAT_B8G8R8A8_SRGB: + case VK_FORMAT_A8B8G8R8_SRGB_PACK32: + case VK_FORMAT_BC1_RGB_SRGB_BLOCK: + case VK_FORMAT_BC1_RGBA_SRGB_BLOCK: + case VK_FORMAT_BC2_SRGB_BLOCK: + case VK_FORMAT_BC3_SRGB_BLOCK: + case VK_FORMAT_BC7_SRGB_BLOCK: + case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK: + case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK: + case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK: + case VK_FORMAT_ASTC_4x4_SRGB_BLOCK: + case VK_FORMAT_ASTC_5x4_SRGB_BLOCK: + case VK_FORMAT_ASTC_5x5_SRGB_BLOCK: + case VK_FORMAT_ASTC_6x5_SRGB_BLOCK: + case VK_FORMAT_ASTC_6x6_SRGB_BLOCK: + case VK_FORMAT_ASTC_8x5_SRGB_BLOCK: + case VK_FORMAT_ASTC_8x6_SRGB_BLOCK: + case VK_FORMAT_ASTC_8x8_SRGB_BLOCK: + case VK_FORMAT_ASTC_10x5_SRGB_BLOCK: + case VK_FORMAT_ASTC_10x6_SRGB_BLOCK: + case VK_FORMAT_ASTC_10x8_SRGB_BLOCK: + case VK_FORMAT_ASTC_10x10_SRGB_BLOCK: + case VK_FORMAT_ASTC_12x10_SRGB_BLOCK: + case VK_FORMAT_ASTC_12x12_SRGB_BLOCK: + case VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG: + case VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG: + case VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG: + case VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG: + return true; + default: + return false; + } +} + +// Return true if all components in a format are an SSCALED +bool vkuFormatIsSSCALED(VkFormat format) { + switch (format) { + case VK_FORMAT_R8_SSCALED: + case VK_FORMAT_R8G8_SSCALED: + case VK_FORMAT_R8G8B8_SSCALED: + case VK_FORMAT_B8G8R8_SSCALED: + case VK_FORMAT_R8G8B8A8_SSCALED: + case VK_FORMAT_B8G8R8A8_SSCALED: + case VK_FORMAT_A8B8G8R8_SSCALED_PACK32: + case VK_FORMAT_A2R10G10B10_SSCALED_PACK32: + case VK_FORMAT_A2B10G10R10_SSCALED_PACK32: + case VK_FORMAT_R16_SSCALED: + case VK_FORMAT_R16G16_SSCALED: + case VK_FORMAT_R16G16B16_SSCALED: + case VK_FORMAT_R16G16B16A16_SSCALED: + return true; + default: + return false; + } +} + +// Return true if all components in a format are an UFLOAT +bool vkuFormatIsUFLOAT(VkFormat format) { + switch (format) { + case VK_FORMAT_B10G11R11_UFLOAT_PACK32: + case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32: + case VK_FORMAT_BC6H_UFLOAT_BLOCK: + return true; + default: + return false; + } +} + +// Return true if all components in a format are an UINT +bool vkuFormatIsUINT(VkFormat format) { + switch (format) { + case VK_FORMAT_R8_UINT: + case VK_FORMAT_R8G8_UINT: + case VK_FORMAT_R8G8B8_UINT: + case VK_FORMAT_B8G8R8_UINT: + case VK_FORMAT_R8G8B8A8_UINT: + case VK_FORMAT_B8G8R8A8_UINT: + case VK_FORMAT_A8B8G8R8_UINT_PACK32: + case VK_FORMAT_A2R10G10B10_UINT_PACK32: + case VK_FORMAT_A2B10G10R10_UINT_PACK32: + case VK_FORMAT_R16_UINT: + case VK_FORMAT_R16G16_UINT: + case VK_FORMAT_R16G16B16_UINT: + case VK_FORMAT_R16G16B16A16_UINT: + case VK_FORMAT_R32_UINT: + case VK_FORMAT_R32G32_UINT: + case VK_FORMAT_R32G32B32_UINT: + case VK_FORMAT_R32G32B32A32_UINT: + case VK_FORMAT_R64_UINT: + case VK_FORMAT_R64G64_UINT: + case VK_FORMAT_R64G64B64_UINT: + case VK_FORMAT_R64G64B64A64_UINT: + case VK_FORMAT_S8_UINT: + return true; + default: + return false; + } +} + +// Return true if all components in a format are an UNORM +bool vkuFormatIsUNORM(VkFormat format) { + switch (format) { + case VK_FORMAT_R4G4_UNORM_PACK8: + case VK_FORMAT_R4G4B4A4_UNORM_PACK16: + case VK_FORMAT_B4G4R4A4_UNORM_PACK16: + case VK_FORMAT_R5G6B5_UNORM_PACK16: + case VK_FORMAT_B5G6R5_UNORM_PACK16: + case VK_FORMAT_R5G5B5A1_UNORM_PACK16: + case VK_FORMAT_B5G5R5A1_UNORM_PACK16: + case VK_FORMAT_A1R5G5B5_UNORM_PACK16: + case VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR: + case VK_FORMAT_A8_UNORM_KHR: + case VK_FORMAT_R8_UNORM: + case VK_FORMAT_R8G8_UNORM: + case VK_FORMAT_R8G8B8_UNORM: + case VK_FORMAT_B8G8R8_UNORM: + case VK_FORMAT_R8G8B8A8_UNORM: + case VK_FORMAT_B8G8R8A8_UNORM: + case VK_FORMAT_A8B8G8R8_UNORM_PACK32: + case VK_FORMAT_A2R10G10B10_UNORM_PACK32: + case VK_FORMAT_A2B10G10R10_UNORM_PACK32: + case VK_FORMAT_R16_UNORM: + case VK_FORMAT_R16G16_UNORM: + case VK_FORMAT_R16G16B16_UNORM: + case VK_FORMAT_R16G16B16A16_UNORM: + case VK_FORMAT_D16_UNORM: + case VK_FORMAT_X8_D24_UNORM_PACK32: + case VK_FORMAT_BC1_RGB_UNORM_BLOCK: + case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: + case VK_FORMAT_BC2_UNORM_BLOCK: + case VK_FORMAT_BC3_UNORM_BLOCK: + case VK_FORMAT_BC4_UNORM_BLOCK: + case VK_FORMAT_BC5_UNORM_BLOCK: + case VK_FORMAT_BC7_UNORM_BLOCK: + case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: + case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK: + case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK: + case VK_FORMAT_EAC_R11_UNORM_BLOCK: + case VK_FORMAT_EAC_R11G11_UNORM_BLOCK: + case VK_FORMAT_ASTC_4x4_UNORM_BLOCK: + case VK_FORMAT_ASTC_5x4_UNORM_BLOCK: + case VK_FORMAT_ASTC_5x5_UNORM_BLOCK: + case VK_FORMAT_ASTC_6x5_UNORM_BLOCK: + case VK_FORMAT_ASTC_6x6_UNORM_BLOCK: + case VK_FORMAT_ASTC_8x5_UNORM_BLOCK: + case VK_FORMAT_ASTC_8x6_UNORM_BLOCK: + case VK_FORMAT_ASTC_8x8_UNORM_BLOCK: + case VK_FORMAT_ASTC_10x5_UNORM_BLOCK: + case VK_FORMAT_ASTC_10x6_UNORM_BLOCK: + case VK_FORMAT_ASTC_10x8_UNORM_BLOCK: + case VK_FORMAT_ASTC_10x10_UNORM_BLOCK: + case VK_FORMAT_ASTC_12x10_UNORM_BLOCK: + case VK_FORMAT_ASTC_12x12_UNORM_BLOCK: + case VK_FORMAT_G8B8G8R8_422_UNORM: + case VK_FORMAT_B8G8R8G8_422_UNORM: + case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM: + case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM: + case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM: + case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM: + case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM: + case VK_FORMAT_R10X6_UNORM_PACK16: + case VK_FORMAT_R10X6G10X6_UNORM_2PACK16: + case VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16: + case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16: + case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16: + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16: + case VK_FORMAT_R12X4_UNORM_PACK16: + case VK_FORMAT_R12X4G12X4_UNORM_2PACK16: + case VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16: + case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16: + case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16: + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16: + case VK_FORMAT_G16B16G16R16_422_UNORM: + case VK_FORMAT_B16G16R16G16_422_UNORM: + case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM: + case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM: + case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM: + case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM: + case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM: + case VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG: + case VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG: + case VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG: + case VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG: + case VK_FORMAT_G8_B8R8_2PLANE_444_UNORM: + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16: + case VK_FORMAT_G16_B16R16_2PLANE_444_UNORM: + case VK_FORMAT_A4R4G4B4_UNORM_PACK16: + case VK_FORMAT_A4B4G4R4_UNORM_PACK16: + return true; + default: + return false; + } +} + +// Return true if all components in a format are an USCALED +bool vkuFormatIsUSCALED(VkFormat format) { + switch (format) { + case VK_FORMAT_R8_USCALED: + case VK_FORMAT_R8G8_USCALED: + case VK_FORMAT_R8G8B8_USCALED: + case VK_FORMAT_B8G8R8_USCALED: + case VK_FORMAT_R8G8B8A8_USCALED: + case VK_FORMAT_B8G8R8A8_USCALED: + case VK_FORMAT_A8B8G8R8_USCALED_PACK32: + case VK_FORMAT_A2R10G10B10_USCALED_PACK32: + case VK_FORMAT_A2B10G10R10_USCALED_PACK32: + case VK_FORMAT_R16_USCALED: + case VK_FORMAT_R16G16_USCALED: + case VK_FORMAT_R16G16B16_USCALED: + case VK_FORMAT_R16G16B16A16_USCALED: + return true; + default: + return false; + } +} + +inline bool vkuFormatIsSampledInt(VkFormat format) { return (vkuFormatIsSINT(format) || vkuFormatIsUINT(format)); } +inline bool vkuFormatIsSampledFloat(VkFormat format) { + return (vkuFormatIsUNORM(format) || vkuFormatIsSNORM(format) || + vkuFormatIsUSCALED(format) || vkuFormatIsSSCALED(format) || + vkuFormatIsUFLOAT(format) || vkuFormatIsSFLOAT(format) || + vkuFormatIsSRGB(format)); +} + +// Return true if a format is a ASTC_HDR compressed image format +bool vkuFormatIsCompressed_ASTC_HDR(VkFormat format) { + switch (format) { + case VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK: + case VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK: + return true; + default: + return false; + } +} + +// Return true if a format is a ASTC_LDR compressed image format +bool vkuFormatIsCompressed_ASTC_LDR(VkFormat format) { + switch (format) { + case VK_FORMAT_ASTC_10x10_SRGB_BLOCK: + case VK_FORMAT_ASTC_10x10_UNORM_BLOCK: + case VK_FORMAT_ASTC_10x5_SRGB_BLOCK: + case VK_FORMAT_ASTC_10x5_UNORM_BLOCK: + case VK_FORMAT_ASTC_10x6_SRGB_BLOCK: + case VK_FORMAT_ASTC_10x6_UNORM_BLOCK: + case VK_FORMAT_ASTC_10x8_SRGB_BLOCK: + case VK_FORMAT_ASTC_10x8_UNORM_BLOCK: + case VK_FORMAT_ASTC_12x10_SRGB_BLOCK: + case VK_FORMAT_ASTC_12x10_UNORM_BLOCK: + case VK_FORMAT_ASTC_12x12_SRGB_BLOCK: + case VK_FORMAT_ASTC_12x12_UNORM_BLOCK: + case VK_FORMAT_ASTC_4x4_SRGB_BLOCK: + case VK_FORMAT_ASTC_4x4_UNORM_BLOCK: + case VK_FORMAT_ASTC_5x4_SRGB_BLOCK: + case VK_FORMAT_ASTC_5x4_UNORM_BLOCK: + case VK_FORMAT_ASTC_5x5_SRGB_BLOCK: + case VK_FORMAT_ASTC_5x5_UNORM_BLOCK: + case VK_FORMAT_ASTC_6x5_SRGB_BLOCK: + case VK_FORMAT_ASTC_6x5_UNORM_BLOCK: + case VK_FORMAT_ASTC_6x6_SRGB_BLOCK: + case VK_FORMAT_ASTC_6x6_UNORM_BLOCK: + case VK_FORMAT_ASTC_8x5_SRGB_BLOCK: + case VK_FORMAT_ASTC_8x5_UNORM_BLOCK: + case VK_FORMAT_ASTC_8x6_SRGB_BLOCK: + case VK_FORMAT_ASTC_8x6_UNORM_BLOCK: + case VK_FORMAT_ASTC_8x8_SRGB_BLOCK: + case VK_FORMAT_ASTC_8x8_UNORM_BLOCK: + return true; + default: + return false; + } +} + +// Return true if a format is a BC compressed image format +bool vkuFormatIsCompressed_BC(VkFormat format) { + switch (format) { + case VK_FORMAT_BC1_RGBA_SRGB_BLOCK: + case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: + case VK_FORMAT_BC1_RGB_SRGB_BLOCK: + case VK_FORMAT_BC1_RGB_UNORM_BLOCK: + case VK_FORMAT_BC2_SRGB_BLOCK: + case VK_FORMAT_BC2_UNORM_BLOCK: + case VK_FORMAT_BC3_SRGB_BLOCK: + case VK_FORMAT_BC3_UNORM_BLOCK: + case VK_FORMAT_BC4_SNORM_BLOCK: + case VK_FORMAT_BC4_UNORM_BLOCK: + case VK_FORMAT_BC5_SNORM_BLOCK: + case VK_FORMAT_BC5_UNORM_BLOCK: + case VK_FORMAT_BC6H_SFLOAT_BLOCK: + case VK_FORMAT_BC6H_UFLOAT_BLOCK: + case VK_FORMAT_BC7_SRGB_BLOCK: + case VK_FORMAT_BC7_UNORM_BLOCK: + return true; + default: + return false; + } +} + +// Return true if a format is a EAC compressed image format +bool vkuFormatIsCompressed_EAC(VkFormat format) { + switch (format) { + case VK_FORMAT_EAC_R11G11_SNORM_BLOCK: + case VK_FORMAT_EAC_R11G11_UNORM_BLOCK: + case VK_FORMAT_EAC_R11_SNORM_BLOCK: + case VK_FORMAT_EAC_R11_UNORM_BLOCK: + return true; + default: + return false; + } +} + +// Return true if a format is a ETC2 compressed image format +bool vkuFormatIsCompressed_ETC2(VkFormat format) { + switch (format) { + case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK: + case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK: + case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK: + case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK: + case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK: + case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: + return true; + default: + return false; + } +} + +// Return true if a format is a PVRTC compressed image format +bool vkuFormatIsCompressed_PVRTC(VkFormat format) { + switch (format) { + case VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG: + case VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG: + case VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG: + case VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG: + case VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG: + case VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG: + case VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG: + case VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG: + return true; + default: + return false; + } +} + +// clang-format off +// Return true if a format is any compressed image format +bool vkuFormatIsCompressed(VkFormat format) { + return + vkuFormatIsCompressed_ASTC_HDR(format) || + vkuFormatIsCompressed_ASTC_LDR(format) || + vkuFormatIsCompressed_BC(format) || + vkuFormatIsCompressed_EAC(format) || + vkuFormatIsCompressed_ETC2(format) || + vkuFormatIsCompressed_PVRTC(format); +} +// clang-format on + +// Return true if format is a depth OR stencil format +bool vkuFormatIsDepthOrStencil(VkFormat format) { + switch (format) { + case VK_FORMAT_D16_UNORM: + case VK_FORMAT_X8_D24_UNORM_PACK32: + case VK_FORMAT_D32_SFLOAT: + case VK_FORMAT_S8_UINT: + case VK_FORMAT_D16_UNORM_S8_UINT: + case VK_FORMAT_D24_UNORM_S8_UINT: + case VK_FORMAT_D32_SFLOAT_S8_UINT: + return true; + default: + return false; + } +} + +// Return true if format is a depth AND stencil format +bool vkuFormatIsDepthAndStencil(VkFormat format) { + switch (format) { + case VK_FORMAT_D16_UNORM_S8_UINT: + case VK_FORMAT_D24_UNORM_S8_UINT: + case VK_FORMAT_D32_SFLOAT_S8_UINT: + return true; + default: + return false; + } +} + +// Return true if format is a depth ONLY format +bool vkuFormatIsDepthOnly(VkFormat format) { + switch (format) { + case VK_FORMAT_D16_UNORM: + case VK_FORMAT_X8_D24_UNORM_PACK32: + case VK_FORMAT_D32_SFLOAT: + return true; + default: + return false; + } +} + +// Return true if format is a stencil ONLY format +bool vkuFormatIsStencilOnly(VkFormat format) { + switch (format) { + case VK_FORMAT_S8_UINT: + return true; + default: + return false; + } +} + +// Returns size of depth component in bits +// Returns zero if no depth component +uint32_t vkuFormatDepthSize(VkFormat format) { + switch (format) { + case VK_FORMAT_D16_UNORM: + case VK_FORMAT_D16_UNORM_S8_UINT: + return 16; + case VK_FORMAT_D24_UNORM_S8_UINT: + case VK_FORMAT_X8_D24_UNORM_PACK32: + return 24; + case VK_FORMAT_D32_SFLOAT: + case VK_FORMAT_D32_SFLOAT_S8_UINT: + return 32; + default: + return 0; + } +} + +// Returns size of stencil component in bits +// Returns zero if no stencil component +uint32_t vkuFormatStencilSize(VkFormat format) { + switch (format) { + case VK_FORMAT_D16_UNORM_S8_UINT: + case VK_FORMAT_D24_UNORM_S8_UINT: + case VK_FORMAT_D32_SFLOAT_S8_UINT: + case VK_FORMAT_S8_UINT: + return 8; + default: + return 0; + } +} + +// Returns NONE if no depth component +enum VKU_FORMAT_NUMERICAL_TYPE vkuFormatDepthNumericalType(VkFormat format) { + switch (format) { + case VK_FORMAT_D16_UNORM: + case VK_FORMAT_D16_UNORM_S8_UINT: + case VK_FORMAT_D24_UNORM_S8_UINT: + case VK_FORMAT_X8_D24_UNORM_PACK32: + return VKU_FORMAT_NUMERICAL_TYPE_UNORM; + case VK_FORMAT_D32_SFLOAT: + case VK_FORMAT_D32_SFLOAT_S8_UINT: + return VKU_FORMAT_NUMERICAL_TYPE_SFLOAT; + default: + return VKU_FORMAT_NUMERICAL_TYPE_NONE; + } +} + +// Returns NONE if no stencil component +enum VKU_FORMAT_NUMERICAL_TYPE vkuFormatStencilNumericalType(VkFormat format) { + switch (format) { + case VK_FORMAT_D16_UNORM_S8_UINT: + case VK_FORMAT_D24_UNORM_S8_UINT: + case VK_FORMAT_D32_SFLOAT_S8_UINT: + case VK_FORMAT_S8_UINT: + return VKU_FORMAT_NUMERICAL_TYPE_UINT; + default: + return VKU_FORMAT_NUMERICAL_TYPE_NONE; + } +} + +// Return true if format is a packed format +bool vkuFormatIsPacked(VkFormat format) { + switch (format) { + case VK_FORMAT_R4G4_UNORM_PACK8: + case VK_FORMAT_R4G4B4A4_UNORM_PACK16: + case VK_FORMAT_B4G4R4A4_UNORM_PACK16: + case VK_FORMAT_R5G6B5_UNORM_PACK16: + case VK_FORMAT_B5G6R5_UNORM_PACK16: + case VK_FORMAT_R5G5B5A1_UNORM_PACK16: + case VK_FORMAT_B5G5R5A1_UNORM_PACK16: + case VK_FORMAT_A1R5G5B5_UNORM_PACK16: + case VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR: + case VK_FORMAT_A8B8G8R8_UNORM_PACK32: + case VK_FORMAT_A8B8G8R8_SNORM_PACK32: + case VK_FORMAT_A8B8G8R8_USCALED_PACK32: + case VK_FORMAT_A8B8G8R8_SSCALED_PACK32: + case VK_FORMAT_A8B8G8R8_UINT_PACK32: + case VK_FORMAT_A8B8G8R8_SINT_PACK32: + case VK_FORMAT_A8B8G8R8_SRGB_PACK32: + case VK_FORMAT_A2R10G10B10_UNORM_PACK32: + case VK_FORMAT_A2R10G10B10_SNORM_PACK32: + case VK_FORMAT_A2R10G10B10_USCALED_PACK32: + case VK_FORMAT_A2R10G10B10_SSCALED_PACK32: + case VK_FORMAT_A2R10G10B10_UINT_PACK32: + case VK_FORMAT_A2R10G10B10_SINT_PACK32: + case VK_FORMAT_A2B10G10R10_UNORM_PACK32: + case VK_FORMAT_A2B10G10R10_SNORM_PACK32: + case VK_FORMAT_A2B10G10R10_USCALED_PACK32: + case VK_FORMAT_A2B10G10R10_SSCALED_PACK32: + case VK_FORMAT_A2B10G10R10_UINT_PACK32: + case VK_FORMAT_A2B10G10R10_SINT_PACK32: + case VK_FORMAT_B10G11R11_UFLOAT_PACK32: + case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32: + case VK_FORMAT_X8_D24_UNORM_PACK32: + case VK_FORMAT_R10X6_UNORM_PACK16: + case VK_FORMAT_R10X6G10X6_UNORM_2PACK16: + case VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16: + case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16: + case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16: + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16: + case VK_FORMAT_R12X4_UNORM_PACK16: + case VK_FORMAT_R12X4G12X4_UNORM_2PACK16: + case VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16: + case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16: + case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16: + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16: + case VK_FORMAT_A4R4G4B4_UNORM_PACK16: + case VK_FORMAT_A4B4G4R4_UNORM_PACK16: + return true; + default: + return false; + } +} + +// Return true if format requires sampler YCBCR conversion +// for VK_IMAGE_ASPECT_COLOR_BIT image views +// Table found in spec +bool vkuFormatRequiresYcbcrConversion(VkFormat format) { + switch (format) { + case VK_FORMAT_G8B8G8R8_422_UNORM: + case VK_FORMAT_B8G8R8G8_422_UNORM: + case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM: + case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM: + case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM: + case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM: + case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM: + case VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16: + case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16: + case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16: + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16: + case VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16: + case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16: + case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16: + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16: + case VK_FORMAT_G16B16G16R16_422_UNORM: + case VK_FORMAT_B16G16R16G16_422_UNORM: + case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM: + case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM: + case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM: + case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM: + case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM: + case VK_FORMAT_G8_B8R8_2PLANE_444_UNORM: + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16: + case VK_FORMAT_G16_B16R16_2PLANE_444_UNORM: + return true; + default: + return false; + } +} + +bool vkuFormatIsXChromaSubsampled(VkFormat format) { + switch (format) { + case VK_FORMAT_G8B8G8R8_422_UNORM: + case VK_FORMAT_B8G8R8G8_422_UNORM: + case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM: + case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM: + case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM: + case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM: + case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16: + case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16: + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16: + case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16: + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G16B16G16R16_422_UNORM: + case VK_FORMAT_B16G16R16G16_422_UNORM: + case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM: + case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM: + case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM: + case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM: + return true; + default: + return false; + } +} + +bool vkuFormatIsYChromaSubsampled(VkFormat format) { + switch (format) { + case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM: + case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM: + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM: + case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM: + return true; + default: + return false; + } +} + +bool vkuFormatIsSinglePlane_422(VkFormat format) { + switch (format) { + case VK_FORMAT_G8B8G8R8_422_UNORM: + case VK_FORMAT_B8G8R8G8_422_UNORM: + case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16: + case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16: + case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16: + case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16: + case VK_FORMAT_G16B16G16R16_422_UNORM: + case VK_FORMAT_B16G16R16G16_422_UNORM: + return true; + default: + return false; + } +} + +// Returns number of planes in format (which is 1 by default) +uint32_t vkuFormatPlaneCount(VkFormat format) { + switch (format) { + case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM: + case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM: + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM: + case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM: + case VK_FORMAT_G8_B8R8_2PLANE_444_UNORM: + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16: + case VK_FORMAT_G16_B16R16_2PLANE_444_UNORM: + return 2; + case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM: + case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM: + case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM: + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16: + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16: + case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM: + case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM: + case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM: + return 3; + default: + return 1; + } +} + +// Will return VK_FORMAT_UNDEFINED if given a plane aspect that doesn't exist for the format +inline VkFormat vkuFindMultiplaneCompatibleFormat(VkFormat mp_fmt, VkImageAspectFlagBits plane_aspect) { + const uint32_t plane_idx = vkuGetPlaneIndex(plane_aspect); + const struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY multiplane_compatibility = vkuGetFormatCompatibility(mp_fmt); + if ((multiplane_compatibility.per_plane[0].compatible_format == VK_FORMAT_UNDEFINED) || (plane_idx >= VKU_FORMAT_MAX_PLANES)) { + return VK_FORMAT_UNDEFINED; + } + + return multiplane_compatibility.per_plane[plane_idx].compatible_format; +} + +inline VkExtent2D vkuFindMultiplaneExtentDivisors(VkFormat mp_fmt, VkImageAspectFlagBits plane_aspect) { + VkExtent2D divisors = {1, 1}; + const uint32_t plane_idx = vkuGetPlaneIndex(plane_aspect); + const struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY multiplane_compatibility = vkuGetFormatCompatibility(mp_fmt); + if ((multiplane_compatibility.per_plane[0].compatible_format == VK_FORMAT_UNDEFINED) || (plane_idx >= VKU_FORMAT_MAX_PLANES)) { + return divisors; + } + + divisors.width = multiplane_compatibility.per_plane[plane_idx].width_divisor; + divisors.height = multiplane_compatibility.per_plane[plane_idx].height_divisor; + return divisors; +} + +inline uint32_t vkuFormatComponentCount(VkFormat format) { return vkuGetFormatInfo(format).component_count; } + +inline VkExtent3D vkuFormatTexelBlockExtent(VkFormat format) { return vkuGetFormatInfo(format).block_extent; } + +inline enum VKU_FORMAT_COMPATIBILITY_CLASS vkuFormatCompatibilityClass(VkFormat format) { return vkuGetFormatInfo(format).compatibility; } + +inline bool vkuFormatElementIsTexel(VkFormat format) { + if (vkuFormatIsPacked(format) || vkuFormatIsCompressed(format) || vkuFormatIsSinglePlane_422(format) || vkuFormatIsMultiplane(format)) { + return false; + } else { + return true; + } +} + +inline uint32_t vkuFormatElementSize(VkFormat format) { + return vkuFormatElementSizeWithAspect(format, VK_IMAGE_ASPECT_COLOR_BIT); +} + +inline uint32_t vkuFormatElementSizeWithAspect(VkFormat format, VkImageAspectFlagBits aspectMask) { + // Depth/Stencil aspect have separate helper functions + if (aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) { + return vkuFormatStencilSize(format) / 8; + } else if (aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) { + return vkuFormatDepthSize(format) / 8; + } else if (vkuFormatIsMultiplane(format)) { + // Element of entire multiplane format is not useful, + // Want to get just a single plane as the lookup format + format = vkuFindMultiplaneCompatibleFormat(format, aspectMask); + } + + return vkuGetFormatInfo(format).block_size; +} + +inline double vkuFormatTexelSize(VkFormat format) { + return vkuFormatTexelSizeWithAspect(format, VK_IMAGE_ASPECT_COLOR_BIT); +} + +inline double vkuFormatTexelSizeWithAspect(VkFormat format, VkImageAspectFlagBits aspectMask) { + double texel_size = (double)(vkuFormatElementSizeWithAspect(format, aspectMask)); + VkExtent3D block_extent = vkuFormatTexelBlockExtent(format); + uint32_t texels_per_block = block_extent.width * block_extent.height * block_extent.depth; + if (1 < texels_per_block) { + texel_size /= (double)(texels_per_block); + } + return texel_size; +} + +inline bool vkuFormatIs8bit(VkFormat format) { + switch (format) { + case VK_FORMAT_A8_UNORM_KHR: + case VK_FORMAT_R8_UNORM: + case VK_FORMAT_R8_SNORM: + case VK_FORMAT_R8_USCALED: + case VK_FORMAT_R8_SSCALED: + case VK_FORMAT_R8_UINT: + case VK_FORMAT_R8_SINT: + case VK_FORMAT_R8_SRGB: + case VK_FORMAT_R8G8_UNORM: + case VK_FORMAT_R8G8_SNORM: + case VK_FORMAT_R8G8_USCALED: + case VK_FORMAT_R8G8_SSCALED: + case VK_FORMAT_R8G8_UINT: + case VK_FORMAT_R8G8_SINT: + case VK_FORMAT_R8G8_SRGB: + case VK_FORMAT_R8G8B8_UNORM: + case VK_FORMAT_R8G8B8_SNORM: + case VK_FORMAT_R8G8B8_USCALED: + case VK_FORMAT_R8G8B8_SSCALED: + case VK_FORMAT_R8G8B8_UINT: + case VK_FORMAT_R8G8B8_SINT: + case VK_FORMAT_R8G8B8_SRGB: + case VK_FORMAT_B8G8R8_UNORM: + case VK_FORMAT_B8G8R8_SNORM: + case VK_FORMAT_B8G8R8_USCALED: + case VK_FORMAT_B8G8R8_SSCALED: + case VK_FORMAT_B8G8R8_UINT: + case VK_FORMAT_B8G8R8_SINT: + case VK_FORMAT_B8G8R8_SRGB: + case VK_FORMAT_R8G8B8A8_UNORM: + case VK_FORMAT_R8G8B8A8_SNORM: + case VK_FORMAT_R8G8B8A8_USCALED: + case VK_FORMAT_R8G8B8A8_SSCALED: + case VK_FORMAT_R8G8B8A8_UINT: + case VK_FORMAT_R8G8B8A8_SINT: + case VK_FORMAT_R8G8B8A8_SRGB: + case VK_FORMAT_B8G8R8A8_UNORM: + case VK_FORMAT_B8G8R8A8_SNORM: + case VK_FORMAT_B8G8R8A8_USCALED: + case VK_FORMAT_B8G8R8A8_SSCALED: + case VK_FORMAT_B8G8R8A8_UINT: + case VK_FORMAT_B8G8R8A8_SINT: + case VK_FORMAT_B8G8R8A8_SRGB: + case VK_FORMAT_A8B8G8R8_UNORM_PACK32: + case VK_FORMAT_A8B8G8R8_SNORM_PACK32: + case VK_FORMAT_A8B8G8R8_USCALED_PACK32: + case VK_FORMAT_A8B8G8R8_SSCALED_PACK32: + case VK_FORMAT_A8B8G8R8_UINT_PACK32: + case VK_FORMAT_A8B8G8R8_SINT_PACK32: + case VK_FORMAT_A8B8G8R8_SRGB_PACK32: + case VK_FORMAT_S8_UINT: + case VK_FORMAT_G8B8G8R8_422_UNORM: + case VK_FORMAT_B8G8R8G8_422_UNORM: + case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM: + case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM: + case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM: + case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM: + case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM: + case VK_FORMAT_G8_B8R8_2PLANE_444_UNORM: + return true; + default: + return false; + } +} +inline bool vkuFormatIs16bit(VkFormat format) { + switch (format) { + case VK_FORMAT_R16_UNORM: + case VK_FORMAT_R16_SNORM: + case VK_FORMAT_R16_USCALED: + case VK_FORMAT_R16_SSCALED: + case VK_FORMAT_R16_UINT: + case VK_FORMAT_R16_SINT: + case VK_FORMAT_R16_SFLOAT: + case VK_FORMAT_R16G16_UNORM: + case VK_FORMAT_R16G16_SNORM: + case VK_FORMAT_R16G16_USCALED: + case VK_FORMAT_R16G16_SSCALED: + case VK_FORMAT_R16G16_UINT: + case VK_FORMAT_R16G16_SINT: + case VK_FORMAT_R16G16_SFLOAT: + case VK_FORMAT_R16G16B16_UNORM: + case VK_FORMAT_R16G16B16_SNORM: + case VK_FORMAT_R16G16B16_USCALED: + case VK_FORMAT_R16G16B16_SSCALED: + case VK_FORMAT_R16G16B16_UINT: + case VK_FORMAT_R16G16B16_SINT: + case VK_FORMAT_R16G16B16_SFLOAT: + case VK_FORMAT_R16G16B16A16_UNORM: + case VK_FORMAT_R16G16B16A16_SNORM: + case VK_FORMAT_R16G16B16A16_USCALED: + case VK_FORMAT_R16G16B16A16_SSCALED: + case VK_FORMAT_R16G16B16A16_UINT: + case VK_FORMAT_R16G16B16A16_SINT: + case VK_FORMAT_R16G16B16A16_SFLOAT: + case VK_FORMAT_D16_UNORM: + case VK_FORMAT_G16B16G16R16_422_UNORM: + case VK_FORMAT_B16G16R16G16_422_UNORM: + case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM: + case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM: + case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM: + case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM: + case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM: + case VK_FORMAT_G16_B16R16_2PLANE_444_UNORM: + case VK_FORMAT_R16G16_S10_5_NV: + return true; + default: + return false; + } +} +inline bool vkuFormatIs32bit(VkFormat format) { + switch (format) { + case VK_FORMAT_R32_UINT: + case VK_FORMAT_R32_SINT: + case VK_FORMAT_R32_SFLOAT: + case VK_FORMAT_R32G32_UINT: + case VK_FORMAT_R32G32_SINT: + case VK_FORMAT_R32G32_SFLOAT: + case VK_FORMAT_R32G32B32_UINT: + case VK_FORMAT_R32G32B32_SINT: + case VK_FORMAT_R32G32B32_SFLOAT: + case VK_FORMAT_R32G32B32A32_UINT: + case VK_FORMAT_R32G32B32A32_SINT: + case VK_FORMAT_R32G32B32A32_SFLOAT: + case VK_FORMAT_D32_SFLOAT: + return true; + default: + return false; + } +} +inline bool vkuFormatIs64bit(VkFormat format) { + switch (format) { + case VK_FORMAT_R64_UINT: + case VK_FORMAT_R64_SINT: + case VK_FORMAT_R64_SFLOAT: + case VK_FORMAT_R64G64_UINT: + case VK_FORMAT_R64G64_SINT: + case VK_FORMAT_R64G64_SFLOAT: + case VK_FORMAT_R64G64B64_UINT: + case VK_FORMAT_R64G64B64_SINT: + case VK_FORMAT_R64G64B64_SFLOAT: + case VK_FORMAT_R64G64B64A64_UINT: + case VK_FORMAT_R64G64B64A64_SINT: + case VK_FORMAT_R64G64B64A64_SFLOAT: + return true; + default: + return false; + } +} + +inline bool vkuFormatHasComponentSize(VkFormat format, uint32_t size) { + const struct VKU_FORMAT_INFO format_info = vkuGetFormatInfo(format); + bool equal_component_size = false; + for (size_t i = 0; i < VKU_FORMAT_MAX_COMPONENTS; i++) { + equal_component_size |= format_info.components[i].size == size; + } + return equal_component_size; +} + +inline bool vkuFormatHasComponentType(VkFormat format, enum VKU_FORMAT_COMPONENT_TYPE component) { + const struct VKU_FORMAT_INFO format_info = vkuGetFormatInfo(format); + bool equal_component_type = false; + for (size_t i = 0; i < VKU_FORMAT_MAX_COMPONENTS; i++) { + equal_component_type |= format_info.components[i].type == component; + } + return equal_component_type; +} + +inline bool vkuFormatHasRed(VkFormat format) { return vkuFormatHasComponentType(format, VKU_FORMAT_COMPONENT_TYPE_R); } + +inline bool vkuFormatHasGreen(VkFormat format) { return vkuFormatHasComponentType(format, VKU_FORMAT_COMPONENT_TYPE_G); } + +inline bool vkuFormatHasBlue(VkFormat format) { return vkuFormatHasComponentType(format, VKU_FORMAT_COMPONENT_TYPE_B); } + +inline bool vkuFormatHasAlpha(VkFormat format) { return vkuFormatHasComponentType(format, VKU_FORMAT_COMPONENT_TYPE_A); } + +inline uint32_t vkuGetPlaneIndex(VkImageAspectFlagBits aspect) { + switch (aspect) { + case VK_IMAGE_ASPECT_PLANE_0_BIT: + return 0; + case VK_IMAGE_ASPECT_PLANE_1_BIT: + return 1; + case VK_IMAGE_ASPECT_PLANE_2_BIT: + return 2; + default: + return VKU_FORMAT_INVALID_INDEX; + } +} + +#ifdef __cplusplus +} +#endif diff --git a/scripts/generate_source.py b/scripts/generate_source.py index 873af13..e2fabe7 100644 --- a/scripts/generate_source.py +++ b/scripts/generate_source.py @@ -24,6 +24,7 @@ def RunGenerators(api: str, registry: str, targetFilter: str) -> None: from generators.base_generator import BaseGeneratorOptions from generators.dispatch_table_generator import DispatchTableOutputGenerator from generators.enum_string_helper_generator import EnumStringHelperOutputGenerator + from generators.format_utils_generator import FormatUtilsOutputGenerator # Build up a list of all generators and custom options generators = { @@ -35,6 +36,10 @@ def RunGenerators(api: str, registry: str, targetFilter: str) -> None: 'generator' : EnumStringHelperOutputGenerator, 'directory' : 'include/vulkan', }, + 'vk_format_utils.h' : { + 'generator' : FormatUtilsOutputGenerator, + 'directory' : 'include/vulkan/utility', + }, } if (targetFilter and targetFilter not in generators.keys()): diff --git a/scripts/generators/format_utils_generator.py b/scripts/generators/format_utils_generator.py new file mode 100644 index 0000000..c65463d --- /dev/null +++ b/scripts/generators/format_utils_generator.py @@ -0,0 +1,662 @@ +#!/usr/bin/python3 -i +# +# Copyright 2023 The Khronos Group Inc. +# Copyright 2023 Valve Corporation +# Copyright 2023 LunarG, Inc. +# +# SPDX-License-Identifier: Apache-2.0 + +import os +from generators.vulkan_object import (Format) +from generators.base_generator import BaseGenerator + +# Make C name friendly class name +def getClassName(className: str) -> str: + return className.replace('-', '').replace(' ', '_').upper() + +def formatHasDepth(format: Format) -> bool: + return any(x.type == 'D' for x in format.components) + +def formatHasStencil(format: Format) -> bool: + return any(x.type == 'S' for x in format.components) + +def formatHasEqualBitsize(format: Format, bitsize: str) -> bool: + return all(x.bits == bitsize for x in format.components) + +# True if all components are same numericFormat +def formatHasNumericFormat(format: Format, numericFormat: str) -> bool: + return all(x.numericFormat == numericFormat for x in format.components) + +class FormatUtilsOutputGenerator(BaseGenerator): + def __init__(self): + BaseGenerator.__init__(self) + + self.maxPlaneCount = 1 + self.maxComponentCount = 1 + + self.compressedFormats = dict() + self.depthFormats = dict() + self.stencilFormats = dict() + self.numericFormats = set() + + # Lots of switch statements share same ending + self.commonBoolSwitch = ''' return true; + default: + return false; + } +} +''' + + # + # Called at beginning of processing as file is opened + def generate(self): + self.maxPlaneCount = max([len(format.planes) for format in self.vk.formats.values()]) + self.maxComponentCount = max([len(format.components) for format in self.vk.formats.values()]) + + for format in [x for x in self.vk.formats.values() if x.compressed]: + compressed = format.compressed.replace(' ', '_') + if compressed not in self.compressedFormats: + # create list if first time + self.compressedFormats[compressed] = [] + self.compressedFormats[compressed].append(format.name) + + for format in self.vk.formats.values(): + for component in format.components: + if component.type == 'D': + self.depthFormats[format.name] = component + elif component.type == 'S': + self.stencilFormats[format.name] = component + self.numericFormats.add(component.numericFormat) + + out = [] + out.append(f'''// *** THIS FILE IS GENERATED - DO NOT EDIT *** +// See {os.path.basename(__file__)} for modifications +// Copyright 2023 The Khronos Group Inc. +// Copyright 2023 Valve Corporation +// Copyright 2023 LunarG, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +''') + + out.append(''' +#pragma once + +#ifdef __cplusplus +extern "C" { +#endif + +#include + +#include + +''') + out.append('#define VKU_FORMAT_INVALID_INDEX 0xFFFFFFFF\n') + out.append(f'#define VKU_FORMAT_MAX_PLANES {self.maxPlaneCount}\n') + out.append(f'#define VKU_FORMAT_MAX_COMPONENTS {self.maxComponentCount}\n') + out.append('\n') + out.append('enum VKU_FORMAT_NUMERICAL_TYPE {\n') + out.append(' VKU_FORMAT_NUMERICAL_TYPE_NONE = 0,\n') + for index, numericFormat in enumerate(sorted(self.numericFormats), start=1): + out.append(f' VKU_FORMAT_NUMERICAL_TYPE_{numericFormat}') + out.append(',\n') + out.append('};\n') + out.append('\n') + out.append('enum VKU_FORMAT_COMPATIBILITY_CLASS {\n') + out.append(' VKU_FORMAT_COMPATIBILITY_CLASS_NONE = 0,\n') + + classNames = set() + for f in self.vk.formats.values(): + classNames.add(getClassName(f.className)) + + for className in sorted(classNames): + out.append(f' VKU_FORMAT_COMPATIBILITY_CLASS_{className}') + out.append(',\n') + out.append('};\n') + + out.append('// Return the plane index of a given VkImageAspectFlagBits.\n') + out.append('// VK_IMAGE_ASPECT_PLANE_0_BIT -> 0\n') + out.append('// VK_IMAGE_ASPECT_PLANE_1_BIT -> 1\n') + out.append('// VK_IMAGE_ASPECT_PLANE_2_BIT -> 2\n') + out.append('// -> VKU_FORMAT_INVALID_INDEX\n') + out.append('inline uint32_t vkuGetPlaneIndex(VkImageAspectFlagBits aspect);\n\n') + + for numericFormat in sorted(self.numericFormats): + out.append(f'// Returns whether a VkFormat is of the numerical format {numericFormat}\n') + out.append('// Format must only contain one numerical format, so formats like D16_UNORM_S8_UINT always return false\n') + out.append(f'inline bool vkuFormatIs{numericFormat}(VkFormat format);\n\n') + + out.append('''// Returns whether the type of a VkFormat is a OpTypeInt (SPIR-V) from "Interpretation of Numeric Format" table +inline bool vkuFormatIsSampledInt(VkFormat format); + +// Returns whether the type of a VkFormat is a OpTypeFloat (SPIR-V) from "Interpretation of Numeric Format" table +inline bool vkuFormatIsSampledFloat(VkFormat format); + +''') + + for key in sorted(self.compressedFormats.keys()): + out.append(f'// Returns whether a VkFormat is a compressed format of type {key}\n') + out.append(f'inline bool vkuFormatIsCompressed_{key}(VkFormat format);\n\n') + out.append('// Returns whether a VkFormat is of any compressed format type\n') + out.append('inline bool vkuFormatIsCompressed(VkFormat format);\n') + + out.append(''' +// Returns whether a VkFormat is either a depth or stencil format +inline bool vkuFormatIsDepthOrStencil(VkFormat format); + +// Returns whether a VkFormat is a depth and stencil format +inline bool vkuFormatIsDepthAndStencil(VkFormat format); + +// Returns whether a VkFormat is a depth only format +inline bool vkuFormatIsDepthOnly(VkFormat format); + +// Returns whether a VkFormat is a stencil only format +inline bool vkuFormatIsStencilOnly(VkFormat format); + +// Returns whether a VkFormat has a depth component +inline bool vkuFormatHasDepth(VkFormat format) { return (vkuFormatIsDepthOnly(format) || vkuFormatIsDepthAndStencil(format)); } + +// Returns whether a VkFormat has a stencil component +inline bool vkuFormatHasStencil(VkFormat format) { return (vkuFormatIsStencilOnly(format) || vkuFormatIsDepthAndStencil(format)); } + +// Returns the size of the depth component in bits if it has one. Otherwise it returns 0 +inline uint32_t vkuFormatDepthSize(VkFormat format); + +// Returns the size of the stencil component in bits if it has one. Otherwise it returns 0 +inline uint32_t vkuFormatStencilSize(VkFormat format); + +// Returns the numerical type of the depth component if it has one. Otherwise it returns VKU_FORMAT_NUMERICAL_TYPE_NONE +inline enum VKU_FORMAT_NUMERICAL_TYPE vkuFormatDepthNumericalType(VkFormat format); + +// Returns the numerical type of the stencil component if it has one. Otherwise it returns VKU_FORMAT_NUMERICAL_TYPE_NONE +inline enum VKU_FORMAT_NUMERICAL_TYPE vkuFormatStencilNumericalType(VkFormat format); + +// Returns whether a VkFormat is packed +inline bool vkuFormatIsPacked(VkFormat format); + +// Returns whether a VkFormat is YCbCr +// This corresponds to formats with _444, _422, or _420 in their name +inline bool vkuFormatRequiresYcbcrConversion(VkFormat format); + +// Returns whether a VkFormat is XChromaSubsampled +// This corresponds to formats with _422 in their name +inline bool vkuFormatIsXChromaSubsampled(VkFormat format); + +// Returns whether a VkFormat is YChromaSubsampled +// This corresponds to formats with _420 in their name +inline bool vkuFormatIsYChromaSubsampled(VkFormat format); + +// Returns whether a VkFormat is Multiplane +// Single-plane "_422" formats are treated as 2x1 compressed (for copies) +inline bool vkuFormatIsSinglePlane_422(VkFormat format); + +// Returns number of planes in format (which is 1 by default) +inline uint32_t vkuFormatPlaneCount(VkFormat format); + +// Returns whether a VkFormat is multiplane +inline bool vkuFormatIsMultiplane(VkFormat format) { return ((vkuFormatPlaneCount(format)) > 1u); } + +// Returns a VkFormat that is compatible with a given plane of a multiplane format +// Will return VK_FORMAT_UNDEFINED if given a plane aspect that doesn't exist for the format +inline VkFormat vkuFindMultiplaneCompatibleFormat(VkFormat mp_fmt, VkImageAspectFlagBits plane_aspect); + +// Returns the extent divisors of a multiplane format given a plane +// Will return {1, 1} if given a plane aspect that doesn't exist for the VkFormat +inline VkExtent2D vkuFindMultiplaneExtentDivisors(VkFormat mp_fmt, VkImageAspectFlagBits plane_aspect); + +// Returns the count of components in a VkFormat +inline uint32_t vkuFormatComponentCount(VkFormat format); + +// Returns the texel block extent of a VkFormat +inline VkExtent3D vkuFormatTexelBlockExtent(VkFormat format); + +// Returns the Compatibility Class of a VkFormat as defined by the spec +inline enum VKU_FORMAT_COMPATIBILITY_CLASS vkuFormatCompatibilityClass(VkFormat format); + +// Return true if a VkFormat is 'normal', with one texel per format element +inline bool vkuFormatElementIsTexel(VkFormat format); + +// Return size, in bytes, of one element of a VkFormat +// Format must not be a depth, stencil, or multiplane format +inline uint32_t vkuFormatElementSize(VkFormat format); + +// Return the size in bytes of one texel of a VkFormat +// For compressed or multi-plane, this may be a fractional number +inline uint32_t vkuFormatElementSizeWithAspect(VkFormat format, VkImageAspectFlagBits aspectMask); + +// Return the size in bytes of one texel of a VkFormat +// Format must not be a depth, stencil, or multiplane format +inline double vkuFormatTexelSize(VkFormat format); + +// Return the size in bytes of one texel of a VkFormat +// For compressed or multi-plane, this may be a fractional number +inline double vkuFormatTexelSizeWithAspect(VkFormat format, VkImageAspectFlagBits aspectMask); + +''') + for bits in ['8', '16', '32', '64']: + out.append(f'// Returns whether a VkFormat contains only {bits}-bit sized components\n') + out.append(f'inline bool vkuFormatIs{bits}bit(VkFormat format);\n\n') + + out.append('''// Returns whether a VkFormat has a component of a given size +inline bool vkuFormatHasComponentSize(VkFormat format, uint32_t size); + +// Returns whether a VkFormat has a Red color component +inline bool vkuFormatHasRed(VkFormat format); + +// Returns whether a VkFormat has a Green color component +inline bool vkuFormatHasGreen(VkFormat format); + +// Returns whether a VkFormat has a Blue color component +inline bool vkuFormatHasBlue(VkFormat format); + +// Returns whether a VkFormat has a Alpha color component +inline bool vkuFormatHasAlpha(VkFormat format); + +// Returns whether a VkFormat is equal to VK_FORMAT_UNDEFINED +inline bool vkuFormatIsUndefined(VkFormat format) { return (format == VK_FORMAT_UNDEFINED); } + +// Returns whether a VkFormat is a "blocked image" as defined in the spec (vkspec.html#blocked-image) +inline bool vkuFormatIsBlockedImage(VkFormat format) { + return (vkuFormatIsCompressed(format) || vkuFormatIsSinglePlane_422(format)); +} + +// Returns whether a VkFormat is a "color format'. Because there is no official specification definition of +// "color format", it is defined here as anything that isn't a depth/stencil format, multiplane format, or the undefined format. +inline bool vkuFormatIsColor(VkFormat format) { + return !(vkuFormatIsUndefined(format) || vkuFormatIsDepthOrStencil(format) || vkuFormatIsMultiplane(format)); +} + +enum VKU_FORMAT_COMPONENT_TYPE { + VKU_FORMAT_COMPONENT_TYPE_NONE, + VKU_FORMAT_COMPONENT_TYPE_R, + VKU_FORMAT_COMPONENT_TYPE_G, + VKU_FORMAT_COMPONENT_TYPE_B, + VKU_FORMAT_COMPONENT_TYPE_A, + VKU_FORMAT_COMPONENT_TYPE_D, + VKU_FORMAT_COMPONENT_TYPE_S, +}; + +// Compressed formats don't have a defined component size +const uint32_t VKU_FORMAT_COMPRESSED_COMPONENT = 0xFFFFFFFF; + +struct VKU_FORMAT_COMPONENT_INFO { + enum VKU_FORMAT_COMPONENT_TYPE type; + uint32_t size; // bits +}; + +// Generic information for all formats +struct VKU_FORMAT_INFO { + enum VKU_FORMAT_COMPATIBILITY_CLASS compatibility; + uint32_t block_size; // bytes + uint32_t texel_per_block; + VkExtent3D block_extent; + uint32_t component_count; + struct VKU_FORMAT_COMPONENT_INFO components[VKU_FORMAT_MAX_COMPONENTS]; +}; +''') + out.append('// clang-format off\n') + out.append('inline const struct VKU_FORMAT_INFO vkuGetFormatInfo(VkFormat format) {\n') + out.append(' switch (format) {\n') + for f in self.vk.formats.values(): + className = getClassName(f.className) + blockExtent = ', '.join(f.blockExtent) if f.blockExtent is not None else '1, 1, 1' + out.extend(f' case {f.name}: {{\n') + out.extend(f' struct VKU_FORMAT_INFO out = {{VKU_FORMAT_COMPATIBILITY_CLASS_{className}, {f.blockSize}, {f.texelsPerBlock}, {{{blockExtent}}}, {len(f.components)}, {{') + for index, component in enumerate(f.components): + bits = 'VKU_FORMAT_COMPRESSED_COMPONENT' if component.bits == 'compressed' else component.bits + out.append(f'{{VKU_FORMAT_COMPONENT_TYPE_{component.type}, {bits}}}') + if index + 1 != len(f.components): + out.append(', ') + out.append('}};\n') + out.append(' return out; }\n') + out.append(''' + default: { + // return values for VK_FORMAT_UNDEFINED + struct VKU_FORMAT_INFO out = { VKU_FORMAT_COMPATIBILITY_CLASS_NONE, 0, 0, {0, 0, 0}, 0, {{VKU_FORMAT_COMPONENT_TYPE_NONE, 0}, {VKU_FORMAT_COMPONENT_TYPE_NONE, 0}, {VKU_FORMAT_COMPONENT_TYPE_NONE, 0}, {VKU_FORMAT_COMPONENT_TYPE_NONE, 0}} }; + return out; + } + }; +} +// clang-format on + +struct VKU_FORMAT_PER_PLANE_COMPATIBILITY { + uint32_t width_divisor; + uint32_t height_divisor; + VkFormat compatible_format; +}; + +// Information for multiplanar formats +struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY { + struct VKU_FORMAT_PER_PLANE_COMPATIBILITY per_plane[VKU_FORMAT_MAX_PLANES]; +}; + +''') + out.append('// Source: Vulkan spec Table 47. Plane Format Compatibility Table\n') + out.append('// clang-format off\n') + out.append('inline const struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY vkuGetFormatCompatibility(VkFormat format) {\n') + out.append(' switch (format) {\n') + for format in [x for x in self.vk.formats.values() if x.planes]: + out.extend(f' case {format.name}: {{\n') + out.append(' struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{') + for index, plane in enumerate(format.planes): + if (index != plane.index): + self.logMsg('error', 'index of planes were not added in order') + out.append(f'{{{plane.widthDivisor}, {plane.heightDivisor}, {plane.compatible} }}') + if index + 1 != self.maxPlaneCount: + out.append(', ') + # pad any 'empty' elements + if len(format.planes) < self.maxPlaneCount: + for index in range(len(format.planes), self.maxPlaneCount): + out.append('{1, 1, VK_FORMAT_UNDEFINED }') + if index + 1 != self.maxPlaneCount: + out.append(', ') + out.append('}};\n') + out.append(' return out; }\n') + out.append(' default: {\n') + out.append(' struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY out = {{{1, 1, VK_FORMAT_UNDEFINED}, {1, 1, VK_FORMAT_UNDEFINED}, {1, 1, VK_FORMAT_UNDEFINED}}};\n') + out.append(' return out; }\n') + out.append(' };\n') + out.append('}\n') + out.append('// clang-format on\n') + + for numericFormat in sorted(self.numericFormats): + out.append(f'\n// Return true if all components in a format are an {numericFormat}\n') + out.append(f'bool vkuFormatIs{numericFormat}(VkFormat format) {{\n') + out.append(' switch (format) {\n') + out.extend([f' case {f.name}:\n' for f in self.vk.formats.values() if formatHasNumericFormat(f, numericFormat)]) + out.append(self.commonBoolSwitch) + + out.append(''' +inline bool vkuFormatIsSampledInt(VkFormat format) { return (vkuFormatIsSINT(format) || vkuFormatIsUINT(format)); } +inline bool vkuFormatIsSampledFloat(VkFormat format) { + return (vkuFormatIsUNORM(format) || vkuFormatIsSNORM(format) || + vkuFormatIsUSCALED(format) || vkuFormatIsSSCALED(format) || + vkuFormatIsUFLOAT(format) || vkuFormatIsSFLOAT(format) || + vkuFormatIsSRGB(format)); +} +''') + + for key in sorted(self.compressedFormats.keys()): + out.append(f'\n// Return true if a format is a {key} compressed image format\n') + out.append(f'bool vkuFormatIsCompressed_{key}(VkFormat format) {{\n') + out.append(' switch (format) {\n') + for f in sorted(self.compressedFormats[key]): + out.append(f' case {f}:\n') + out.append(self.commonBoolSwitch) + + out.append('\n// clang-format off\n') + out.append('// Return true if a format is any compressed image format\n') + out.append('bool vkuFormatIsCompressed(VkFormat format) {\n') + out.append(' return\n') + for index, key in enumerate(sorted(self.compressedFormats.keys())): + out.append(f' vkuFormatIsCompressed_{key}(format)') + if (index !=len(self.compressedFormats.keys()) - 1): + out.append(' ||\n') + out.append(';\n') + out.append('}\n') + out.append('// clang-format on\n') + + out.append('\n// Return true if format is a depth OR stencil format\n') + out.append('bool vkuFormatIsDepthOrStencil(VkFormat format) {\n') + out.append(' switch (format) {\n') + out.extend([f' case {f.name}:\n' for f in self.vk.formats.values() if formatHasDepth(f) or formatHasStencil(f)]) + out.append(self.commonBoolSwitch) + + out.append('\n// Return true if format is a depth AND stencil format\n') + out.append('bool vkuFormatIsDepthAndStencil(VkFormat format) {\n') + out.append(' switch (format) {\n') + out.extend([f' case {f.name}:\n' for f in self.vk.formats.values() if formatHasDepth(f) and formatHasStencil(f)]) + out.append(self.commonBoolSwitch) + + out.append('\n// Return true if format is a depth ONLY format\n') + out.append('bool vkuFormatIsDepthOnly(VkFormat format) {\n') + out.append(' switch (format) {\n') + out.extend([f' case {f.name}:\n' for f in self.vk.formats.values() if formatHasDepth(f) and not formatHasStencil(f)]) + out.append(self.commonBoolSwitch) + + out.append('\n// Return true if format is a stencil ONLY format\n') + out.append('bool vkuFormatIsStencilOnly(VkFormat format) {\n') + out.append(' switch (format) {\n') + out.extend([f' case {f.name}:\n' for f in self.vk.formats.values() if formatHasStencil(f) and not formatHasDepth(f)]) + out.append(self.commonBoolSwitch) + + out.append('\n// Returns size of depth component in bits') + out.append('\n// Returns zero if no depth component\n') + out.append('uint32_t vkuFormatDepthSize(VkFormat format) {\n') + out.append(' switch (format) {\n') + # sorts case statments together with same return value + used = [] + for key, value in sorted(self.depthFormats.items()): + if key not in used: + for key_dup, value_dup in sorted(self.depthFormats.items()): + if value_dup.bits == value.bits: + used.append(key_dup) + out.append(f' case {key_dup}:\n') + out.append(f' return {value.bits};\n') + out.append(' default:\n') + out.append(' return 0;\n') + out.append(' }\n') + out.append('}\n') + + out.append('\n// Returns size of stencil component in bits') + out.append('\n// Returns zero if no stencil component\n') + out.append('uint32_t vkuFormatStencilSize(VkFormat format) {\n') + out.append(' switch (format) {\n') + # sorts case statments together with same return value + used = [] + for key, value in sorted(self.stencilFormats.items()): + if key not in used: + for key_dup, value_dup in sorted(self.stencilFormats.items()): + if value_dup.bits == value.bits: + used.append(key_dup) + out.append(f' case {key_dup}:\n') + out.append(f' return {value.bits};\n') + out.append(' default:\n') + out.append(' return 0;\n') + out.append(' }\n') + out.append('}\n') + + out.append('\n// Returns NONE if no depth component\n') + out.append('enum VKU_FORMAT_NUMERICAL_TYPE vkuFormatDepthNumericalType(VkFormat format) {\n') + out.append(' switch (format) {\n') + # sorts case statments together with same return value + used = [] + for key, value in sorted(self.depthFormats.items()): + if key not in used: + for key_dup, value_dup in sorted(self.depthFormats.items()): + if value_dup.numericFormat == value.numericFormat: + used.append(key_dup) + out.append(f' case {key_dup}:\n') + out.append(f' return VKU_FORMAT_NUMERICAL_TYPE_{value.numericFormat};\n') + out.append(' default:\n') + out.append(' return VKU_FORMAT_NUMERICAL_TYPE_NONE;\n') + out.append(' }\n') + out.append('}\n') + + out.append('\n// Returns NONE if no stencil component\n') + out.append('enum VKU_FORMAT_NUMERICAL_TYPE vkuFormatStencilNumericalType(VkFormat format) {\n') + out.append(' switch (format) {\n') + # sorts case statments together with same return value + used = [] + for key, value in sorted(self.stencilFormats.items()): + if key not in used: + for key_dup, value_dup in sorted(self.stencilFormats.items()): + if value_dup.numericFormat == value.numericFormat: + used.append(key_dup) + out.append(f' case {key_dup}:\n') + out.append(f' return VKU_FORMAT_NUMERICAL_TYPE_{value.numericFormat};\n') + out.append(' default:\n') + out.append(' return VKU_FORMAT_NUMERICAL_TYPE_NONE;\n') + out.append(' }\n') + out.append('}\n') + + out.append('\n// Return true if format is a packed format\n') + out.append('bool vkuFormatIsPacked(VkFormat format) {\n') + out.append(' switch (format) {\n') + for name in [x.name for x in self.vk.formats.values() if x.packed]: + out.append(f' case {name}:\n') + out.append(self.commonBoolSwitch) + + out.append('\n// Return true if format requires sampler YCBCR conversion\n') + out.append('// for VK_IMAGE_ASPECT_COLOR_BIT image views\n') + out.append('// Table found in spec\n') + out.append('bool vkuFormatRequiresYcbcrConversion(VkFormat format) {\n') + out.append(' switch (format) {\n') + for name in [x.name for x in self.vk.formats.values() if x.chroma]: + out.append(f' case {name}:\n') + out.append(self.commonBoolSwitch) + + out.append('\nbool vkuFormatIsXChromaSubsampled(VkFormat format) {\n') + out.append(' switch (format) {\n') + for name in [x.name for x in self.vk.formats.values() if x.chroma == '420' or x.chroma == '422']: + out.append(f' case {name}:\n') + out.append(self.commonBoolSwitch) + + out.append('\nbool vkuFormatIsYChromaSubsampled(VkFormat format) {\n') + out.append(' switch (format) {\n') + for name in [x.name for x in self.vk.formats.values() if x.chroma == '420']: + out.append(f' case {name}:\n') + out.append(self.commonBoolSwitch) + + out.append('\nbool vkuFormatIsSinglePlane_422(VkFormat format) {\n') + out.append(' switch (format) {\n') + for name in [x.name for x in self.vk.formats.values() if x.chroma == '422' and not x.planes]: + out.append(f' case {name}:\n') + out.append(self.commonBoolSwitch) + + out.append('\n// Returns number of planes in format (which is 1 by default)\n') + out.append('uint32_t vkuFormatPlaneCount(VkFormat format) {\n') + out.append(' switch (format) {\n') + # Use range to sort formats together + for i in range(2, self.maxPlaneCount + 1): + out.extend([f' case {f.name}:\n' for f in self.vk.formats.values() if len(f.planes) == i]) + out.append(f' return {i};\n') + out.append(' default:\n') + out.append(' return 1;\n') + out.append(' }\n') + out.append('}\n') + out.append(''' +// Will return VK_FORMAT_UNDEFINED if given a plane aspect that doesn't exist for the format +inline VkFormat vkuFindMultiplaneCompatibleFormat(VkFormat mp_fmt, VkImageAspectFlagBits plane_aspect) { + const uint32_t plane_idx = vkuGetPlaneIndex(plane_aspect); + const struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY multiplane_compatibility = vkuGetFormatCompatibility(mp_fmt); + if ((multiplane_compatibility.per_plane[0].compatible_format == VK_FORMAT_UNDEFINED) || (plane_idx >= VKU_FORMAT_MAX_PLANES)) { + return VK_FORMAT_UNDEFINED; + } + + return multiplane_compatibility.per_plane[plane_idx].compatible_format; +} + +inline VkExtent2D vkuFindMultiplaneExtentDivisors(VkFormat mp_fmt, VkImageAspectFlagBits plane_aspect) { + VkExtent2D divisors = {1, 1}; + const uint32_t plane_idx = vkuGetPlaneIndex(plane_aspect); + const struct VKU_FORMAT_MULTIPLANE_COMPATIBILITY multiplane_compatibility = vkuGetFormatCompatibility(mp_fmt); + if ((multiplane_compatibility.per_plane[0].compatible_format == VK_FORMAT_UNDEFINED) || (plane_idx >= VKU_FORMAT_MAX_PLANES)) { + return divisors; + } + + divisors.width = multiplane_compatibility.per_plane[plane_idx].width_divisor; + divisors.height = multiplane_compatibility.per_plane[plane_idx].height_divisor; + return divisors; +} + +inline uint32_t vkuFormatComponentCount(VkFormat format) { return vkuGetFormatInfo(format).component_count; } + +inline VkExtent3D vkuFormatTexelBlockExtent(VkFormat format) { return vkuGetFormatInfo(format).block_extent; } + +inline enum VKU_FORMAT_COMPATIBILITY_CLASS vkuFormatCompatibilityClass(VkFormat format) { return vkuGetFormatInfo(format).compatibility; } + +inline bool vkuFormatElementIsTexel(VkFormat format) { + if (vkuFormatIsPacked(format) || vkuFormatIsCompressed(format) || vkuFormatIsSinglePlane_422(format) || vkuFormatIsMultiplane(format)) { + return false; + } else { + return true; + } +} + +inline uint32_t vkuFormatElementSize(VkFormat format) { + return vkuFormatElementSizeWithAspect(format, VK_IMAGE_ASPECT_COLOR_BIT); +} + +inline uint32_t vkuFormatElementSizeWithAspect(VkFormat format, VkImageAspectFlagBits aspectMask) { + // Depth/Stencil aspect have separate helper functions + if (aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) { + return vkuFormatStencilSize(format) / 8; + } else if (aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) { + return vkuFormatDepthSize(format) / 8; + } else if (vkuFormatIsMultiplane(format)) { + // Element of entire multiplane format is not useful, + // Want to get just a single plane as the lookup format + format = vkuFindMultiplaneCompatibleFormat(format, aspectMask); + } + + return vkuGetFormatInfo(format).block_size; +} + +inline double vkuFormatTexelSize(VkFormat format) { + return vkuFormatTexelSizeWithAspect(format, VK_IMAGE_ASPECT_COLOR_BIT); +} + +inline double vkuFormatTexelSizeWithAspect(VkFormat format, VkImageAspectFlagBits aspectMask) { + double texel_size = (double)(vkuFormatElementSizeWithAspect(format, aspectMask)); + VkExtent3D block_extent = vkuFormatTexelBlockExtent(format); + uint32_t texels_per_block = block_extent.width * block_extent.height * block_extent.depth; + if (1 < texels_per_block) { + texel_size /= (double)(texels_per_block); + } + return texel_size; +} + +''') + # Could loop the components, but faster to just list these + for bits in ['8', '16', '32', '64']: + out.append(f'inline bool vkuFormatIs{bits}bit(VkFormat format) {{\n') + out.append(' switch (format) {\n') + out.extend([f' case {f.name}:\n' for f in self.vk.formats.values() if formatHasEqualBitsize(f, bits)]) + out.append(self.commonBoolSwitch) + + out.append(''' +inline bool vkuFormatHasComponentSize(VkFormat format, uint32_t size) { + const struct VKU_FORMAT_INFO format_info = vkuGetFormatInfo(format); + bool equal_component_size = false; + for (size_t i = 0; i < VKU_FORMAT_MAX_COMPONENTS; i++) { + equal_component_size |= format_info.components[i].size == size; + } + return equal_component_size; +} + +inline bool vkuFormatHasComponentType(VkFormat format, enum VKU_FORMAT_COMPONENT_TYPE component) { + const struct VKU_FORMAT_INFO format_info = vkuGetFormatInfo(format); + bool equal_component_type = false; + for (size_t i = 0; i < VKU_FORMAT_MAX_COMPONENTS; i++) { + equal_component_type |= format_info.components[i].type == component; + } + return equal_component_type; +} + +inline bool vkuFormatHasRed(VkFormat format) { return vkuFormatHasComponentType(format, VKU_FORMAT_COMPONENT_TYPE_R); } + +inline bool vkuFormatHasGreen(VkFormat format) { return vkuFormatHasComponentType(format, VKU_FORMAT_COMPONENT_TYPE_G); } + +inline bool vkuFormatHasBlue(VkFormat format) { return vkuFormatHasComponentType(format, VKU_FORMAT_COMPONENT_TYPE_B); } + +inline bool vkuFormatHasAlpha(VkFormat format) { return vkuFormatHasComponentType(format, VKU_FORMAT_COMPONENT_TYPE_A); } + +inline uint32_t vkuGetPlaneIndex(VkImageAspectFlagBits aspect) { + switch (aspect) { + case VK_IMAGE_ASPECT_PLANE_0_BIT: + return 0; + case VK_IMAGE_ASPECT_PLANE_1_BIT: + return 1; + case VK_IMAGE_ASPECT_PLANE_2_BIT: + return 2; + default: + return VKU_FORMAT_INVALID_INDEX; + } +} + +#ifdef __cplusplus +} +#endif''') + + self.write("".join(out)) + diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 1c8dd9d..eee2da6 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -6,3 +6,4 @@ add_subdirectory(layer) add_subdirectory(generated) add_subdirectory(vk_dispatch_table) +add_subdirectory(format_utils) diff --git a/tests/add_subdirectory/CMakeLists.txt b/tests/add_subdirectory/CMakeLists.txt index 65e94e6..35dd258 100644 --- a/tests/add_subdirectory/CMakeLists.txt +++ b/tests/add_subdirectory/CMakeLists.txt @@ -53,6 +53,8 @@ target_sources(add_subdirectory_example PRIVATE vk_dispatch_table.c vk_enum_string_helper.c vk_layer_settings.c + vk_format_utils.c + vk_format_utils_2.c # Need two translation units to test if header file behaves correctly. ) target_link_libraries(add_subdirectory_example PRIVATE diff --git a/tests/add_subdirectory/vk_format_utils.c b/tests/add_subdirectory/vk_format_utils.c new file mode 100644 index 0000000..e1c5854 --- /dev/null +++ b/tests/add_subdirectory/vk_format_utils.c @@ -0,0 +1,17 @@ +// Copyright 2023 The Khronos Group Inc. +// Copyright 2023 Valve Corporation +// Copyright 2023 LunarG, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +#include + +bool check_format_utils() { + vkuGetPlaneIndex(VK_IMAGE_ASPECT_PLANE_1_BIT); + vkuFormatHasGreen(VK_FORMAT_R8G8B8A8_UNORM); + vkuFormatElementSizeWithAspect(VK_FORMAT_ASTC_5x4_SRGB_BLOCK, VK_IMAGE_ASPECT_STENCIL_BIT); + struct VKU_FORMAT_INFO f = vkuGetFormatInfo(VK_FORMAT_R8G8B8A8_SRGB); + if (f.component_count != 4) { + return false; + } + return true; +} diff --git a/tests/add_subdirectory/vk_format_utils_2.c b/tests/add_subdirectory/vk_format_utils_2.c new file mode 100644 index 0000000..090e4c8 --- /dev/null +++ b/tests/add_subdirectory/vk_format_utils_2.c @@ -0,0 +1,18 @@ +// Copyright 2023 The Khronos Group Inc. +// Copyright 2023 Valve Corporation +// Copyright 2023 LunarG, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +#include + +// Need two translation units to test if header file behaves correctly. +bool check_format_utils_2() { + vkuGetPlaneIndex(VK_IMAGE_ASPECT_PLANE_1_BIT); + vkuFormatHasGreen(VK_FORMAT_R8G8B8A8_UNORM); + vkuFormatElementSizeWithAspect(VK_FORMAT_ASTC_5x4_SRGB_BLOCK, VK_IMAGE_ASPECT_STENCIL_BIT); + struct VKU_FORMAT_INFO f = vkuGetFormatInfo(VK_FORMAT_R8G8B8A8_SRGB); + if (f.component_count != 4) { + return false; + } + return true; +} diff --git a/tests/find_package/CMakeLists.txt b/tests/find_package/CMakeLists.txt index 3ccc6c0..496e6bb 100644 --- a/tests/find_package/CMakeLists.txt +++ b/tests/find_package/CMakeLists.txt @@ -16,6 +16,7 @@ target_sources(find_package_example PRIVATE ${CMAKE_CURRENT_LIST_DIR}/../add_subdirectory/vk_layer_settings.c ${CMAKE_CURRENT_LIST_DIR}/../add_subdirectory/vk_enum_string_helper.c ${CMAKE_CURRENT_LIST_DIR}/../add_subdirectory/vk_dispatch_table.c + ${CMAKE_CURRENT_LIST_DIR}/../add_subdirectory/vk_format_utils.c ) # NOTE: Because VulkanHeaders is a PUBLIC dependency it needs to be found prior to VulkanUtilityLibraries diff --git a/tests/format_utils/CMakeLists.txt b/tests/format_utils/CMakeLists.txt new file mode 100644 index 0000000..223b847 --- /dev/null +++ b/tests/format_utils/CMakeLists.txt @@ -0,0 +1,25 @@ +# Copyright 2023 The Khronos Group Inc. +# Copyright 2023 Valve Corporation +# Copyright 2023 LunarG, Inc. +# +# SPDX-License-Identifier: Apache-2.0 + +find_package(GTest REQUIRED CONFIG) +find_package(magic_enum REQUIRED CONFIG) + +include(GoogleTest) + +add_executable(test_format_utils test_formats.cpp) + +target_link_libraries(test_format_utils PRIVATE + GTest::gtest + GTest::gtest_main + magic_enum::magic_enum + Vulkan::UtilityHeaders +) + +if(${CMAKE_C_COMPILER_ID} MATCHES "(GNU|Clang)") + add_compile_options(-Wpedantic -Wall -Wextra -Werror) +endif() + +gtest_discover_tests(test_format_utils) diff --git a/tests/format_utils/test_formats.cpp b/tests/format_utils/test_formats.cpp new file mode 100644 index 0000000..8304d89 --- /dev/null +++ b/tests/format_utils/test_formats.cpp @@ -0,0 +1,662 @@ +// Copyright 2023 The Khronos Group Inc. +// Copyright 2023 Valve Corporation +// Copyright 2023 LunarG, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// + +#include +#define MAGIC_ENUM_RANGE_MIN 0 +#define MAGIC_ENUM_RANGE_MAX 512 +#include +#include +#include + +#include + +// Given the string_view of a VkFormat, find the location of the letter that corresponds with a component +// EG. find_component("R8G8B8", 'G') would return 2 +// returns std::string::npos if it is not found. +size_t find_component(std::string_view str, char letter) { + size_t loc = str.find(letter, 10); + while (loc != std::string_view::npos) { + if (loc < str.length() - 1 && str[loc] == letter && (std::isdigit(str[loc - 1]) || str[loc - 1] == '_') && + std::isdigit(str[loc + 1])) { + break; + } + loc = str.find(letter, loc + 1); + } + + return loc; +} + +// Given the string_view of a VkFormat, find the size of the letter that corresponds with a component +// EG. find_component_size("R16G16B16", 3) would return 16 +// Returns 0 if the component can't be found +size_t find_component_size(std::string_view str, char letter) { + size_t loc = find_component(str, letter); + if (loc == std::string_view::npos) return 0; + if (loc + 1 >= str.length()) return 0; + return static_cast(std::stoi(std::string(str.substr(loc + 1)), nullptr)); +} + +size_t find_component_count(std::string_view str) { + size_t comp_count = 0; + if (find_component(str, 'R') != std::string_view::npos) comp_count++; + if (find_component(str, 'G') != std::string_view::npos) comp_count++; + if (find_component(str, 'B') != std::string_view::npos) comp_count++; + if (find_component(str, 'A') != std::string_view::npos) comp_count++; + if (find_component(str, 'D') != std::string_view::npos) comp_count++; + if (find_component(str, 'S') != std::string_view::npos) comp_count++; + return comp_count; +} + +TEST(format_utils, vkuFormatIsSFLOAT) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + // special case depth + stencil formats + if (format == VK_FORMAT_D32_SFLOAT_S8_UINT) { + EXPECT_FALSE(vkuFormatIsSFLOAT(format)); + continue; + } + + if (std::string::npos != format_str.find("_SFLOAT")) { + EXPECT_TRUE(vkuFormatIsSFLOAT(format)); + } else { + EXPECT_FALSE(vkuFormatIsSFLOAT(format)); + } + } +} + +TEST(format_utils, vkuFormatIsSINT) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("_SINT")) { + EXPECT_TRUE(vkuFormatIsSINT(format)); + } else { + EXPECT_FALSE(vkuFormatIsSINT(format)); + } + } +} +TEST(format_utils, vkuFormatIsSNORM) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("_SNORM")) { + EXPECT_TRUE(vkuFormatIsSNORM(format)); + } else { + EXPECT_FALSE(vkuFormatIsSNORM(format)); + } + } +} +TEST(format_utils, vkuFormatIsSRGB) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("_SRGB")) { + EXPECT_TRUE(vkuFormatIsSRGB(format)); + } else { + EXPECT_FALSE(vkuFormatIsSRGB(format)); + } + } +} +TEST(format_utils, vkuFormatIsUFLOAT) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("_UFLOAT")) { + EXPECT_TRUE(vkuFormatIsUFLOAT(format)); + } else { + EXPECT_FALSE(vkuFormatIsUFLOAT(format)); + } + } +} +TEST(format_utils, vkuFormatIsUINT) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + // special case depth + stencil formats + if (format == VK_FORMAT_D16_UNORM_S8_UINT || format == VK_FORMAT_D24_UNORM_S8_UINT || + format == VK_FORMAT_D32_SFLOAT_S8_UINT) { + EXPECT_FALSE(vkuFormatIsUINT(format)); + continue; + } + if (std::string::npos != format_str.find("_UINT")) { + EXPECT_TRUE(vkuFormatIsUINT(format)); + } else { + EXPECT_FALSE(vkuFormatIsUINT(format)); + } + } +} +TEST(format_utils, vkuFormatIsUNORM) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (format == VK_FORMAT_D16_UNORM_S8_UINT || format == VK_FORMAT_D24_UNORM_S8_UINT) { + EXPECT_FALSE(vkuFormatIsUNORM(format)); + continue; + } + if (std::string::npos != format_str.find("_UNORM")) { + EXPECT_TRUE(vkuFormatIsUNORM(format)); + } else { + EXPECT_FALSE(vkuFormatIsUNORM(format)); + } + } +} +TEST(format_utils, vkuFormatIsUSCALED) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("_USCALED")) { + EXPECT_TRUE(vkuFormatIsUSCALED(format)); + } else { + EXPECT_FALSE(vkuFormatIsUSCALED(format)); + } + } +} +TEST(format_utils, vkuFormatIsCompressed_ASTC_HDR) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + // contains ASTC and SFLOAT in the enum + if (std::string::npos != format_str.find("_ASTC_") && std::string::npos != format_str.find("_SFLOAT_BLOCK")) { + EXPECT_TRUE(vkuFormatIsCompressed_ASTC_HDR(format)); + } else { + EXPECT_FALSE(vkuFormatIsCompressed_ASTC_HDR(format)); + } + } +} +TEST(format_utils, vkuFormatIsCompressed_ASTC_LDR) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + // contains ASTC and does not contain SFLOAT in the enum + if (std::string::npos != format_str.find("_ASTC_") && std::string::npos == format_str.find("_SFLOAT_BLOCK")) { + EXPECT_TRUE(vkuFormatIsCompressed_ASTC_LDR(format)); + } else { + EXPECT_FALSE(vkuFormatIsCompressed_ASTC_LDR(format)); + } + } +} +TEST(format_utils, vkuFormatIsCompressed_BC) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("_BC1") || std::string::npos != format_str.find("_BC2") || + std::string::npos != format_str.find("_BC3") || std::string::npos != format_str.find("_BC4") || + std::string::npos != format_str.find("_BC5") || std::string::npos != format_str.find("_BC6") || + std::string::npos != format_str.find("_BC7")) { + EXPECT_TRUE(vkuFormatIsCompressed_BC(format)); + } else { + EXPECT_FALSE(vkuFormatIsCompressed_BC(format)); + } + } +} +TEST(format_utils, vkuFormatIsCompressed_EAC) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("_EAC_")) { + EXPECT_TRUE(vkuFormatIsCompressed_EAC(format)); + } else { + EXPECT_FALSE(vkuFormatIsCompressed_EAC(format)); + } + } +} +TEST(format_utils, vkuFormatIsCompressed_ETC2) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("_ETC2_")) { + EXPECT_TRUE(vkuFormatIsCompressed_ETC2(format)); + } else { + EXPECT_FALSE(vkuFormatIsCompressed_ETC2(format)); + } + } +} +TEST(format_utils, vkuFormatIsCompressed_PVRTC) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("_PVRTC_")) { + EXPECT_TRUE(vkuFormatIsCompressed_PVRTC(format)); + } else { + EXPECT_FALSE(vkuFormatIsCompressed_PVRTC(format)); + } + } +} + +TEST(format_utils, vkuFormatIsCompressed) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + // Since the contents of FormatIsCompressed is generated, there is no easy way to check a format based on the string. + // Instead, this function will have to be + if (std::string::npos != format_str.find("_BLOCK")) { + EXPECT_TRUE(vkuFormatIsCompressed(format)); + } else { + EXPECT_FALSE(vkuFormatIsCompressed(format)); + } + } +} + +TEST(format_utils, vkuFormatIsDepthOrStencil) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("_D16") || std::string::npos != format_str.find("_D24") || + std::string::npos != format_str.find("_D32") || std::string::npos != format_str.find("_S8")) { + EXPECT_TRUE(vkuFormatIsDepthOrStencil(format)); + } else { + EXPECT_FALSE(vkuFormatIsDepthOrStencil(format)); + } + } +} +TEST(format_utils, vkuFormatIsDepthAndStencil) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if ((std::string::npos != format_str.find("_D16") || std::string::npos != format_str.find("_D24") || + std::string::npos != format_str.find("_D32")) && + std::string::npos != format_str.find("_S8")) { + EXPECT_TRUE(vkuFormatIsDepthAndStencil(format)); + } else { + EXPECT_FALSE(vkuFormatIsDepthAndStencil(format)); + } + } +} +TEST(format_utils, vkuFormatIsDepthOnly) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + // enum contains D16, D24, or D32 but does not contain _S8 + if ((std::string::npos != format_str.find("_D16") || std::string::npos != format_str.find("_D24") || + std::string::npos != format_str.find("_D32")) && + std::string::npos == format_str.find("_S8")) { + EXPECT_TRUE(vkuFormatIsDepthOnly(format)); + } else { + EXPECT_FALSE(vkuFormatIsDepthOnly(format)); + } + } +} +TEST(format_utils, vkuFormatIsStencilOnly) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + // enum contains _S8 but does not contain D16, D24, or D32 + if (std::string::npos == format_str.find("_D16") && std::string::npos == format_str.find("_D24") && + std::string::npos == format_str.find("_D32") && std::string::npos != format_str.find("_S8")) { + EXPECT_TRUE(vkuFormatIsStencilOnly(format)); + } else { + EXPECT_FALSE(vkuFormatIsStencilOnly(format)); + } + } +} +TEST(format_utils, vkuFormatDepthSize) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("_D16")) { + EXPECT_EQ(vkuFormatDepthSize(format), 16); + } else if (std::string::npos != format_str.find("_D24")) { + EXPECT_EQ(vkuFormatDepthSize(format), 24); + } else if (std::string::npos != format_str.find("_D32")) { + EXPECT_EQ(vkuFormatDepthSize(format), 32); + } else { + EXPECT_EQ(vkuFormatDepthSize(format), 0); + } + } +} +TEST(format_utils, vkuFormatStencilSize) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("_S8")) { + EXPECT_EQ(vkuFormatStencilSize(format), 8); + } else { + EXPECT_EQ(vkuFormatStencilSize(format), 0); + } + } +} +TEST(format_utils, vkuFormatDepthNumericalType) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("D16_UNORM") || std::string::npos != format_str.find("D24_UNORM") || + std::string::npos != format_str.find("D32_UNORM")) { + EXPECT_EQ(vkuFormatDepthNumericalType(format), VKU_FORMAT_NUMERICAL_TYPE_UNORM); + } else if (std::string::npos != format_str.find("D32_SFLOAT")) { + EXPECT_EQ(vkuFormatDepthNumericalType(format), VKU_FORMAT_NUMERICAL_TYPE_SFLOAT); + } else { + EXPECT_EQ(vkuFormatDepthNumericalType(format), VKU_FORMAT_NUMERICAL_TYPE_NONE); + } + } +} +TEST(format_utils, vkuFormatStencilNumericalType) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("S8_UINT")) { + EXPECT_EQ(vkuFormatStencilNumericalType(format), VKU_FORMAT_NUMERICAL_TYPE_UINT); + } else { + EXPECT_EQ(vkuFormatStencilNumericalType(format), VKU_FORMAT_NUMERICAL_TYPE_NONE); + } + } +} +TEST(format_utils, vkuFormatIsPacked) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("_PACK8") || std::string::npos != format_str.find("_PACK16") || + std::string::npos != format_str.find("_PACK32")) { + EXPECT_TRUE(vkuFormatIsPacked(format)); + } else { + EXPECT_FALSE(vkuFormatIsPacked(format)); + } + } +} +TEST(format_utils, vkuFormatRequiresYcbcrConversion) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("_444_") || std::string::npos != format_str.find("_422_") || + std::string::npos != format_str.find("_420_")) { + EXPECT_TRUE(vkuFormatRequiresYcbcrConversion(format)); + } else { + EXPECT_FALSE(vkuFormatRequiresYcbcrConversion(format)); + } + } +} +TEST(format_utils, vkuFormatIsXChromaSubsampled) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos == format_str.find("_444_") && + (std::string::npos != format_str.find("_422_") || std::string::npos != format_str.find("_420_"))) { + EXPECT_TRUE(vkuFormatIsXChromaSubsampled(format)); + } else { + EXPECT_FALSE(vkuFormatIsXChromaSubsampled(format)); + } + } +} +TEST(format_utils, vkuFormatIsYChromaSubsampled) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("_420_")) { + EXPECT_TRUE(vkuFormatIsYChromaSubsampled(format)); + } else { + EXPECT_FALSE(vkuFormatIsYChromaSubsampled(format)); + } + } +} +TEST(format_utils, vkuFormatIsSinglePlane_422) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("_422_")) { + EXPECT_TRUE(vkuFormatIsSinglePlane_422(format)); + } else { + EXPECT_FALSE(vkuFormatIsSinglePlane_422(format)); + } + } +} +TEST(format_utils, vkuFormatPlaneCount) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("2PLANE")) { + EXPECT_EQ(vkuFormatPlaneCount(format), 2); + } else if (std::string::npos != format_str.find("3PLANE")) { + EXPECT_EQ(vkuFormatPlaneCount(format), 3); + } else { + EXPECT_EQ(vkuFormatPlaneCount(format), 1); + } + } +} +TEST(format_utils, vkuFormatIsMultiplane) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("2PLANE") || std::string::npos != format_str.find("3PLANE")) { + EXPECT_TRUE(vkuFormatIsMultiplane(format)); + } else { + EXPECT_FALSE(vkuFormatIsMultiplane(format)); + } + } +} +TEST(format_utils, vkuFindMultiplaneCompatibleFormat) { + EXPECT_EQ(vkuFindMultiplaneCompatibleFormat(VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, VK_IMAGE_ASPECT_PLANE_0_BIT), + VK_FORMAT_R10X6_UNORM_PACK16); + EXPECT_EQ(vkuFindMultiplaneCompatibleFormat(VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, VK_IMAGE_ASPECT_PLANE_1_BIT), + VK_FORMAT_R10X6_UNORM_PACK16); + EXPECT_EQ(vkuFindMultiplaneCompatibleFormat(VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, VK_IMAGE_ASPECT_PLANE_2_BIT), + VK_FORMAT_R10X6_UNORM_PACK16); + + EXPECT_EQ(vkuFindMultiplaneCompatibleFormat(VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, VK_IMAGE_ASPECT_PLANE_0_BIT), + VK_FORMAT_R16_UNORM); + EXPECT_EQ(vkuFindMultiplaneCompatibleFormat(VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, VK_IMAGE_ASPECT_PLANE_1_BIT), + VK_FORMAT_R16G16_UNORM); + EXPECT_EQ(vkuFindMultiplaneCompatibleFormat(VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, VK_IMAGE_ASPECT_PLANE_2_BIT), + VK_FORMAT_UNDEFINED); +} +TEST(format_utils, vkuFindMultiplaneExtentDivisors) { + EXPECT_EQ( + vkuFindMultiplaneExtentDivisors(VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, VK_IMAGE_ASPECT_PLANE_0_BIT).width, + 1); + EXPECT_EQ( + vkuFindMultiplaneExtentDivisors(VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, VK_IMAGE_ASPECT_PLANE_1_BIT).width, + 2); + EXPECT_EQ( + vkuFindMultiplaneExtentDivisors(VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, VK_IMAGE_ASPECT_PLANE_2_BIT).width, + 2); + + EXPECT_EQ( + vkuFindMultiplaneExtentDivisors(VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, VK_IMAGE_ASPECT_PLANE_0_BIT).height, + 1); + EXPECT_EQ( + vkuFindMultiplaneExtentDivisors(VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, VK_IMAGE_ASPECT_PLANE_1_BIT).height, + 2); + EXPECT_EQ( + vkuFindMultiplaneExtentDivisors(VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, VK_IMAGE_ASPECT_PLANE_2_BIT).height, + 2); + + EXPECT_EQ(vkuFindMultiplaneExtentDivisors(VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, VK_IMAGE_ASPECT_PLANE_0_BIT).width, 1); + EXPECT_EQ(vkuFindMultiplaneExtentDivisors(VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, VK_IMAGE_ASPECT_PLANE_1_BIT).width, 2); + EXPECT_EQ(vkuFindMultiplaneExtentDivisors(VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, VK_IMAGE_ASPECT_PLANE_2_BIT).width, 1); + + EXPECT_EQ(vkuFindMultiplaneExtentDivisors(VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, VK_IMAGE_ASPECT_PLANE_0_BIT).height, 1); + EXPECT_EQ(vkuFindMultiplaneExtentDivisors(VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, VK_IMAGE_ASPECT_PLANE_1_BIT).height, 1); + EXPECT_EQ(vkuFindMultiplaneExtentDivisors(VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, VK_IMAGE_ASPECT_PLANE_2_BIT).height, 1); +} + +TEST(format_utils, vkuFormatComponentCount) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (vkuFormatIsCompressed(format)) { + // special case compressed formats because they don't typically list their components in the enum itself + continue; + } + + EXPECT_EQ(vkuFormatComponentCount(format), find_component_count(format_str)); + if (vkuFormatComponentCount(format) != find_component_count(format_str)) { + std::cout << ""; + } + } +} +TEST(format_utils, vkuFormatTexelBlockExtent) { + constexpr auto formats = magic_enum::enum_values(); + for (auto format : formats) { + auto extent = vkuFormatTexelBlockExtent(format); + if (vkuFormatIsCompressed(format)) { + EXPECT_GT(extent.width, 1); + EXPECT_GT(extent.height, 1); + EXPECT_EQ(extent.depth, 1); + } else if (format == VK_FORMAT_UNDEFINED) { + EXPECT_EQ(extent.width, 0); + EXPECT_EQ(extent.height, 0); + EXPECT_EQ(extent.depth, 0); + continue; + } else { + EXPECT_EQ(extent.width, 1); + EXPECT_EQ(extent.height, 1); + EXPECT_EQ(extent.depth, 1); + } + } + auto extent = vkuFormatTexelBlockExtent(static_cast(10001)); + EXPECT_EQ(extent.width, 0); + EXPECT_EQ(extent.height, 0); + EXPECT_EQ(extent.depth, 0); +} +TEST(format_utils, vkuFormatCompatibilityClass) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (format == VK_FORMAT_UNDEFINED) { + EXPECT_EQ(vkuFormatCompatibilityClass(format), VKU_FORMAT_COMPATIBILITY_CLASS_NONE); + continue; + } + if (std::string::npos != format_str.find("D16") && std::string::npos != format_str.find("S8")) { + EXPECT_EQ(vkuFormatCompatibilityClass(format), VKU_FORMAT_COMPATIBILITY_CLASS_D16S8); + } else if (std::string::npos != format_str.find("D16") && std::string::npos == format_str.find("S8")) { + EXPECT_EQ(vkuFormatCompatibilityClass(format), VKU_FORMAT_COMPATIBILITY_CLASS_D16); + } else if (std::string::npos != format_str.find("D24") && std::string::npos != format_str.find("S8")) { + EXPECT_EQ(vkuFormatCompatibilityClass(format), VKU_FORMAT_COMPATIBILITY_CLASS_D24S8); + } else if (std::string::npos != format_str.find("D24") && std::string::npos == format_str.find("S8")) { + EXPECT_EQ(vkuFormatCompatibilityClass(format), VKU_FORMAT_COMPATIBILITY_CLASS_D24); + } else if (std::string::npos != format_str.find("D32") && std::string::npos != format_str.find("S8")) { + EXPECT_EQ(vkuFormatCompatibilityClass(format), VKU_FORMAT_COMPATIBILITY_CLASS_D32S8); + } else if (std::string::npos != format_str.find("D32") && std::string::npos == format_str.find("S8")) { + EXPECT_EQ(vkuFormatCompatibilityClass(format), VKU_FORMAT_COMPATIBILITY_CLASS_D32); + } else if (std::string::npos != format_str.find("S8")) { + EXPECT_EQ(vkuFormatCompatibilityClass(format), VKU_FORMAT_COMPATIBILITY_CLASS_S8); + } else if (std::string::npos != format_str.find("_PACK8")) { + EXPECT_EQ(vkuFormatCompatibilityClass(format), VKU_FORMAT_COMPATIBILITY_CLASS_8BIT); + } else if (std::string::npos != format_str.find("_PACK16")) { + EXPECT_EQ(vkuFormatCompatibilityClass(format), VKU_FORMAT_COMPATIBILITY_CLASS_16BIT); + } else if (std::string::npos != format_str.find("_PACK32")) { + EXPECT_EQ(vkuFormatCompatibilityClass(format), VKU_FORMAT_COMPATIBILITY_CLASS_32BIT); + } else if (vkuFormatIsCompressed(format)) { + // special case compressed formats because they don't typically list their components in the enum itself + continue; + } else { + size_t component_size_combined = find_component_size(format_str, 'R') + find_component_size(format_str, 'G') + + find_component_size(format_str, 'B') + find_component_size(format_str, 'A'); + VKU_FORMAT_COMPATIBILITY_CLASS comp_class = VKU_FORMAT_COMPATIBILITY_CLASS_NONE; + if (component_size_combined == 8) comp_class = VKU_FORMAT_COMPATIBILITY_CLASS_8BIT; + if (component_size_combined == 16) comp_class = VKU_FORMAT_COMPATIBILITY_CLASS_16BIT; + if (component_size_combined == 24) comp_class = VKU_FORMAT_COMPATIBILITY_CLASS_24BIT; + if (component_size_combined == 32) comp_class = VKU_FORMAT_COMPATIBILITY_CLASS_32BIT; + if (component_size_combined == 48) comp_class = VKU_FORMAT_COMPATIBILITY_CLASS_48BIT; + if (component_size_combined == 64) comp_class = VKU_FORMAT_COMPATIBILITY_CLASS_64BIT; + if (component_size_combined == 96) comp_class = VKU_FORMAT_COMPATIBILITY_CLASS_96BIT; + if (component_size_combined == 128) comp_class = VKU_FORMAT_COMPATIBILITY_CLASS_128BIT; + if (component_size_combined == 192) comp_class = VKU_FORMAT_COMPATIBILITY_CLASS_192BIT; + if (component_size_combined == 256) comp_class = VKU_FORMAT_COMPATIBILITY_CLASS_256BIT; + + if (comp_class == VKU_FORMAT_COMPATIBILITY_CLASS_8BIT && find_component(format_str, 'A') != std::string_view::npos) + comp_class = VKU_FORMAT_COMPATIBILITY_CLASS_8BIT_ALPHA; + + EXPECT_EQ(vkuFormatCompatibilityClass(format), comp_class); + if (vkuFormatCompatibilityClass(format) != comp_class) { + std::cout << ""; + } + } + } +} +TEST(format_utils, vkuFormatElementIsTexel) { + constexpr auto formats = magic_enum::enum_values(); + for (auto format : formats) { + if (!(vkuFormatIsPacked(format) || vkuFormatIsCompressed(format) || vkuFormatIsSinglePlane_422(format) || + vkuFormatIsMultiplane(format))) { + EXPECT_TRUE(vkuFormatElementIsTexel(format)); + } else { + EXPECT_FALSE(vkuFormatElementIsTexel(format)); + } + } +} +TEST(format_utils, vkuFormatElementSizeWithAspect) { + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_R64G64_SFLOAT, VK_IMAGE_ASPECT_NONE), 16); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_R64G64_SFLOAT, VK_IMAGE_ASPECT_STENCIL_BIT), 0); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_R64G64_SFLOAT, VK_IMAGE_ASPECT_DEPTH_BIT), 0); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_ASTC_5x4_SRGB_BLOCK, VK_IMAGE_ASPECT_NONE), 16); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_ASTC_5x4_SRGB_BLOCK, VK_IMAGE_ASPECT_PLANE_0_BIT), 16); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_ASTC_5x4_SRGB_BLOCK, VK_IMAGE_ASPECT_PLANE_1_BIT), 16); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_ASTC_5x4_SRGB_BLOCK, VK_IMAGE_ASPECT_PLANE_2_BIT), 16); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_ASTC_5x4_SRGB_BLOCK, VK_IMAGE_ASPECT_STENCIL_BIT), 0); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_ASTC_5x4_SRGB_BLOCK, VK_IMAGE_ASPECT_DEPTH_BIT), 0); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, VK_IMAGE_ASPECT_NONE), 0); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, VK_IMAGE_ASPECT_PLANE_0_BIT), 2); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, VK_IMAGE_ASPECT_PLANE_1_BIT), 2); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, VK_IMAGE_ASPECT_PLANE_2_BIT), 2); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, VK_IMAGE_ASPECT_STENCIL_BIT), 0); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, VK_IMAGE_ASPECT_DEPTH_BIT), 0); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_D32_SFLOAT, VK_IMAGE_ASPECT_NONE), 4); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_D32_SFLOAT, VK_IMAGE_ASPECT_STENCIL_BIT), 0); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_D32_SFLOAT, VK_IMAGE_ASPECT_DEPTH_BIT), 4); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_D32_SFLOAT_S8_UINT, VK_IMAGE_ASPECT_NONE), 5); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_D32_SFLOAT_S8_UINT, VK_IMAGE_ASPECT_STENCIL_BIT), 1); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_D32_SFLOAT_S8_UINT, VK_IMAGE_ASPECT_DEPTH_BIT), 4); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_S8_UINT, VK_IMAGE_ASPECT_NONE), 1); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_S8_UINT, VK_IMAGE_ASPECT_STENCIL_BIT), 1); + EXPECT_EQ(vkuFormatElementSizeWithAspect(VK_FORMAT_S8_UINT, VK_IMAGE_ASPECT_DEPTH_BIT), 0); +} +TEST(format_utils, vkuFormatTexelSizeWithAspect) { + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_R64G64_SFLOAT, VK_IMAGE_ASPECT_NONE), 16); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_R64G64_SFLOAT, VK_IMAGE_ASPECT_STENCIL_BIT), 0); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_R64G64_SFLOAT, VK_IMAGE_ASPECT_DEPTH_BIT), 0); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_ASTC_5x4_SRGB_BLOCK, VK_IMAGE_ASPECT_NONE), 16. / 20.); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_ASTC_5x4_SRGB_BLOCK, VK_IMAGE_ASPECT_STENCIL_BIT), 0); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_ASTC_5x4_SRGB_BLOCK, VK_IMAGE_ASPECT_DEPTH_BIT), 0); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, VK_IMAGE_ASPECT_NONE), 0); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, VK_IMAGE_ASPECT_PLANE_0_BIT), 2); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, VK_IMAGE_ASPECT_PLANE_1_BIT), 2); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, VK_IMAGE_ASPECT_PLANE_2_BIT), 2); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, VK_IMAGE_ASPECT_STENCIL_BIT), 0); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, VK_IMAGE_ASPECT_STENCIL_BIT), 0); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, VK_IMAGE_ASPECT_DEPTH_BIT), 0); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_D32_SFLOAT, VK_IMAGE_ASPECT_NONE), 4); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_D32_SFLOAT, VK_IMAGE_ASPECT_STENCIL_BIT), 0); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_D32_SFLOAT, VK_IMAGE_ASPECT_DEPTH_BIT), 4); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_D32_SFLOAT_S8_UINT, VK_IMAGE_ASPECT_NONE), 5); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_D32_SFLOAT_S8_UINT, VK_IMAGE_ASPECT_STENCIL_BIT), 1); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_D32_SFLOAT_S8_UINT, VK_IMAGE_ASPECT_DEPTH_BIT), 4); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_S8_UINT, VK_IMAGE_ASPECT_NONE), 1); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_S8_UINT, VK_IMAGE_ASPECT_STENCIL_BIT), 1); + EXPECT_EQ(vkuFormatTexelSizeWithAspect(VK_FORMAT_S8_UINT, VK_IMAGE_ASPECT_DEPTH_BIT), 0); +} +TEST(format_utils, vkuFormatIs64bit) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (std::string::npos != format_str.find("R64")) { + EXPECT_TRUE(vkuFormatIs64bit(format)); + } else { + EXPECT_FALSE(vkuFormatIs64bit(format)); + } + } +} +TEST(format_utils, vkuFormatHasComponentSize) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (vkuFormatIsCompressed(format)) { + // special case compressed formats because they don't typically list their components in the enum itself + continue; + } + for (uint32_t i = 1; i < 64; i++) { + bool has_component_size = false; + has_component_size |= find_component_size(format_str, 'R') == i; + has_component_size |= find_component_size(format_str, 'G') == i; + has_component_size |= find_component_size(format_str, 'B') == i; + has_component_size |= find_component_size(format_str, 'A') == i; + has_component_size |= find_component_size(format_str, 'D') == i; + has_component_size |= find_component_size(format_str, 'S') == i; + EXPECT_EQ(vkuFormatHasComponentSize(format, i), has_component_size); + } + } +} + +void check_for_letter(char letter, bool (*func)(VkFormat)) { + for (auto [format, format_str] : magic_enum::enum_entries()) { + if (vkuFormatIsCompressed(format)) { + // special case compressed formats because they don't typically list their components in the enum itself + continue; + } + auto loc = find_component(format_str, letter); + if (std::string::npos != loc) { + EXPECT_TRUE(func(format)); + } else { + EXPECT_FALSE(func(format)); + } + } +} +TEST(format_utils, vkuFormatHasRed) { check_for_letter('R', vkuFormatHasRed); } +TEST(format_utils, vkuFormatHasGreen) { check_for_letter('G', vkuFormatHasGreen); } +TEST(format_utils, vkuFormatHasBlue) { check_for_letter('B', vkuFormatHasBlue); } +TEST(format_utils, vkuFormatHasAlpha) { check_for_letter('A', vkuFormatHasAlpha); } + +TEST(format_utils, vkuFormatIsUndefined) { + constexpr auto formats = magic_enum::enum_values(); + for (auto format : formats) { + if (format == VK_FORMAT_UNDEFINED) { + EXPECT_TRUE(vkuFormatIsUndefined(format)); + } else { + EXPECT_FALSE(vkuFormatIsUndefined(format)); + } + } +} +TEST(format_utils, vkuFormatIsBlockedImage) { + constexpr auto formats = magic_enum::enum_values(); + for (auto format : formats) { + if (vkuFormatIsCompressed(format) || vkuFormatIsSinglePlane_422(format)) { + EXPECT_TRUE(vkuFormatIsBlockedImage(format)); + } else { + EXPECT_FALSE(vkuFormatIsBlockedImage(format)); + } + } +} +TEST(format_utils, vkuFormatIsColor) { + constexpr auto formats = magic_enum::enum_values(); + for (auto format : formats) { + if (!(vkuFormatIsUndefined(format) || vkuFormatIsDepthOrStencil(format) || vkuFormatIsMultiplane(format))) { + EXPECT_TRUE(vkuFormatIsColor(format)); + } else { + EXPECT_FALSE(vkuFormatIsColor(format)); + } + } +} +template <> +struct magic_enum::customize::enum_range { + static constexpr bool is_flags = true; +}; + +TEST(format_utils, vkuGetPlaneIndex) { + for (auto [aspect_flag, aspect_flag_str] : magic_enum::enum_entries()) { + if (std::string::npos != aspect_flag_str.find("_ASPECT_PLANE_0")) { + EXPECT_EQ(vkuGetPlaneIndex(aspect_flag), 0); + } else if (std::string::npos != aspect_flag_str.find("_ASPECT_PLANE_1")) { + EXPECT_EQ(vkuGetPlaneIndex(aspect_flag), 1); + + } else if (std::string::npos != aspect_flag_str.find("_ASPECT_PLANE_2")) { + EXPECT_EQ(vkuGetPlaneIndex(aspect_flag), 2); + } else { + EXPECT_EQ(vkuGetPlaneIndex(aspect_flag), VKU_FORMAT_INVALID_INDEX); + } + } +}