Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
VulkanUtilsPriv.h
Go to the documentation of this file.
1/*
2 * Copyright 2022 Google LLC
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef skgpu_VulkanUtilsPriv_DEFINED
9#define skgpu_VulkanUtilsPriv_DEFINED
10
11#include <cstdint>
12#include <string>
13
17
21
22#ifdef SK_BUILD_FOR_ANDROID
24#endif
25
26namespace SkSL {
27
28enum class ProgramKind : int8_t;
29struct ProgramInterface;
30struct ProgramSettings;
31struct ShaderCaps;
32
33} // namespace SkSL
34
35namespace skgpu {
36
37class ShaderErrorHandler;
38
39inline bool SkSLToSPIRV(const SkSL::ShaderCaps* caps,
40 const std::string& sksl,
41 SkSL::ProgramKind programKind,
42 const SkSL::ProgramSettings& settings,
43 std::string* spirv,
44 SkSL::ProgramInterface* outInterface,
45 ShaderErrorHandler* errorHandler) {
46 return SkSLToBackend(caps, &SkSL::ToSPIRV, /*backendLabel=*/nullptr,
47 sksl, programKind, settings, spirv, outInterface, errorHandler);
48}
49
50static constexpr uint32_t VkFormatChannels(VkFormat vkFormat) {
51 switch (vkFormat) {
73 case VK_FORMAT_S8_UINT: return 0;
74 case VK_FORMAT_D24_UNORM_S8_UINT: return 0;
75 case VK_FORMAT_D32_SFLOAT_S8_UINT: return 0;
76 default: return 0;
77 }
78}
79
80static constexpr size_t VkFormatBytesPerBlock(VkFormat vkFormat) {
81 switch (vkFormat) {
82 case VK_FORMAT_R8G8B8A8_UNORM: return 4;
83 case VK_FORMAT_R8_UNORM: return 1;
84 case VK_FORMAT_B8G8R8A8_UNORM: return 4;
85 case VK_FORMAT_R5G6B5_UNORM_PACK16: return 2;
86 case VK_FORMAT_B5G6R5_UNORM_PACK16: return 2;
87 case VK_FORMAT_R16G16B16A16_SFLOAT: return 8;
88 case VK_FORMAT_R16_SFLOAT: return 2;
89 case VK_FORMAT_R8G8B8_UNORM: return 3;
90 case VK_FORMAT_R8G8_UNORM: return 2;
95 case VK_FORMAT_R8G8B8A8_SRGB: return 4;
97 case VK_FORMAT_BC1_RGB_UNORM_BLOCK: return 8;
98 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: return 8;
99 case VK_FORMAT_R16_UNORM: return 2;
100 case VK_FORMAT_R16G16_UNORM: return 4;
101 case VK_FORMAT_R16G16B16A16_UNORM: return 8;
102 case VK_FORMAT_R16G16_SFLOAT: return 4;
103 // Currently we are just over estimating this value to be used in gpu size calculations even
104 // though the actually size is probably less. We should instead treat planar formats similar
105 // to compressed textures that go through their own special query for calculating size.
109 case VK_FORMAT_S8_UINT: return 1;
110 case VK_FORMAT_D24_UNORM_S8_UINT: return 4;
111 case VK_FORMAT_D32_SFLOAT_S8_UINT: return 8;
112
113 default: return 0;
114 }
115}
116
125
126static constexpr int VkFormatIsStencil(VkFormat format) {
127 switch (format) {
131 return true;
132 default:
133 return false;
134 }
135}
136
137static constexpr int VkFormatIsDepth(VkFormat format) {
138 switch (format) {
141 return true;
142 default:
143 return false;
144 }
145}
146
147static constexpr int VkFormatStencilBits(VkFormat format) {
148 switch (format) {
150 return 8;
152 return 8;
154 return 8;
155 default:
156 return 0;
157 }
158}
159
165
166static constexpr bool SampleCountToVkSampleCount(uint32_t samples,
167 VkSampleCountFlagBits* vkSamples) {
168 SkASSERT(samples >= 1);
169 switch (samples) {
170 case 1:
171 *vkSamples = VK_SAMPLE_COUNT_1_BIT;
172 return true;
173 case 2:
174 *vkSamples = VK_SAMPLE_COUNT_2_BIT;
175 return true;
176 case 4:
177 *vkSamples = VK_SAMPLE_COUNT_4_BIT;
178 return true;
179 case 8:
180 *vkSamples = VK_SAMPLE_COUNT_8_BIT;
181 return true;
182 case 16:
183 *vkSamples = VK_SAMPLE_COUNT_16_BIT;
184 return true;
185 default:
186 return false;
187 }
188}
189
190/**
191 * Returns true if the format is compressed.
192 */
193static constexpr bool VkFormatIsCompressed(VkFormat vkFormat) {
194 switch (vkFormat) {
198 return true;
199 default:
200 return false;
201 }
203}
204
205/**
206 * Returns a ptr to the requested extension feature struct or nullptr if it is not present.
207*/
208template<typename T> T* GetExtensionFeatureStruct(const VkPhysicalDeviceFeatures2& features,
210 // All Vulkan structs that could be part of the features chain will start with the
211 // structure type followed by the pNext pointer. We cast to the CommonVulkanHeader
212 // so we can get access to the pNext for the next struct.
213 struct CommonVulkanHeader {
214 VkStructureType sType;
215 void* pNext;
216 };
217
218 void* pNext = features.pNext;
219 while (pNext) {
220 CommonVulkanHeader* header = static_cast<CommonVulkanHeader*>(pNext);
221 if (header->sType == type) {
222 return static_cast<T*>(pNext);
223 }
224 pNext = header->pNext;
225 }
226 return nullptr;
227}
228
229/**
230 * Returns a populated VkSamplerYcbcrConversionCreateInfo object based on VulkanYcbcrConversionInfo
231*/
233 const VulkanYcbcrConversionInfo& conversionInfo);
234
235static constexpr const char* VkFormatToStr(VkFormat vkFormat) {
236 switch (vkFormat) {
237 case VK_FORMAT_R8G8B8A8_UNORM: return "R8G8B8A8_UNORM";
238 case VK_FORMAT_R8_UNORM: return "R8_UNORM";
239 case VK_FORMAT_B8G8R8A8_UNORM: return "B8G8R8A8_UNORM";
240 case VK_FORMAT_R5G6B5_UNORM_PACK16: return "R5G6B5_UNORM_PACK16";
241 case VK_FORMAT_B5G6R5_UNORM_PACK16: return "B5G6R5_UNORM_PACK16";
242 case VK_FORMAT_R16G16B16A16_SFLOAT: return "R16G16B16A16_SFLOAT";
243 case VK_FORMAT_R16_SFLOAT: return "R16_SFLOAT";
244 case VK_FORMAT_R8G8B8_UNORM: return "R8G8B8_UNORM";
245 case VK_FORMAT_R8G8_UNORM: return "R8G8_UNORM";
246 case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return "A2B10G10R10_UNORM_PACK32";
247 case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return "A2R10G10B10_UNORM_PACK32";
248 case VK_FORMAT_B4G4R4A4_UNORM_PACK16: return "B4G4R4A4_UNORM_PACK16";
249 case VK_FORMAT_R4G4B4A4_UNORM_PACK16: return "R4G4B4A4_UNORM_PACK16";
250 case VK_FORMAT_R32G32B32A32_SFLOAT: return "R32G32B32A32_SFLOAT";
251 case VK_FORMAT_R8G8B8A8_SRGB: return "R8G8B8A8_SRGB";
252 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: return "ETC2_R8G8B8_UNORM_BLOCK";
253 case VK_FORMAT_BC1_RGB_UNORM_BLOCK: return "BC1_RGB_UNORM_BLOCK";
254 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: return "BC1_RGBA_UNORM_BLOCK";
255 case VK_FORMAT_R16_UNORM: return "R16_UNORM";
256 case VK_FORMAT_R16G16_UNORM: return "R16G16_UNORM";
257 case VK_FORMAT_R16G16B16A16_UNORM: return "R16G16B16A16_UNORM";
258 case VK_FORMAT_R16G16_SFLOAT: return "R16G16_SFLOAT";
259 case VK_FORMAT_S8_UINT: return "S8_UINT";
260 case VK_FORMAT_D24_UNORM_S8_UINT: return "D24_UNORM_S8_UINT";
261 case VK_FORMAT_D32_SFLOAT_S8_UINT: return "D32_SFLOAT_S8_UINT";
262
263 default: return "Unknown";
264 }
265}
266
267#ifdef SK_BUILD_FOR_ANDROID
268/**
269 * Vulkan AHardwareBuffer utility functions shared between graphite and ganesh
270*/
271void GetYcbcrConversionInfoFromFormatProps(
272 VulkanYcbcrConversionInfo* outConversionInfo,
274
275bool GetAHardwareBufferProperties(
279 const AHardwareBuffer*,
280 VkDevice);
281
282bool AllocateAndBindImageMemory(skgpu::VulkanAlloc* outVulkanAlloc,
283 VkImage,
288 VkDevice);
289
290#endif // SK_BUILD_FOR_ANDROID
291
292/**
293 * Calls faultProc with faultContext; passes debug info if VK_EXT_device_fault is supported/enabled.
294 *
295 * Note: must only be called *after* receiving VK_ERROR_DEVICE_LOST.
296 */
297void InvokeDeviceLostCallback(const skgpu::VulkanInterface* vulkanInterface,
298 VkDevice vkDevice,
301 bool supportsDeviceFaultInfoExtension);
302
303} // namespace skgpu
304
305#endif // skgpu_VulkanUtilsPriv_DEFINED
struct AHardwareBuffer AHardwareBuffer
#define SkUNREACHABLE
Definition SkAssert.h:135
#define SkASSERT(cond)
Definition SkAssert.h:116
@ kRGB_SkColorChannelFlags
Definition SkColor.h:247
@ kRGBA_SkColorChannelFlags
Definition SkColor.h:248
@ kRed_SkColorChannelFlag
Definition SkColor.h:239
@ kRG_SkColorChannelFlags
Definition SkColor.h:246
uint32_t uint32_t * format
bool ToSPIRV(Program &program, const ShaderCaps *caps, OutputStream &out)
static constexpr const char * VkFormatToStr(VkFormat vkFormat)
static constexpr int VkFormatStencilBits(VkFormat format)
static constexpr size_t VkFormatBytesPerBlock(VkFormat vkFormat)
static constexpr int VkFormatIsDepth(VkFormat format)
static constexpr int VkFormatIsStencil(VkFormat format)
static constexpr uint32_t VkFormatChannels(VkFormat vkFormat)
static constexpr bool VkFormatNeedsYcbcrSampler(VkFormat format)
bool SkSLToBackend(const SkSL::ShaderCaps *caps, bool(*toBackend)(SkSL::Program &, const SkSL::ShaderCaps *, std::string *), const char *backendLabel, const std::string &sksl, SkSL::ProgramKind programKind, const SkSL::ProgramSettings &settings, std::string *output, SkSL::ProgramInterface *outInterface, ShaderErrorHandler *errorHandler)
static constexpr bool VkFormatIsCompressed(VkFormat vkFormat)
static constexpr SkTextureCompressionType VkFormatToCompressionType(VkFormat vkFormat)
static constexpr bool SampleCountToVkSampleCount(uint32_t samples, VkSampleCountFlagBits *vkSamples)
void * VulkanDeviceLostContext
bool SkSLToSPIRV(const SkSL::ShaderCaps *caps, const std::string &sksl, SkSL::ProgramKind programKind, const SkSL::ProgramSettings &settings, std::string *spirv, SkSL::ProgramInterface *outInterface, ShaderErrorHandler *errorHandler)
T * GetExtensionFeatureStruct(const VkPhysicalDeviceFeatures2 &features, VkStructureType type)
void(* VulkanDeviceLostProc)(VulkanDeviceLostContext faultContext, const std::string &description, const std::vector< VkDeviceFaultAddressInfoEXT > &addressInfos, const std::vector< VkDeviceFaultVendorInfoEXT > &vendorInfos, const std::vector< std::byte > &vendorBinaryData)
void SetupSamplerYcbcrConversionInfo(VkSamplerYcbcrConversionCreateInfo *outInfo, const VulkanYcbcrConversionInfo &conversionInfo)
void InvokeDeviceLostCallback(const skgpu::VulkanInterface *vulkanInterface, VkDevice vkDevice, skgpu::VulkanDeviceLostContext deviceLostContext, skgpu::VulkanDeviceLostProc deviceLostProc, bool supportsDeviceFaultInfoExtension)
#define T
static const char header[]
Definition skpbench.cpp:88
VkSampleCountFlagBits
@ VK_SAMPLE_COUNT_8_BIT
@ VK_SAMPLE_COUNT_2_BIT
@ VK_SAMPLE_COUNT_1_BIT
@ VK_SAMPLE_COUNT_4_BIT
@ VK_SAMPLE_COUNT_16_BIT
VkFormat
@ VK_FORMAT_R16G16B16A16_UNORM
@ VK_FORMAT_R8G8B8A8_SRGB
@ VK_FORMAT_R8G8B8_UNORM
@ VK_FORMAT_D24_UNORM_S8_UINT
@ VK_FORMAT_B8G8R8A8_UNORM
@ VK_FORMAT_R16G16_SFLOAT
@ VK_FORMAT_B4G4R4A4_UNORM_PACK16
@ VK_FORMAT_R16_SFLOAT
@ VK_FORMAT_R8G8_UNORM
@ VK_FORMAT_S8_UINT
@ VK_FORMAT_R8_UNORM
@ VK_FORMAT_R32G32B32A32_SFLOAT
@ VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16
@ VK_FORMAT_R5G6B5_UNORM_PACK16
@ VK_FORMAT_R4G4B4A4_UNORM_PACK16
@ VK_FORMAT_A2B10G10R10_UNORM_PACK32
@ VK_FORMAT_R8G8B8A8_UNORM
@ VK_FORMAT_BC1_RGB_UNORM_BLOCK
@ VK_FORMAT_R16_UNORM
@ VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM
@ VK_FORMAT_R16G16B16A16_SFLOAT
@ VK_FORMAT_R16G16_UNORM
@ VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK
@ VK_FORMAT_A2R10G10B10_UNORM_PACK32
@ VK_FORMAT_BC1_RGBA_UNORM_BLOCK
@ VK_FORMAT_G8_B8R8_2PLANE_420_UNORM
@ VK_FORMAT_B5G6R5_UNORM_PACK16
@ VK_FORMAT_D32_SFLOAT_S8_UINT
VkStructureType