blob: 3c5ec882b1862bffebad027977789902cad14c90 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "include/gpu/GrBackendSurface.h"
9#include "include/gpu/GrRenderTarget.h"
10#include "include/gpu/vk/GrVkBackendContext.h"
11#include "include/gpu/vk/GrVkExtensions.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040012#include "src/gpu/GrRenderTargetProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "src/gpu/GrShaderCaps.h"
14#include "src/gpu/SkGr.h"
15#include "src/gpu/vk/GrVkCaps.h"
16#include "src/gpu/vk/GrVkInterface.h"
17#include "src/gpu/vk/GrVkTexture.h"
18#include "src/gpu/vk/GrVkUtil.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050019
20GrVkCaps::GrVkCaps(const GrContextOptions& contextOptions, const GrVkInterface* vkInterface,
Greg Daniela0651ac2018-08-08 09:23:18 -040021 VkPhysicalDevice physDev, const VkPhysicalDeviceFeatures2& features,
Greg Daniel41f0e282019-01-28 13:15:05 -050022 uint32_t instanceVersion, uint32_t physicalDeviceVersion,
23 const GrVkExtensions& extensions)
egdanielc5ec1402016-03-28 12:14:42 -070024 : INHERITED(contextOptions) {
Greg Danielc0b03d82018-08-03 14:41:15 -040025
Greg Daniel164a9f02016-02-22 09:56:40 -050026 /**************************************************************************
Brian Salomonf7232642018-09-19 08:58:08 -040027 * GrCaps fields
28 **************************************************************************/
jvanverth62340062016-04-26 08:01:44 -070029 fMipMapSupport = true; // always available in Vulkan
brianosmanf05ab1b2016-05-12 11:01:10 -070030 fSRGBSupport = true; // always available in Vulkan
brianosman88791862016-05-23 10:15:27 -070031 fNPOTTextureTileSupport = true; // always available in Vulkan
egdaniel37535c92016-06-30 08:23:30 -070032 fDiscardRenderTargetSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050033 fReuseScratchTextures = true; //TODO: figure this out
34 fGpuTracingSupport = false; //TODO: figure this out
Jim Van Verth1676cb92019-01-15 13:24:45 -050035 fCompressedTexSubImageSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050036 fOversizedStencilSupport = false; //TODO: figure this out
Chris Dalton1d616352017-05-31 12:51:23 -060037 fInstanceAttribSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050038
Brian Salomon9ff5acb2019-05-08 09:04:47 -040039 fSemaphoreSupport = true; // always available in Vulkan
jvanverth84741b32016-09-30 08:39:02 -070040 fFenceSyncSupport = true; // always available in Vulkan
Greg Daniel691f5e72018-02-28 14:21:34 -050041 fCrossContextTextureSupport = true;
Brian Osman499bf1a2018-09-17 11:32:42 -040042 fHalfFloatVertexAttributeSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050043
Brian Salomone05ba5a2019-04-08 11:59:07 -040044 fTransferBufferSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050045
46 fMaxRenderTargetSize = 4096; // minimum required by spec
47 fMaxTextureSize = 4096; // minimum required by spec
Greg Daniel164a9f02016-02-22 09:56:40 -050048
Brian Salomonf7232642018-09-19 08:58:08 -040049 fDynamicStateArrayGeometryProcessorTextureSupport = true;
50
Brian Salomon94efbf52016-11-29 13:43:05 -050051 fShaderCaps.reset(new GrShaderCaps(contextOptions));
Greg Daniel164a9f02016-02-22 09:56:40 -050052
Greg Daniel41f0e282019-01-28 13:15:05 -050053 this->init(contextOptions, vkInterface, physDev, features, physicalDeviceVersion, extensions);
Greg Daniel164a9f02016-02-22 09:56:40 -050054}
55
Robert Phillipsbf25d432017-04-07 10:08:53 -040056bool GrVkCaps::initDescForDstCopy(const GrRenderTargetProxy* src, GrSurfaceDesc* desc,
Greg Daniel46cfbc62019-06-07 11:43:30 -040057 bool* rectsMustMatch, bool* disallowSubrect) const {
Eric Karl74480882017-04-03 14:49:05 -070058 // Vk doesn't use rectsMustMatch or disallowSubrect. Always return false.
59 *rectsMustMatch = false;
60 *disallowSubrect = false;
61
Brian Salomon467921e2017-03-06 16:17:12 -050062 // We can always succeed here with either a CopyImage (none msaa src) or ResolveImage (msaa).
63 // For CopyImage we can make a simple texture, for ResolveImage we require the dst to be a
64 // render target as well.
Brian Salomon467921e2017-03-06 16:17:12 -050065 desc->fConfig = src->config();
Greg Daniel55fa6472018-03-16 16:13:10 -040066 if (src->numColorSamples() > 1 || src->asTextureProxy()) {
Brian Salomon467921e2017-03-06 16:17:12 -050067 desc->fFlags = kRenderTarget_GrSurfaceFlag;
68 } else {
69 // Just going to use CopyImage here
70 desc->fFlags = kNone_GrSurfaceFlags;
71 }
72
73 return true;
74}
75
Greg Daniel5c7b5412019-05-10 11:39:55 -040076static int get_compatible_format_class(GrPixelConfig config) {
77 switch (config) {
78 case kAlpha_8_GrPixelConfig:
79 case kAlpha_8_as_Red_GrPixelConfig:
80 case kGray_8_GrPixelConfig:
81 case kGray_8_as_Red_GrPixelConfig:
82 return 1;
83 case kRGB_565_GrPixelConfig:
84 case kRGBA_4444_GrPixelConfig:
85 case kRG_88_GrPixelConfig:
86 case kAlpha_half_GrPixelConfig:
87 case kAlpha_half_as_Red_GrPixelConfig:
Robert Phillips66a46032019-06-18 08:00:42 -040088 case kR_16_GrPixelConfig:
Greg Daniel5c7b5412019-05-10 11:39:55 -040089 return 2;
90 case kRGB_888_GrPixelConfig:
91 return 3;
92 case kRGBA_8888_GrPixelConfig:
93 case kRGB_888X_GrPixelConfig:
94 case kBGRA_8888_GrPixelConfig:
95 case kSRGBA_8888_GrPixelConfig:
96 case kSBGRA_8888_GrPixelConfig:
97 case kRGBA_1010102_GrPixelConfig:
Robert Phillips66a46032019-06-18 08:00:42 -040098 case kRG_1616_GrPixelConfig:
Greg Daniel5c7b5412019-05-10 11:39:55 -040099 return 4;
100 case kRGBA_half_GrPixelConfig:
101 case kRGBA_half_Clamped_GrPixelConfig:
102 case kRG_float_GrPixelConfig:
103 return 5;
104 case kRGBA_float_GrPixelConfig:
105 return 6;
106 case kRGB_ETC1_GrPixelConfig:
107 return 7;
108 case kUnknown_GrPixelConfig:
109 case kAlpha_8_as_Alpha_GrPixelConfig:
110 case kGray_8_as_Lum_GrPixelConfig:
111 SK_ABORT("Unsupported Vulkan pixel config");
112 return 0;
Robert Phillips66a46032019-06-18 08:00:42 -0400113
114 // Experimental (for Y416 and mutant P016/P010)
115 case kRGBA_16161616_GrPixelConfig:
116 return 8;
117 case kRG_half_GrPixelConfig:
Robert Phillipsfe18de52019-06-06 17:21:50 -0400118 return 4;
Greg Daniel5c7b5412019-05-10 11:39:55 -0400119 }
120 SK_ABORT("Invalid pixel config");
121 return 0;
122}
123
Greg Daniel46cfbc62019-06-07 11:43:30 -0400124bool GrVkCaps::canCopyImage(GrPixelConfig dstConfig, int dstSampleCnt, bool dstHasYcbcr,
125 GrPixelConfig srcConfig, int srcSampleCnt, bool srcHasYcbcr) const {
Greg Daniel25af6712018-04-25 10:44:38 -0400126 if ((dstSampleCnt > 1 || srcSampleCnt > 1) && dstSampleCnt != srcSampleCnt) {
127 return false;
128 }
129
Greg Daniela51e93c2019-03-25 12:30:45 -0400130 if (dstHasYcbcr || srcHasYcbcr) {
131 return false;
132 }
133
Greg Daniel25af6712018-04-25 10:44:38 -0400134 // We require that all vulkan GrSurfaces have been created with transfer_dst and transfer_src
135 // as image usage flags.
Greg Daniel46cfbc62019-06-07 11:43:30 -0400136 if (get_compatible_format_class(srcConfig) != get_compatible_format_class(dstConfig)) {
Greg Daniel25af6712018-04-25 10:44:38 -0400137 return false;
138 }
139
140 if (this->shaderCaps()->configOutputSwizzle(srcConfig) !=
141 this->shaderCaps()->configOutputSwizzle(dstConfig)) {
142 return false;
143 }
144
145 return true;
146}
147
148bool GrVkCaps::canCopyAsBlit(GrPixelConfig dstConfig, int dstSampleCnt, bool dstIsLinear,
Greg Daniela51e93c2019-03-25 12:30:45 -0400149 bool dstHasYcbcr, GrPixelConfig srcConfig, int srcSampleCnt,
150 bool srcIsLinear, bool srcHasYcbcr) const {
Greg Danielcaa795f2019-05-14 11:54:25 -0400151
152 VkFormat dstFormat;
153 SkAssertResult(GrPixelConfigToVkFormat(dstConfig, &dstFormat));
154 VkFormat srcFormat;
155 SkAssertResult(GrPixelConfigToVkFormat(srcConfig, &srcFormat));
Greg Daniel25af6712018-04-25 10:44:38 -0400156 // We require that all vulkan GrSurfaces have been created with transfer_dst and transfer_src
157 // as image usage flags.
Greg Danielcaa795f2019-05-14 11:54:25 -0400158 if (!this->formatCanBeDstofBlit(dstFormat, dstIsLinear) ||
159 !this->formatCanBeSrcofBlit(srcFormat, srcIsLinear)) {
Greg Daniel25af6712018-04-25 10:44:38 -0400160 return false;
161 }
162
163 if (this->shaderCaps()->configOutputSwizzle(srcConfig) !=
164 this->shaderCaps()->configOutputSwizzle(dstConfig)) {
165 return false;
166 }
167
168 // We cannot blit images that are multisampled. Will need to figure out if we can blit the
169 // resolved msaa though.
170 if (dstSampleCnt > 1 || srcSampleCnt > 1) {
171 return false;
172 }
173
Greg Daniela51e93c2019-03-25 12:30:45 -0400174 if (dstHasYcbcr || srcHasYcbcr) {
175 return false;
176 }
177
Greg Daniel25af6712018-04-25 10:44:38 -0400178 return true;
179}
180
Greg Daniel46cfbc62019-06-07 11:43:30 -0400181bool GrVkCaps::canCopyAsResolve(GrPixelConfig dstConfig, int dstSampleCnt, bool dstHasYcbcr,
182 GrPixelConfig srcConfig, int srcSampleCnt, bool srcHasYcbcr) const {
Greg Daniel25af6712018-04-25 10:44:38 -0400183 // The src surface must be multisampled.
184 if (srcSampleCnt <= 1) {
185 return false;
186 }
187
188 // The dst must not be multisampled.
189 if (dstSampleCnt > 1) {
190 return false;
191 }
192
193 // Surfaces must have the same format.
194 if (dstConfig != srcConfig) {
195 return false;
196 }
197
Greg Daniela51e93c2019-03-25 12:30:45 -0400198 if (dstHasYcbcr || srcHasYcbcr) {
199 return false;
200 }
201
Greg Daniel25af6712018-04-25 10:44:38 -0400202 return true;
203}
204
Brian Salomonc67c31c2018-12-06 10:00:03 -0500205bool GrVkCaps::onCanCopySurface(const GrSurfaceProxy* dst, const GrSurfaceProxy* src,
206 const SkIRect& srcRect, const SkIPoint& dstPoint) const {
Greg Daniel25af6712018-04-25 10:44:38 -0400207 GrPixelConfig dstConfig = dst->config();
208 GrPixelConfig srcConfig = src->config();
209
210 // TODO: Figure out a way to track if we've wrapped a linear texture in a proxy (e.g.
211 // PromiseImage which won't get instantiated right away. Does this need a similar thing like the
212 // tracking of external or rectangle textures in GL? For now we don't create linear textures
213 // internally, and I don't believe anyone is wrapping them.
214 bool srcIsLinear = false;
215 bool dstIsLinear = false;
216
217 int dstSampleCnt = 0;
218 int srcSampleCnt = 0;
219 if (const GrRenderTargetProxy* rtProxy = dst->asRenderTargetProxy()) {
Greg Danielbe7fc462019-01-03 16:40:42 -0500220 // Copying to or from render targets that wrap a secondary command buffer is not allowed
221 // since they would require us to know the VkImage, which we don't have, as well as need us
222 // to stop and start the VkRenderPass which we don't have access to.
223 if (rtProxy->wrapsVkSecondaryCB()) {
224 return false;
225 }
Greg Daniel25af6712018-04-25 10:44:38 -0400226 dstSampleCnt = rtProxy->numColorSamples();
227 }
228 if (const GrRenderTargetProxy* rtProxy = src->asRenderTargetProxy()) {
Greg Danielbe7fc462019-01-03 16:40:42 -0500229 // Copying to or from render targets that wrap a secondary command buffer is not allowed
230 // since they would require us to know the VkImage, which we don't have, as well as need us
231 // to stop and start the VkRenderPass which we don't have access to.
232 if (rtProxy->wrapsVkSecondaryCB()) {
233 return false;
234 }
Greg Daniel25af6712018-04-25 10:44:38 -0400235 srcSampleCnt = rtProxy->numColorSamples();
236 }
237 SkASSERT((dstSampleCnt > 0) == SkToBool(dst->asRenderTargetProxy()));
238 SkASSERT((srcSampleCnt > 0) == SkToBool(src->asRenderTargetProxy()));
239
Greg Daniela51e93c2019-03-25 12:30:45 -0400240 bool dstHasYcbcr = false;
241 if (auto ycbcr = dst->backendFormat().getVkYcbcrConversionInfo()) {
242 if (ycbcr->isValid()) {
243 dstHasYcbcr = true;
244 }
245 }
246
247 bool srcHasYcbcr = false;
248 if (auto ycbcr = src->backendFormat().getVkYcbcrConversionInfo()) {
249 if (ycbcr->isValid()) {
250 srcHasYcbcr = true;
251 }
252 }
253
Greg Daniel46cfbc62019-06-07 11:43:30 -0400254 return this->canCopyImage(dstConfig, dstSampleCnt, dstHasYcbcr,
255 srcConfig, srcSampleCnt, srcHasYcbcr) ||
Greg Daniela51e93c2019-03-25 12:30:45 -0400256 this->canCopyAsBlit(dstConfig, dstSampleCnt, dstIsLinear, dstHasYcbcr,
257 srcConfig, srcSampleCnt, srcIsLinear, srcHasYcbcr) ||
Greg Daniel46cfbc62019-06-07 11:43:30 -0400258 this->canCopyAsResolve(dstConfig, dstSampleCnt, dstHasYcbcr,
259 srcConfig, srcSampleCnt, srcHasYcbcr);
Greg Daniel25af6712018-04-25 10:44:38 -0400260}
261
Greg Daniel7e000222018-12-03 10:08:21 -0500262template<typename T> T* get_extension_feature_struct(const VkPhysicalDeviceFeatures2& features,
263 VkStructureType type) {
264 // All Vulkan structs that could be part of the features chain will start with the
265 // structure type followed by the pNext pointer. We cast to the CommonVulkanHeader
266 // so we can get access to the pNext for the next struct.
267 struct CommonVulkanHeader {
268 VkStructureType sType;
269 void* pNext;
270 };
271
272 void* pNext = features.pNext;
273 while (pNext) {
274 CommonVulkanHeader* header = static_cast<CommonVulkanHeader*>(pNext);
275 if (header->sType == type) {
276 return static_cast<T*>(pNext);
277 }
278 pNext = header->pNext;
279 }
280 return nullptr;
281}
282
Greg Daniel164a9f02016-02-22 09:56:40 -0500283void GrVkCaps::init(const GrContextOptions& contextOptions, const GrVkInterface* vkInterface,
Greg Daniela0651ac2018-08-08 09:23:18 -0400284 VkPhysicalDevice physDev, const VkPhysicalDeviceFeatures2& features,
Greg Daniel41f0e282019-01-28 13:15:05 -0500285 uint32_t physicalDeviceVersion, const GrVkExtensions& extensions) {
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000286 VkPhysicalDeviceProperties properties;
287 GR_VK_CALL(vkInterface, GetPhysicalDeviceProperties(physDev, &properties));
egdanield5e3b9e2016-03-08 12:19:54 -0800288
egdanield5e3b9e2016-03-08 12:19:54 -0800289 VkPhysicalDeviceMemoryProperties memoryProperties;
290 GR_VK_CALL(vkInterface, GetPhysicalDeviceMemoryProperties(physDev, &memoryProperties));
291
Greg Daniel41f0e282019-01-28 13:15:05 -0500292 SkASSERT(physicalDeviceVersion <= properties.apiVersion);
Greg Danielc0b03d82018-08-03 14:41:15 -0400293
Greg Danielcb324152019-02-25 11:36:53 -0500294 if (extensions.hasExtension(VK_KHR_SWAPCHAIN_EXTENSION_NAME, 1)) {
295 fSupportsSwapchain = true;
296 }
297
Greg Danielc0b03d82018-08-03 14:41:15 -0400298 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
299 extensions.hasExtension(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, 1)) {
300 fSupportsPhysicalDeviceProperties2 = true;
301 }
302
303 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
304 extensions.hasExtension(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, 1)) {
305 fSupportsMemoryRequirements2 = true;
306 }
307
308 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
Greg Daniel637c06a2018-09-12 09:44:25 -0400309 extensions.hasExtension(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, 1)) {
310 fSupportsBindMemory2 = true;
311 }
312
313 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
Greg Danielc0b03d82018-08-03 14:41:15 -0400314 extensions.hasExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME, 1)) {
315 fSupportsMaintenance1 = true;
316 }
317
318 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
319 extensions.hasExtension(VK_KHR_MAINTENANCE2_EXTENSION_NAME, 1)) {
320 fSupportsMaintenance2 = true;
321 }
322
323 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
324 extensions.hasExtension(VK_KHR_MAINTENANCE3_EXTENSION_NAME, 1)) {
325 fSupportsMaintenance3 = true;
326 }
327
Greg Daniela9979d12018-08-27 15:56:46 -0400328 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
Greg Daniel637c06a2018-09-12 09:44:25 -0400329 (extensions.hasExtension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, 1) &&
Greg Daniela9979d12018-08-27 15:56:46 -0400330 this->supportsMemoryRequirements2())) {
331 fSupportsDedicatedAllocation = true;
332 }
333
334 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
335 (extensions.hasExtension(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, 1) &&
336 this->supportsPhysicalDeviceProperties2() &&
337 extensions.hasExtension(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, 1) &&
338 this->supportsDedicatedAllocation())) {
339 fSupportsExternalMemory = true;
340 }
341
342#ifdef SK_BUILD_FOR_ANDROID
Greg Daniel637c06a2018-09-12 09:44:25 -0400343 // Currently Adreno devices are not supporting the QUEUE_FAMILY_FOREIGN_EXTENSION, so until they
344 // do we don't explicitly require it here even the spec says it is required.
Greg Daniela9979d12018-08-27 15:56:46 -0400345 if (extensions.hasExtension(
Greg Daniel637c06a2018-09-12 09:44:25 -0400346 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 2) &&
347 /* extensions.hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1) &&*/
348 this->supportsExternalMemory() &&
349 this->supportsBindMemory2()) {
Greg Daniela9979d12018-08-27 15:56:46 -0400350 fSupportsAndroidHWBExternalMemory = true;
Greg Daniel637c06a2018-09-12 09:44:25 -0400351 fSupportsAHardwareBufferImages = true;
Greg Daniela9979d12018-08-27 15:56:46 -0400352 }
353#endif
354
Greg Daniel7e000222018-12-03 10:08:21 -0500355 auto ycbcrFeatures =
356 get_extension_feature_struct<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(
357 features,
358 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES);
359 if (ycbcrFeatures && ycbcrFeatures->samplerYcbcrConversion &&
360 fSupportsAndroidHWBExternalMemory &&
361 (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
362 (extensions.hasExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, 1) &&
363 this->supportsMaintenance1() &&
364 this->supportsBindMemory2() &&
365 this->supportsMemoryRequirements2() &&
366 this->supportsPhysicalDeviceProperties2()))) {
367 fSupportsYcbcrConversion = true;
368 }
369 // We always push back the default GrVkYcbcrConversionInfo so that the case of no conversion
370 // will return a key of 0.
371 fYcbcrInfos.push_back(GrVkYcbcrConversionInfo());
372
Greg Daniel313c6952018-08-08 09:24:08 -0400373 this->initGrCaps(vkInterface, physDev, properties, memoryProperties, features, extensions);
Greg Daniel36443602018-08-02 12:51:52 -0400374 this->initShaderCaps(properties, features);
Greg Danielf3b11622018-03-01 15:01:27 -0500375
376 if (!contextOptions.fDisableDriverCorrectnessWorkarounds) {
377#if defined(SK_CPU_X86)
378 // We need to do this before initing the config table since it uses fSRGBSupport
379 if (kImagination_VkVendor == properties.vendorID) {
380 fSRGBSupport = false;
381 }
382#endif
383 }
384
Chris Dalton8e738a22018-10-05 16:41:44 -0600385 if (kQualcomm_VkVendor == properties.vendorID) {
386 // A "clear" load for the CCPR atlas runs faster on QC than a "discard" load followed by a
387 // scissored clear.
388 // On NVIDIA and Intel, the discard load followed by clear is faster.
389 // TODO: Evaluate on ARM, Imagination, and ATI.
390 fPreferFullscreenClears = true;
391 }
392
Greg Daniel44e69f92019-03-20 11:18:25 -0400393 if (kQualcomm_VkVendor == properties.vendorID || kARM_VkVendor == properties.vendorID) {
394 // On Qualcomm and ARM mapping a gpu buffer and doing both reads and writes to it is slow.
395 // Thus for index and vertex buffers we will force to use a cpu side buffer and then copy
396 // the whole buffer up to the gpu.
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400397 fBufferMapThreshold = SK_MaxS32;
398 }
399
400 if (kQualcomm_VkVendor == properties.vendorID) {
401 // On Qualcomm it looks like using vkCmdUpdateBuffer is slower than using a transfer buffer
402 // even for small sizes.
403 fAvoidUpdateBuffers = true;
404 }
405
Chris Dalton0dffbab2019-03-27 13:08:50 -0600406 if (kARM_VkVendor == properties.vendorID) {
407 // ARM seems to do better with more fine triangles as opposed to using the sample mask.
408 // (At least in our current round rect op.)
409 fPreferTrianglesOverSampleMask = true;
410 }
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400411
Greg Danielcaa795f2019-05-14 11:54:25 -0400412 this->initFormatTable(vkInterface, physDev, properties);
egdaniel8f1dcaa2016-04-01 10:10:45 -0700413 this->initStencilFormat(vkInterface, physDev);
Greg Daniel164a9f02016-02-22 09:56:40 -0500414
Greg Daniel691f5e72018-02-28 14:21:34 -0500415 if (!contextOptions.fDisableDriverCorrectnessWorkarounds) {
416 this->applyDriverCorrectnessWorkarounds(properties);
egdanielc5ec1402016-03-28 12:14:42 -0700417 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500418
Greg Daniel691f5e72018-02-28 14:21:34 -0500419 this->applyOptionsOverrides(contextOptions);
420 fShaderCaps->applyOptionsOverrides(contextOptions);
421}
422
423void GrVkCaps::applyDriverCorrectnessWorkarounds(const VkPhysicalDeviceProperties& properties) {
egdaniel6fa0a912016-09-12 11:51:29 -0700424 if (kQualcomm_VkVendor == properties.vendorID) {
425 fMustDoCopiesFromOrigin = true;
Brian Salomona585fe92019-04-09 14:57:00 -0400426 // Transfer doesn't support this workaround.
427 fTransferBufferSupport = false;
egdaniel6fa0a912016-09-12 11:51:29 -0700428 }
429
Greg Daniel80a08dd2017-01-20 10:45:49 -0500430#if defined(SK_BUILD_FOR_WIN)
Greg Daniel900e5c82018-08-28 10:59:24 -0400431 if (kNvidia_VkVendor == properties.vendorID || kIntel_VkVendor == properties.vendorID) {
Greg Daniel80a08dd2017-01-20 10:45:49 -0500432 fMustSleepOnTearDown = true;
433 }
434#elif defined(SK_BUILD_FOR_ANDROID)
435 if (kImagination_VkVendor == properties.vendorID) {
436 fMustSleepOnTearDown = true;
437 }
438#endif
Greg Danielbce5eb92018-03-01 13:13:44 -0500439
440 // AMD seems to have issues binding new VkPipelines inside a secondary command buffer.
441 // Current workaround is to use a different secondary command buffer for each new VkPipeline.
442 if (kAMD_VkVendor == properties.vendorID) {
443 fNewCBOnPipelineChange = true;
444 }
445
Greg Danielddc0c602018-06-18 11:26:30 -0400446 // On Mali galaxy s7 we see lots of rendering issues when we suballocate VkImages.
447 if (kARM_VkVendor == properties.vendorID) {
448 fShouldAlwaysUseDedicatedImageMemory = true;
449 }
450
Greg Danielbce5eb92018-03-01 13:13:44 -0500451 ////////////////////////////////////////////////////////////////////////////
452 // GrCaps workarounds
453 ////////////////////////////////////////////////////////////////////////////
454
455 if (kARM_VkVendor == properties.vendorID) {
456 fInstanceAttribSupport = false;
Greg Daniel4374e962018-09-28 15:09:47 -0400457 fAvoidWritePixelsFastPath = true; // bugs.skia.org/8064
Greg Danielbce5eb92018-03-01 13:13:44 -0500458 }
459
460 // AMD advertises support for MAX_UINT vertex input attributes, but in reality only supports 32.
461 if (kAMD_VkVendor == properties.vendorID) {
462 fMaxVertexAttributes = SkTMin(fMaxVertexAttributes, 32);
463 }
464
Greg Danielbce5eb92018-03-01 13:13:44 -0500465 ////////////////////////////////////////////////////////////////////////////
466 // GrShaderCaps workarounds
467 ////////////////////////////////////////////////////////////////////////////
468
Greg Danielbce5eb92018-03-01 13:13:44 -0500469 if (kImagination_VkVendor == properties.vendorID) {
470 fShaderCaps->fAtan2ImplementedAsAtanYOverX = true;
471 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500472}
473
474int get_max_sample_count(VkSampleCountFlags flags) {
475 SkASSERT(flags & VK_SAMPLE_COUNT_1_BIT);
476 if (!(flags & VK_SAMPLE_COUNT_2_BIT)) {
477 return 0;
478 }
479 if (!(flags & VK_SAMPLE_COUNT_4_BIT)) {
480 return 2;
481 }
482 if (!(flags & VK_SAMPLE_COUNT_8_BIT)) {
483 return 4;
484 }
485 if (!(flags & VK_SAMPLE_COUNT_16_BIT)) {
486 return 8;
487 }
488 if (!(flags & VK_SAMPLE_COUNT_32_BIT)) {
489 return 16;
490 }
491 if (!(flags & VK_SAMPLE_COUNT_64_BIT)) {
492 return 32;
493 }
494 return 64;
495}
496
Greg Daniel313c6952018-08-08 09:24:08 -0400497void GrVkCaps::initGrCaps(const GrVkInterface* vkInterface,
498 VkPhysicalDevice physDev,
499 const VkPhysicalDeviceProperties& properties,
jvanverthfd7bd452016-03-25 06:29:52 -0700500 const VkPhysicalDeviceMemoryProperties& memoryProperties,
Greg Daniel313c6952018-08-08 09:24:08 -0400501 const VkPhysicalDeviceFeatures2& features,
502 const GrVkExtensions& extensions) {
Greg Danielc5cc2de2017-03-20 11:40:58 -0400503 // So GPUs, like AMD, are reporting MAX_INT support vertex attributes. In general, there is no
504 // need for us ever to support that amount, and it makes tests which tests all the vertex
505 // attribs timeout looping over that many. For now, we'll cap this at 64 max and can raise it if
506 // we ever find that need.
507 static const uint32_t kMaxVertexAttributes = 64;
508 fMaxVertexAttributes = SkTMin(properties.limits.maxVertexInputAttributes, kMaxVertexAttributes);
Greg Danielc5cc2de2017-03-20 11:40:58 -0400509
egdanield5e3b9e2016-03-08 12:19:54 -0800510 // We could actually query and get a max size for each config, however maxImageDimension2D will
511 // give the minimum max size across all configs. So for simplicity we will use that for now.
jvanverthe78d4872016-09-27 03:33:05 -0700512 fMaxRenderTargetSize = SkTMin(properties.limits.maxImageDimension2D, (uint32_t)INT_MAX);
513 fMaxTextureSize = SkTMin(properties.limits.maxImageDimension2D, (uint32_t)INT_MAX);
Adrienne Walker724afe82018-05-15 11:36:26 -0700514 if (fDriverBugWorkarounds.max_texture_size_limit_4096) {
515 fMaxTextureSize = SkTMin(fMaxTextureSize, 4096);
516 }
517 // Our render targets are always created with textures as the color
518 // attachment, hence this min:
519 fMaxRenderTargetSize = SkTMin(fMaxTextureSize, fMaxRenderTargetSize);
egdanield5e3b9e2016-03-08 12:19:54 -0800520
Chris Dalton2612bae2018-02-22 13:41:37 -0700521 // TODO: check if RT's larger than 4k incur a performance cost on ARM.
522 fMaxPreferredRenderTargetSize = fMaxRenderTargetSize;
523
egdanield5e3b9e2016-03-08 12:19:54 -0800524 // Assuming since we will always map in the end to upload the data we might as well just map
525 // from the get go. There is no hard data to suggest this is faster or slower.
cdalton397536c2016-03-25 12:15:03 -0700526 fBufferMapThreshold = 0;
egdanield5e3b9e2016-03-08 12:19:54 -0800527
Brian Salomon105d7c22019-04-16 13:46:14 -0400528 fMapBufferFlags = kCanMap_MapFlag | kSubset_MapFlag | kAsyncRead_MapFlag;
egdanield5e3b9e2016-03-08 12:19:54 -0800529
egdanield5e3b9e2016-03-08 12:19:54 -0800530 fOversizedStencilSupport = true;
Greg Daniel313c6952018-08-08 09:24:08 -0400531
532 if (extensions.hasExtension(VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME, 2) &&
533 this->supportsPhysicalDeviceProperties2()) {
534
535 VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT blendProps;
536 blendProps.sType =
537 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT;
538 blendProps.pNext = nullptr;
539
540 VkPhysicalDeviceProperties2 props;
541 props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
542 props.pNext = &blendProps;
543
544 GR_VK_CALL(vkInterface, GetPhysicalDeviceProperties2(physDev, &props));
545
546 if (blendProps.advancedBlendAllOperations == VK_TRUE) {
547 fShaderCaps->fAdvBlendEqInteraction = GrShaderCaps::kAutomatic_AdvBlendEqInteraction;
548
549 auto blendFeatures =
550 get_extension_feature_struct<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT>(
551 features,
552 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT);
553 if (blendFeatures && blendFeatures->advancedBlendCoherentOperations == VK_TRUE) {
554 fBlendEquationSupport = kAdvancedCoherent_BlendEquationSupport;
555 } else {
556 // TODO: Currently non coherent blends are not supported in our vulkan backend. They
557 // require us to support self dependencies in our render passes.
558 // fBlendEquationSupport = kAdvanced_BlendEquationSupport;
559 }
560 }
561 }
egdanield5e3b9e2016-03-08 12:19:54 -0800562}
563
Greg Daniel36443602018-08-02 12:51:52 -0400564void GrVkCaps::initShaderCaps(const VkPhysicalDeviceProperties& properties,
Greg Daniela0651ac2018-08-08 09:23:18 -0400565 const VkPhysicalDeviceFeatures2& features) {
Brian Salomon1edc5b92016-11-29 13:43:46 -0500566 GrShaderCaps* shaderCaps = fShaderCaps.get();
567 shaderCaps->fVersionDeclString = "#version 330\n";
egdaniel3a15fd42016-04-05 11:00:29 -0700568
Greg Daniel164a9f02016-02-22 09:56:40 -0500569
570 // fConfigOutputSwizzle will default to RGBA so we only need to set it for alpha only config.
571 for (int i = 0; i < kGrPixelConfigCnt; ++i) {
572 GrPixelConfig config = static_cast<GrPixelConfig>(i);
Greg Danielef59d872017-11-17 16:47:21 -0500573 // Vulkan doesn't support a single channel format stored in alpha.
574 if (GrPixelConfigIsAlphaOnly(config) &&
575 kAlpha_8_as_Alpha_GrPixelConfig != config) {
Brian Salomon1edc5b92016-11-29 13:43:46 -0500576 shaderCaps->fConfigTextureSwizzle[i] = GrSwizzle::RRRR();
577 shaderCaps->fConfigOutputSwizzle[i] = GrSwizzle::AAAA();
Greg Daniel164a9f02016-02-22 09:56:40 -0500578 } else {
Greg Daniel7af060a2017-12-05 16:27:11 -0500579 if (kGray_8_GrPixelConfig == config ||
580 kGray_8_as_Red_GrPixelConfig == config) {
Brian Osman986563b2017-01-10 14:20:02 -0500581 shaderCaps->fConfigTextureSwizzle[i] = GrSwizzle::RRRA();
582 } else if (kRGBA_4444_GrPixelConfig == config) {
egdaniel3fe03272016-08-15 10:59:17 -0700583 // The vulkan spec does not require R4G4B4A4 to be supported for texturing so we
584 // store the data in a B4G4R4A4 texture and then swizzle it when doing texture reads
585 // or writing to outputs. Since we're not actually changing the data at all, the
586 // only extra work is the swizzle in the shader for all operations.
Brian Salomon1edc5b92016-11-29 13:43:46 -0500587 shaderCaps->fConfigTextureSwizzle[i] = GrSwizzle::BGRA();
588 shaderCaps->fConfigOutputSwizzle[i] = GrSwizzle::BGRA();
Greg Danielf259b8b2019-02-14 09:03:43 -0500589 } else if (kRGB_888X_GrPixelConfig == config) {
590 shaderCaps->fConfigTextureSwizzle[i] = GrSwizzle::RGB1();
egdaniel3fe03272016-08-15 10:59:17 -0700591 } else {
Brian Salomon1edc5b92016-11-29 13:43:46 -0500592 shaderCaps->fConfigTextureSwizzle[i] = GrSwizzle::RGBA();
egdaniel3fe03272016-08-15 10:59:17 -0700593 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500594 }
595 }
egdaniel67acb832016-02-26 08:32:20 -0800596
egdanield5e3b9e2016-03-08 12:19:54 -0800597 // Vulkan is based off ES 3.0 so the following should all be supported
Brian Salomon1edc5b92016-11-29 13:43:46 -0500598 shaderCaps->fUsesPrecisionModifiers = true;
599 shaderCaps->fFlatInterpolationSupport = true;
Brian Salomon41274562017-09-15 09:40:03 -0700600 // Flat interpolation appears to be slow on Qualcomm GPUs. This was tested in GL and is assumed
601 // to be true with Vulkan as well.
602 shaderCaps->fPreferFlatInterpolation = kQualcomm_VkVendor != properties.vendorID;
egdanield5e3b9e2016-03-08 12:19:54 -0800603
604 // GrShaderCaps
605
Brian Salomon1edc5b92016-11-29 13:43:46 -0500606 shaderCaps->fShaderDerivativeSupport = true;
Chris Daltonf1b47bb2017-10-06 11:57:51 -0600607
Ethan Nicholas6ac8d362019-01-22 21:43:55 +0000608 // FIXME: http://skbug.com/7733: Disable geometry shaders until Intel/Radeon GMs draw correctly.
609 // shaderCaps->fGeometryShaderSupport =
610 // shaderCaps->fGSInvocationsSupport = features.features.geometryShader;
egdanield632bb42016-03-30 12:06:48 -0700611
Greg Daniela0651ac2018-08-08 09:23:18 -0400612 shaderCaps->fDualSourceBlendingSupport = features.features.dualSrcBlend;
egdanield632bb42016-03-30 12:06:48 -0700613
Brian Salomon1edc5b92016-11-29 13:43:46 -0500614 shaderCaps->fIntegerSupport = true;
Chris Dalton1d616352017-05-31 12:51:23 -0600615 shaderCaps->fVertexIDSupport = true;
Chris Dalton7c7ff032018-03-28 20:09:58 -0600616 shaderCaps->fFPManipulationSupport = true;
cdalton9c3f1432016-03-11 10:07:37 -0800617
cdaltona6b92ad2016-04-11 12:03:08 -0700618 // Assume the minimum precisions mandated by the SPIR-V spec.
Chris Dalton47c8ed32017-11-15 18:27:09 -0700619 shaderCaps->fFloatIs32Bits = true;
620 shaderCaps->fHalfIs32Bits = false;
cdaltona6b92ad2016-04-11 12:03:08 -0700621
Brian Salomon1edc5b92016-11-29 13:43:46 -0500622 shaderCaps->fMaxFragmentSamplers = SkTMin(
623 SkTMin(properties.limits.maxPerStageDescriptorSampledImages,
624 properties.limits.maxPerStageDescriptorSamplers),
625 (uint32_t)INT_MAX);
Greg Daniel164a9f02016-02-22 09:56:40 -0500626}
627
egdaniel8f1dcaa2016-04-01 10:10:45 -0700628bool stencil_format_supported(const GrVkInterface* interface,
629 VkPhysicalDevice physDev,
630 VkFormat format) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500631 VkFormatProperties props;
632 memset(&props, 0, sizeof(VkFormatProperties));
633 GR_VK_CALL(interface, GetPhysicalDeviceFormatProperties(physDev, format, &props));
egdaniel8f1dcaa2016-04-01 10:10:45 -0700634 return SkToBool(VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT & props.optimalTilingFeatures);
Greg Daniel164a9f02016-02-22 09:56:40 -0500635}
636
egdaniel8f1dcaa2016-04-01 10:10:45 -0700637void GrVkCaps::initStencilFormat(const GrVkInterface* interface, VkPhysicalDevice physDev) {
638 // List of legal stencil formats (though perhaps not supported on
639 // the particular gpu/driver) from most preferred to least. We are guaranteed to have either
jvanvertha4b0fed2016-04-27 11:42:21 -0700640 // VK_FORMAT_D24_UNORM_S8_UINT or VK_FORMAT_D32_SFLOAT_S8_UINT. VK_FORMAT_D32_SFLOAT_S8_UINT
egdaniel8f1dcaa2016-04-01 10:10:45 -0700641 // can optionally have 24 unused bits at the end so we assume the total bits is 64.
Greg Daniel164a9f02016-02-22 09:56:40 -0500642 static const StencilFormat
643 // internal Format stencil bits total bits packed?
644 gS8 = { VK_FORMAT_S8_UINT, 8, 8, false },
egdaniel8f1dcaa2016-04-01 10:10:45 -0700645 gD24S8 = { VK_FORMAT_D24_UNORM_S8_UINT, 8, 32, true },
646 gD32S8 = { VK_FORMAT_D32_SFLOAT_S8_UINT, 8, 64, true };
Greg Daniel164a9f02016-02-22 09:56:40 -0500647
egdaniel8f1dcaa2016-04-01 10:10:45 -0700648 if (stencil_format_supported(interface, physDev, VK_FORMAT_S8_UINT)) {
Ethan Nicholasf610bae2018-09-20 16:55:21 -0400649 fPreferredStencilFormat = gS8;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700650 } else if (stencil_format_supported(interface, physDev, VK_FORMAT_D24_UNORM_S8_UINT)) {
Ethan Nicholasf610bae2018-09-20 16:55:21 -0400651 fPreferredStencilFormat = gD24S8;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700652 } else {
653 SkASSERT(stencil_format_supported(interface, physDev, VK_FORMAT_D32_SFLOAT_S8_UINT));
Ethan Nicholasf610bae2018-09-20 16:55:21 -0400654 fPreferredStencilFormat = gD32S8;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700655 }
656}
657
Greg Danielcaa795f2019-05-14 11:54:25 -0400658static bool format_is_srgb(VkFormat format) {
659 switch (format) {
660 case VK_FORMAT_R8G8B8A8_SRGB:
661 case VK_FORMAT_B8G8R8A8_SRGB:
662 return true;
663 case VK_FORMAT_R8G8B8A8_UNORM:
664 case VK_FORMAT_B8G8R8A8_UNORM:
665 case VK_FORMAT_R8G8B8A8_SINT:
666 case VK_FORMAT_R8G8B8_UNORM:
667 case VK_FORMAT_R8G8_UNORM:
668 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
669 case VK_FORMAT_R5G6B5_UNORM_PACK16:
670 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
Greg Danieleb4a8272019-05-16 16:52:55 -0400671 case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
Greg Danielcaa795f2019-05-14 11:54:25 -0400672 case VK_FORMAT_R8_UNORM:
673 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
674 case VK_FORMAT_R32G32B32A32_SFLOAT:
675 case VK_FORMAT_R32G32_SFLOAT:
676 case VK_FORMAT_R16G16B16A16_SFLOAT:
677 case VK_FORMAT_R16_SFLOAT:
Robert Phillipsfe18de52019-06-06 17:21:50 -0400678 case VK_FORMAT_R16_UNORM:
679 case VK_FORMAT_R16G16_UNORM:
Robert Phillips66a46032019-06-18 08:00:42 -0400680 // Experimental (for Y416 and mutant P016/P010)
681 case VK_FORMAT_R16G16B16A16_UNORM:
682 case VK_FORMAT_R16G16_SFLOAT:
Greg Danielcaa795f2019-05-14 11:54:25 -0400683 return false;
684 default:
685 SK_ABORT("Unsupported VkFormat");
686 return false;
687 }
688}
689
Greg Daniel2c19e7f2019-06-18 13:29:21 -0400690// These are all the valid VkFormats that we support in Skia. They are roughly ordered from most
Greg Danielcaa795f2019-05-14 11:54:25 -0400691// frequently used to least to improve look up times in arrays.
692static constexpr VkFormat kVkFormats[] = {
693 VK_FORMAT_R8G8B8A8_UNORM,
694 VK_FORMAT_R8_UNORM,
695 VK_FORMAT_B8G8R8A8_UNORM,
696 VK_FORMAT_R5G6B5_UNORM_PACK16,
697 VK_FORMAT_R16G16B16A16_SFLOAT,
698 VK_FORMAT_R16_SFLOAT,
699 VK_FORMAT_R8G8B8A8_SINT,
700 VK_FORMAT_R8G8B8_UNORM,
701 VK_FORMAT_R8G8_UNORM,
702 VK_FORMAT_A2B10G10R10_UNORM_PACK32,
703 VK_FORMAT_B4G4R4A4_UNORM_PACK16,
Greg Danieleb4a8272019-05-16 16:52:55 -0400704 VK_FORMAT_R4G4B4A4_UNORM_PACK16,
Greg Danielcaa795f2019-05-14 11:54:25 -0400705 VK_FORMAT_R32G32B32A32_SFLOAT,
706 VK_FORMAT_R32G32_SFLOAT,
707 VK_FORMAT_R8G8B8A8_SRGB,
708 VK_FORMAT_B8G8R8A8_SRGB,
Robert Phillipsfe18de52019-06-06 17:21:50 -0400709 VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
Robert Phillipsfe18de52019-06-06 17:21:50 -0400710 VK_FORMAT_R16_UNORM,
711 VK_FORMAT_R16G16_UNORM,
Robert Phillips66a46032019-06-18 08:00:42 -0400712 // Experimental (for Y416 and mutant P016/P010)
713 VK_FORMAT_R16G16B16A16_UNORM,
714 VK_FORMAT_R16G16_SFLOAT,
Greg Danielcaa795f2019-05-14 11:54:25 -0400715};
716
717const GrVkCaps::FormatInfo& GrVkCaps::getFormatInfo(VkFormat format) const {
718 static_assert(SK_ARRAY_COUNT(kVkFormats) == GrVkCaps::kNumVkFormats,
719 "Size of VkFormats array must match static value in header");
720 for (size_t i = 0; i < SK_ARRAY_COUNT(kVkFormats); ++i) {
721 if (kVkFormats[i] == format) {
722 return fFormatTable[i];
723 }
724 }
725 SK_ABORT("Invalid VkFormat");
726 static const FormatInfo kInvalidConfig;
727 return kInvalidConfig;
728}
729
730void GrVkCaps::initFormatTable(const GrVkInterface* interface, VkPhysicalDevice physDev,
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000731 const VkPhysicalDeviceProperties& properties) {
Greg Danielcaa795f2019-05-14 11:54:25 -0400732 static_assert(SK_ARRAY_COUNT(kVkFormats) == GrVkCaps::kNumVkFormats,
733 "Size of VkFormats array must match static value in header");
734 for (size_t i = 0; i < SK_ARRAY_COUNT(kVkFormats); ++i) {
735 VkFormat format = kVkFormats[i];
736 if (!format_is_srgb(format) || fSRGBSupport) {
737 fFormatTable[i].init(interface, physDev, properties, format);
egdaniel8f1dcaa2016-04-01 10:10:45 -0700738 }
739 }
740}
741
Greg Danielcaa795f2019-05-14 11:54:25 -0400742void GrVkCaps::FormatInfo::InitConfigFlags(VkFormatFeatureFlags vkFlags, uint16_t* flags) {
egdaniel8f1dcaa2016-04-01 10:10:45 -0700743 if (SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & vkFlags) &&
744 SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT & vkFlags)) {
745 *flags = *flags | kTextureable_Flag;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700746
Robert Phillipsb7b7e5f2017-05-22 13:23:19 -0400747 // Ganesh assumes that all renderable surfaces are also texturable
Greg Danielcaa795f2019-05-14 11:54:25 -0400748 if (SkToBool(VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT & vkFlags)) {
Robert Phillipsb7b7e5f2017-05-22 13:23:19 -0400749 *flags = *flags | kRenderable_Flag;
750 }
egdaniel8f1dcaa2016-04-01 10:10:45 -0700751 }
752
753 if (SkToBool(VK_FORMAT_FEATURE_BLIT_SRC_BIT & vkFlags)) {
754 *flags = *flags | kBlitSrc_Flag;
755 }
756
757 if (SkToBool(VK_FORMAT_FEATURE_BLIT_DST_BIT & vkFlags)) {
758 *flags = *flags | kBlitDst_Flag;
759 }
760}
761
Greg Danielcaa795f2019-05-14 11:54:25 -0400762void GrVkCaps::FormatInfo::initSampleCounts(const GrVkInterface* interface,
Greg Daniel81e7bf82017-07-19 14:47:42 -0400763 VkPhysicalDevice physDev,
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000764 const VkPhysicalDeviceProperties& physProps,
Greg Daniel81e7bf82017-07-19 14:47:42 -0400765 VkFormat format) {
766 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
767 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
768 VK_IMAGE_USAGE_SAMPLED_BIT |
769 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
Greg Daniel81e7bf82017-07-19 14:47:42 -0400770 VkImageFormatProperties properties;
771 GR_VK_CALL(interface, GetPhysicalDeviceImageFormatProperties(physDev,
772 format,
773 VK_IMAGE_TYPE_2D,
774 VK_IMAGE_TILING_OPTIMAL,
775 usage,
Brian Osman2b23c4b2018-06-01 12:25:08 -0400776 0, // createFlags
Greg Daniel81e7bf82017-07-19 14:47:42 -0400777 &properties));
778 VkSampleCountFlags flags = properties.sampleCounts;
779 if (flags & VK_SAMPLE_COUNT_1_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400780 fColorSampleCounts.push_back(1);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400781 }
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000782 if (kImagination_VkVendor == physProps.vendorID) {
783 // MSAA does not work on imagination
784 return;
785 }
Greg Daniel81e7bf82017-07-19 14:47:42 -0400786 if (flags & VK_SAMPLE_COUNT_2_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400787 fColorSampleCounts.push_back(2);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400788 }
789 if (flags & VK_SAMPLE_COUNT_4_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400790 fColorSampleCounts.push_back(4);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400791 }
792 if (flags & VK_SAMPLE_COUNT_8_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400793 fColorSampleCounts.push_back(8);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400794 }
795 if (flags & VK_SAMPLE_COUNT_16_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400796 fColorSampleCounts.push_back(16);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400797 }
798 if (flags & VK_SAMPLE_COUNT_32_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400799 fColorSampleCounts.push_back(32);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400800 }
801 if (flags & VK_SAMPLE_COUNT_64_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400802 fColorSampleCounts.push_back(64);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400803 }
804}
805
Greg Danielcaa795f2019-05-14 11:54:25 -0400806void GrVkCaps::FormatInfo::init(const GrVkInterface* interface,
egdaniel8f1dcaa2016-04-01 10:10:45 -0700807 VkPhysicalDevice physDev,
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000808 const VkPhysicalDeviceProperties& properties,
Greg Danielcaa795f2019-05-14 11:54:25 -0400809 VkFormat format) {
egdaniel8f1dcaa2016-04-01 10:10:45 -0700810 VkFormatProperties props;
811 memset(&props, 0, sizeof(VkFormatProperties));
812 GR_VK_CALL(interface, GetPhysicalDeviceFormatProperties(physDev, format, &props));
Greg Danielcaa795f2019-05-14 11:54:25 -0400813 InitConfigFlags(props.linearTilingFeatures, &fLinearFlags);
814 InitConfigFlags(props.optimalTilingFeatures, &fOptimalFlags);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400815 if (fOptimalFlags & kRenderable_Flag) {
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000816 this->initSampleCounts(interface, physDev, properties, format);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400817 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500818}
Greg Daniel81e7bf82017-07-19 14:47:42 -0400819
Robert Phillips39ef2ef2019-05-15 08:45:53 -0400820bool GrVkCaps::isFormatTexturable(VkFormat format) const {
Greg Danielcaa795f2019-05-14 11:54:25 -0400821 if (!GrVkFormatIsSupported(format)) {
822 return false;
823 }
824
825 const FormatInfo& info = this->getFormatInfo(format);
826 return SkToBool(FormatInfo::kTextureable_Flag & info.fOptimalFlags);
827}
828
829bool GrVkCaps::isConfigTexturable(GrPixelConfig config) const {
830 VkFormat format;
831 if (!GrPixelConfigToVkFormat(config, &format)) {
832 return false;
833 }
Robert Phillips39ef2ef2019-05-15 08:45:53 -0400834 return this->isFormatTexturable(format);
835}
836
837bool GrVkCaps::isFormatRenderable(VkFormat format) const {
838 return this->maxRenderTargetSampleCount(format) > 0;
Greg Danielcaa795f2019-05-14 11:54:25 -0400839}
840
Brian Salomonbdecacf2018-02-02 20:32:49 -0500841int GrVkCaps::getRenderTargetSampleCount(int requestedCount, GrPixelConfig config) const {
Greg Danielcaa795f2019-05-14 11:54:25 -0400842 // Currently we don't allow RGB_888X to be renderable because we don't have a way to handle
843 // blends that reference dst alpha when the values in the dst alpha channel are uninitialized.
844 if (config == kRGB_888X_GrPixelConfig) {
845 return 0;
846 }
847
848 VkFormat format;
849 if (!GrPixelConfigToVkFormat(config, &format)) {
850 return 0;
851 }
852
853 return this->getRenderTargetSampleCount(requestedCount, format);
854}
855
856int GrVkCaps::getRenderTargetSampleCount(int requestedCount, VkFormat format) const {
Brian Salomonbdecacf2018-02-02 20:32:49 -0500857 requestedCount = SkTMax(1, requestedCount);
Greg Danielcaa795f2019-05-14 11:54:25 -0400858
859 const FormatInfo& info = this->getFormatInfo(format);
860
861 int count = info.fColorSampleCounts.count();
Brian Salomonbdecacf2018-02-02 20:32:49 -0500862
863 if (!count) {
Greg Daniel81e7bf82017-07-19 14:47:42 -0400864 return 0;
865 }
866
Brian Salomonbdecacf2018-02-02 20:32:49 -0500867 if (1 == requestedCount) {
Greg Danielcaa795f2019-05-14 11:54:25 -0400868 SkASSERT(info.fColorSampleCounts.count() && info.fColorSampleCounts[0] == 1);
Brian Salomonbdecacf2018-02-02 20:32:49 -0500869 return 1;
870 }
871
Greg Daniel81e7bf82017-07-19 14:47:42 -0400872 for (int i = 0; i < count; ++i) {
Greg Danielcaa795f2019-05-14 11:54:25 -0400873 if (info.fColorSampleCounts[i] >= requestedCount) {
874 return info.fColorSampleCounts[i];
Greg Daniel81e7bf82017-07-19 14:47:42 -0400875 }
876 }
Brian Salomonbdecacf2018-02-02 20:32:49 -0500877 return 0;
878}
879
880int GrVkCaps::maxRenderTargetSampleCount(GrPixelConfig config) const {
Greg Danielcaa795f2019-05-14 11:54:25 -0400881 // Currently we don't allow RGB_888X to be renderable because we don't have a way to handle
882 // blends that reference dst alpha when the values in the dst alpha channel are uninitialized.
883 if (config == kRGB_888X_GrPixelConfig) {
884 return 0;
885 }
886
887 VkFormat format;
888 if (!GrPixelConfigToVkFormat(config, &format)) {
889 return 0;
890 }
891 return this->maxRenderTargetSampleCount(format);
892}
893
894int GrVkCaps::maxRenderTargetSampleCount(VkFormat format) const {
895 const FormatInfo& info = this->getFormatInfo(format);
896
897 const auto& table = info.fColorSampleCounts;
Brian Salomonbdecacf2018-02-02 20:32:49 -0500898 if (!table.count()) {
899 return 0;
900 }
901 return table[table.count() - 1];
Brian Salomond653cac2018-02-01 13:58:00 -0500902}
903
Greg Daniela51e93c2019-03-25 12:30:45 -0400904bool GrVkCaps::surfaceSupportsReadPixels(const GrSurface* surface) const {
905 if (auto tex = static_cast<const GrVkTexture*>(surface->asTexture())) {
906 // We can't directly read from a VkImage that has a ycbcr sampler.
907 if (tex->ycbcrConversionInfo().isValid()) {
908 return false;
909 }
910 }
911 return true;
912}
913
Brian Salomonc67c31c2018-12-06 10:00:03 -0500914bool GrVkCaps::onSurfaceSupportsWritePixels(const GrSurface* surface) const {
Brian Salomon3d86a192018-02-27 16:46:11 -0500915 if (auto rt = surface->asRenderTarget()) {
916 return rt->numColorSamples() <= 1 && SkToBool(surface->asTexture());
917 }
Greg Daniela51e93c2019-03-25 12:30:45 -0400918 // We can't write to a texture that has a ycbcr sampler.
919 if (auto tex = static_cast<const GrVkTexture*>(surface->asTexture())) {
920 // We can't directly read from a VkImage that has a ycbcr sampler.
921 if (tex->ycbcrConversionInfo().isValid()) {
922 return false;
923 }
924 }
Brian Salomon3d86a192018-02-27 16:46:11 -0500925 return true;
926}
927
Robert Phillipsc1bee132019-02-06 16:04:34 -0500928static GrPixelConfig validate_image_info(VkFormat format, SkColorType ct, bool hasYcbcrConversion) {
Greg Daniel14c55c22018-12-04 11:25:03 -0500929 if (format == VK_FORMAT_UNDEFINED) {
930 // If the format is undefined then it is only valid as an external image which requires that
931 // we have a valid VkYcbcrConversion.
932 if (hasYcbcrConversion) {
933 // We don't actually care what the color type or config are since we won't use those
Greg Daniela51e93c2019-03-25 12:30:45 -0400934 // values for external textures. However, for read pixels we will draw to a non ycbcr
935 // texture of this config so we set RGBA here for that.
Brian Salomonf391d0f2018-12-14 09:18:50 -0500936 return kRGBA_8888_GrPixelConfig;
Greg Daniel14c55c22018-12-04 11:25:03 -0500937 } else {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500938 return kUnknown_GrPixelConfig;
Greg Daniel14c55c22018-12-04 11:25:03 -0500939 }
940 }
941
942 if (hasYcbcrConversion) {
943 // We only support having a ycbcr conversion for external images.
Brian Salomonf391d0f2018-12-14 09:18:50 -0500944 return kUnknown_GrPixelConfig;
Greg Daniel14c55c22018-12-04 11:25:03 -0500945 }
946
Greg Danielf5d87582017-12-18 14:48:15 -0500947 switch (ct) {
948 case kUnknown_SkColorType:
Brian Salomonf391d0f2018-12-14 09:18:50 -0500949 break;
Greg Danielf5d87582017-12-18 14:48:15 -0500950 case kAlpha_8_SkColorType:
951 if (VK_FORMAT_R8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500952 return kAlpha_8_as_Red_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500953 }
954 break;
955 case kRGB_565_SkColorType:
956 if (VK_FORMAT_R5G6B5_UNORM_PACK16 == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500957 return kRGB_565_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500958 }
959 break;
960 case kARGB_4444_SkColorType:
Greg Danieleb4a8272019-05-16 16:52:55 -0400961 if (VK_FORMAT_B4G4R4A4_UNORM_PACK16 == format ||
962 VK_FORMAT_R4G4B4A4_UNORM_PACK16 == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500963 return kRGBA_4444_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500964 }
965 break;
966 case kRGBA_8888_SkColorType:
967 if (VK_FORMAT_R8G8B8A8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500968 return kRGBA_8888_GrPixelConfig;
Greg Daniel7b219ac2017-12-18 14:49:04 -0500969 } else if (VK_FORMAT_R8G8B8A8_SRGB == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500970 return kSRGBA_8888_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500971 }
972 break;
Brian Salomone41e1762018-01-25 14:07:47 -0500973 case kRGB_888x_SkColorType:
Greg Daniel475eb702018-09-28 14:16:50 -0400974 if (VK_FORMAT_R8G8B8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500975 return kRGB_888_GrPixelConfig;
Greg Daniel475eb702018-09-28 14:16:50 -0400976 }
Greg Danielf259b8b2019-02-14 09:03:43 -0500977 if (VK_FORMAT_R8G8B8A8_UNORM == format) {
978 return kRGB_888X_GrPixelConfig;
979 }
Greg Daniel475eb702018-09-28 14:16:50 -0400980 break;
Greg Danielf5d87582017-12-18 14:48:15 -0500981 case kBGRA_8888_SkColorType:
982 if (VK_FORMAT_B8G8R8A8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500983 return kBGRA_8888_GrPixelConfig;
Greg Daniel7b219ac2017-12-18 14:49:04 -0500984 } else if (VK_FORMAT_B8G8R8A8_SRGB == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500985 return kSBGRA_8888_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500986 }
987 break;
Brian Salomone41e1762018-01-25 14:07:47 -0500988 case kRGBA_1010102_SkColorType:
Brian Osman10fc6fd2018-03-02 11:01:10 -0500989 if (VK_FORMAT_A2B10G10R10_UNORM_PACK32 == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500990 return kRGBA_1010102_GrPixelConfig;
Brian Osman10fc6fd2018-03-02 11:01:10 -0500991 }
992 break;
Brian Salomone41e1762018-01-25 14:07:47 -0500993 case kRGB_101010x_SkColorType:
Brian Salomonf391d0f2018-12-14 09:18:50 -0500994 return kUnknown_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500995 case kGray_8_SkColorType:
996 if (VK_FORMAT_R8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500997 return kGray_8_as_Red_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500998 }
999 break;
Brian Osmand0626aa2019-03-11 15:28:06 -04001000 case kRGBA_F16Norm_SkColorType:
Mike Kleinb70990e2019-02-28 10:03:27 -06001001 if (VK_FORMAT_R16G16B16A16_SFLOAT == format) {
Brian Osmand0626aa2019-03-11 15:28:06 -04001002 return kRGBA_half_Clamped_GrPixelConfig;
Mike Kleinb70990e2019-02-28 10:03:27 -06001003 }
1004 break;
Greg Danielf5d87582017-12-18 14:48:15 -05001005 case kRGBA_F16_SkColorType:
1006 if (VK_FORMAT_R16G16B16A16_SFLOAT == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001007 return kRGBA_half_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -05001008 }
1009 break;
Mike Klein37854712018-06-26 11:43:06 -04001010 case kRGBA_F32_SkColorType:
1011 if (VK_FORMAT_R32G32B32A32_SFLOAT == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001012 return kRGBA_float_GrPixelConfig;
Mike Klein37854712018-06-26 11:43:06 -04001013 }
1014 break;
Greg Danielf5d87582017-12-18 14:48:15 -05001015 }
1016
Brian Salomonf391d0f2018-12-14 09:18:50 -05001017 return kUnknown_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -05001018}
1019
Brian Salomonf391d0f2018-12-14 09:18:50 -05001020GrPixelConfig GrVkCaps::validateBackendRenderTarget(const GrBackendRenderTarget& rt,
1021 SkColorType ct) const {
Greg Daniel323fbcf2018-04-10 13:46:30 -04001022 GrVkImageInfo imageInfo;
1023 if (!rt.getVkImageInfo(&imageInfo)) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001024 return kUnknown_GrPixelConfig;
Robert Phillipsfc711a22018-02-13 17:03:00 -05001025 }
Brian Salomonf391d0f2018-12-14 09:18:50 -05001026 return validate_image_info(imageInfo.fFormat, ct, imageInfo.fYcbcrConversionInfo.isValid());
Robert Phillipsfc711a22018-02-13 17:03:00 -05001027}
1028
Brian Salomonf391d0f2018-12-14 09:18:50 -05001029GrPixelConfig GrVkCaps::getConfigFromBackendFormat(const GrBackendFormat& format,
1030 SkColorType ct) const {
Robert Phillipsfc711a22018-02-13 17:03:00 -05001031 const VkFormat* vkFormat = format.getVkFormat();
Greg Daniel14c55c22018-12-04 11:25:03 -05001032 const GrVkYcbcrConversionInfo* ycbcrInfo = format.getVkYcbcrConversionInfo();
1033 if (!vkFormat || !ycbcrInfo) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001034 return kUnknown_GrPixelConfig;
Robert Phillipsfc711a22018-02-13 17:03:00 -05001035 }
Brian Salomonf391d0f2018-12-14 09:18:50 -05001036 return validate_image_info(*vkFormat, ct, ycbcrInfo->isValid());
Greg Danielfaa095e2017-12-19 13:15:02 -05001037}
Greg Danielf5d87582017-12-18 14:48:15 -05001038
Brian Salomonf391d0f2018-12-14 09:18:50 -05001039static GrPixelConfig get_yuva_config(VkFormat vkFormat) {
Jim Van Verth9bf81202018-10-30 15:53:36 -04001040 switch (vkFormat) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001041 case VK_FORMAT_R8_UNORM:
1042 return kAlpha_8_as_Red_GrPixelConfig;
1043 case VK_FORMAT_R8G8B8A8_UNORM:
1044 return kRGBA_8888_GrPixelConfig;
1045 case VK_FORMAT_R8G8B8_UNORM:
1046 return kRGB_888_GrPixelConfig;
1047 case VK_FORMAT_R8G8_UNORM:
1048 return kRG_88_GrPixelConfig;
1049 case VK_FORMAT_B8G8R8A8_UNORM:
1050 return kBGRA_8888_GrPixelConfig;
Robert Phillips2dd1b472019-03-21 09:00:20 -04001051 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
1052 return kRGBA_1010102_GrPixelConfig;
Robert Phillipsfe18de52019-06-06 17:21:50 -04001053 case VK_FORMAT_R16_UNORM:
1054 return kR_16_GrPixelConfig;
1055 case VK_FORMAT_R16G16_UNORM:
1056 return kRG_1616_GrPixelConfig;
Robert Phillips66a46032019-06-18 08:00:42 -04001057 // Experimental (for Y416 and mutant P016/P010)
1058 case VK_FORMAT_R16G16B16A16_UNORM:
1059 return kRGBA_16161616_GrPixelConfig;
1060 case VK_FORMAT_R16G16_SFLOAT:
1061 return kRG_half_GrPixelConfig;
Brian Salomonf391d0f2018-12-14 09:18:50 -05001062 default:
1063 return kUnknown_GrPixelConfig;
Jim Van Verthb7f0b9c2018-10-22 14:12:03 -04001064 }
Jim Van Verthb7f0b9c2018-10-22 14:12:03 -04001065}
1066
Brian Salomonf391d0f2018-12-14 09:18:50 -05001067GrPixelConfig GrVkCaps::getYUVAConfigFromBackendFormat(const GrBackendFormat& format) const {
Jim Van Verth9bf81202018-10-30 15:53:36 -04001068 const VkFormat* vkFormat = format.getVkFormat();
1069 if (!vkFormat) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001070 return kUnknown_GrPixelConfig;
Jim Van Verth9bf81202018-10-30 15:53:36 -04001071 }
Brian Salomonf391d0f2018-12-14 09:18:50 -05001072 return get_yuva_config(*vkFormat);
Timothy Liang036fdfe2018-06-28 15:50:36 -04001073}
Greg Daniel4065d452018-11-16 15:43:41 -05001074
1075GrBackendFormat GrVkCaps::getBackendFormatFromGrColorType(GrColorType ct,
1076 GrSRGBEncoded srgbEncoded) const {
1077 GrPixelConfig config = GrColorTypeToPixelConfig(ct, srgbEncoded);
1078 if (config == kUnknown_GrPixelConfig) {
1079 return GrBackendFormat();
1080 }
1081 VkFormat format;
1082 if (!GrPixelConfigToVkFormat(config, &format)) {
1083 return GrBackendFormat();
1084 }
1085 return GrBackendFormat::MakeVk(format);
1086}
Timothy Liang036fdfe2018-06-28 15:50:36 -04001087
Greg Danieleb4a8272019-05-16 16:52:55 -04001088#ifdef SK_DEBUG
1089static bool format_color_type_valid_pair(VkFormat vkFormat, GrColorType colorType) {
1090 switch (colorType) {
1091 case GrColorType::kUnknown:
1092 return false;
1093 case GrColorType::kAlpha_8:
1094 return VK_FORMAT_R8_UNORM == vkFormat;
Greg Daniel48fec762019-06-18 17:06:43 -04001095 case GrColorType::kBGR_565:
Greg Danieleb4a8272019-05-16 16:52:55 -04001096 return VK_FORMAT_R5G6B5_UNORM_PACK16 == vkFormat;
1097 case GrColorType::kABGR_4444:
1098 return VK_FORMAT_B4G4R4A4_UNORM_PACK16 == vkFormat ||
1099 VK_FORMAT_R4G4B4A4_UNORM_PACK16 == vkFormat;
1100 case GrColorType::kRGBA_8888:
1101 return VK_FORMAT_R8G8B8A8_UNORM == vkFormat || VK_FORMAT_R8G8B8A8_SRGB == vkFormat;
1102 case GrColorType::kRGB_888x:
1103 return VK_FORMAT_R8G8B8_UNORM == vkFormat || VK_FORMAT_R8G8B8A8_UNORM == vkFormat;
1104 case GrColorType::kRG_88:
1105 return VK_FORMAT_R8G8_UNORM == vkFormat;
1106 case GrColorType::kBGRA_8888:
1107 return VK_FORMAT_B8G8R8A8_UNORM == vkFormat || VK_FORMAT_B8G8R8A8_SRGB == vkFormat;
1108 case GrColorType::kRGBA_1010102:
1109 return VK_FORMAT_A2B10G10R10_UNORM_PACK32 == vkFormat;
1110 case GrColorType::kGray_8:
1111 return VK_FORMAT_R8_UNORM == vkFormat;
1112 case GrColorType::kAlpha_F16:
1113 return VK_FORMAT_R16_SFLOAT == vkFormat;
1114 case GrColorType::kRGBA_F16:
1115 return VK_FORMAT_R16G16B16A16_SFLOAT == vkFormat;
1116 case GrColorType::kRGBA_F16_Clamped:
1117 return VK_FORMAT_R16G16B16A16_SFLOAT == vkFormat;
1118 case GrColorType::kRG_F32:
1119 return VK_FORMAT_R32G32_SFLOAT == vkFormat;
1120 case GrColorType::kRGBA_F32:
1121 return VK_FORMAT_R32G32B32A32_SFLOAT == vkFormat;
1122 case GrColorType::kRGB_ETC1:
1123 return VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK == vkFormat;
Robert Phillipsfe18de52019-06-06 17:21:50 -04001124 case GrColorType::kR_16:
1125 return VK_FORMAT_R16_UNORM == vkFormat;
1126 case GrColorType::kRG_1616:
1127 return VK_FORMAT_R16G16_UNORM == vkFormat;
Robert Phillips66a46032019-06-18 08:00:42 -04001128 // Experimental (for Y416 and mutant P016/P010)
1129 case GrColorType::kRGBA_16161616:
1130 return VK_FORMAT_R16G16B16A16_UNORM == vkFormat;
1131 case GrColorType::kRG_half:
1132 return VK_FORMAT_R16G16_SFLOAT == vkFormat;
Greg Danieleb4a8272019-05-16 16:52:55 -04001133 }
Greg Daniel4f71ccc2019-05-17 10:56:46 -04001134 SK_ABORT("Unknown color type");
1135 return false;
Greg Danieleb4a8272019-05-16 16:52:55 -04001136}
1137#endif
1138
1139static GrSwizzle get_swizzle(const GrBackendFormat& format, GrColorType colorType,
1140 bool forOutput) {
1141 SkASSERT(format.getVkFormat());
1142 VkFormat vkFormat = *format.getVkFormat();
1143
1144 SkASSERT(format_color_type_valid_pair(vkFormat, colorType));
1145
1146 switch (colorType) {
1147 case GrColorType::kAlpha_8: // fall through
1148 case GrColorType::kAlpha_F16:
1149 if (forOutput) {
1150 return GrSwizzle::AAAA();
1151 } else {
1152 return GrSwizzle::RRRR();
1153 }
1154 case GrColorType::kGray_8:
1155 if (!forOutput) {
1156 return GrSwizzle::RRRA();
1157 }
1158 break;
1159 case GrColorType::kABGR_4444:
1160 if (VK_FORMAT_B4G4R4A4_UNORM_PACK16 == vkFormat) {
1161 return GrSwizzle::BGRA();
1162 }
1163 break;
1164 case GrColorType::kRGB_888x:
1165 if (!forOutput) {
1166 return GrSwizzle::RGB1();
1167 }
1168 default:
1169 return GrSwizzle::RGBA();
1170 }
1171 return GrSwizzle::RGBA();
1172}
1173
1174GrSwizzle GrVkCaps::getTextureSwizzle(const GrBackendFormat& format, GrColorType colorType) const {
1175 return get_swizzle(format, colorType, false);
1176}
1177GrSwizzle GrVkCaps::getOutputSwizzle(const GrBackendFormat& format, GrColorType colorType) const {
1178 return get_swizzle(format, colorType, true);
1179}
1180
Brian Salomon26de56e2019-04-10 12:14:26 -04001181size_t GrVkCaps::onTransferFromOffsetAlignment(GrColorType bufferColorType) const {
Brian Salomona585fe92019-04-09 14:57:00 -04001182 // This GrColorType has 32 bpp but the Vulkan pixel format we use for with may have 24bpp
1183 // (VK_FORMAT_R8G8B8_...) or may be 32 bpp. We don't support post transforming the pixel data
1184 // for transfer-from currently and don't want to have to pass info about the src surface here.
1185 if (bufferColorType == GrColorType::kRGB_888x) {
1186 return false;
1187 }
1188 size_t bpp = GrColorTypeBytesPerPixel(bufferColorType);
1189 // The VkBufferImageCopy bufferOffset field must be both a multiple of 4 and of a single texel.
1190 switch (bpp & 0b11) {
Brian Salomon26de56e2019-04-10 12:14:26 -04001191 // bpp is already a multiple of 4.
1192 case 0: return bpp;
1193 // bpp is a multiple of 2 but not 4.
1194 case 2: return 2 * bpp;
1195 // bpp is not a multiple of 2.
1196 default: return 4 * bpp;
Brian Salomona585fe92019-04-09 14:57:00 -04001197 }
Brian Salomona585fe92019-04-09 14:57:00 -04001198}