blob: c60b9185031202b3af53794286ff19a9486773f6 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "include/gpu/GrBackendSurface.h"
9#include "include/gpu/GrRenderTarget.h"
10#include "include/gpu/vk/GrVkBackendContext.h"
11#include "include/gpu/vk/GrVkExtensions.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040012#include "src/gpu/GrRenderTargetProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "src/gpu/GrShaderCaps.h"
14#include "src/gpu/SkGr.h"
15#include "src/gpu/vk/GrVkCaps.h"
16#include "src/gpu/vk/GrVkInterface.h"
17#include "src/gpu/vk/GrVkTexture.h"
18#include "src/gpu/vk/GrVkUtil.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050019
Emircan Uysaler23ca4e72019-06-24 10:53:09 -040020#ifdef SK_BUILD_FOR_ANDROID
21#include <sys/system_properties.h>
22#endif
23
Greg Daniel164a9f02016-02-22 09:56:40 -050024GrVkCaps::GrVkCaps(const GrContextOptions& contextOptions, const GrVkInterface* vkInterface,
Greg Daniela0651ac2018-08-08 09:23:18 -040025 VkPhysicalDevice physDev, const VkPhysicalDeviceFeatures2& features,
Greg Daniel41f0e282019-01-28 13:15:05 -050026 uint32_t instanceVersion, uint32_t physicalDeviceVersion,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -040027 const GrVkExtensions& extensions, GrProtected isProtected)
28 : INHERITED(contextOptions) {
Greg Daniel164a9f02016-02-22 09:56:40 -050029 /**************************************************************************
Brian Salomonf7232642018-09-19 08:58:08 -040030 * GrCaps fields
31 **************************************************************************/
jvanverth62340062016-04-26 08:01:44 -070032 fMipMapSupport = true; // always available in Vulkan
brianosmanf05ab1b2016-05-12 11:01:10 -070033 fSRGBSupport = true; // always available in Vulkan
brianosman88791862016-05-23 10:15:27 -070034 fNPOTTextureTileSupport = true; // always available in Vulkan
egdaniel37535c92016-06-30 08:23:30 -070035 fDiscardRenderTargetSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050036 fReuseScratchTextures = true; //TODO: figure this out
37 fGpuTracingSupport = false; //TODO: figure this out
Greg Daniel164a9f02016-02-22 09:56:40 -050038 fOversizedStencilSupport = false; //TODO: figure this out
Chris Dalton1d616352017-05-31 12:51:23 -060039 fInstanceAttribSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050040
Brian Salomon9ff5acb2019-05-08 09:04:47 -040041 fSemaphoreSupport = true; // always available in Vulkan
jvanverth84741b32016-09-30 08:39:02 -070042 fFenceSyncSupport = true; // always available in Vulkan
Greg Daniel691f5e72018-02-28 14:21:34 -050043 fCrossContextTextureSupport = true;
Brian Osman499bf1a2018-09-17 11:32:42 -040044 fHalfFloatVertexAttributeSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050045
Brian Salomon1047a492019-07-02 12:25:21 -040046 // We always copy in/out of a transfer buffer so it's trivial to support row bytes.
47 fReadPixelsRowBytesSupport = true;
48 fWritePixelsRowBytesSupport = true;
49
Brian Salomone05ba5a2019-04-08 11:59:07 -040050 fTransferBufferSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050051
52 fMaxRenderTargetSize = 4096; // minimum required by spec
53 fMaxTextureSize = 4096; // minimum required by spec
Greg Daniel164a9f02016-02-22 09:56:40 -050054
Brian Salomonf7232642018-09-19 08:58:08 -040055 fDynamicStateArrayGeometryProcessorTextureSupport = true;
56
Brian Salomon94efbf52016-11-29 13:43:05 -050057 fShaderCaps.reset(new GrShaderCaps(contextOptions));
Greg Daniel164a9f02016-02-22 09:56:40 -050058
Emircan Uysaler23ca4e72019-06-24 10:53:09 -040059 this->init(contextOptions, vkInterface, physDev, features, physicalDeviceVersion, extensions,
60 isProtected);
Greg Daniel164a9f02016-02-22 09:56:40 -050061}
62
Robert Phillipsbf25d432017-04-07 10:08:53 -040063bool GrVkCaps::initDescForDstCopy(const GrRenderTargetProxy* src, GrSurfaceDesc* desc,
Greg Daniel46cfbc62019-06-07 11:43:30 -040064 bool* rectsMustMatch, bool* disallowSubrect) const {
Eric Karl74480882017-04-03 14:49:05 -070065 // Vk doesn't use rectsMustMatch or disallowSubrect. Always return false.
66 *rectsMustMatch = false;
67 *disallowSubrect = false;
68
Brian Salomon467921e2017-03-06 16:17:12 -050069 // We can always succeed here with either a CopyImage (none msaa src) or ResolveImage (msaa).
70 // For CopyImage we can make a simple texture, for ResolveImage we require the dst to be a
71 // render target as well.
Brian Salomon467921e2017-03-06 16:17:12 -050072 desc->fConfig = src->config();
Chris Dalton6ce447a2019-06-23 18:07:38 -060073 if (src->numSamples() > 1 || src->asTextureProxy()) {
Brian Salomon467921e2017-03-06 16:17:12 -050074 desc->fFlags = kRenderTarget_GrSurfaceFlag;
75 } else {
76 // Just going to use CopyImage here
77 desc->fFlags = kNone_GrSurfaceFlags;
78 }
79
80 return true;
81}
82
Greg Daniel5c7b5412019-05-10 11:39:55 -040083static int get_compatible_format_class(GrPixelConfig config) {
84 switch (config) {
85 case kAlpha_8_GrPixelConfig:
86 case kAlpha_8_as_Red_GrPixelConfig:
87 case kGray_8_GrPixelConfig:
88 case kGray_8_as_Red_GrPixelConfig:
89 return 1;
90 case kRGB_565_GrPixelConfig:
91 case kRGBA_4444_GrPixelConfig:
92 case kRG_88_GrPixelConfig:
93 case kAlpha_half_GrPixelConfig:
94 case kAlpha_half_as_Red_GrPixelConfig:
Robert Phillips66a46032019-06-18 08:00:42 -040095 case kR_16_GrPixelConfig:
Greg Daniel5c7b5412019-05-10 11:39:55 -040096 return 2;
97 case kRGB_888_GrPixelConfig:
98 return 3;
99 case kRGBA_8888_GrPixelConfig:
100 case kRGB_888X_GrPixelConfig:
101 case kBGRA_8888_GrPixelConfig:
102 case kSRGBA_8888_GrPixelConfig:
Greg Daniel5c7b5412019-05-10 11:39:55 -0400103 case kRGBA_1010102_GrPixelConfig:
Robert Phillips66a46032019-06-18 08:00:42 -0400104 case kRG_1616_GrPixelConfig:
Greg Daniel5c7b5412019-05-10 11:39:55 -0400105 return 4;
106 case kRGBA_half_GrPixelConfig:
107 case kRGBA_half_Clamped_GrPixelConfig:
108 case kRG_float_GrPixelConfig:
109 return 5;
110 case kRGBA_float_GrPixelConfig:
111 return 6;
112 case kRGB_ETC1_GrPixelConfig:
113 return 7;
114 case kUnknown_GrPixelConfig:
115 case kAlpha_8_as_Alpha_GrPixelConfig:
116 case kGray_8_as_Lum_GrPixelConfig:
117 SK_ABORT("Unsupported Vulkan pixel config");
118 return 0;
Robert Phillips66a46032019-06-18 08:00:42 -0400119
120 // Experimental (for Y416 and mutant P016/P010)
121 case kRGBA_16161616_GrPixelConfig:
122 return 8;
123 case kRG_half_GrPixelConfig:
Robert Phillipsfe18de52019-06-06 17:21:50 -0400124 return 4;
Greg Daniel5c7b5412019-05-10 11:39:55 -0400125 }
126 SK_ABORT("Invalid pixel config");
127 return 0;
128}
129
Greg Daniel46cfbc62019-06-07 11:43:30 -0400130bool GrVkCaps::canCopyImage(GrPixelConfig dstConfig, int dstSampleCnt, bool dstHasYcbcr,
131 GrPixelConfig srcConfig, int srcSampleCnt, bool srcHasYcbcr) const {
Greg Daniel25af6712018-04-25 10:44:38 -0400132 if ((dstSampleCnt > 1 || srcSampleCnt > 1) && dstSampleCnt != srcSampleCnt) {
133 return false;
134 }
135
Greg Daniela51e93c2019-03-25 12:30:45 -0400136 if (dstHasYcbcr || srcHasYcbcr) {
137 return false;
138 }
139
Greg Daniel25af6712018-04-25 10:44:38 -0400140 // We require that all vulkan GrSurfaces have been created with transfer_dst and transfer_src
141 // as image usage flags.
Greg Daniel46cfbc62019-06-07 11:43:30 -0400142 if (get_compatible_format_class(srcConfig) != get_compatible_format_class(dstConfig)) {
Greg Daniel25af6712018-04-25 10:44:38 -0400143 return false;
144 }
145
Greg Daniel25af6712018-04-25 10:44:38 -0400146 return true;
147}
148
149bool GrVkCaps::canCopyAsBlit(GrPixelConfig dstConfig, int dstSampleCnt, bool dstIsLinear,
Greg Daniela51e93c2019-03-25 12:30:45 -0400150 bool dstHasYcbcr, GrPixelConfig srcConfig, int srcSampleCnt,
151 bool srcIsLinear, bool srcHasYcbcr) const {
Greg Danielcaa795f2019-05-14 11:54:25 -0400152
153 VkFormat dstFormat;
154 SkAssertResult(GrPixelConfigToVkFormat(dstConfig, &dstFormat));
155 VkFormat srcFormat;
156 SkAssertResult(GrPixelConfigToVkFormat(srcConfig, &srcFormat));
Greg Daniel25af6712018-04-25 10:44:38 -0400157 // We require that all vulkan GrSurfaces have been created with transfer_dst and transfer_src
158 // as image usage flags.
Greg Danielcaa795f2019-05-14 11:54:25 -0400159 if (!this->formatCanBeDstofBlit(dstFormat, dstIsLinear) ||
160 !this->formatCanBeSrcofBlit(srcFormat, srcIsLinear)) {
Greg Daniel25af6712018-04-25 10:44:38 -0400161 return false;
162 }
163
Greg Daniel25af6712018-04-25 10:44:38 -0400164 // We cannot blit images that are multisampled. Will need to figure out if we can blit the
165 // resolved msaa though.
166 if (dstSampleCnt > 1 || srcSampleCnt > 1) {
167 return false;
168 }
169
Greg Daniela51e93c2019-03-25 12:30:45 -0400170 if (dstHasYcbcr || srcHasYcbcr) {
171 return false;
172 }
173
Greg Daniel25af6712018-04-25 10:44:38 -0400174 return true;
175}
176
Greg Daniel46cfbc62019-06-07 11:43:30 -0400177bool GrVkCaps::canCopyAsResolve(GrPixelConfig dstConfig, int dstSampleCnt, bool dstHasYcbcr,
178 GrPixelConfig srcConfig, int srcSampleCnt, bool srcHasYcbcr) const {
Greg Daniel25af6712018-04-25 10:44:38 -0400179 // The src surface must be multisampled.
180 if (srcSampleCnt <= 1) {
181 return false;
182 }
183
184 // The dst must not be multisampled.
185 if (dstSampleCnt > 1) {
186 return false;
187 }
188
189 // Surfaces must have the same format.
190 if (dstConfig != srcConfig) {
191 return false;
192 }
193
Greg Daniela51e93c2019-03-25 12:30:45 -0400194 if (dstHasYcbcr || srcHasYcbcr) {
195 return false;
196 }
197
Greg Daniel25af6712018-04-25 10:44:38 -0400198 return true;
199}
200
Brian Salomonc67c31c2018-12-06 10:00:03 -0500201bool GrVkCaps::onCanCopySurface(const GrSurfaceProxy* dst, const GrSurfaceProxy* src,
202 const SkIRect& srcRect, const SkIPoint& dstPoint) const {
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400203 if (src->isProtected() && !dst->isProtected()) {
204 return false;
205 }
206
Greg Daniel25af6712018-04-25 10:44:38 -0400207 GrPixelConfig dstConfig = dst->config();
208 GrPixelConfig srcConfig = src->config();
209
210 // TODO: Figure out a way to track if we've wrapped a linear texture in a proxy (e.g.
211 // PromiseImage which won't get instantiated right away. Does this need a similar thing like the
212 // tracking of external or rectangle textures in GL? For now we don't create linear textures
213 // internally, and I don't believe anyone is wrapping them.
214 bool srcIsLinear = false;
215 bool dstIsLinear = false;
216
217 int dstSampleCnt = 0;
218 int srcSampleCnt = 0;
219 if (const GrRenderTargetProxy* rtProxy = dst->asRenderTargetProxy()) {
Greg Danielbe7fc462019-01-03 16:40:42 -0500220 // Copying to or from render targets that wrap a secondary command buffer is not allowed
221 // since they would require us to know the VkImage, which we don't have, as well as need us
222 // to stop and start the VkRenderPass which we don't have access to.
223 if (rtProxy->wrapsVkSecondaryCB()) {
224 return false;
225 }
Chris Dalton6ce447a2019-06-23 18:07:38 -0600226 dstSampleCnt = rtProxy->numSamples();
Greg Daniel25af6712018-04-25 10:44:38 -0400227 }
228 if (const GrRenderTargetProxy* rtProxy = src->asRenderTargetProxy()) {
Greg Danielbe7fc462019-01-03 16:40:42 -0500229 // Copying to or from render targets that wrap a secondary command buffer is not allowed
230 // since they would require us to know the VkImage, which we don't have, as well as need us
231 // to stop and start the VkRenderPass which we don't have access to.
232 if (rtProxy->wrapsVkSecondaryCB()) {
233 return false;
234 }
Chris Dalton6ce447a2019-06-23 18:07:38 -0600235 srcSampleCnt = rtProxy->numSamples();
Greg Daniel25af6712018-04-25 10:44:38 -0400236 }
237 SkASSERT((dstSampleCnt > 0) == SkToBool(dst->asRenderTargetProxy()));
238 SkASSERT((srcSampleCnt > 0) == SkToBool(src->asRenderTargetProxy()));
239
Greg Daniela51e93c2019-03-25 12:30:45 -0400240 bool dstHasYcbcr = false;
241 if (auto ycbcr = dst->backendFormat().getVkYcbcrConversionInfo()) {
242 if (ycbcr->isValid()) {
243 dstHasYcbcr = true;
244 }
245 }
246
247 bool srcHasYcbcr = false;
248 if (auto ycbcr = src->backendFormat().getVkYcbcrConversionInfo()) {
249 if (ycbcr->isValid()) {
250 srcHasYcbcr = true;
251 }
252 }
253
Greg Daniel46cfbc62019-06-07 11:43:30 -0400254 return this->canCopyImage(dstConfig, dstSampleCnt, dstHasYcbcr,
255 srcConfig, srcSampleCnt, srcHasYcbcr) ||
Greg Daniela51e93c2019-03-25 12:30:45 -0400256 this->canCopyAsBlit(dstConfig, dstSampleCnt, dstIsLinear, dstHasYcbcr,
257 srcConfig, srcSampleCnt, srcIsLinear, srcHasYcbcr) ||
Greg Daniel46cfbc62019-06-07 11:43:30 -0400258 this->canCopyAsResolve(dstConfig, dstSampleCnt, dstHasYcbcr,
259 srcConfig, srcSampleCnt, srcHasYcbcr);
Greg Daniel25af6712018-04-25 10:44:38 -0400260}
261
Greg Daniel7e000222018-12-03 10:08:21 -0500262template<typename T> T* get_extension_feature_struct(const VkPhysicalDeviceFeatures2& features,
263 VkStructureType type) {
264 // All Vulkan structs that could be part of the features chain will start with the
265 // structure type followed by the pNext pointer. We cast to the CommonVulkanHeader
266 // so we can get access to the pNext for the next struct.
267 struct CommonVulkanHeader {
268 VkStructureType sType;
269 void* pNext;
270 };
271
272 void* pNext = features.pNext;
273 while (pNext) {
274 CommonVulkanHeader* header = static_cast<CommonVulkanHeader*>(pNext);
275 if (header->sType == type) {
276 return static_cast<T*>(pNext);
277 }
278 pNext = header->pNext;
279 }
280 return nullptr;
281}
282
Greg Daniel164a9f02016-02-22 09:56:40 -0500283void GrVkCaps::init(const GrContextOptions& contextOptions, const GrVkInterface* vkInterface,
Greg Daniela0651ac2018-08-08 09:23:18 -0400284 VkPhysicalDevice physDev, const VkPhysicalDeviceFeatures2& features,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400285 uint32_t physicalDeviceVersion, const GrVkExtensions& extensions,
286 GrProtected isProtected) {
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000287 VkPhysicalDeviceProperties properties;
288 GR_VK_CALL(vkInterface, GetPhysicalDeviceProperties(physDev, &properties));
egdanield5e3b9e2016-03-08 12:19:54 -0800289
egdanield5e3b9e2016-03-08 12:19:54 -0800290 VkPhysicalDeviceMemoryProperties memoryProperties;
291 GR_VK_CALL(vkInterface, GetPhysicalDeviceMemoryProperties(physDev, &memoryProperties));
292
Greg Daniel41f0e282019-01-28 13:15:05 -0500293 SkASSERT(physicalDeviceVersion <= properties.apiVersion);
Greg Danielc0b03d82018-08-03 14:41:15 -0400294
Greg Danielcb324152019-02-25 11:36:53 -0500295 if (extensions.hasExtension(VK_KHR_SWAPCHAIN_EXTENSION_NAME, 1)) {
296 fSupportsSwapchain = true;
297 }
298
Greg Danielc0b03d82018-08-03 14:41:15 -0400299 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
300 extensions.hasExtension(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, 1)) {
301 fSupportsPhysicalDeviceProperties2 = true;
302 }
303
304 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
305 extensions.hasExtension(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, 1)) {
306 fSupportsMemoryRequirements2 = true;
307 }
308
309 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
Greg Daniel637c06a2018-09-12 09:44:25 -0400310 extensions.hasExtension(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, 1)) {
311 fSupportsBindMemory2 = true;
312 }
313
314 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
Greg Danielc0b03d82018-08-03 14:41:15 -0400315 extensions.hasExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME, 1)) {
316 fSupportsMaintenance1 = true;
317 }
318
319 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
320 extensions.hasExtension(VK_KHR_MAINTENANCE2_EXTENSION_NAME, 1)) {
321 fSupportsMaintenance2 = true;
322 }
323
324 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
325 extensions.hasExtension(VK_KHR_MAINTENANCE3_EXTENSION_NAME, 1)) {
326 fSupportsMaintenance3 = true;
327 }
328
Greg Daniela9979d12018-08-27 15:56:46 -0400329 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
Greg Daniel637c06a2018-09-12 09:44:25 -0400330 (extensions.hasExtension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, 1) &&
Greg Daniela9979d12018-08-27 15:56:46 -0400331 this->supportsMemoryRequirements2())) {
332 fSupportsDedicatedAllocation = true;
333 }
334
335 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
336 (extensions.hasExtension(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, 1) &&
337 this->supportsPhysicalDeviceProperties2() &&
338 extensions.hasExtension(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, 1) &&
339 this->supportsDedicatedAllocation())) {
340 fSupportsExternalMemory = true;
341 }
342
343#ifdef SK_BUILD_FOR_ANDROID
Greg Daniel637c06a2018-09-12 09:44:25 -0400344 // Currently Adreno devices are not supporting the QUEUE_FAMILY_FOREIGN_EXTENSION, so until they
345 // do we don't explicitly require it here even the spec says it is required.
Greg Daniela9979d12018-08-27 15:56:46 -0400346 if (extensions.hasExtension(
Greg Daniel637c06a2018-09-12 09:44:25 -0400347 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 2) &&
348 /* extensions.hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1) &&*/
349 this->supportsExternalMemory() &&
350 this->supportsBindMemory2()) {
Greg Daniela9979d12018-08-27 15:56:46 -0400351 fSupportsAndroidHWBExternalMemory = true;
Greg Daniel637c06a2018-09-12 09:44:25 -0400352 fSupportsAHardwareBufferImages = true;
Greg Daniela9979d12018-08-27 15:56:46 -0400353 }
354#endif
355
Greg Daniel7e000222018-12-03 10:08:21 -0500356 auto ycbcrFeatures =
357 get_extension_feature_struct<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(
358 features,
359 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES);
360 if (ycbcrFeatures && ycbcrFeatures->samplerYcbcrConversion &&
361 fSupportsAndroidHWBExternalMemory &&
362 (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
363 (extensions.hasExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, 1) &&
364 this->supportsMaintenance1() &&
365 this->supportsBindMemory2() &&
366 this->supportsMemoryRequirements2() &&
367 this->supportsPhysicalDeviceProperties2()))) {
368 fSupportsYcbcrConversion = true;
369 }
370 // We always push back the default GrVkYcbcrConversionInfo so that the case of no conversion
371 // will return a key of 0.
372 fYcbcrInfos.push_back(GrVkYcbcrConversionInfo());
373
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400374 if ((isProtected == GrProtected::kYes) &&
375 (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0))) {
376 fSupportsProtectedMemory = true;
377 fAvoidUpdateBuffers = true;
378 fShouldAlwaysUseDedicatedImageMemory = true;
379 }
380
Greg Daniel313c6952018-08-08 09:24:08 -0400381 this->initGrCaps(vkInterface, physDev, properties, memoryProperties, features, extensions);
Greg Daniel36443602018-08-02 12:51:52 -0400382 this->initShaderCaps(properties, features);
Greg Danielf3b11622018-03-01 15:01:27 -0500383
384 if (!contextOptions.fDisableDriverCorrectnessWorkarounds) {
385#if defined(SK_CPU_X86)
386 // We need to do this before initing the config table since it uses fSRGBSupport
387 if (kImagination_VkVendor == properties.vendorID) {
388 fSRGBSupport = false;
389 }
390#endif
391 }
392
Chris Dalton8e738a22018-10-05 16:41:44 -0600393 if (kQualcomm_VkVendor == properties.vendorID) {
394 // A "clear" load for the CCPR atlas runs faster on QC than a "discard" load followed by a
395 // scissored clear.
396 // On NVIDIA and Intel, the discard load followed by clear is faster.
397 // TODO: Evaluate on ARM, Imagination, and ATI.
398 fPreferFullscreenClears = true;
399 }
400
Greg Daniel44e69f92019-03-20 11:18:25 -0400401 if (kQualcomm_VkVendor == properties.vendorID || kARM_VkVendor == properties.vendorID) {
402 // On Qualcomm and ARM mapping a gpu buffer and doing both reads and writes to it is slow.
403 // Thus for index and vertex buffers we will force to use a cpu side buffer and then copy
404 // the whole buffer up to the gpu.
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400405 fBufferMapThreshold = SK_MaxS32;
406 }
407
408 if (kQualcomm_VkVendor == properties.vendorID) {
409 // On Qualcomm it looks like using vkCmdUpdateBuffer is slower than using a transfer buffer
410 // even for small sizes.
411 fAvoidUpdateBuffers = true;
412 }
413
Chris Dalton0dffbab2019-03-27 13:08:50 -0600414 if (kARM_VkVendor == properties.vendorID) {
415 // ARM seems to do better with more fine triangles as opposed to using the sample mask.
416 // (At least in our current round rect op.)
417 fPreferTrianglesOverSampleMask = true;
418 }
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400419
Greg Danielcaa795f2019-05-14 11:54:25 -0400420 this->initFormatTable(vkInterface, physDev, properties);
egdaniel8f1dcaa2016-04-01 10:10:45 -0700421 this->initStencilFormat(vkInterface, physDev);
Greg Daniel164a9f02016-02-22 09:56:40 -0500422
Greg Daniel691f5e72018-02-28 14:21:34 -0500423 if (!contextOptions.fDisableDriverCorrectnessWorkarounds) {
424 this->applyDriverCorrectnessWorkarounds(properties);
egdanielc5ec1402016-03-28 12:14:42 -0700425 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500426
Greg Daniel691f5e72018-02-28 14:21:34 -0500427 this->applyOptionsOverrides(contextOptions);
428 fShaderCaps->applyOptionsOverrides(contextOptions);
429}
430
431void GrVkCaps::applyDriverCorrectnessWorkarounds(const VkPhysicalDeviceProperties& properties) {
egdaniel6fa0a912016-09-12 11:51:29 -0700432 if (kQualcomm_VkVendor == properties.vendorID) {
433 fMustDoCopiesFromOrigin = true;
Brian Salomona585fe92019-04-09 14:57:00 -0400434 // Transfer doesn't support this workaround.
435 fTransferBufferSupport = false;
egdaniel6fa0a912016-09-12 11:51:29 -0700436 }
437
Greg Daniel80a08dd2017-01-20 10:45:49 -0500438#if defined(SK_BUILD_FOR_WIN)
Greg Daniel900e5c82018-08-28 10:59:24 -0400439 if (kNvidia_VkVendor == properties.vendorID || kIntel_VkVendor == properties.vendorID) {
Greg Daniel80a08dd2017-01-20 10:45:49 -0500440 fMustSleepOnTearDown = true;
441 }
442#elif defined(SK_BUILD_FOR_ANDROID)
443 if (kImagination_VkVendor == properties.vendorID) {
444 fMustSleepOnTearDown = true;
445 }
446#endif
Greg Danielbce5eb92018-03-01 13:13:44 -0500447
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400448#if defined(SK_BUILD_FOR_ANDROID)
449 // Protected memory features have problems in Android P and earlier.
450 if (fSupportsProtectedMemory && (kQualcomm_VkVendor == properties.vendorID)) {
451 char androidAPIVersion[PROP_VALUE_MAX];
452 int strLength = __system_property_get("ro.build.version.sdk", androidAPIVersion);
453 if (strLength == 0 || atoi(androidAPIVersion) <= 28) {
454 fSupportsProtectedMemory = false;
455 }
456 }
457#endif
458
Greg Danielbce5eb92018-03-01 13:13:44 -0500459 // AMD seems to have issues binding new VkPipelines inside a secondary command buffer.
460 // Current workaround is to use a different secondary command buffer for each new VkPipeline.
461 if (kAMD_VkVendor == properties.vendorID) {
462 fNewCBOnPipelineChange = true;
463 }
464
Greg Danielddc0c602018-06-18 11:26:30 -0400465 // On Mali galaxy s7 we see lots of rendering issues when we suballocate VkImages.
466 if (kARM_VkVendor == properties.vendorID) {
467 fShouldAlwaysUseDedicatedImageMemory = true;
468 }
469
Greg Danielbce5eb92018-03-01 13:13:44 -0500470 ////////////////////////////////////////////////////////////////////////////
471 // GrCaps workarounds
472 ////////////////////////////////////////////////////////////////////////////
473
474 if (kARM_VkVendor == properties.vendorID) {
475 fInstanceAttribSupport = false;
Greg Daniel4374e962018-09-28 15:09:47 -0400476 fAvoidWritePixelsFastPath = true; // bugs.skia.org/8064
Greg Danielbce5eb92018-03-01 13:13:44 -0500477 }
478
479 // AMD advertises support for MAX_UINT vertex input attributes, but in reality only supports 32.
480 if (kAMD_VkVendor == properties.vendorID) {
481 fMaxVertexAttributes = SkTMin(fMaxVertexAttributes, 32);
482 }
483
Greg Danielbce5eb92018-03-01 13:13:44 -0500484 ////////////////////////////////////////////////////////////////////////////
485 // GrShaderCaps workarounds
486 ////////////////////////////////////////////////////////////////////////////
487
Greg Danielbce5eb92018-03-01 13:13:44 -0500488 if (kImagination_VkVendor == properties.vendorID) {
489 fShaderCaps->fAtan2ImplementedAsAtanYOverX = true;
490 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500491}
492
493int get_max_sample_count(VkSampleCountFlags flags) {
494 SkASSERT(flags & VK_SAMPLE_COUNT_1_BIT);
495 if (!(flags & VK_SAMPLE_COUNT_2_BIT)) {
496 return 0;
497 }
498 if (!(flags & VK_SAMPLE_COUNT_4_BIT)) {
499 return 2;
500 }
501 if (!(flags & VK_SAMPLE_COUNT_8_BIT)) {
502 return 4;
503 }
504 if (!(flags & VK_SAMPLE_COUNT_16_BIT)) {
505 return 8;
506 }
507 if (!(flags & VK_SAMPLE_COUNT_32_BIT)) {
508 return 16;
509 }
510 if (!(flags & VK_SAMPLE_COUNT_64_BIT)) {
511 return 32;
512 }
513 return 64;
514}
515
Greg Daniel313c6952018-08-08 09:24:08 -0400516void GrVkCaps::initGrCaps(const GrVkInterface* vkInterface,
517 VkPhysicalDevice physDev,
518 const VkPhysicalDeviceProperties& properties,
jvanverthfd7bd452016-03-25 06:29:52 -0700519 const VkPhysicalDeviceMemoryProperties& memoryProperties,
Greg Daniel313c6952018-08-08 09:24:08 -0400520 const VkPhysicalDeviceFeatures2& features,
521 const GrVkExtensions& extensions) {
Greg Danielc5cc2de2017-03-20 11:40:58 -0400522 // So GPUs, like AMD, are reporting MAX_INT support vertex attributes. In general, there is no
523 // need for us ever to support that amount, and it makes tests which tests all the vertex
524 // attribs timeout looping over that many. For now, we'll cap this at 64 max and can raise it if
525 // we ever find that need.
526 static const uint32_t kMaxVertexAttributes = 64;
527 fMaxVertexAttributes = SkTMin(properties.limits.maxVertexInputAttributes, kMaxVertexAttributes);
Greg Danielc5cc2de2017-03-20 11:40:58 -0400528
egdanield5e3b9e2016-03-08 12:19:54 -0800529 // We could actually query and get a max size for each config, however maxImageDimension2D will
530 // give the minimum max size across all configs. So for simplicity we will use that for now.
jvanverthe78d4872016-09-27 03:33:05 -0700531 fMaxRenderTargetSize = SkTMin(properties.limits.maxImageDimension2D, (uint32_t)INT_MAX);
532 fMaxTextureSize = SkTMin(properties.limits.maxImageDimension2D, (uint32_t)INT_MAX);
Adrienne Walker724afe82018-05-15 11:36:26 -0700533 if (fDriverBugWorkarounds.max_texture_size_limit_4096) {
534 fMaxTextureSize = SkTMin(fMaxTextureSize, 4096);
535 }
536 // Our render targets are always created with textures as the color
537 // attachment, hence this min:
538 fMaxRenderTargetSize = SkTMin(fMaxTextureSize, fMaxRenderTargetSize);
egdanield5e3b9e2016-03-08 12:19:54 -0800539
Chris Dalton2612bae2018-02-22 13:41:37 -0700540 // TODO: check if RT's larger than 4k incur a performance cost on ARM.
541 fMaxPreferredRenderTargetSize = fMaxRenderTargetSize;
542
egdanield5e3b9e2016-03-08 12:19:54 -0800543 // Assuming since we will always map in the end to upload the data we might as well just map
544 // from the get go. There is no hard data to suggest this is faster or slower.
cdalton397536c2016-03-25 12:15:03 -0700545 fBufferMapThreshold = 0;
egdanield5e3b9e2016-03-08 12:19:54 -0800546
Brian Salomon105d7c22019-04-16 13:46:14 -0400547 fMapBufferFlags = kCanMap_MapFlag | kSubset_MapFlag | kAsyncRead_MapFlag;
egdanield5e3b9e2016-03-08 12:19:54 -0800548
egdanield5e3b9e2016-03-08 12:19:54 -0800549 fOversizedStencilSupport = true;
Greg Daniel313c6952018-08-08 09:24:08 -0400550
551 if (extensions.hasExtension(VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME, 2) &&
552 this->supportsPhysicalDeviceProperties2()) {
553
554 VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT blendProps;
555 blendProps.sType =
556 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT;
557 blendProps.pNext = nullptr;
558
559 VkPhysicalDeviceProperties2 props;
560 props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
561 props.pNext = &blendProps;
562
563 GR_VK_CALL(vkInterface, GetPhysicalDeviceProperties2(physDev, &props));
564
565 if (blendProps.advancedBlendAllOperations == VK_TRUE) {
566 fShaderCaps->fAdvBlendEqInteraction = GrShaderCaps::kAutomatic_AdvBlendEqInteraction;
567
568 auto blendFeatures =
569 get_extension_feature_struct<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT>(
570 features,
571 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT);
572 if (blendFeatures && blendFeatures->advancedBlendCoherentOperations == VK_TRUE) {
573 fBlendEquationSupport = kAdvancedCoherent_BlendEquationSupport;
574 } else {
575 // TODO: Currently non coherent blends are not supported in our vulkan backend. They
576 // require us to support self dependencies in our render passes.
577 // fBlendEquationSupport = kAdvanced_BlendEquationSupport;
578 }
579 }
580 }
egdanield5e3b9e2016-03-08 12:19:54 -0800581}
582
Greg Daniel36443602018-08-02 12:51:52 -0400583void GrVkCaps::initShaderCaps(const VkPhysicalDeviceProperties& properties,
Greg Daniela0651ac2018-08-08 09:23:18 -0400584 const VkPhysicalDeviceFeatures2& features) {
Brian Salomon1edc5b92016-11-29 13:43:46 -0500585 GrShaderCaps* shaderCaps = fShaderCaps.get();
586 shaderCaps->fVersionDeclString = "#version 330\n";
egdaniel3a15fd42016-04-05 11:00:29 -0700587
egdanield5e3b9e2016-03-08 12:19:54 -0800588 // Vulkan is based off ES 3.0 so the following should all be supported
Brian Salomon1edc5b92016-11-29 13:43:46 -0500589 shaderCaps->fUsesPrecisionModifiers = true;
590 shaderCaps->fFlatInterpolationSupport = true;
Brian Salomon41274562017-09-15 09:40:03 -0700591 // Flat interpolation appears to be slow on Qualcomm GPUs. This was tested in GL and is assumed
592 // to be true with Vulkan as well.
593 shaderCaps->fPreferFlatInterpolation = kQualcomm_VkVendor != properties.vendorID;
egdanield5e3b9e2016-03-08 12:19:54 -0800594
595 // GrShaderCaps
596
Brian Salomon1edc5b92016-11-29 13:43:46 -0500597 shaderCaps->fShaderDerivativeSupport = true;
Chris Daltonf1b47bb2017-10-06 11:57:51 -0600598
Ethan Nicholas6ac8d362019-01-22 21:43:55 +0000599 // FIXME: http://skbug.com/7733: Disable geometry shaders until Intel/Radeon GMs draw correctly.
600 // shaderCaps->fGeometryShaderSupport =
601 // shaderCaps->fGSInvocationsSupport = features.features.geometryShader;
egdanield632bb42016-03-30 12:06:48 -0700602
Greg Daniela0651ac2018-08-08 09:23:18 -0400603 shaderCaps->fDualSourceBlendingSupport = features.features.dualSrcBlend;
egdanield632bb42016-03-30 12:06:48 -0700604
Brian Salomon1edc5b92016-11-29 13:43:46 -0500605 shaderCaps->fIntegerSupport = true;
Chris Dalton1d616352017-05-31 12:51:23 -0600606 shaderCaps->fVertexIDSupport = true;
Chris Dalton7c7ff032018-03-28 20:09:58 -0600607 shaderCaps->fFPManipulationSupport = true;
cdalton9c3f1432016-03-11 10:07:37 -0800608
cdaltona6b92ad2016-04-11 12:03:08 -0700609 // Assume the minimum precisions mandated by the SPIR-V spec.
Chris Dalton47c8ed32017-11-15 18:27:09 -0700610 shaderCaps->fFloatIs32Bits = true;
611 shaderCaps->fHalfIs32Bits = false;
cdaltona6b92ad2016-04-11 12:03:08 -0700612
Brian Salomon1edc5b92016-11-29 13:43:46 -0500613 shaderCaps->fMaxFragmentSamplers = SkTMin(
614 SkTMin(properties.limits.maxPerStageDescriptorSampledImages,
615 properties.limits.maxPerStageDescriptorSamplers),
616 (uint32_t)INT_MAX);
Greg Daniel164a9f02016-02-22 09:56:40 -0500617}
618
egdaniel8f1dcaa2016-04-01 10:10:45 -0700619bool stencil_format_supported(const GrVkInterface* interface,
620 VkPhysicalDevice physDev,
621 VkFormat format) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500622 VkFormatProperties props;
623 memset(&props, 0, sizeof(VkFormatProperties));
624 GR_VK_CALL(interface, GetPhysicalDeviceFormatProperties(physDev, format, &props));
egdaniel8f1dcaa2016-04-01 10:10:45 -0700625 return SkToBool(VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT & props.optimalTilingFeatures);
Greg Daniel164a9f02016-02-22 09:56:40 -0500626}
627
egdaniel8f1dcaa2016-04-01 10:10:45 -0700628void GrVkCaps::initStencilFormat(const GrVkInterface* interface, VkPhysicalDevice physDev) {
629 // List of legal stencil formats (though perhaps not supported on
630 // the particular gpu/driver) from most preferred to least. We are guaranteed to have either
jvanvertha4b0fed2016-04-27 11:42:21 -0700631 // VK_FORMAT_D24_UNORM_S8_UINT or VK_FORMAT_D32_SFLOAT_S8_UINT. VK_FORMAT_D32_SFLOAT_S8_UINT
egdaniel8f1dcaa2016-04-01 10:10:45 -0700632 // can optionally have 24 unused bits at the end so we assume the total bits is 64.
Greg Daniel164a9f02016-02-22 09:56:40 -0500633 static const StencilFormat
634 // internal Format stencil bits total bits packed?
635 gS8 = { VK_FORMAT_S8_UINT, 8, 8, false },
egdaniel8f1dcaa2016-04-01 10:10:45 -0700636 gD24S8 = { VK_FORMAT_D24_UNORM_S8_UINT, 8, 32, true },
637 gD32S8 = { VK_FORMAT_D32_SFLOAT_S8_UINT, 8, 64, true };
Greg Daniel164a9f02016-02-22 09:56:40 -0500638
egdaniel8f1dcaa2016-04-01 10:10:45 -0700639 if (stencil_format_supported(interface, physDev, VK_FORMAT_S8_UINT)) {
Ethan Nicholasf610bae2018-09-20 16:55:21 -0400640 fPreferredStencilFormat = gS8;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700641 } else if (stencil_format_supported(interface, physDev, VK_FORMAT_D24_UNORM_S8_UINT)) {
Ethan Nicholasf610bae2018-09-20 16:55:21 -0400642 fPreferredStencilFormat = gD24S8;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700643 } else {
644 SkASSERT(stencil_format_supported(interface, physDev, VK_FORMAT_D32_SFLOAT_S8_UINT));
Ethan Nicholasf610bae2018-09-20 16:55:21 -0400645 fPreferredStencilFormat = gD32S8;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700646 }
647}
648
Greg Danielcaa795f2019-05-14 11:54:25 -0400649static bool format_is_srgb(VkFormat format) {
Robert Phillipsf209e882019-06-25 15:59:50 -0400650 SkASSERT(GrVkFormatIsSupported(format));
651
Greg Danielcaa795f2019-05-14 11:54:25 -0400652 switch (format) {
653 case VK_FORMAT_R8G8B8A8_SRGB:
654 case VK_FORMAT_B8G8R8A8_SRGB:
655 return true;
Greg Danielcaa795f2019-05-14 11:54:25 -0400656 default:
Greg Danielcaa795f2019-05-14 11:54:25 -0400657 return false;
658 }
659}
660
Greg Daniel2c19e7f2019-06-18 13:29:21 -0400661// These are all the valid VkFormats that we support in Skia. They are roughly ordered from most
Greg Danielcaa795f2019-05-14 11:54:25 -0400662// frequently used to least to improve look up times in arrays.
663static constexpr VkFormat kVkFormats[] = {
664 VK_FORMAT_R8G8B8A8_UNORM,
665 VK_FORMAT_R8_UNORM,
666 VK_FORMAT_B8G8R8A8_UNORM,
667 VK_FORMAT_R5G6B5_UNORM_PACK16,
668 VK_FORMAT_R16G16B16A16_SFLOAT,
669 VK_FORMAT_R16_SFLOAT,
Greg Danielcaa795f2019-05-14 11:54:25 -0400670 VK_FORMAT_R8G8B8_UNORM,
671 VK_FORMAT_R8G8_UNORM,
672 VK_FORMAT_A2B10G10R10_UNORM_PACK32,
673 VK_FORMAT_B4G4R4A4_UNORM_PACK16,
Greg Danieleb4a8272019-05-16 16:52:55 -0400674 VK_FORMAT_R4G4B4A4_UNORM_PACK16,
Greg Danielcaa795f2019-05-14 11:54:25 -0400675 VK_FORMAT_R32G32B32A32_SFLOAT,
676 VK_FORMAT_R32G32_SFLOAT,
677 VK_FORMAT_R8G8B8A8_SRGB,
678 VK_FORMAT_B8G8R8A8_SRGB,
Robert Phillipsfe18de52019-06-06 17:21:50 -0400679 VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
Robert Phillipsfe18de52019-06-06 17:21:50 -0400680 VK_FORMAT_R16_UNORM,
681 VK_FORMAT_R16G16_UNORM,
Robert Phillips66a46032019-06-18 08:00:42 -0400682 // Experimental (for Y416 and mutant P016/P010)
683 VK_FORMAT_R16G16B16A16_UNORM,
684 VK_FORMAT_R16G16_SFLOAT,
Greg Danielcaa795f2019-05-14 11:54:25 -0400685};
686
687const GrVkCaps::FormatInfo& GrVkCaps::getFormatInfo(VkFormat format) const {
688 static_assert(SK_ARRAY_COUNT(kVkFormats) == GrVkCaps::kNumVkFormats,
689 "Size of VkFormats array must match static value in header");
690 for (size_t i = 0; i < SK_ARRAY_COUNT(kVkFormats); ++i) {
691 if (kVkFormats[i] == format) {
692 return fFormatTable[i];
693 }
694 }
695 SK_ABORT("Invalid VkFormat");
Greg Daniel52ee5f62019-06-20 13:38:18 -0400696 static const FormatInfo kInvalidFormat;
697 return kInvalidFormat;
Greg Danielcaa795f2019-05-14 11:54:25 -0400698}
699
700void GrVkCaps::initFormatTable(const GrVkInterface* interface, VkPhysicalDevice physDev,
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000701 const VkPhysicalDeviceProperties& properties) {
Greg Danielcaa795f2019-05-14 11:54:25 -0400702 static_assert(SK_ARRAY_COUNT(kVkFormats) == GrVkCaps::kNumVkFormats,
703 "Size of VkFormats array must match static value in header");
704 for (size_t i = 0; i < SK_ARRAY_COUNT(kVkFormats); ++i) {
705 VkFormat format = kVkFormats[i];
706 if (!format_is_srgb(format) || fSRGBSupport) {
707 fFormatTable[i].init(interface, physDev, properties, format);
egdaniel8f1dcaa2016-04-01 10:10:45 -0700708 }
709 }
710}
711
Greg Danielcaa795f2019-05-14 11:54:25 -0400712void GrVkCaps::FormatInfo::InitConfigFlags(VkFormatFeatureFlags vkFlags, uint16_t* flags) {
egdaniel8f1dcaa2016-04-01 10:10:45 -0700713 if (SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & vkFlags) &&
714 SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT & vkFlags)) {
715 *flags = *flags | kTextureable_Flag;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700716
Robert Phillipsb7b7e5f2017-05-22 13:23:19 -0400717 // Ganesh assumes that all renderable surfaces are also texturable
Greg Danielcaa795f2019-05-14 11:54:25 -0400718 if (SkToBool(VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT & vkFlags)) {
Robert Phillipsb7b7e5f2017-05-22 13:23:19 -0400719 *flags = *flags | kRenderable_Flag;
720 }
egdaniel8f1dcaa2016-04-01 10:10:45 -0700721 }
722
723 if (SkToBool(VK_FORMAT_FEATURE_BLIT_SRC_BIT & vkFlags)) {
724 *flags = *flags | kBlitSrc_Flag;
725 }
726
727 if (SkToBool(VK_FORMAT_FEATURE_BLIT_DST_BIT & vkFlags)) {
728 *flags = *flags | kBlitDst_Flag;
729 }
730}
731
Greg Danielcaa795f2019-05-14 11:54:25 -0400732void GrVkCaps::FormatInfo::initSampleCounts(const GrVkInterface* interface,
Greg Daniel81e7bf82017-07-19 14:47:42 -0400733 VkPhysicalDevice physDev,
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000734 const VkPhysicalDeviceProperties& physProps,
Greg Daniel81e7bf82017-07-19 14:47:42 -0400735 VkFormat format) {
736 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
737 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
738 VK_IMAGE_USAGE_SAMPLED_BIT |
739 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
Greg Daniel81e7bf82017-07-19 14:47:42 -0400740 VkImageFormatProperties properties;
741 GR_VK_CALL(interface, GetPhysicalDeviceImageFormatProperties(physDev,
742 format,
743 VK_IMAGE_TYPE_2D,
744 VK_IMAGE_TILING_OPTIMAL,
745 usage,
Brian Osman2b23c4b2018-06-01 12:25:08 -0400746 0, // createFlags
Greg Daniel81e7bf82017-07-19 14:47:42 -0400747 &properties));
748 VkSampleCountFlags flags = properties.sampleCounts;
749 if (flags & VK_SAMPLE_COUNT_1_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400750 fColorSampleCounts.push_back(1);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400751 }
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000752 if (kImagination_VkVendor == physProps.vendorID) {
753 // MSAA does not work on imagination
754 return;
755 }
Greg Daniel81e7bf82017-07-19 14:47:42 -0400756 if (flags & VK_SAMPLE_COUNT_2_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400757 fColorSampleCounts.push_back(2);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400758 }
759 if (flags & VK_SAMPLE_COUNT_4_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400760 fColorSampleCounts.push_back(4);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400761 }
762 if (flags & VK_SAMPLE_COUNT_8_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400763 fColorSampleCounts.push_back(8);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400764 }
765 if (flags & VK_SAMPLE_COUNT_16_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400766 fColorSampleCounts.push_back(16);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400767 }
768 if (flags & VK_SAMPLE_COUNT_32_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400769 fColorSampleCounts.push_back(32);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400770 }
771 if (flags & VK_SAMPLE_COUNT_64_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400772 fColorSampleCounts.push_back(64);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400773 }
774}
775
Greg Danielcaa795f2019-05-14 11:54:25 -0400776void GrVkCaps::FormatInfo::init(const GrVkInterface* interface,
egdaniel8f1dcaa2016-04-01 10:10:45 -0700777 VkPhysicalDevice physDev,
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000778 const VkPhysicalDeviceProperties& properties,
Greg Danielcaa795f2019-05-14 11:54:25 -0400779 VkFormat format) {
egdaniel8f1dcaa2016-04-01 10:10:45 -0700780 VkFormatProperties props;
781 memset(&props, 0, sizeof(VkFormatProperties));
782 GR_VK_CALL(interface, GetPhysicalDeviceFormatProperties(physDev, format, &props));
Greg Danielcaa795f2019-05-14 11:54:25 -0400783 InitConfigFlags(props.linearTilingFeatures, &fLinearFlags);
784 InitConfigFlags(props.optimalTilingFeatures, &fOptimalFlags);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400785 if (fOptimalFlags & kRenderable_Flag) {
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000786 this->initSampleCounts(interface, physDev, properties, format);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400787 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500788}
Greg Daniel81e7bf82017-07-19 14:47:42 -0400789
Robert Phillipsf209e882019-06-25 15:59:50 -0400790bool GrVkCaps::isFormatSRGB(const GrBackendFormat& format) const {
791 if (!format.getVkFormat()) {
792 return false;
793 }
794
795 return format_is_srgb(*format.getVkFormat());
796}
797
Greg Daniel2f2caea2019-07-08 14:24:47 -0400798bool GrVkCaps::isFormatTexturable(GrColorType, const GrBackendFormat& format) const {
Robert Phillipsd8f79a22019-06-24 13:25:42 -0400799 if (!format.getVkFormat()) {
800 return false;
801 }
802
Greg Daniel2f2caea2019-07-08 14:24:47 -0400803 return this->isVkFormatTexturable(*format.getVkFormat());
Robert Phillipsd8f79a22019-06-24 13:25:42 -0400804}
805
Greg Daniel2f2caea2019-07-08 14:24:47 -0400806bool GrVkCaps::isVkFormatTexturable(VkFormat format) const {
Greg Danielcaa795f2019-05-14 11:54:25 -0400807 if (!GrVkFormatIsSupported(format)) {
808 return false;
809 }
810
811 const FormatInfo& info = this->getFormatInfo(format);
812 return SkToBool(FormatInfo::kTextureable_Flag & info.fOptimalFlags);
813}
814
815bool GrVkCaps::isConfigTexturable(GrPixelConfig config) const {
816 VkFormat format;
817 if (!GrPixelConfigToVkFormat(config, &format)) {
818 return false;
819 }
Greg Daniel2f2caea2019-07-08 14:24:47 -0400820 return this->isVkFormatTexturable(format);
Robert Phillips39ef2ef2019-05-15 08:45:53 -0400821}
822
823bool GrVkCaps::isFormatRenderable(VkFormat format) const {
824 return this->maxRenderTargetSampleCount(format) > 0;
Greg Danielcaa795f2019-05-14 11:54:25 -0400825}
826
Robert Phillipsd8f79a22019-06-24 13:25:42 -0400827int GrVkCaps::getRenderTargetSampleCount(int requestedCount,
Greg Daniel5c96db82019-07-09 14:06:58 -0400828 GrColorType, const GrBackendFormat& format) const {
Robert Phillipsd8f79a22019-06-24 13:25:42 -0400829 if (!format.getVkFormat()) {
830 return 0;
831 }
832
833 return this->getRenderTargetSampleCount(requestedCount, *format.getVkFormat());
834}
835
Brian Salomonbdecacf2018-02-02 20:32:49 -0500836int GrVkCaps::getRenderTargetSampleCount(int requestedCount, GrPixelConfig config) const {
Greg Danielcaa795f2019-05-14 11:54:25 -0400837 // Currently we don't allow RGB_888X to be renderable because we don't have a way to handle
838 // blends that reference dst alpha when the values in the dst alpha channel are uninitialized.
839 if (config == kRGB_888X_GrPixelConfig) {
840 return 0;
841 }
842
843 VkFormat format;
844 if (!GrPixelConfigToVkFormat(config, &format)) {
845 return 0;
846 }
847
848 return this->getRenderTargetSampleCount(requestedCount, format);
849}
850
851int GrVkCaps::getRenderTargetSampleCount(int requestedCount, VkFormat format) const {
Brian Salomonbdecacf2018-02-02 20:32:49 -0500852 requestedCount = SkTMax(1, requestedCount);
Greg Danielcaa795f2019-05-14 11:54:25 -0400853
854 const FormatInfo& info = this->getFormatInfo(format);
855
856 int count = info.fColorSampleCounts.count();
Brian Salomonbdecacf2018-02-02 20:32:49 -0500857
858 if (!count) {
Greg Daniel81e7bf82017-07-19 14:47:42 -0400859 return 0;
860 }
861
Brian Salomonbdecacf2018-02-02 20:32:49 -0500862 if (1 == requestedCount) {
Greg Danielcaa795f2019-05-14 11:54:25 -0400863 SkASSERT(info.fColorSampleCounts.count() && info.fColorSampleCounts[0] == 1);
Brian Salomonbdecacf2018-02-02 20:32:49 -0500864 return 1;
865 }
866
Greg Daniel81e7bf82017-07-19 14:47:42 -0400867 for (int i = 0; i < count; ++i) {
Greg Danielcaa795f2019-05-14 11:54:25 -0400868 if (info.fColorSampleCounts[i] >= requestedCount) {
869 return info.fColorSampleCounts[i];
Greg Daniel81e7bf82017-07-19 14:47:42 -0400870 }
871 }
Brian Salomonbdecacf2018-02-02 20:32:49 -0500872 return 0;
873}
874
Greg Daniel5c96db82019-07-09 14:06:58 -0400875int GrVkCaps::maxRenderTargetSampleCount(GrColorType, const GrBackendFormat& format) const {
Robert Phillipsd8f79a22019-06-24 13:25:42 -0400876 if (!format.getVkFormat()) {
877 return 0;
878 }
879
880 return this->maxRenderTargetSampleCount(*format.getVkFormat());
881}
882
Brian Salomonbdecacf2018-02-02 20:32:49 -0500883int GrVkCaps::maxRenderTargetSampleCount(GrPixelConfig config) const {
Greg Danielcaa795f2019-05-14 11:54:25 -0400884 // Currently we don't allow RGB_888X to be renderable because we don't have a way to handle
885 // blends that reference dst alpha when the values in the dst alpha channel are uninitialized.
886 if (config == kRGB_888X_GrPixelConfig) {
887 return 0;
888 }
889
890 VkFormat format;
891 if (!GrPixelConfigToVkFormat(config, &format)) {
892 return 0;
893 }
894 return this->maxRenderTargetSampleCount(format);
895}
896
897int GrVkCaps::maxRenderTargetSampleCount(VkFormat format) const {
898 const FormatInfo& info = this->getFormatInfo(format);
899
900 const auto& table = info.fColorSampleCounts;
Brian Salomonbdecacf2018-02-02 20:32:49 -0500901 if (!table.count()) {
902 return 0;
903 }
904 return table[table.count() - 1];
Brian Salomond653cac2018-02-01 13:58:00 -0500905}
906
Brian Salomondc0710f2019-07-01 14:59:32 -0400907GrCaps::SurfaceReadPixelsSupport GrVkCaps::surfaceSupportsReadPixels(
908 const GrSurface* surface) const {
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400909 if (surface->isProtected()) {
Brian Salomondc0710f2019-07-01 14:59:32 -0400910 return SurfaceReadPixelsSupport::kUnsupported;
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400911 }
Greg Daniela51e93c2019-03-25 12:30:45 -0400912 if (auto tex = static_cast<const GrVkTexture*>(surface->asTexture())) {
913 // We can't directly read from a VkImage that has a ycbcr sampler.
914 if (tex->ycbcrConversionInfo().isValid()) {
Brian Salomondc0710f2019-07-01 14:59:32 -0400915 return SurfaceReadPixelsSupport::kCopyToTexture2D;
Greg Daniela51e93c2019-03-25 12:30:45 -0400916 }
917 }
Brian Salomondc0710f2019-07-01 14:59:32 -0400918 return SurfaceReadPixelsSupport::kSupported;
Greg Daniela51e93c2019-03-25 12:30:45 -0400919}
920
Brian Salomonc67c31c2018-12-06 10:00:03 -0500921bool GrVkCaps::onSurfaceSupportsWritePixels(const GrSurface* surface) const {
Brian Salomon3d86a192018-02-27 16:46:11 -0500922 if (auto rt = surface->asRenderTarget()) {
Chris Dalton6ce447a2019-06-23 18:07:38 -0600923 return rt->numSamples() <= 1 && SkToBool(surface->asTexture());
Brian Salomon3d86a192018-02-27 16:46:11 -0500924 }
Greg Daniela51e93c2019-03-25 12:30:45 -0400925 // We can't write to a texture that has a ycbcr sampler.
926 if (auto tex = static_cast<const GrVkTexture*>(surface->asTexture())) {
927 // We can't directly read from a VkImage that has a ycbcr sampler.
928 if (tex->ycbcrConversionInfo().isValid()) {
929 return false;
930 }
931 }
Brian Salomon3d86a192018-02-27 16:46:11 -0500932 return true;
933}
934
Robert Phillips1e2cb442019-07-02 15:51:28 -0400935// A near clone of format_color_type_valid_pair
936static GrPixelConfig validate_image_info(VkFormat format, GrColorType ct, bool hasYcbcrConversion) {
Greg Daniel14c55c22018-12-04 11:25:03 -0500937 if (format == VK_FORMAT_UNDEFINED) {
938 // If the format is undefined then it is only valid as an external image which requires that
939 // we have a valid VkYcbcrConversion.
940 if (hasYcbcrConversion) {
941 // We don't actually care what the color type or config are since we won't use those
Greg Daniela51e93c2019-03-25 12:30:45 -0400942 // values for external textures. However, for read pixels we will draw to a non ycbcr
943 // texture of this config so we set RGBA here for that.
Brian Salomonf391d0f2018-12-14 09:18:50 -0500944 return kRGBA_8888_GrPixelConfig;
Greg Daniel14c55c22018-12-04 11:25:03 -0500945 } else {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500946 return kUnknown_GrPixelConfig;
Greg Daniel14c55c22018-12-04 11:25:03 -0500947 }
948 }
949
950 if (hasYcbcrConversion) {
951 // We only support having a ycbcr conversion for external images.
Brian Salomonf391d0f2018-12-14 09:18:50 -0500952 return kUnknown_GrPixelConfig;
Greg Daniel14c55c22018-12-04 11:25:03 -0500953 }
954
Greg Danielf5d87582017-12-18 14:48:15 -0500955 switch (ct) {
Robert Phillips1e2cb442019-07-02 15:51:28 -0400956 case GrColorType::kUnknown:
Brian Salomonf391d0f2018-12-14 09:18:50 -0500957 break;
Robert Phillips1e2cb442019-07-02 15:51:28 -0400958 case GrColorType::kAlpha_8:
Greg Danielf5d87582017-12-18 14:48:15 -0500959 if (VK_FORMAT_R8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500960 return kAlpha_8_as_Red_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500961 }
962 break;
Robert Phillips1e2cb442019-07-02 15:51:28 -0400963 case GrColorType::kBGR_565:
Greg Danielf5d87582017-12-18 14:48:15 -0500964 if (VK_FORMAT_R5G6B5_UNORM_PACK16 == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500965 return kRGB_565_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500966 }
967 break;
Robert Phillips1e2cb442019-07-02 15:51:28 -0400968 case GrColorType::kABGR_4444:
Greg Danieleb4a8272019-05-16 16:52:55 -0400969 if (VK_FORMAT_B4G4R4A4_UNORM_PACK16 == format ||
970 VK_FORMAT_R4G4B4A4_UNORM_PACK16 == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500971 return kRGBA_4444_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500972 }
973 break;
Robert Phillips1e2cb442019-07-02 15:51:28 -0400974 case GrColorType::kRGBA_8888:
Greg Danielf5d87582017-12-18 14:48:15 -0500975 if (VK_FORMAT_R8G8B8A8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500976 return kRGBA_8888_GrPixelConfig;
Greg Daniel7b219ac2017-12-18 14:49:04 -0500977 } else if (VK_FORMAT_R8G8B8A8_SRGB == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500978 return kSRGBA_8888_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500979 }
980 break;
Robert Phillips1e2cb442019-07-02 15:51:28 -0400981 case GrColorType::kRGB_888x:
Greg Daniel475eb702018-09-28 14:16:50 -0400982 if (VK_FORMAT_R8G8B8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500983 return kRGB_888_GrPixelConfig;
Greg Daniel475eb702018-09-28 14:16:50 -0400984 }
Greg Danielf259b8b2019-02-14 09:03:43 -0500985 if (VK_FORMAT_R8G8B8A8_UNORM == format) {
986 return kRGB_888X_GrPixelConfig;
987 }
Greg Daniel475eb702018-09-28 14:16:50 -0400988 break;
Robert Phillips1e2cb442019-07-02 15:51:28 -0400989 case GrColorType::kRG_88:
990 if (VK_FORMAT_R8G8_UNORM == format) {
991 return kRG_88_GrPixelConfig;
992 }
993 break;
994 case GrColorType::kBGRA_8888:
Greg Danielf5d87582017-12-18 14:48:15 -0500995 if (VK_FORMAT_B8G8R8A8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500996 return kBGRA_8888_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500997 }
998 break;
Robert Phillips1e2cb442019-07-02 15:51:28 -0400999 case GrColorType::kRGBA_1010102:
Brian Osman10fc6fd2018-03-02 11:01:10 -05001000 if (VK_FORMAT_A2B10G10R10_UNORM_PACK32 == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001001 return kRGBA_1010102_GrPixelConfig;
Brian Osman10fc6fd2018-03-02 11:01:10 -05001002 }
1003 break;
Robert Phillips1e2cb442019-07-02 15:51:28 -04001004 case GrColorType::kGray_8:
Greg Danielf5d87582017-12-18 14:48:15 -05001005 if (VK_FORMAT_R8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001006 return kGray_8_as_Red_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -05001007 }
1008 break;
Robert Phillips1e2cb442019-07-02 15:51:28 -04001009 case GrColorType::kAlpha_F16:
1010 if (VK_FORMAT_R16_SFLOAT == format) {
1011 return kAlpha_half_GrPixelConfig;
Mike Kleinb70990e2019-02-28 10:03:27 -06001012 }
1013 break;
Robert Phillips1e2cb442019-07-02 15:51:28 -04001014 case GrColorType::kRGBA_F16:
Greg Danielf5d87582017-12-18 14:48:15 -05001015 if (VK_FORMAT_R16G16B16A16_SFLOAT == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001016 return kRGBA_half_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -05001017 }
1018 break;
Robert Phillips1e2cb442019-07-02 15:51:28 -04001019 case GrColorType::kRGBA_F16_Clamped:
1020 if (VK_FORMAT_R16G16B16A16_SFLOAT == format) {
1021 return kRGBA_half_Clamped_GrPixelConfig;
1022 }
1023 break;
1024 case GrColorType::kRG_F32:
1025 if (VK_FORMAT_R32G32_SFLOAT == format) {
1026 return kRG_float_GrPixelConfig;
1027 }
1028 break;
1029 case GrColorType::kRGBA_F32:
Mike Klein37854712018-06-26 11:43:06 -04001030 if (VK_FORMAT_R32G32B32A32_SFLOAT == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001031 return kRGBA_float_GrPixelConfig;
Mike Klein37854712018-06-26 11:43:06 -04001032 }
1033 break;
Robert Phillips1e2cb442019-07-02 15:51:28 -04001034 case GrColorType::kR_16:
1035 if (VK_FORMAT_R16_UNORM == format) {
1036 return kR_16_GrPixelConfig;
1037 }
1038 break;
1039 case GrColorType::kRG_1616:
1040 if (VK_FORMAT_R16G16_UNORM == format) {
1041 return kRG_1616_GrPixelConfig;
1042 }
1043 break;
1044 case GrColorType::kRGBA_16161616:
1045 if (VK_FORMAT_R16G16B16A16_UNORM == format) {
1046 return kRGBA_16161616_GrPixelConfig;
1047 }
1048 break;
1049 case GrColorType::kRG_F16:
1050 if (VK_FORMAT_R16G16_SFLOAT == format) {
1051 return kRG_half_GrPixelConfig;
1052 }
1053 break;
Greg Danielf5d87582017-12-18 14:48:15 -05001054 }
1055
Brian Salomonf391d0f2018-12-14 09:18:50 -05001056 return kUnknown_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -05001057}
1058
Brian Salomonf391d0f2018-12-14 09:18:50 -05001059GrPixelConfig GrVkCaps::validateBackendRenderTarget(const GrBackendRenderTarget& rt,
Robert Phillips1e2cb442019-07-02 15:51:28 -04001060 GrColorType ct) const {
Greg Daniel323fbcf2018-04-10 13:46:30 -04001061 GrVkImageInfo imageInfo;
1062 if (!rt.getVkImageInfo(&imageInfo)) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001063 return kUnknown_GrPixelConfig;
Robert Phillipsfc711a22018-02-13 17:03:00 -05001064 }
Brian Salomonf391d0f2018-12-14 09:18:50 -05001065 return validate_image_info(imageInfo.fFormat, ct, imageInfo.fYcbcrConversionInfo.isValid());
Robert Phillipsfc711a22018-02-13 17:03:00 -05001066}
1067
Greg Daniel627d0532019-07-08 16:48:14 -04001068bool GrVkCaps::onAreColorTypeAndFormatCompatible(GrColorType ct,
1069 const GrBackendFormat& format) const {
Robert Phillipsc046ff02019-07-01 10:34:03 -04001070 const VkFormat* vkFormat = format.getVkFormat();
1071 const GrVkYcbcrConversionInfo* ycbcrInfo = format.getVkYcbcrConversionInfo();
1072 if (!vkFormat || !ycbcrInfo) {
1073 return false;
1074 }
1075
1076 return kUnknown_GrPixelConfig != validate_image_info(*vkFormat, ct, ycbcrInfo->isValid());
1077}
1078
1079
Greg Daniel627d0532019-07-08 16:48:14 -04001080GrPixelConfig GrVkCaps::onGetConfigFromBackendFormat(const GrBackendFormat& format,
1081 GrColorType ct) const {
Robert Phillipsfc711a22018-02-13 17:03:00 -05001082 const VkFormat* vkFormat = format.getVkFormat();
Greg Daniel14c55c22018-12-04 11:25:03 -05001083 const GrVkYcbcrConversionInfo* ycbcrInfo = format.getVkYcbcrConversionInfo();
1084 if (!vkFormat || !ycbcrInfo) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001085 return kUnknown_GrPixelConfig;
Robert Phillipsfc711a22018-02-13 17:03:00 -05001086 }
Brian Salomonf391d0f2018-12-14 09:18:50 -05001087 return validate_image_info(*vkFormat, ct, ycbcrInfo->isValid());
Greg Danielfaa095e2017-12-19 13:15:02 -05001088}
Greg Danielf5d87582017-12-18 14:48:15 -05001089
Brian Salomonf391d0f2018-12-14 09:18:50 -05001090static GrPixelConfig get_yuva_config(VkFormat vkFormat) {
Jim Van Verth9bf81202018-10-30 15:53:36 -04001091 switch (vkFormat) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001092 case VK_FORMAT_R8_UNORM:
1093 return kAlpha_8_as_Red_GrPixelConfig;
1094 case VK_FORMAT_R8G8B8A8_UNORM:
1095 return kRGBA_8888_GrPixelConfig;
1096 case VK_FORMAT_R8G8B8_UNORM:
1097 return kRGB_888_GrPixelConfig;
1098 case VK_FORMAT_R8G8_UNORM:
1099 return kRG_88_GrPixelConfig;
1100 case VK_FORMAT_B8G8R8A8_UNORM:
1101 return kBGRA_8888_GrPixelConfig;
Robert Phillips2dd1b472019-03-21 09:00:20 -04001102 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
1103 return kRGBA_1010102_GrPixelConfig;
Robert Phillipsfe18de52019-06-06 17:21:50 -04001104 case VK_FORMAT_R16_UNORM:
1105 return kR_16_GrPixelConfig;
1106 case VK_FORMAT_R16G16_UNORM:
1107 return kRG_1616_GrPixelConfig;
Robert Phillips66a46032019-06-18 08:00:42 -04001108 // Experimental (for Y416 and mutant P016/P010)
1109 case VK_FORMAT_R16G16B16A16_UNORM:
1110 return kRGBA_16161616_GrPixelConfig;
1111 case VK_FORMAT_R16G16_SFLOAT:
1112 return kRG_half_GrPixelConfig;
Brian Salomonf391d0f2018-12-14 09:18:50 -05001113 default:
1114 return kUnknown_GrPixelConfig;
Jim Van Verthb7f0b9c2018-10-22 14:12:03 -04001115 }
Jim Van Verthb7f0b9c2018-10-22 14:12:03 -04001116}
1117
Brian Salomonf391d0f2018-12-14 09:18:50 -05001118GrPixelConfig GrVkCaps::getYUVAConfigFromBackendFormat(const GrBackendFormat& format) const {
Jim Van Verth9bf81202018-10-30 15:53:36 -04001119 const VkFormat* vkFormat = format.getVkFormat();
1120 if (!vkFormat) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001121 return kUnknown_GrPixelConfig;
Jim Van Verth9bf81202018-10-30 15:53:36 -04001122 }
Brian Salomonf391d0f2018-12-14 09:18:50 -05001123 return get_yuva_config(*vkFormat);
Timothy Liang036fdfe2018-06-28 15:50:36 -04001124}
Greg Daniel4065d452018-11-16 15:43:41 -05001125
Greg Daniel627d0532019-07-08 16:48:14 -04001126GrBackendFormat GrVkCaps::getBackendFormatFromColorType(GrColorType ct,
1127 GrSRGBEncoded srgbEncoded) const {
Greg Daniel4065d452018-11-16 15:43:41 -05001128 GrPixelConfig config = GrColorTypeToPixelConfig(ct, srgbEncoded);
1129 if (config == kUnknown_GrPixelConfig) {
1130 return GrBackendFormat();
1131 }
1132 VkFormat format;
1133 if (!GrPixelConfigToVkFormat(config, &format)) {
1134 return GrBackendFormat();
1135 }
1136 return GrBackendFormat::MakeVk(format);
1137}
Timothy Liang036fdfe2018-06-28 15:50:36 -04001138
Brian Salomonbb8dde82019-06-27 10:52:13 -04001139GrBackendFormat GrVkCaps::getBackendFormatFromCompressionType(
1140 SkImage::CompressionType compressionType) const {
1141 switch (compressionType) {
1142 case SkImage::kETC1_CompressionType:
1143 return GrBackendFormat::MakeVk(VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK);
1144 }
1145 SK_ABORT("Invalid compression type");
1146 return {};
1147}
1148
Greg Danieleb4a8272019-05-16 16:52:55 -04001149#ifdef SK_DEBUG
1150static bool format_color_type_valid_pair(VkFormat vkFormat, GrColorType colorType) {
1151 switch (colorType) {
1152 case GrColorType::kUnknown:
1153 return false;
1154 case GrColorType::kAlpha_8:
1155 return VK_FORMAT_R8_UNORM == vkFormat;
Greg Daniel48fec762019-06-18 17:06:43 -04001156 case GrColorType::kBGR_565:
Greg Danieleb4a8272019-05-16 16:52:55 -04001157 return VK_FORMAT_R5G6B5_UNORM_PACK16 == vkFormat;
1158 case GrColorType::kABGR_4444:
1159 return VK_FORMAT_B4G4R4A4_UNORM_PACK16 == vkFormat ||
1160 VK_FORMAT_R4G4B4A4_UNORM_PACK16 == vkFormat;
1161 case GrColorType::kRGBA_8888:
1162 return VK_FORMAT_R8G8B8A8_UNORM == vkFormat || VK_FORMAT_R8G8B8A8_SRGB == vkFormat;
1163 case GrColorType::kRGB_888x:
Brian Salomonbb8dde82019-06-27 10:52:13 -04001164 GR_STATIC_ASSERT(GrCompressionTypeClosestColorType(SkImage::kETC1_CompressionType) ==
1165 GrColorType::kRGB_888x);
1166 return VK_FORMAT_R8G8B8_UNORM == vkFormat || VK_FORMAT_R8G8B8A8_UNORM == vkFormat ||
1167 VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK == vkFormat;
Greg Danieleb4a8272019-05-16 16:52:55 -04001168 case GrColorType::kRG_88:
1169 return VK_FORMAT_R8G8_UNORM == vkFormat;
1170 case GrColorType::kBGRA_8888:
1171 return VK_FORMAT_B8G8R8A8_UNORM == vkFormat || VK_FORMAT_B8G8R8A8_SRGB == vkFormat;
1172 case GrColorType::kRGBA_1010102:
1173 return VK_FORMAT_A2B10G10R10_UNORM_PACK32 == vkFormat;
1174 case GrColorType::kGray_8:
1175 return VK_FORMAT_R8_UNORM == vkFormat;
1176 case GrColorType::kAlpha_F16:
1177 return VK_FORMAT_R16_SFLOAT == vkFormat;
1178 case GrColorType::kRGBA_F16:
1179 return VK_FORMAT_R16G16B16A16_SFLOAT == vkFormat;
1180 case GrColorType::kRGBA_F16_Clamped:
1181 return VK_FORMAT_R16G16B16A16_SFLOAT == vkFormat;
1182 case GrColorType::kRG_F32:
1183 return VK_FORMAT_R32G32_SFLOAT == vkFormat;
1184 case GrColorType::kRGBA_F32:
1185 return VK_FORMAT_R32G32B32A32_SFLOAT == vkFormat;
Robert Phillipsfe18de52019-06-06 17:21:50 -04001186 case GrColorType::kR_16:
1187 return VK_FORMAT_R16_UNORM == vkFormat;
1188 case GrColorType::kRG_1616:
1189 return VK_FORMAT_R16G16_UNORM == vkFormat;
Robert Phillips66a46032019-06-18 08:00:42 -04001190 // Experimental (for Y416 and mutant P016/P010)
1191 case GrColorType::kRGBA_16161616:
1192 return VK_FORMAT_R16G16B16A16_UNORM == vkFormat;
Brian Salomone14cfbe2019-06-24 15:00:58 -04001193 case GrColorType::kRG_F16:
Robert Phillips66a46032019-06-18 08:00:42 -04001194 return VK_FORMAT_R16G16_SFLOAT == vkFormat;
Greg Danieleb4a8272019-05-16 16:52:55 -04001195 }
Greg Daniel4f71ccc2019-05-17 10:56:46 -04001196 SK_ABORT("Unknown color type");
1197 return false;
Greg Danieleb4a8272019-05-16 16:52:55 -04001198}
1199#endif
1200
1201static GrSwizzle get_swizzle(const GrBackendFormat& format, GrColorType colorType,
1202 bool forOutput) {
1203 SkASSERT(format.getVkFormat());
1204 VkFormat vkFormat = *format.getVkFormat();
1205
1206 SkASSERT(format_color_type_valid_pair(vkFormat, colorType));
1207
1208 switch (colorType) {
1209 case GrColorType::kAlpha_8: // fall through
1210 case GrColorType::kAlpha_F16:
1211 if (forOutput) {
1212 return GrSwizzle::AAAA();
1213 } else {
1214 return GrSwizzle::RRRR();
1215 }
1216 case GrColorType::kGray_8:
1217 if (!forOutput) {
1218 return GrSwizzle::RRRA();
1219 }
1220 break;
1221 case GrColorType::kABGR_4444:
1222 if (VK_FORMAT_B4G4R4A4_UNORM_PACK16 == vkFormat) {
1223 return GrSwizzle::BGRA();
1224 }
1225 break;
1226 case GrColorType::kRGB_888x:
1227 if (!forOutput) {
1228 return GrSwizzle::RGB1();
1229 }
1230 default:
1231 return GrSwizzle::RGBA();
1232 }
1233 return GrSwizzle::RGBA();
1234}
1235
1236GrSwizzle GrVkCaps::getTextureSwizzle(const GrBackendFormat& format, GrColorType colorType) const {
1237 return get_swizzle(format, colorType, false);
1238}
1239GrSwizzle GrVkCaps::getOutputSwizzle(const GrBackendFormat& format, GrColorType colorType) const {
1240 return get_swizzle(format, colorType, true);
1241}
1242
Brian Salomon26de56e2019-04-10 12:14:26 -04001243size_t GrVkCaps::onTransferFromOffsetAlignment(GrColorType bufferColorType) const {
Brian Salomona585fe92019-04-09 14:57:00 -04001244 // This GrColorType has 32 bpp but the Vulkan pixel format we use for with may have 24bpp
1245 // (VK_FORMAT_R8G8B8_...) or may be 32 bpp. We don't support post transforming the pixel data
1246 // for transfer-from currently and don't want to have to pass info about the src surface here.
1247 if (bufferColorType == GrColorType::kRGB_888x) {
1248 return false;
1249 }
1250 size_t bpp = GrColorTypeBytesPerPixel(bufferColorType);
1251 // The VkBufferImageCopy bufferOffset field must be both a multiple of 4 and of a single texel.
1252 switch (bpp & 0b11) {
Brian Salomon26de56e2019-04-10 12:14:26 -04001253 // bpp is already a multiple of 4.
1254 case 0: return bpp;
1255 // bpp is a multiple of 2 but not 4.
1256 case 2: return 2 * bpp;
1257 // bpp is not a multiple of 2.
1258 default: return 4 * bpp;
Brian Salomona585fe92019-04-09 14:57:00 -04001259 }
Brian Salomona585fe92019-04-09 14:57:00 -04001260}