blob: aa4bac3f9c1fff4dbfe80728c3ea097b205bd8ec [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "include/gpu/GrBackendSurface.h"
9#include "include/gpu/GrRenderTarget.h"
10#include "include/gpu/vk/GrVkBackendContext.h"
11#include "include/gpu/vk/GrVkExtensions.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040012#include "src/gpu/GrRenderTargetProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "src/gpu/GrShaderCaps.h"
14#include "src/gpu/SkGr.h"
15#include "src/gpu/vk/GrVkCaps.h"
16#include "src/gpu/vk/GrVkInterface.h"
17#include "src/gpu/vk/GrVkTexture.h"
18#include "src/gpu/vk/GrVkUtil.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050019
Emircan Uysaler23ca4e72019-06-24 10:53:09 -040020#ifdef SK_BUILD_FOR_ANDROID
21#include <sys/system_properties.h>
22#endif
23
Greg Daniel164a9f02016-02-22 09:56:40 -050024GrVkCaps::GrVkCaps(const GrContextOptions& contextOptions, const GrVkInterface* vkInterface,
Greg Daniela0651ac2018-08-08 09:23:18 -040025 VkPhysicalDevice physDev, const VkPhysicalDeviceFeatures2& features,
Greg Daniel41f0e282019-01-28 13:15:05 -050026 uint32_t instanceVersion, uint32_t physicalDeviceVersion,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -040027 const GrVkExtensions& extensions, GrProtected isProtected)
28 : INHERITED(contextOptions) {
Greg Daniel164a9f02016-02-22 09:56:40 -050029 /**************************************************************************
Brian Salomonf7232642018-09-19 08:58:08 -040030 * GrCaps fields
31 **************************************************************************/
jvanverth62340062016-04-26 08:01:44 -070032 fMipMapSupport = true; // always available in Vulkan
brianosmanf05ab1b2016-05-12 11:01:10 -070033 fSRGBSupport = true; // always available in Vulkan
brianosman88791862016-05-23 10:15:27 -070034 fNPOTTextureTileSupport = true; // always available in Vulkan
egdaniel37535c92016-06-30 08:23:30 -070035 fDiscardRenderTargetSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050036 fReuseScratchTextures = true; //TODO: figure this out
37 fGpuTracingSupport = false; //TODO: figure this out
Greg Daniel164a9f02016-02-22 09:56:40 -050038 fOversizedStencilSupport = false; //TODO: figure this out
Chris Dalton1d616352017-05-31 12:51:23 -060039 fInstanceAttribSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050040
Brian Salomon9ff5acb2019-05-08 09:04:47 -040041 fSemaphoreSupport = true; // always available in Vulkan
jvanverth84741b32016-09-30 08:39:02 -070042 fFenceSyncSupport = true; // always available in Vulkan
Greg Daniel691f5e72018-02-28 14:21:34 -050043 fCrossContextTextureSupport = true;
Brian Osman499bf1a2018-09-17 11:32:42 -040044 fHalfFloatVertexAttributeSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050045
Brian Salomone05ba5a2019-04-08 11:59:07 -040046 fTransferBufferSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050047
48 fMaxRenderTargetSize = 4096; // minimum required by spec
49 fMaxTextureSize = 4096; // minimum required by spec
Greg Daniel164a9f02016-02-22 09:56:40 -050050
Brian Salomonf7232642018-09-19 08:58:08 -040051 fDynamicStateArrayGeometryProcessorTextureSupport = true;
52
Brian Salomon94efbf52016-11-29 13:43:05 -050053 fShaderCaps.reset(new GrShaderCaps(contextOptions));
Greg Daniel164a9f02016-02-22 09:56:40 -050054
Emircan Uysaler23ca4e72019-06-24 10:53:09 -040055 this->init(contextOptions, vkInterface, physDev, features, physicalDeviceVersion, extensions,
56 isProtected);
Greg Daniel164a9f02016-02-22 09:56:40 -050057}
58
Robert Phillipsbf25d432017-04-07 10:08:53 -040059bool GrVkCaps::initDescForDstCopy(const GrRenderTargetProxy* src, GrSurfaceDesc* desc,
Greg Daniel46cfbc62019-06-07 11:43:30 -040060 bool* rectsMustMatch, bool* disallowSubrect) const {
Eric Karl74480882017-04-03 14:49:05 -070061 // Vk doesn't use rectsMustMatch or disallowSubrect. Always return false.
62 *rectsMustMatch = false;
63 *disallowSubrect = false;
64
Brian Salomon467921e2017-03-06 16:17:12 -050065 // We can always succeed here with either a CopyImage (none msaa src) or ResolveImage (msaa).
66 // For CopyImage we can make a simple texture, for ResolveImage we require the dst to be a
67 // render target as well.
Brian Salomon467921e2017-03-06 16:17:12 -050068 desc->fConfig = src->config();
Chris Dalton6ce447a2019-06-23 18:07:38 -060069 if (src->numSamples() > 1 || src->asTextureProxy()) {
Brian Salomon467921e2017-03-06 16:17:12 -050070 desc->fFlags = kRenderTarget_GrSurfaceFlag;
71 } else {
72 // Just going to use CopyImage here
73 desc->fFlags = kNone_GrSurfaceFlags;
74 }
75
76 return true;
77}
78
Greg Daniel5c7b5412019-05-10 11:39:55 -040079static int get_compatible_format_class(GrPixelConfig config) {
80 switch (config) {
81 case kAlpha_8_GrPixelConfig:
82 case kAlpha_8_as_Red_GrPixelConfig:
83 case kGray_8_GrPixelConfig:
84 case kGray_8_as_Red_GrPixelConfig:
85 return 1;
86 case kRGB_565_GrPixelConfig:
87 case kRGBA_4444_GrPixelConfig:
88 case kRG_88_GrPixelConfig:
89 case kAlpha_half_GrPixelConfig:
90 case kAlpha_half_as_Red_GrPixelConfig:
Robert Phillips66a46032019-06-18 08:00:42 -040091 case kR_16_GrPixelConfig:
Greg Daniel5c7b5412019-05-10 11:39:55 -040092 return 2;
93 case kRGB_888_GrPixelConfig:
94 return 3;
95 case kRGBA_8888_GrPixelConfig:
96 case kRGB_888X_GrPixelConfig:
97 case kBGRA_8888_GrPixelConfig:
98 case kSRGBA_8888_GrPixelConfig:
Greg Daniel5c7b5412019-05-10 11:39:55 -040099 case kRGBA_1010102_GrPixelConfig:
Robert Phillips66a46032019-06-18 08:00:42 -0400100 case kRG_1616_GrPixelConfig:
Greg Daniel5c7b5412019-05-10 11:39:55 -0400101 return 4;
102 case kRGBA_half_GrPixelConfig:
103 case kRGBA_half_Clamped_GrPixelConfig:
104 case kRG_float_GrPixelConfig:
105 return 5;
106 case kRGBA_float_GrPixelConfig:
107 return 6;
108 case kRGB_ETC1_GrPixelConfig:
109 return 7;
110 case kUnknown_GrPixelConfig:
111 case kAlpha_8_as_Alpha_GrPixelConfig:
112 case kGray_8_as_Lum_GrPixelConfig:
113 SK_ABORT("Unsupported Vulkan pixel config");
114 return 0;
Robert Phillips66a46032019-06-18 08:00:42 -0400115
116 // Experimental (for Y416 and mutant P016/P010)
117 case kRGBA_16161616_GrPixelConfig:
118 return 8;
119 case kRG_half_GrPixelConfig:
Robert Phillipsfe18de52019-06-06 17:21:50 -0400120 return 4;
Greg Daniel5c7b5412019-05-10 11:39:55 -0400121 }
122 SK_ABORT("Invalid pixel config");
123 return 0;
124}
125
Greg Daniel46cfbc62019-06-07 11:43:30 -0400126bool GrVkCaps::canCopyImage(GrPixelConfig dstConfig, int dstSampleCnt, bool dstHasYcbcr,
127 GrPixelConfig srcConfig, int srcSampleCnt, bool srcHasYcbcr) const {
Greg Daniel25af6712018-04-25 10:44:38 -0400128 if ((dstSampleCnt > 1 || srcSampleCnt > 1) && dstSampleCnt != srcSampleCnt) {
129 return false;
130 }
131
Greg Daniela51e93c2019-03-25 12:30:45 -0400132 if (dstHasYcbcr || srcHasYcbcr) {
133 return false;
134 }
135
Greg Daniel25af6712018-04-25 10:44:38 -0400136 // We require that all vulkan GrSurfaces have been created with transfer_dst and transfer_src
137 // as image usage flags.
Greg Daniel46cfbc62019-06-07 11:43:30 -0400138 if (get_compatible_format_class(srcConfig) != get_compatible_format_class(dstConfig)) {
Greg Daniel25af6712018-04-25 10:44:38 -0400139 return false;
140 }
141
Greg Daniel25af6712018-04-25 10:44:38 -0400142 return true;
143}
144
145bool GrVkCaps::canCopyAsBlit(GrPixelConfig dstConfig, int dstSampleCnt, bool dstIsLinear,
Greg Daniela51e93c2019-03-25 12:30:45 -0400146 bool dstHasYcbcr, GrPixelConfig srcConfig, int srcSampleCnt,
147 bool srcIsLinear, bool srcHasYcbcr) const {
Greg Danielcaa795f2019-05-14 11:54:25 -0400148
149 VkFormat dstFormat;
150 SkAssertResult(GrPixelConfigToVkFormat(dstConfig, &dstFormat));
151 VkFormat srcFormat;
152 SkAssertResult(GrPixelConfigToVkFormat(srcConfig, &srcFormat));
Greg Daniel25af6712018-04-25 10:44:38 -0400153 // We require that all vulkan GrSurfaces have been created with transfer_dst and transfer_src
154 // as image usage flags.
Greg Danielcaa795f2019-05-14 11:54:25 -0400155 if (!this->formatCanBeDstofBlit(dstFormat, dstIsLinear) ||
156 !this->formatCanBeSrcofBlit(srcFormat, srcIsLinear)) {
Greg Daniel25af6712018-04-25 10:44:38 -0400157 return false;
158 }
159
Greg Daniel25af6712018-04-25 10:44:38 -0400160 // We cannot blit images that are multisampled. Will need to figure out if we can blit the
161 // resolved msaa though.
162 if (dstSampleCnt > 1 || srcSampleCnt > 1) {
163 return false;
164 }
165
Greg Daniela51e93c2019-03-25 12:30:45 -0400166 if (dstHasYcbcr || srcHasYcbcr) {
167 return false;
168 }
169
Greg Daniel25af6712018-04-25 10:44:38 -0400170 return true;
171}
172
Greg Daniel46cfbc62019-06-07 11:43:30 -0400173bool GrVkCaps::canCopyAsResolve(GrPixelConfig dstConfig, int dstSampleCnt, bool dstHasYcbcr,
174 GrPixelConfig srcConfig, int srcSampleCnt, bool srcHasYcbcr) const {
Greg Daniel25af6712018-04-25 10:44:38 -0400175 // The src surface must be multisampled.
176 if (srcSampleCnt <= 1) {
177 return false;
178 }
179
180 // The dst must not be multisampled.
181 if (dstSampleCnt > 1) {
182 return false;
183 }
184
185 // Surfaces must have the same format.
186 if (dstConfig != srcConfig) {
187 return false;
188 }
189
Greg Daniela51e93c2019-03-25 12:30:45 -0400190 if (dstHasYcbcr || srcHasYcbcr) {
191 return false;
192 }
193
Greg Daniel25af6712018-04-25 10:44:38 -0400194 return true;
195}
196
Brian Salomonc67c31c2018-12-06 10:00:03 -0500197bool GrVkCaps::onCanCopySurface(const GrSurfaceProxy* dst, const GrSurfaceProxy* src,
198 const SkIRect& srcRect, const SkIPoint& dstPoint) const {
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400199 if (src->isProtected() && !dst->isProtected()) {
200 return false;
201 }
202
Greg Daniel25af6712018-04-25 10:44:38 -0400203 GrPixelConfig dstConfig = dst->config();
204 GrPixelConfig srcConfig = src->config();
205
206 // TODO: Figure out a way to track if we've wrapped a linear texture in a proxy (e.g.
207 // PromiseImage which won't get instantiated right away. Does this need a similar thing like the
208 // tracking of external or rectangle textures in GL? For now we don't create linear textures
209 // internally, and I don't believe anyone is wrapping them.
210 bool srcIsLinear = false;
211 bool dstIsLinear = false;
212
213 int dstSampleCnt = 0;
214 int srcSampleCnt = 0;
215 if (const GrRenderTargetProxy* rtProxy = dst->asRenderTargetProxy()) {
Greg Danielbe7fc462019-01-03 16:40:42 -0500216 // Copying to or from render targets that wrap a secondary command buffer is not allowed
217 // since they would require us to know the VkImage, which we don't have, as well as need us
218 // to stop and start the VkRenderPass which we don't have access to.
219 if (rtProxy->wrapsVkSecondaryCB()) {
220 return false;
221 }
Chris Dalton6ce447a2019-06-23 18:07:38 -0600222 dstSampleCnt = rtProxy->numSamples();
Greg Daniel25af6712018-04-25 10:44:38 -0400223 }
224 if (const GrRenderTargetProxy* rtProxy = src->asRenderTargetProxy()) {
Greg Danielbe7fc462019-01-03 16:40:42 -0500225 // Copying to or from render targets that wrap a secondary command buffer is not allowed
226 // since they would require us to know the VkImage, which we don't have, as well as need us
227 // to stop and start the VkRenderPass which we don't have access to.
228 if (rtProxy->wrapsVkSecondaryCB()) {
229 return false;
230 }
Chris Dalton6ce447a2019-06-23 18:07:38 -0600231 srcSampleCnt = rtProxy->numSamples();
Greg Daniel25af6712018-04-25 10:44:38 -0400232 }
233 SkASSERT((dstSampleCnt > 0) == SkToBool(dst->asRenderTargetProxy()));
234 SkASSERT((srcSampleCnt > 0) == SkToBool(src->asRenderTargetProxy()));
235
Greg Daniela51e93c2019-03-25 12:30:45 -0400236 bool dstHasYcbcr = false;
237 if (auto ycbcr = dst->backendFormat().getVkYcbcrConversionInfo()) {
238 if (ycbcr->isValid()) {
239 dstHasYcbcr = true;
240 }
241 }
242
243 bool srcHasYcbcr = false;
244 if (auto ycbcr = src->backendFormat().getVkYcbcrConversionInfo()) {
245 if (ycbcr->isValid()) {
246 srcHasYcbcr = true;
247 }
248 }
249
Greg Daniel46cfbc62019-06-07 11:43:30 -0400250 return this->canCopyImage(dstConfig, dstSampleCnt, dstHasYcbcr,
251 srcConfig, srcSampleCnt, srcHasYcbcr) ||
Greg Daniela51e93c2019-03-25 12:30:45 -0400252 this->canCopyAsBlit(dstConfig, dstSampleCnt, dstIsLinear, dstHasYcbcr,
253 srcConfig, srcSampleCnt, srcIsLinear, srcHasYcbcr) ||
Greg Daniel46cfbc62019-06-07 11:43:30 -0400254 this->canCopyAsResolve(dstConfig, dstSampleCnt, dstHasYcbcr,
255 srcConfig, srcSampleCnt, srcHasYcbcr);
Greg Daniel25af6712018-04-25 10:44:38 -0400256}
257
Greg Daniel7e000222018-12-03 10:08:21 -0500258template<typename T> T* get_extension_feature_struct(const VkPhysicalDeviceFeatures2& features,
259 VkStructureType type) {
260 // All Vulkan structs that could be part of the features chain will start with the
261 // structure type followed by the pNext pointer. We cast to the CommonVulkanHeader
262 // so we can get access to the pNext for the next struct.
263 struct CommonVulkanHeader {
264 VkStructureType sType;
265 void* pNext;
266 };
267
268 void* pNext = features.pNext;
269 while (pNext) {
270 CommonVulkanHeader* header = static_cast<CommonVulkanHeader*>(pNext);
271 if (header->sType == type) {
272 return static_cast<T*>(pNext);
273 }
274 pNext = header->pNext;
275 }
276 return nullptr;
277}
278
Greg Daniel164a9f02016-02-22 09:56:40 -0500279void GrVkCaps::init(const GrContextOptions& contextOptions, const GrVkInterface* vkInterface,
Greg Daniela0651ac2018-08-08 09:23:18 -0400280 VkPhysicalDevice physDev, const VkPhysicalDeviceFeatures2& features,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400281 uint32_t physicalDeviceVersion, const GrVkExtensions& extensions,
282 GrProtected isProtected) {
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000283 VkPhysicalDeviceProperties properties;
284 GR_VK_CALL(vkInterface, GetPhysicalDeviceProperties(physDev, &properties));
egdanield5e3b9e2016-03-08 12:19:54 -0800285
egdanield5e3b9e2016-03-08 12:19:54 -0800286 VkPhysicalDeviceMemoryProperties memoryProperties;
287 GR_VK_CALL(vkInterface, GetPhysicalDeviceMemoryProperties(physDev, &memoryProperties));
288
Greg Daniel41f0e282019-01-28 13:15:05 -0500289 SkASSERT(physicalDeviceVersion <= properties.apiVersion);
Greg Danielc0b03d82018-08-03 14:41:15 -0400290
Greg Danielcb324152019-02-25 11:36:53 -0500291 if (extensions.hasExtension(VK_KHR_SWAPCHAIN_EXTENSION_NAME, 1)) {
292 fSupportsSwapchain = true;
293 }
294
Greg Danielc0b03d82018-08-03 14:41:15 -0400295 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
296 extensions.hasExtension(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, 1)) {
297 fSupportsPhysicalDeviceProperties2 = true;
298 }
299
300 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
301 extensions.hasExtension(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, 1)) {
302 fSupportsMemoryRequirements2 = true;
303 }
304
305 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
Greg Daniel637c06a2018-09-12 09:44:25 -0400306 extensions.hasExtension(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, 1)) {
307 fSupportsBindMemory2 = true;
308 }
309
310 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
Greg Danielc0b03d82018-08-03 14:41:15 -0400311 extensions.hasExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME, 1)) {
312 fSupportsMaintenance1 = true;
313 }
314
315 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
316 extensions.hasExtension(VK_KHR_MAINTENANCE2_EXTENSION_NAME, 1)) {
317 fSupportsMaintenance2 = true;
318 }
319
320 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
321 extensions.hasExtension(VK_KHR_MAINTENANCE3_EXTENSION_NAME, 1)) {
322 fSupportsMaintenance3 = true;
323 }
324
Greg Daniela9979d12018-08-27 15:56:46 -0400325 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
Greg Daniel637c06a2018-09-12 09:44:25 -0400326 (extensions.hasExtension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, 1) &&
Greg Daniela9979d12018-08-27 15:56:46 -0400327 this->supportsMemoryRequirements2())) {
328 fSupportsDedicatedAllocation = true;
329 }
330
331 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
332 (extensions.hasExtension(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, 1) &&
333 this->supportsPhysicalDeviceProperties2() &&
334 extensions.hasExtension(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, 1) &&
335 this->supportsDedicatedAllocation())) {
336 fSupportsExternalMemory = true;
337 }
338
339#ifdef SK_BUILD_FOR_ANDROID
Greg Daniel637c06a2018-09-12 09:44:25 -0400340 // Currently Adreno devices are not supporting the QUEUE_FAMILY_FOREIGN_EXTENSION, so until they
341 // do we don't explicitly require it here even the spec says it is required.
Greg Daniela9979d12018-08-27 15:56:46 -0400342 if (extensions.hasExtension(
Greg Daniel637c06a2018-09-12 09:44:25 -0400343 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 2) &&
344 /* extensions.hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1) &&*/
345 this->supportsExternalMemory() &&
346 this->supportsBindMemory2()) {
Greg Daniela9979d12018-08-27 15:56:46 -0400347 fSupportsAndroidHWBExternalMemory = true;
Greg Daniel637c06a2018-09-12 09:44:25 -0400348 fSupportsAHardwareBufferImages = true;
Greg Daniela9979d12018-08-27 15:56:46 -0400349 }
350#endif
351
Greg Daniel7e000222018-12-03 10:08:21 -0500352 auto ycbcrFeatures =
353 get_extension_feature_struct<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(
354 features,
355 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES);
356 if (ycbcrFeatures && ycbcrFeatures->samplerYcbcrConversion &&
357 fSupportsAndroidHWBExternalMemory &&
358 (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
359 (extensions.hasExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, 1) &&
360 this->supportsMaintenance1() &&
361 this->supportsBindMemory2() &&
362 this->supportsMemoryRequirements2() &&
363 this->supportsPhysicalDeviceProperties2()))) {
364 fSupportsYcbcrConversion = true;
365 }
366 // We always push back the default GrVkYcbcrConversionInfo so that the case of no conversion
367 // will return a key of 0.
368 fYcbcrInfos.push_back(GrVkYcbcrConversionInfo());
369
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400370 if ((isProtected == GrProtected::kYes) &&
371 (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0))) {
372 fSupportsProtectedMemory = true;
373 fAvoidUpdateBuffers = true;
374 fShouldAlwaysUseDedicatedImageMemory = true;
375 }
376
Greg Daniel313c6952018-08-08 09:24:08 -0400377 this->initGrCaps(vkInterface, physDev, properties, memoryProperties, features, extensions);
Greg Daniel36443602018-08-02 12:51:52 -0400378 this->initShaderCaps(properties, features);
Greg Danielf3b11622018-03-01 15:01:27 -0500379
380 if (!contextOptions.fDisableDriverCorrectnessWorkarounds) {
381#if defined(SK_CPU_X86)
382 // We need to do this before initing the config table since it uses fSRGBSupport
383 if (kImagination_VkVendor == properties.vendorID) {
384 fSRGBSupport = false;
385 }
386#endif
387 }
388
Chris Dalton8e738a22018-10-05 16:41:44 -0600389 if (kQualcomm_VkVendor == properties.vendorID) {
390 // A "clear" load for the CCPR atlas runs faster on QC than a "discard" load followed by a
391 // scissored clear.
392 // On NVIDIA and Intel, the discard load followed by clear is faster.
393 // TODO: Evaluate on ARM, Imagination, and ATI.
394 fPreferFullscreenClears = true;
395 }
396
Greg Daniel44e69f92019-03-20 11:18:25 -0400397 if (kQualcomm_VkVendor == properties.vendorID || kARM_VkVendor == properties.vendorID) {
398 // On Qualcomm and ARM mapping a gpu buffer and doing both reads and writes to it is slow.
399 // Thus for index and vertex buffers we will force to use a cpu side buffer and then copy
400 // the whole buffer up to the gpu.
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400401 fBufferMapThreshold = SK_MaxS32;
402 }
403
404 if (kQualcomm_VkVendor == properties.vendorID) {
405 // On Qualcomm it looks like using vkCmdUpdateBuffer is slower than using a transfer buffer
406 // even for small sizes.
407 fAvoidUpdateBuffers = true;
408 }
409
Chris Dalton0dffbab2019-03-27 13:08:50 -0600410 if (kARM_VkVendor == properties.vendorID) {
411 // ARM seems to do better with more fine triangles as opposed to using the sample mask.
412 // (At least in our current round rect op.)
413 fPreferTrianglesOverSampleMask = true;
414 }
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400415
Greg Danielcaa795f2019-05-14 11:54:25 -0400416 this->initFormatTable(vkInterface, physDev, properties);
egdaniel8f1dcaa2016-04-01 10:10:45 -0700417 this->initStencilFormat(vkInterface, physDev);
Greg Daniel164a9f02016-02-22 09:56:40 -0500418
Greg Daniel691f5e72018-02-28 14:21:34 -0500419 if (!contextOptions.fDisableDriverCorrectnessWorkarounds) {
420 this->applyDriverCorrectnessWorkarounds(properties);
egdanielc5ec1402016-03-28 12:14:42 -0700421 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500422
Greg Daniel691f5e72018-02-28 14:21:34 -0500423 this->applyOptionsOverrides(contextOptions);
424 fShaderCaps->applyOptionsOverrides(contextOptions);
425}
426
427void GrVkCaps::applyDriverCorrectnessWorkarounds(const VkPhysicalDeviceProperties& properties) {
egdaniel6fa0a912016-09-12 11:51:29 -0700428 if (kQualcomm_VkVendor == properties.vendorID) {
429 fMustDoCopiesFromOrigin = true;
Brian Salomona585fe92019-04-09 14:57:00 -0400430 // Transfer doesn't support this workaround.
431 fTransferBufferSupport = false;
egdaniel6fa0a912016-09-12 11:51:29 -0700432 }
433
Greg Daniel80a08dd2017-01-20 10:45:49 -0500434#if defined(SK_BUILD_FOR_WIN)
Greg Daniel900e5c82018-08-28 10:59:24 -0400435 if (kNvidia_VkVendor == properties.vendorID || kIntel_VkVendor == properties.vendorID) {
Greg Daniel80a08dd2017-01-20 10:45:49 -0500436 fMustSleepOnTearDown = true;
437 }
438#elif defined(SK_BUILD_FOR_ANDROID)
439 if (kImagination_VkVendor == properties.vendorID) {
440 fMustSleepOnTearDown = true;
441 }
442#endif
Greg Danielbce5eb92018-03-01 13:13:44 -0500443
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400444#if defined(SK_BUILD_FOR_ANDROID)
445 // Protected memory features have problems in Android P and earlier.
446 if (fSupportsProtectedMemory && (kQualcomm_VkVendor == properties.vendorID)) {
447 char androidAPIVersion[PROP_VALUE_MAX];
448 int strLength = __system_property_get("ro.build.version.sdk", androidAPIVersion);
449 if (strLength == 0 || atoi(androidAPIVersion) <= 28) {
450 fSupportsProtectedMemory = false;
451 }
452 }
453#endif
454
Greg Danielbce5eb92018-03-01 13:13:44 -0500455 // AMD seems to have issues binding new VkPipelines inside a secondary command buffer.
456 // Current workaround is to use a different secondary command buffer for each new VkPipeline.
457 if (kAMD_VkVendor == properties.vendorID) {
458 fNewCBOnPipelineChange = true;
459 }
460
Greg Danielddc0c602018-06-18 11:26:30 -0400461 // On Mali galaxy s7 we see lots of rendering issues when we suballocate VkImages.
462 if (kARM_VkVendor == properties.vendorID) {
463 fShouldAlwaysUseDedicatedImageMemory = true;
464 }
465
Greg Danielbce5eb92018-03-01 13:13:44 -0500466 ////////////////////////////////////////////////////////////////////////////
467 // GrCaps workarounds
468 ////////////////////////////////////////////////////////////////////////////
469
470 if (kARM_VkVendor == properties.vendorID) {
471 fInstanceAttribSupport = false;
Greg Daniel4374e962018-09-28 15:09:47 -0400472 fAvoidWritePixelsFastPath = true; // bugs.skia.org/8064
Greg Danielbce5eb92018-03-01 13:13:44 -0500473 }
474
475 // AMD advertises support for MAX_UINT vertex input attributes, but in reality only supports 32.
476 if (kAMD_VkVendor == properties.vendorID) {
477 fMaxVertexAttributes = SkTMin(fMaxVertexAttributes, 32);
478 }
479
Greg Danielbce5eb92018-03-01 13:13:44 -0500480 ////////////////////////////////////////////////////////////////////////////
481 // GrShaderCaps workarounds
482 ////////////////////////////////////////////////////////////////////////////
483
Greg Danielbce5eb92018-03-01 13:13:44 -0500484 if (kImagination_VkVendor == properties.vendorID) {
485 fShaderCaps->fAtan2ImplementedAsAtanYOverX = true;
486 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500487}
488
489int get_max_sample_count(VkSampleCountFlags flags) {
490 SkASSERT(flags & VK_SAMPLE_COUNT_1_BIT);
491 if (!(flags & VK_SAMPLE_COUNT_2_BIT)) {
492 return 0;
493 }
494 if (!(flags & VK_SAMPLE_COUNT_4_BIT)) {
495 return 2;
496 }
497 if (!(flags & VK_SAMPLE_COUNT_8_BIT)) {
498 return 4;
499 }
500 if (!(flags & VK_SAMPLE_COUNT_16_BIT)) {
501 return 8;
502 }
503 if (!(flags & VK_SAMPLE_COUNT_32_BIT)) {
504 return 16;
505 }
506 if (!(flags & VK_SAMPLE_COUNT_64_BIT)) {
507 return 32;
508 }
509 return 64;
510}
511
Greg Daniel313c6952018-08-08 09:24:08 -0400512void GrVkCaps::initGrCaps(const GrVkInterface* vkInterface,
513 VkPhysicalDevice physDev,
514 const VkPhysicalDeviceProperties& properties,
jvanverthfd7bd452016-03-25 06:29:52 -0700515 const VkPhysicalDeviceMemoryProperties& memoryProperties,
Greg Daniel313c6952018-08-08 09:24:08 -0400516 const VkPhysicalDeviceFeatures2& features,
517 const GrVkExtensions& extensions) {
Greg Danielc5cc2de2017-03-20 11:40:58 -0400518 // So GPUs, like AMD, are reporting MAX_INT support vertex attributes. In general, there is no
519 // need for us ever to support that amount, and it makes tests which tests all the vertex
520 // attribs timeout looping over that many. For now, we'll cap this at 64 max and can raise it if
521 // we ever find that need.
522 static const uint32_t kMaxVertexAttributes = 64;
523 fMaxVertexAttributes = SkTMin(properties.limits.maxVertexInputAttributes, kMaxVertexAttributes);
Greg Danielc5cc2de2017-03-20 11:40:58 -0400524
egdanield5e3b9e2016-03-08 12:19:54 -0800525 // We could actually query and get a max size for each config, however maxImageDimension2D will
526 // give the minimum max size across all configs. So for simplicity we will use that for now.
jvanverthe78d4872016-09-27 03:33:05 -0700527 fMaxRenderTargetSize = SkTMin(properties.limits.maxImageDimension2D, (uint32_t)INT_MAX);
528 fMaxTextureSize = SkTMin(properties.limits.maxImageDimension2D, (uint32_t)INT_MAX);
Adrienne Walker724afe82018-05-15 11:36:26 -0700529 if (fDriverBugWorkarounds.max_texture_size_limit_4096) {
530 fMaxTextureSize = SkTMin(fMaxTextureSize, 4096);
531 }
532 // Our render targets are always created with textures as the color
533 // attachment, hence this min:
534 fMaxRenderTargetSize = SkTMin(fMaxTextureSize, fMaxRenderTargetSize);
egdanield5e3b9e2016-03-08 12:19:54 -0800535
Chris Dalton2612bae2018-02-22 13:41:37 -0700536 // TODO: check if RT's larger than 4k incur a performance cost on ARM.
537 fMaxPreferredRenderTargetSize = fMaxRenderTargetSize;
538
egdanield5e3b9e2016-03-08 12:19:54 -0800539 // Assuming since we will always map in the end to upload the data we might as well just map
540 // from the get go. There is no hard data to suggest this is faster or slower.
cdalton397536c2016-03-25 12:15:03 -0700541 fBufferMapThreshold = 0;
egdanield5e3b9e2016-03-08 12:19:54 -0800542
Brian Salomon105d7c22019-04-16 13:46:14 -0400543 fMapBufferFlags = kCanMap_MapFlag | kSubset_MapFlag | kAsyncRead_MapFlag;
egdanield5e3b9e2016-03-08 12:19:54 -0800544
egdanield5e3b9e2016-03-08 12:19:54 -0800545 fOversizedStencilSupport = true;
Greg Daniel313c6952018-08-08 09:24:08 -0400546
547 if (extensions.hasExtension(VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME, 2) &&
548 this->supportsPhysicalDeviceProperties2()) {
549
550 VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT blendProps;
551 blendProps.sType =
552 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT;
553 blendProps.pNext = nullptr;
554
555 VkPhysicalDeviceProperties2 props;
556 props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
557 props.pNext = &blendProps;
558
559 GR_VK_CALL(vkInterface, GetPhysicalDeviceProperties2(physDev, &props));
560
561 if (blendProps.advancedBlendAllOperations == VK_TRUE) {
562 fShaderCaps->fAdvBlendEqInteraction = GrShaderCaps::kAutomatic_AdvBlendEqInteraction;
563
564 auto blendFeatures =
565 get_extension_feature_struct<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT>(
566 features,
567 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT);
568 if (blendFeatures && blendFeatures->advancedBlendCoherentOperations == VK_TRUE) {
569 fBlendEquationSupport = kAdvancedCoherent_BlendEquationSupport;
570 } else {
571 // TODO: Currently non coherent blends are not supported in our vulkan backend. They
572 // require us to support self dependencies in our render passes.
573 // fBlendEquationSupport = kAdvanced_BlendEquationSupport;
574 }
575 }
576 }
egdanield5e3b9e2016-03-08 12:19:54 -0800577}
578
Greg Daniel36443602018-08-02 12:51:52 -0400579void GrVkCaps::initShaderCaps(const VkPhysicalDeviceProperties& properties,
Greg Daniela0651ac2018-08-08 09:23:18 -0400580 const VkPhysicalDeviceFeatures2& features) {
Brian Salomon1edc5b92016-11-29 13:43:46 -0500581 GrShaderCaps* shaderCaps = fShaderCaps.get();
582 shaderCaps->fVersionDeclString = "#version 330\n";
egdaniel3a15fd42016-04-05 11:00:29 -0700583
egdanield5e3b9e2016-03-08 12:19:54 -0800584 // Vulkan is based off ES 3.0 so the following should all be supported
Brian Salomon1edc5b92016-11-29 13:43:46 -0500585 shaderCaps->fUsesPrecisionModifiers = true;
586 shaderCaps->fFlatInterpolationSupport = true;
Brian Salomon41274562017-09-15 09:40:03 -0700587 // Flat interpolation appears to be slow on Qualcomm GPUs. This was tested in GL and is assumed
588 // to be true with Vulkan as well.
589 shaderCaps->fPreferFlatInterpolation = kQualcomm_VkVendor != properties.vendorID;
egdanield5e3b9e2016-03-08 12:19:54 -0800590
591 // GrShaderCaps
592
Brian Salomon1edc5b92016-11-29 13:43:46 -0500593 shaderCaps->fShaderDerivativeSupport = true;
Chris Daltonf1b47bb2017-10-06 11:57:51 -0600594
Ethan Nicholas6ac8d362019-01-22 21:43:55 +0000595 // FIXME: http://skbug.com/7733: Disable geometry shaders until Intel/Radeon GMs draw correctly.
596 // shaderCaps->fGeometryShaderSupport =
597 // shaderCaps->fGSInvocationsSupport = features.features.geometryShader;
egdanield632bb42016-03-30 12:06:48 -0700598
Greg Daniela0651ac2018-08-08 09:23:18 -0400599 shaderCaps->fDualSourceBlendingSupport = features.features.dualSrcBlend;
egdanield632bb42016-03-30 12:06:48 -0700600
Brian Salomon1edc5b92016-11-29 13:43:46 -0500601 shaderCaps->fIntegerSupport = true;
Chris Dalton1d616352017-05-31 12:51:23 -0600602 shaderCaps->fVertexIDSupport = true;
Chris Dalton7c7ff032018-03-28 20:09:58 -0600603 shaderCaps->fFPManipulationSupport = true;
cdalton9c3f1432016-03-11 10:07:37 -0800604
cdaltona6b92ad2016-04-11 12:03:08 -0700605 // Assume the minimum precisions mandated by the SPIR-V spec.
Chris Dalton47c8ed32017-11-15 18:27:09 -0700606 shaderCaps->fFloatIs32Bits = true;
607 shaderCaps->fHalfIs32Bits = false;
cdaltona6b92ad2016-04-11 12:03:08 -0700608
Brian Salomon1edc5b92016-11-29 13:43:46 -0500609 shaderCaps->fMaxFragmentSamplers = SkTMin(
610 SkTMin(properties.limits.maxPerStageDescriptorSampledImages,
611 properties.limits.maxPerStageDescriptorSamplers),
612 (uint32_t)INT_MAX);
Greg Daniel164a9f02016-02-22 09:56:40 -0500613}
614
egdaniel8f1dcaa2016-04-01 10:10:45 -0700615bool stencil_format_supported(const GrVkInterface* interface,
616 VkPhysicalDevice physDev,
617 VkFormat format) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500618 VkFormatProperties props;
619 memset(&props, 0, sizeof(VkFormatProperties));
620 GR_VK_CALL(interface, GetPhysicalDeviceFormatProperties(physDev, format, &props));
egdaniel8f1dcaa2016-04-01 10:10:45 -0700621 return SkToBool(VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT & props.optimalTilingFeatures);
Greg Daniel164a9f02016-02-22 09:56:40 -0500622}
623
egdaniel8f1dcaa2016-04-01 10:10:45 -0700624void GrVkCaps::initStencilFormat(const GrVkInterface* interface, VkPhysicalDevice physDev) {
625 // List of legal stencil formats (though perhaps not supported on
626 // the particular gpu/driver) from most preferred to least. We are guaranteed to have either
jvanvertha4b0fed2016-04-27 11:42:21 -0700627 // VK_FORMAT_D24_UNORM_S8_UINT or VK_FORMAT_D32_SFLOAT_S8_UINT. VK_FORMAT_D32_SFLOAT_S8_UINT
egdaniel8f1dcaa2016-04-01 10:10:45 -0700628 // can optionally have 24 unused bits at the end so we assume the total bits is 64.
Greg Daniel164a9f02016-02-22 09:56:40 -0500629 static const StencilFormat
630 // internal Format stencil bits total bits packed?
631 gS8 = { VK_FORMAT_S8_UINT, 8, 8, false },
egdaniel8f1dcaa2016-04-01 10:10:45 -0700632 gD24S8 = { VK_FORMAT_D24_UNORM_S8_UINT, 8, 32, true },
633 gD32S8 = { VK_FORMAT_D32_SFLOAT_S8_UINT, 8, 64, true };
Greg Daniel164a9f02016-02-22 09:56:40 -0500634
egdaniel8f1dcaa2016-04-01 10:10:45 -0700635 if (stencil_format_supported(interface, physDev, VK_FORMAT_S8_UINT)) {
Ethan Nicholasf610bae2018-09-20 16:55:21 -0400636 fPreferredStencilFormat = gS8;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700637 } else if (stencil_format_supported(interface, physDev, VK_FORMAT_D24_UNORM_S8_UINT)) {
Ethan Nicholasf610bae2018-09-20 16:55:21 -0400638 fPreferredStencilFormat = gD24S8;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700639 } else {
640 SkASSERT(stencil_format_supported(interface, physDev, VK_FORMAT_D32_SFLOAT_S8_UINT));
Ethan Nicholasf610bae2018-09-20 16:55:21 -0400641 fPreferredStencilFormat = gD32S8;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700642 }
643}
644
Greg Danielcaa795f2019-05-14 11:54:25 -0400645static bool format_is_srgb(VkFormat format) {
Robert Phillipsf209e882019-06-25 15:59:50 -0400646 SkASSERT(GrVkFormatIsSupported(format));
647
Greg Danielcaa795f2019-05-14 11:54:25 -0400648 switch (format) {
649 case VK_FORMAT_R8G8B8A8_SRGB:
650 case VK_FORMAT_B8G8R8A8_SRGB:
651 return true;
Greg Danielcaa795f2019-05-14 11:54:25 -0400652 default:
Greg Danielcaa795f2019-05-14 11:54:25 -0400653 return false;
654 }
655}
656
Greg Daniel2c19e7f2019-06-18 13:29:21 -0400657// These are all the valid VkFormats that we support in Skia. They are roughly ordered from most
Greg Danielcaa795f2019-05-14 11:54:25 -0400658// frequently used to least to improve look up times in arrays.
659static constexpr VkFormat kVkFormats[] = {
660 VK_FORMAT_R8G8B8A8_UNORM,
661 VK_FORMAT_R8_UNORM,
662 VK_FORMAT_B8G8R8A8_UNORM,
663 VK_FORMAT_R5G6B5_UNORM_PACK16,
664 VK_FORMAT_R16G16B16A16_SFLOAT,
665 VK_FORMAT_R16_SFLOAT,
Greg Danielcaa795f2019-05-14 11:54:25 -0400666 VK_FORMAT_R8G8B8_UNORM,
667 VK_FORMAT_R8G8_UNORM,
668 VK_FORMAT_A2B10G10R10_UNORM_PACK32,
669 VK_FORMAT_B4G4R4A4_UNORM_PACK16,
Greg Danieleb4a8272019-05-16 16:52:55 -0400670 VK_FORMAT_R4G4B4A4_UNORM_PACK16,
Greg Danielcaa795f2019-05-14 11:54:25 -0400671 VK_FORMAT_R32G32B32A32_SFLOAT,
672 VK_FORMAT_R32G32_SFLOAT,
673 VK_FORMAT_R8G8B8A8_SRGB,
674 VK_FORMAT_B8G8R8A8_SRGB,
Robert Phillipsfe18de52019-06-06 17:21:50 -0400675 VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
Robert Phillipsfe18de52019-06-06 17:21:50 -0400676 VK_FORMAT_R16_UNORM,
677 VK_FORMAT_R16G16_UNORM,
Robert Phillips66a46032019-06-18 08:00:42 -0400678 // Experimental (for Y416 and mutant P016/P010)
679 VK_FORMAT_R16G16B16A16_UNORM,
680 VK_FORMAT_R16G16_SFLOAT,
Greg Danielcaa795f2019-05-14 11:54:25 -0400681};
682
683const GrVkCaps::FormatInfo& GrVkCaps::getFormatInfo(VkFormat format) const {
684 static_assert(SK_ARRAY_COUNT(kVkFormats) == GrVkCaps::kNumVkFormats,
685 "Size of VkFormats array must match static value in header");
686 for (size_t i = 0; i < SK_ARRAY_COUNT(kVkFormats); ++i) {
687 if (kVkFormats[i] == format) {
688 return fFormatTable[i];
689 }
690 }
691 SK_ABORT("Invalid VkFormat");
Greg Daniel52ee5f62019-06-20 13:38:18 -0400692 static const FormatInfo kInvalidFormat;
693 return kInvalidFormat;
Greg Danielcaa795f2019-05-14 11:54:25 -0400694}
695
696void GrVkCaps::initFormatTable(const GrVkInterface* interface, VkPhysicalDevice physDev,
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000697 const VkPhysicalDeviceProperties& properties) {
Greg Danielcaa795f2019-05-14 11:54:25 -0400698 static_assert(SK_ARRAY_COUNT(kVkFormats) == GrVkCaps::kNumVkFormats,
699 "Size of VkFormats array must match static value in header");
700 for (size_t i = 0; i < SK_ARRAY_COUNT(kVkFormats); ++i) {
701 VkFormat format = kVkFormats[i];
702 if (!format_is_srgb(format) || fSRGBSupport) {
703 fFormatTable[i].init(interface, physDev, properties, format);
egdaniel8f1dcaa2016-04-01 10:10:45 -0700704 }
705 }
706}
707
Greg Danielcaa795f2019-05-14 11:54:25 -0400708void GrVkCaps::FormatInfo::InitConfigFlags(VkFormatFeatureFlags vkFlags, uint16_t* flags) {
egdaniel8f1dcaa2016-04-01 10:10:45 -0700709 if (SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & vkFlags) &&
710 SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT & vkFlags)) {
711 *flags = *flags | kTextureable_Flag;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700712
Robert Phillipsb7b7e5f2017-05-22 13:23:19 -0400713 // Ganesh assumes that all renderable surfaces are also texturable
Greg Danielcaa795f2019-05-14 11:54:25 -0400714 if (SkToBool(VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT & vkFlags)) {
Robert Phillipsb7b7e5f2017-05-22 13:23:19 -0400715 *flags = *flags | kRenderable_Flag;
716 }
egdaniel8f1dcaa2016-04-01 10:10:45 -0700717 }
718
719 if (SkToBool(VK_FORMAT_FEATURE_BLIT_SRC_BIT & vkFlags)) {
720 *flags = *flags | kBlitSrc_Flag;
721 }
722
723 if (SkToBool(VK_FORMAT_FEATURE_BLIT_DST_BIT & vkFlags)) {
724 *flags = *flags | kBlitDst_Flag;
725 }
726}
727
Greg Danielcaa795f2019-05-14 11:54:25 -0400728void GrVkCaps::FormatInfo::initSampleCounts(const GrVkInterface* interface,
Greg Daniel81e7bf82017-07-19 14:47:42 -0400729 VkPhysicalDevice physDev,
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000730 const VkPhysicalDeviceProperties& physProps,
Greg Daniel81e7bf82017-07-19 14:47:42 -0400731 VkFormat format) {
732 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
733 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
734 VK_IMAGE_USAGE_SAMPLED_BIT |
735 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
Greg Daniel81e7bf82017-07-19 14:47:42 -0400736 VkImageFormatProperties properties;
737 GR_VK_CALL(interface, GetPhysicalDeviceImageFormatProperties(physDev,
738 format,
739 VK_IMAGE_TYPE_2D,
740 VK_IMAGE_TILING_OPTIMAL,
741 usage,
Brian Osman2b23c4b2018-06-01 12:25:08 -0400742 0, // createFlags
Greg Daniel81e7bf82017-07-19 14:47:42 -0400743 &properties));
744 VkSampleCountFlags flags = properties.sampleCounts;
745 if (flags & VK_SAMPLE_COUNT_1_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400746 fColorSampleCounts.push_back(1);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400747 }
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000748 if (kImagination_VkVendor == physProps.vendorID) {
749 // MSAA does not work on imagination
750 return;
751 }
Greg Daniel81e7bf82017-07-19 14:47:42 -0400752 if (flags & VK_SAMPLE_COUNT_2_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400753 fColorSampleCounts.push_back(2);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400754 }
755 if (flags & VK_SAMPLE_COUNT_4_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400756 fColorSampleCounts.push_back(4);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400757 }
758 if (flags & VK_SAMPLE_COUNT_8_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400759 fColorSampleCounts.push_back(8);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400760 }
761 if (flags & VK_SAMPLE_COUNT_16_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400762 fColorSampleCounts.push_back(16);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400763 }
764 if (flags & VK_SAMPLE_COUNT_32_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400765 fColorSampleCounts.push_back(32);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400766 }
767 if (flags & VK_SAMPLE_COUNT_64_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400768 fColorSampleCounts.push_back(64);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400769 }
770}
771
Greg Danielcaa795f2019-05-14 11:54:25 -0400772void GrVkCaps::FormatInfo::init(const GrVkInterface* interface,
egdaniel8f1dcaa2016-04-01 10:10:45 -0700773 VkPhysicalDevice physDev,
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000774 const VkPhysicalDeviceProperties& properties,
Greg Danielcaa795f2019-05-14 11:54:25 -0400775 VkFormat format) {
egdaniel8f1dcaa2016-04-01 10:10:45 -0700776 VkFormatProperties props;
777 memset(&props, 0, sizeof(VkFormatProperties));
778 GR_VK_CALL(interface, GetPhysicalDeviceFormatProperties(physDev, format, &props));
Greg Danielcaa795f2019-05-14 11:54:25 -0400779 InitConfigFlags(props.linearTilingFeatures, &fLinearFlags);
780 InitConfigFlags(props.optimalTilingFeatures, &fOptimalFlags);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400781 if (fOptimalFlags & kRenderable_Flag) {
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000782 this->initSampleCounts(interface, physDev, properties, format);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400783 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500784}
Greg Daniel81e7bf82017-07-19 14:47:42 -0400785
Robert Phillipsf209e882019-06-25 15:59:50 -0400786bool GrVkCaps::isFormatSRGB(const GrBackendFormat& format) const {
787 if (!format.getVkFormat()) {
788 return false;
789 }
790
791 return format_is_srgb(*format.getVkFormat());
792}
793
Robert Phillipsd8f79a22019-06-24 13:25:42 -0400794bool GrVkCaps::isFormatTexturable(SkColorType, const GrBackendFormat& format) const {
795 if (!format.getVkFormat()) {
796 return false;
797 }
798
799 return this->isFormatTexturable(*format.getVkFormat());
800}
801
Robert Phillips39ef2ef2019-05-15 08:45:53 -0400802bool GrVkCaps::isFormatTexturable(VkFormat format) const {
Greg Danielcaa795f2019-05-14 11:54:25 -0400803 if (!GrVkFormatIsSupported(format)) {
804 return false;
805 }
806
807 const FormatInfo& info = this->getFormatInfo(format);
808 return SkToBool(FormatInfo::kTextureable_Flag & info.fOptimalFlags);
809}
810
811bool GrVkCaps::isConfigTexturable(GrPixelConfig config) const {
812 VkFormat format;
813 if (!GrPixelConfigToVkFormat(config, &format)) {
814 return false;
815 }
Robert Phillips39ef2ef2019-05-15 08:45:53 -0400816 return this->isFormatTexturable(format);
817}
818
819bool GrVkCaps::isFormatRenderable(VkFormat format) const {
820 return this->maxRenderTargetSampleCount(format) > 0;
Greg Danielcaa795f2019-05-14 11:54:25 -0400821}
822
Robert Phillipsd8f79a22019-06-24 13:25:42 -0400823int GrVkCaps::getRenderTargetSampleCount(int requestedCount,
824 SkColorType, const GrBackendFormat& format) const {
825 if (!format.getVkFormat()) {
826 return 0;
827 }
828
829 return this->getRenderTargetSampleCount(requestedCount, *format.getVkFormat());
830}
831
Brian Salomonbdecacf2018-02-02 20:32:49 -0500832int GrVkCaps::getRenderTargetSampleCount(int requestedCount, GrPixelConfig config) const {
Greg Danielcaa795f2019-05-14 11:54:25 -0400833 // Currently we don't allow RGB_888X to be renderable because we don't have a way to handle
834 // blends that reference dst alpha when the values in the dst alpha channel are uninitialized.
835 if (config == kRGB_888X_GrPixelConfig) {
836 return 0;
837 }
838
839 VkFormat format;
840 if (!GrPixelConfigToVkFormat(config, &format)) {
841 return 0;
842 }
843
844 return this->getRenderTargetSampleCount(requestedCount, format);
845}
846
847int GrVkCaps::getRenderTargetSampleCount(int requestedCount, VkFormat format) const {
Brian Salomonbdecacf2018-02-02 20:32:49 -0500848 requestedCount = SkTMax(1, requestedCount);
Greg Danielcaa795f2019-05-14 11:54:25 -0400849
850 const FormatInfo& info = this->getFormatInfo(format);
851
852 int count = info.fColorSampleCounts.count();
Brian Salomonbdecacf2018-02-02 20:32:49 -0500853
854 if (!count) {
Greg Daniel81e7bf82017-07-19 14:47:42 -0400855 return 0;
856 }
857
Brian Salomonbdecacf2018-02-02 20:32:49 -0500858 if (1 == requestedCount) {
Greg Danielcaa795f2019-05-14 11:54:25 -0400859 SkASSERT(info.fColorSampleCounts.count() && info.fColorSampleCounts[0] == 1);
Brian Salomonbdecacf2018-02-02 20:32:49 -0500860 return 1;
861 }
862
Greg Daniel81e7bf82017-07-19 14:47:42 -0400863 for (int i = 0; i < count; ++i) {
Greg Danielcaa795f2019-05-14 11:54:25 -0400864 if (info.fColorSampleCounts[i] >= requestedCount) {
865 return info.fColorSampleCounts[i];
Greg Daniel81e7bf82017-07-19 14:47:42 -0400866 }
867 }
Brian Salomonbdecacf2018-02-02 20:32:49 -0500868 return 0;
869}
870
Robert Phillipsd8f79a22019-06-24 13:25:42 -0400871int GrVkCaps::maxRenderTargetSampleCount(SkColorType, const GrBackendFormat& format) const {
872 if (!format.getVkFormat()) {
873 return 0;
874 }
875
876 return this->maxRenderTargetSampleCount(*format.getVkFormat());
877}
878
Brian Salomonbdecacf2018-02-02 20:32:49 -0500879int GrVkCaps::maxRenderTargetSampleCount(GrPixelConfig config) const {
Greg Danielcaa795f2019-05-14 11:54:25 -0400880 // Currently we don't allow RGB_888X to be renderable because we don't have a way to handle
881 // blends that reference dst alpha when the values in the dst alpha channel are uninitialized.
882 if (config == kRGB_888X_GrPixelConfig) {
883 return 0;
884 }
885
886 VkFormat format;
887 if (!GrPixelConfigToVkFormat(config, &format)) {
888 return 0;
889 }
890 return this->maxRenderTargetSampleCount(format);
891}
892
893int GrVkCaps::maxRenderTargetSampleCount(VkFormat format) const {
894 const FormatInfo& info = this->getFormatInfo(format);
895
896 const auto& table = info.fColorSampleCounts;
Brian Salomonbdecacf2018-02-02 20:32:49 -0500897 if (!table.count()) {
898 return 0;
899 }
900 return table[table.count() - 1];
Brian Salomond653cac2018-02-01 13:58:00 -0500901}
902
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400903GrCaps::ReadFlags GrVkCaps::surfaceSupportsReadPixels(const GrSurface* surface) const {
904 if (surface->isProtected()) {
905 return kProtected_ReadFlag;
906 }
Greg Daniela51e93c2019-03-25 12:30:45 -0400907 if (auto tex = static_cast<const GrVkTexture*>(surface->asTexture())) {
908 // We can't directly read from a VkImage that has a ycbcr sampler.
909 if (tex->ycbcrConversionInfo().isValid()) {
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400910 return kRequiresCopy_ReadFlag;
Greg Daniela51e93c2019-03-25 12:30:45 -0400911 }
912 }
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400913 return kSupported_ReadFlag;
Greg Daniela51e93c2019-03-25 12:30:45 -0400914}
915
Brian Salomonc67c31c2018-12-06 10:00:03 -0500916bool GrVkCaps::onSurfaceSupportsWritePixels(const GrSurface* surface) const {
Brian Salomon3d86a192018-02-27 16:46:11 -0500917 if (auto rt = surface->asRenderTarget()) {
Chris Dalton6ce447a2019-06-23 18:07:38 -0600918 return rt->numSamples() <= 1 && SkToBool(surface->asTexture());
Brian Salomon3d86a192018-02-27 16:46:11 -0500919 }
Greg Daniela51e93c2019-03-25 12:30:45 -0400920 // We can't write to a texture that has a ycbcr sampler.
921 if (auto tex = static_cast<const GrVkTexture*>(surface->asTexture())) {
922 // We can't directly read from a VkImage that has a ycbcr sampler.
923 if (tex->ycbcrConversionInfo().isValid()) {
924 return false;
925 }
926 }
Brian Salomon3d86a192018-02-27 16:46:11 -0500927 return true;
928}
929
Robert Phillipsc1bee132019-02-06 16:04:34 -0500930static GrPixelConfig validate_image_info(VkFormat format, SkColorType ct, bool hasYcbcrConversion) {
Greg Daniel14c55c22018-12-04 11:25:03 -0500931 if (format == VK_FORMAT_UNDEFINED) {
932 // If the format is undefined then it is only valid as an external image which requires that
933 // we have a valid VkYcbcrConversion.
934 if (hasYcbcrConversion) {
935 // We don't actually care what the color type or config are since we won't use those
Greg Daniela51e93c2019-03-25 12:30:45 -0400936 // values for external textures. However, for read pixels we will draw to a non ycbcr
937 // texture of this config so we set RGBA here for that.
Brian Salomonf391d0f2018-12-14 09:18:50 -0500938 return kRGBA_8888_GrPixelConfig;
Greg Daniel14c55c22018-12-04 11:25:03 -0500939 } else {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500940 return kUnknown_GrPixelConfig;
Greg Daniel14c55c22018-12-04 11:25:03 -0500941 }
942 }
943
944 if (hasYcbcrConversion) {
945 // We only support having a ycbcr conversion for external images.
Brian Salomonf391d0f2018-12-14 09:18:50 -0500946 return kUnknown_GrPixelConfig;
Greg Daniel14c55c22018-12-04 11:25:03 -0500947 }
948
Greg Danielf5d87582017-12-18 14:48:15 -0500949 switch (ct) {
950 case kUnknown_SkColorType:
Brian Salomonf391d0f2018-12-14 09:18:50 -0500951 break;
Greg Danielf5d87582017-12-18 14:48:15 -0500952 case kAlpha_8_SkColorType:
953 if (VK_FORMAT_R8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500954 return kAlpha_8_as_Red_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500955 }
956 break;
957 case kRGB_565_SkColorType:
958 if (VK_FORMAT_R5G6B5_UNORM_PACK16 == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500959 return kRGB_565_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500960 }
961 break;
962 case kARGB_4444_SkColorType:
Greg Danieleb4a8272019-05-16 16:52:55 -0400963 if (VK_FORMAT_B4G4R4A4_UNORM_PACK16 == format ||
964 VK_FORMAT_R4G4B4A4_UNORM_PACK16 == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500965 return kRGBA_4444_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500966 }
967 break;
968 case kRGBA_8888_SkColorType:
969 if (VK_FORMAT_R8G8B8A8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500970 return kRGBA_8888_GrPixelConfig;
Greg Daniel7b219ac2017-12-18 14:49:04 -0500971 } else if (VK_FORMAT_R8G8B8A8_SRGB == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500972 return kSRGBA_8888_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500973 }
974 break;
Brian Salomone41e1762018-01-25 14:07:47 -0500975 case kRGB_888x_SkColorType:
Greg Daniel475eb702018-09-28 14:16:50 -0400976 if (VK_FORMAT_R8G8B8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500977 return kRGB_888_GrPixelConfig;
Greg Daniel475eb702018-09-28 14:16:50 -0400978 }
Greg Danielf259b8b2019-02-14 09:03:43 -0500979 if (VK_FORMAT_R8G8B8A8_UNORM == format) {
980 return kRGB_888X_GrPixelConfig;
981 }
Greg Daniel475eb702018-09-28 14:16:50 -0400982 break;
Greg Danielf5d87582017-12-18 14:48:15 -0500983 case kBGRA_8888_SkColorType:
984 if (VK_FORMAT_B8G8R8A8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500985 return kBGRA_8888_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500986 }
987 break;
Brian Salomone41e1762018-01-25 14:07:47 -0500988 case kRGBA_1010102_SkColorType:
Brian Osman10fc6fd2018-03-02 11:01:10 -0500989 if (VK_FORMAT_A2B10G10R10_UNORM_PACK32 == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500990 return kRGBA_1010102_GrPixelConfig;
Brian Osman10fc6fd2018-03-02 11:01:10 -0500991 }
992 break;
Brian Salomone41e1762018-01-25 14:07:47 -0500993 case kRGB_101010x_SkColorType:
Brian Salomonf391d0f2018-12-14 09:18:50 -0500994 return kUnknown_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500995 case kGray_8_SkColorType:
996 if (VK_FORMAT_R8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500997 return kGray_8_as_Red_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500998 }
999 break;
Brian Osmand0626aa2019-03-11 15:28:06 -04001000 case kRGBA_F16Norm_SkColorType:
Mike Kleinb70990e2019-02-28 10:03:27 -06001001 if (VK_FORMAT_R16G16B16A16_SFLOAT == format) {
Brian Osmand0626aa2019-03-11 15:28:06 -04001002 return kRGBA_half_Clamped_GrPixelConfig;
Mike Kleinb70990e2019-02-28 10:03:27 -06001003 }
1004 break;
Greg Danielf5d87582017-12-18 14:48:15 -05001005 case kRGBA_F16_SkColorType:
1006 if (VK_FORMAT_R16G16B16A16_SFLOAT == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001007 return kRGBA_half_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -05001008 }
1009 break;
Mike Klein37854712018-06-26 11:43:06 -04001010 case kRGBA_F32_SkColorType:
1011 if (VK_FORMAT_R32G32B32A32_SFLOAT == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001012 return kRGBA_float_GrPixelConfig;
Mike Klein37854712018-06-26 11:43:06 -04001013 }
1014 break;
Greg Danielf5d87582017-12-18 14:48:15 -05001015 }
1016
Brian Salomonf391d0f2018-12-14 09:18:50 -05001017 return kUnknown_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -05001018}
1019
Brian Salomonf391d0f2018-12-14 09:18:50 -05001020GrPixelConfig GrVkCaps::validateBackendRenderTarget(const GrBackendRenderTarget& rt,
1021 SkColorType ct) const {
Greg Daniel323fbcf2018-04-10 13:46:30 -04001022 GrVkImageInfo imageInfo;
1023 if (!rt.getVkImageInfo(&imageInfo)) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001024 return kUnknown_GrPixelConfig;
Robert Phillipsfc711a22018-02-13 17:03:00 -05001025 }
Brian Salomonf391d0f2018-12-14 09:18:50 -05001026 return validate_image_info(imageInfo.fFormat, ct, imageInfo.fYcbcrConversionInfo.isValid());
Robert Phillipsfc711a22018-02-13 17:03:00 -05001027}
1028
Robert Phillipsc046ff02019-07-01 10:34:03 -04001029bool GrVkCaps::areColorTypeAndFormatCompatible(SkColorType ct,
1030 const GrBackendFormat& format) const {
1031 const VkFormat* vkFormat = format.getVkFormat();
1032 const GrVkYcbcrConversionInfo* ycbcrInfo = format.getVkYcbcrConversionInfo();
1033 if (!vkFormat || !ycbcrInfo) {
1034 return false;
1035 }
1036
1037 return kUnknown_GrPixelConfig != validate_image_info(*vkFormat, ct, ycbcrInfo->isValid());
1038}
1039
1040
Brian Salomonf391d0f2018-12-14 09:18:50 -05001041GrPixelConfig GrVkCaps::getConfigFromBackendFormat(const GrBackendFormat& format,
1042 SkColorType ct) const {
Robert Phillipsfc711a22018-02-13 17:03:00 -05001043 const VkFormat* vkFormat = format.getVkFormat();
Greg Daniel14c55c22018-12-04 11:25:03 -05001044 const GrVkYcbcrConversionInfo* ycbcrInfo = format.getVkYcbcrConversionInfo();
1045 if (!vkFormat || !ycbcrInfo) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001046 return kUnknown_GrPixelConfig;
Robert Phillipsfc711a22018-02-13 17:03:00 -05001047 }
Brian Salomonf391d0f2018-12-14 09:18:50 -05001048 return validate_image_info(*vkFormat, ct, ycbcrInfo->isValid());
Greg Danielfaa095e2017-12-19 13:15:02 -05001049}
Greg Danielf5d87582017-12-18 14:48:15 -05001050
Brian Salomonf391d0f2018-12-14 09:18:50 -05001051static GrPixelConfig get_yuva_config(VkFormat vkFormat) {
Jim Van Verth9bf81202018-10-30 15:53:36 -04001052 switch (vkFormat) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001053 case VK_FORMAT_R8_UNORM:
1054 return kAlpha_8_as_Red_GrPixelConfig;
1055 case VK_FORMAT_R8G8B8A8_UNORM:
1056 return kRGBA_8888_GrPixelConfig;
1057 case VK_FORMAT_R8G8B8_UNORM:
1058 return kRGB_888_GrPixelConfig;
1059 case VK_FORMAT_R8G8_UNORM:
1060 return kRG_88_GrPixelConfig;
1061 case VK_FORMAT_B8G8R8A8_UNORM:
1062 return kBGRA_8888_GrPixelConfig;
Robert Phillips2dd1b472019-03-21 09:00:20 -04001063 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
1064 return kRGBA_1010102_GrPixelConfig;
Robert Phillipsfe18de52019-06-06 17:21:50 -04001065 case VK_FORMAT_R16_UNORM:
1066 return kR_16_GrPixelConfig;
1067 case VK_FORMAT_R16G16_UNORM:
1068 return kRG_1616_GrPixelConfig;
Robert Phillips66a46032019-06-18 08:00:42 -04001069 // Experimental (for Y416 and mutant P016/P010)
1070 case VK_FORMAT_R16G16B16A16_UNORM:
1071 return kRGBA_16161616_GrPixelConfig;
1072 case VK_FORMAT_R16G16_SFLOAT:
1073 return kRG_half_GrPixelConfig;
Brian Salomonf391d0f2018-12-14 09:18:50 -05001074 default:
1075 return kUnknown_GrPixelConfig;
Jim Van Verthb7f0b9c2018-10-22 14:12:03 -04001076 }
Jim Van Verthb7f0b9c2018-10-22 14:12:03 -04001077}
1078
Brian Salomonf391d0f2018-12-14 09:18:50 -05001079GrPixelConfig GrVkCaps::getYUVAConfigFromBackendFormat(const GrBackendFormat& format) const {
Jim Van Verth9bf81202018-10-30 15:53:36 -04001080 const VkFormat* vkFormat = format.getVkFormat();
1081 if (!vkFormat) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001082 return kUnknown_GrPixelConfig;
Jim Van Verth9bf81202018-10-30 15:53:36 -04001083 }
Brian Salomonf391d0f2018-12-14 09:18:50 -05001084 return get_yuva_config(*vkFormat);
Timothy Liang036fdfe2018-06-28 15:50:36 -04001085}
Greg Daniel4065d452018-11-16 15:43:41 -05001086
1087GrBackendFormat GrVkCaps::getBackendFormatFromGrColorType(GrColorType ct,
1088 GrSRGBEncoded srgbEncoded) const {
1089 GrPixelConfig config = GrColorTypeToPixelConfig(ct, srgbEncoded);
1090 if (config == kUnknown_GrPixelConfig) {
1091 return GrBackendFormat();
1092 }
1093 VkFormat format;
1094 if (!GrPixelConfigToVkFormat(config, &format)) {
1095 return GrBackendFormat();
1096 }
1097 return GrBackendFormat::MakeVk(format);
1098}
Timothy Liang036fdfe2018-06-28 15:50:36 -04001099
Brian Salomonbb8dde82019-06-27 10:52:13 -04001100GrBackendFormat GrVkCaps::getBackendFormatFromCompressionType(
1101 SkImage::CompressionType compressionType) const {
1102 switch (compressionType) {
1103 case SkImage::kETC1_CompressionType:
1104 return GrBackendFormat::MakeVk(VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK);
1105 }
1106 SK_ABORT("Invalid compression type");
1107 return {};
1108}
1109
Greg Danieleb4a8272019-05-16 16:52:55 -04001110#ifdef SK_DEBUG
1111static bool format_color_type_valid_pair(VkFormat vkFormat, GrColorType colorType) {
1112 switch (colorType) {
1113 case GrColorType::kUnknown:
1114 return false;
1115 case GrColorType::kAlpha_8:
1116 return VK_FORMAT_R8_UNORM == vkFormat;
Greg Daniel48fec762019-06-18 17:06:43 -04001117 case GrColorType::kBGR_565:
Greg Danieleb4a8272019-05-16 16:52:55 -04001118 return VK_FORMAT_R5G6B5_UNORM_PACK16 == vkFormat;
1119 case GrColorType::kABGR_4444:
1120 return VK_FORMAT_B4G4R4A4_UNORM_PACK16 == vkFormat ||
1121 VK_FORMAT_R4G4B4A4_UNORM_PACK16 == vkFormat;
1122 case GrColorType::kRGBA_8888:
1123 return VK_FORMAT_R8G8B8A8_UNORM == vkFormat || VK_FORMAT_R8G8B8A8_SRGB == vkFormat;
1124 case GrColorType::kRGB_888x:
Brian Salomonbb8dde82019-06-27 10:52:13 -04001125 GR_STATIC_ASSERT(GrCompressionTypeClosestColorType(SkImage::kETC1_CompressionType) ==
1126 GrColorType::kRGB_888x);
1127 return VK_FORMAT_R8G8B8_UNORM == vkFormat || VK_FORMAT_R8G8B8A8_UNORM == vkFormat ||
1128 VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK == vkFormat;
Greg Danieleb4a8272019-05-16 16:52:55 -04001129 case GrColorType::kRG_88:
1130 return VK_FORMAT_R8G8_UNORM == vkFormat;
1131 case GrColorType::kBGRA_8888:
1132 return VK_FORMAT_B8G8R8A8_UNORM == vkFormat || VK_FORMAT_B8G8R8A8_SRGB == vkFormat;
1133 case GrColorType::kRGBA_1010102:
1134 return VK_FORMAT_A2B10G10R10_UNORM_PACK32 == vkFormat;
1135 case GrColorType::kGray_8:
1136 return VK_FORMAT_R8_UNORM == vkFormat;
1137 case GrColorType::kAlpha_F16:
1138 return VK_FORMAT_R16_SFLOAT == vkFormat;
1139 case GrColorType::kRGBA_F16:
1140 return VK_FORMAT_R16G16B16A16_SFLOAT == vkFormat;
1141 case GrColorType::kRGBA_F16_Clamped:
1142 return VK_FORMAT_R16G16B16A16_SFLOAT == vkFormat;
1143 case GrColorType::kRG_F32:
1144 return VK_FORMAT_R32G32_SFLOAT == vkFormat;
1145 case GrColorType::kRGBA_F32:
1146 return VK_FORMAT_R32G32B32A32_SFLOAT == vkFormat;
Robert Phillipsfe18de52019-06-06 17:21:50 -04001147 case GrColorType::kR_16:
1148 return VK_FORMAT_R16_UNORM == vkFormat;
1149 case GrColorType::kRG_1616:
1150 return VK_FORMAT_R16G16_UNORM == vkFormat;
Robert Phillips66a46032019-06-18 08:00:42 -04001151 // Experimental (for Y416 and mutant P016/P010)
1152 case GrColorType::kRGBA_16161616:
1153 return VK_FORMAT_R16G16B16A16_UNORM == vkFormat;
Brian Salomone14cfbe2019-06-24 15:00:58 -04001154 case GrColorType::kRG_F16:
Robert Phillips66a46032019-06-18 08:00:42 -04001155 return VK_FORMAT_R16G16_SFLOAT == vkFormat;
Greg Danieleb4a8272019-05-16 16:52:55 -04001156 }
Greg Daniel4f71ccc2019-05-17 10:56:46 -04001157 SK_ABORT("Unknown color type");
1158 return false;
Greg Danieleb4a8272019-05-16 16:52:55 -04001159}
1160#endif
1161
1162static GrSwizzle get_swizzle(const GrBackendFormat& format, GrColorType colorType,
1163 bool forOutput) {
1164 SkASSERT(format.getVkFormat());
1165 VkFormat vkFormat = *format.getVkFormat();
1166
1167 SkASSERT(format_color_type_valid_pair(vkFormat, colorType));
1168
1169 switch (colorType) {
1170 case GrColorType::kAlpha_8: // fall through
1171 case GrColorType::kAlpha_F16:
1172 if (forOutput) {
1173 return GrSwizzle::AAAA();
1174 } else {
1175 return GrSwizzle::RRRR();
1176 }
1177 case GrColorType::kGray_8:
1178 if (!forOutput) {
1179 return GrSwizzle::RRRA();
1180 }
1181 break;
1182 case GrColorType::kABGR_4444:
1183 if (VK_FORMAT_B4G4R4A4_UNORM_PACK16 == vkFormat) {
1184 return GrSwizzle::BGRA();
1185 }
1186 break;
1187 case GrColorType::kRGB_888x:
1188 if (!forOutput) {
1189 return GrSwizzle::RGB1();
1190 }
1191 default:
1192 return GrSwizzle::RGBA();
1193 }
1194 return GrSwizzle::RGBA();
1195}
1196
1197GrSwizzle GrVkCaps::getTextureSwizzle(const GrBackendFormat& format, GrColorType colorType) const {
1198 return get_swizzle(format, colorType, false);
1199}
1200GrSwizzle GrVkCaps::getOutputSwizzle(const GrBackendFormat& format, GrColorType colorType) const {
1201 return get_swizzle(format, colorType, true);
1202}
1203
Brian Salomon26de56e2019-04-10 12:14:26 -04001204size_t GrVkCaps::onTransferFromOffsetAlignment(GrColorType bufferColorType) const {
Brian Salomona585fe92019-04-09 14:57:00 -04001205 // This GrColorType has 32 bpp but the Vulkan pixel format we use for with may have 24bpp
1206 // (VK_FORMAT_R8G8B8_...) or may be 32 bpp. We don't support post transforming the pixel data
1207 // for transfer-from currently and don't want to have to pass info about the src surface here.
1208 if (bufferColorType == GrColorType::kRGB_888x) {
1209 return false;
1210 }
1211 size_t bpp = GrColorTypeBytesPerPixel(bufferColorType);
1212 // The VkBufferImageCopy bufferOffset field must be both a multiple of 4 and of a single texel.
1213 switch (bpp & 0b11) {
Brian Salomon26de56e2019-04-10 12:14:26 -04001214 // bpp is already a multiple of 4.
1215 case 0: return bpp;
1216 // bpp is a multiple of 2 but not 4.
1217 case 2: return 2 * bpp;
1218 // bpp is not a multiple of 2.
1219 default: return 4 * bpp;
Brian Salomona585fe92019-04-09 14:57:00 -04001220 }
Brian Salomona585fe92019-04-09 14:57:00 -04001221}