blob: 7a50e1efb266627c4980d1d78c4f797c0dab9785 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "include/gpu/GrBackendSurface.h"
9#include "include/gpu/GrRenderTarget.h"
10#include "include/gpu/vk/GrVkBackendContext.h"
11#include "include/gpu/vk/GrVkExtensions.h"
12#include "include/private/GrRenderTargetProxy.h"
13#include "src/gpu/GrShaderCaps.h"
14#include "src/gpu/SkGr.h"
15#include "src/gpu/vk/GrVkCaps.h"
16#include "src/gpu/vk/GrVkInterface.h"
17#include "src/gpu/vk/GrVkTexture.h"
18#include "src/gpu/vk/GrVkUtil.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050019
20GrVkCaps::GrVkCaps(const GrContextOptions& contextOptions, const GrVkInterface* vkInterface,
Greg Daniela0651ac2018-08-08 09:23:18 -040021 VkPhysicalDevice physDev, const VkPhysicalDeviceFeatures2& features,
Greg Daniel41f0e282019-01-28 13:15:05 -050022 uint32_t instanceVersion, uint32_t physicalDeviceVersion,
23 const GrVkExtensions& extensions)
egdanielc5ec1402016-03-28 12:14:42 -070024 : INHERITED(contextOptions) {
Greg Danielc0b03d82018-08-03 14:41:15 -040025
Greg Daniel164a9f02016-02-22 09:56:40 -050026 /**************************************************************************
Brian Salomonf7232642018-09-19 08:58:08 -040027 * GrCaps fields
28 **************************************************************************/
jvanverth62340062016-04-26 08:01:44 -070029 fMipMapSupport = true; // always available in Vulkan
brianosmanf05ab1b2016-05-12 11:01:10 -070030 fSRGBSupport = true; // always available in Vulkan
brianosman88791862016-05-23 10:15:27 -070031 fNPOTTextureTileSupport = true; // always available in Vulkan
egdaniel37535c92016-06-30 08:23:30 -070032 fDiscardRenderTargetSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050033 fReuseScratchTextures = true; //TODO: figure this out
34 fGpuTracingSupport = false; //TODO: figure this out
Jim Van Verth1676cb92019-01-15 13:24:45 -050035 fCompressedTexSubImageSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050036 fOversizedStencilSupport = false; //TODO: figure this out
Chris Dalton1d616352017-05-31 12:51:23 -060037 fInstanceAttribSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050038
Brian Salomon9ff5acb2019-05-08 09:04:47 -040039 fSemaphoreSupport = true; // always available in Vulkan
jvanverth84741b32016-09-30 08:39:02 -070040 fFenceSyncSupport = true; // always available in Vulkan
Greg Daniel691f5e72018-02-28 14:21:34 -050041 fCrossContextTextureSupport = true;
Brian Osman499bf1a2018-09-17 11:32:42 -040042 fHalfFloatVertexAttributeSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050043
Brian Salomone05ba5a2019-04-08 11:59:07 -040044 fTransferBufferSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050045
46 fMaxRenderTargetSize = 4096; // minimum required by spec
47 fMaxTextureSize = 4096; // minimum required by spec
Greg Daniel164a9f02016-02-22 09:56:40 -050048
Brian Salomonf7232642018-09-19 08:58:08 -040049 fDynamicStateArrayGeometryProcessorTextureSupport = true;
50
Brian Salomon94efbf52016-11-29 13:43:05 -050051 fShaderCaps.reset(new GrShaderCaps(contextOptions));
Greg Daniel164a9f02016-02-22 09:56:40 -050052
Greg Daniel41f0e282019-01-28 13:15:05 -050053 this->init(contextOptions, vkInterface, physDev, features, physicalDeviceVersion, extensions);
Greg Daniel164a9f02016-02-22 09:56:40 -050054}
55
Robert Phillipsbf25d432017-04-07 10:08:53 -040056bool GrVkCaps::initDescForDstCopy(const GrRenderTargetProxy* src, GrSurfaceDesc* desc,
Brian Salomon2a4f9832018-03-03 22:43:43 -050057 GrSurfaceOrigin* origin, bool* rectsMustMatch,
58 bool* disallowSubrect) const {
Eric Karl74480882017-04-03 14:49:05 -070059 // Vk doesn't use rectsMustMatch or disallowSubrect. Always return false.
60 *rectsMustMatch = false;
61 *disallowSubrect = false;
62
Brian Salomon467921e2017-03-06 16:17:12 -050063 // We can always succeed here with either a CopyImage (none msaa src) or ResolveImage (msaa).
64 // For CopyImage we can make a simple texture, for ResolveImage we require the dst to be a
65 // render target as well.
Brian Salomon2a4f9832018-03-03 22:43:43 -050066 *origin = src->origin();
Brian Salomon467921e2017-03-06 16:17:12 -050067 desc->fConfig = src->config();
Greg Daniel55fa6472018-03-16 16:13:10 -040068 if (src->numColorSamples() > 1 || src->asTextureProxy()) {
Brian Salomon467921e2017-03-06 16:17:12 -050069 desc->fFlags = kRenderTarget_GrSurfaceFlag;
70 } else {
71 // Just going to use CopyImage here
72 desc->fFlags = kNone_GrSurfaceFlags;
73 }
74
75 return true;
76}
77
Greg Daniel5c7b5412019-05-10 11:39:55 -040078static int get_compatible_format_class(GrPixelConfig config) {
79 switch (config) {
80 case kAlpha_8_GrPixelConfig:
81 case kAlpha_8_as_Red_GrPixelConfig:
82 case kGray_8_GrPixelConfig:
83 case kGray_8_as_Red_GrPixelConfig:
84 return 1;
85 case kRGB_565_GrPixelConfig:
86 case kRGBA_4444_GrPixelConfig:
87 case kRG_88_GrPixelConfig:
88 case kAlpha_half_GrPixelConfig:
89 case kAlpha_half_as_Red_GrPixelConfig:
90 return 2;
91 case kRGB_888_GrPixelConfig:
92 return 3;
93 case kRGBA_8888_GrPixelConfig:
94 case kRGB_888X_GrPixelConfig:
95 case kBGRA_8888_GrPixelConfig:
96 case kSRGBA_8888_GrPixelConfig:
97 case kSBGRA_8888_GrPixelConfig:
98 case kRGBA_1010102_GrPixelConfig:
99 return 4;
100 case kRGBA_half_GrPixelConfig:
101 case kRGBA_half_Clamped_GrPixelConfig:
102 case kRG_float_GrPixelConfig:
103 return 5;
104 case kRGBA_float_GrPixelConfig:
105 return 6;
106 case kRGB_ETC1_GrPixelConfig:
107 return 7;
108 case kUnknown_GrPixelConfig:
109 case kAlpha_8_as_Alpha_GrPixelConfig:
110 case kGray_8_as_Lum_GrPixelConfig:
111 SK_ABORT("Unsupported Vulkan pixel config");
112 return 0;
113 }
114 SK_ABORT("Invalid pixel config");
115 return 0;
116}
117
Greg Daniel25af6712018-04-25 10:44:38 -0400118bool GrVkCaps::canCopyImage(GrPixelConfig dstConfig, int dstSampleCnt, GrSurfaceOrigin dstOrigin,
Greg Daniela51e93c2019-03-25 12:30:45 -0400119 bool dstHasYcbcr, GrPixelConfig srcConfig, int srcSampleCnt,
120 GrSurfaceOrigin srcOrigin, bool srcHasYcbcr) const {
Greg Daniel25af6712018-04-25 10:44:38 -0400121 if ((dstSampleCnt > 1 || srcSampleCnt > 1) && dstSampleCnt != srcSampleCnt) {
122 return false;
123 }
124
Greg Daniela51e93c2019-03-25 12:30:45 -0400125 if (dstHasYcbcr || srcHasYcbcr) {
126 return false;
127 }
128
Greg Daniel25af6712018-04-25 10:44:38 -0400129 // We require that all vulkan GrSurfaces have been created with transfer_dst and transfer_src
130 // as image usage flags.
Greg Daniel5c7b5412019-05-10 11:39:55 -0400131 if (srcOrigin != dstOrigin ||
132 get_compatible_format_class(srcConfig) != get_compatible_format_class(dstConfig)) {
Greg Daniel25af6712018-04-25 10:44:38 -0400133 return false;
134 }
135
136 if (this->shaderCaps()->configOutputSwizzle(srcConfig) !=
137 this->shaderCaps()->configOutputSwizzle(dstConfig)) {
138 return false;
139 }
140
141 return true;
142}
143
144bool GrVkCaps::canCopyAsBlit(GrPixelConfig dstConfig, int dstSampleCnt, bool dstIsLinear,
Greg Daniela51e93c2019-03-25 12:30:45 -0400145 bool dstHasYcbcr, GrPixelConfig srcConfig, int srcSampleCnt,
146 bool srcIsLinear, bool srcHasYcbcr) const {
Greg Danielcaa795f2019-05-14 11:54:25 -0400147
148 VkFormat dstFormat;
149 SkAssertResult(GrPixelConfigToVkFormat(dstConfig, &dstFormat));
150 VkFormat srcFormat;
151 SkAssertResult(GrPixelConfigToVkFormat(srcConfig, &srcFormat));
Greg Daniel25af6712018-04-25 10:44:38 -0400152 // We require that all vulkan GrSurfaces have been created with transfer_dst and transfer_src
153 // as image usage flags.
Greg Danielcaa795f2019-05-14 11:54:25 -0400154 if (!this->formatCanBeDstofBlit(dstFormat, dstIsLinear) ||
155 !this->formatCanBeSrcofBlit(srcFormat, srcIsLinear)) {
Greg Daniel25af6712018-04-25 10:44:38 -0400156 return false;
157 }
158
159 if (this->shaderCaps()->configOutputSwizzle(srcConfig) !=
160 this->shaderCaps()->configOutputSwizzle(dstConfig)) {
161 return false;
162 }
163
164 // We cannot blit images that are multisampled. Will need to figure out if we can blit the
165 // resolved msaa though.
166 if (dstSampleCnt > 1 || srcSampleCnt > 1) {
167 return false;
168 }
169
Greg Daniela51e93c2019-03-25 12:30:45 -0400170 if (dstHasYcbcr || srcHasYcbcr) {
171 return false;
172 }
173
Greg Daniel25af6712018-04-25 10:44:38 -0400174 return true;
175}
176
177bool GrVkCaps::canCopyAsResolve(GrPixelConfig dstConfig, int dstSampleCnt,
Greg Daniela51e93c2019-03-25 12:30:45 -0400178 GrSurfaceOrigin dstOrigin, bool dstHasYcbcr,
179 GrPixelConfig srcConfig, int srcSampleCnt,
180 GrSurfaceOrigin srcOrigin, bool srcHasYcbcr) const {
Greg Daniel25af6712018-04-25 10:44:38 -0400181 // The src surface must be multisampled.
182 if (srcSampleCnt <= 1) {
183 return false;
184 }
185
186 // The dst must not be multisampled.
187 if (dstSampleCnt > 1) {
188 return false;
189 }
190
191 // Surfaces must have the same format.
192 if (dstConfig != srcConfig) {
193 return false;
194 }
195
196 // Surfaces must have the same origin.
197 if (srcOrigin != dstOrigin) {
198 return false;
199 }
200
Greg Daniela51e93c2019-03-25 12:30:45 -0400201 if (dstHasYcbcr || srcHasYcbcr) {
202 return false;
203 }
204
Greg Daniel25af6712018-04-25 10:44:38 -0400205 return true;
206}
207
Greg Daniela51e93c2019-03-25 12:30:45 -0400208bool GrVkCaps::canCopyAsDraw(GrPixelConfig dstConfig, bool dstIsRenderable, bool dstHasYcbcr,
209 GrPixelConfig srcConfig, bool srcIsTextureable,
210 bool srcHasYcbcr) const {
Greg Daniel25af6712018-04-25 10:44:38 -0400211 // TODO: Make copySurfaceAsDraw handle the swizzle
212 if (this->shaderCaps()->configOutputSwizzle(srcConfig) !=
213 this->shaderCaps()->configOutputSwizzle(dstConfig)) {
214 return false;
215 }
216
217 // Make sure the dst is a render target and the src is a texture.
218 if (!dstIsRenderable || !srcIsTextureable) {
219 return false;
220 }
221
Greg Daniela51e93c2019-03-25 12:30:45 -0400222 if (dstHasYcbcr) {
223 return false;
224 }
225
Greg Daniel25af6712018-04-25 10:44:38 -0400226 return true;
227}
228
Brian Salomonc67c31c2018-12-06 10:00:03 -0500229bool GrVkCaps::onCanCopySurface(const GrSurfaceProxy* dst, const GrSurfaceProxy* src,
230 const SkIRect& srcRect, const SkIPoint& dstPoint) const {
Greg Daniel25af6712018-04-25 10:44:38 -0400231 GrSurfaceOrigin dstOrigin = dst->origin();
232 GrSurfaceOrigin srcOrigin = src->origin();
233
234 GrPixelConfig dstConfig = dst->config();
235 GrPixelConfig srcConfig = src->config();
236
237 // TODO: Figure out a way to track if we've wrapped a linear texture in a proxy (e.g.
238 // PromiseImage which won't get instantiated right away. Does this need a similar thing like the
239 // tracking of external or rectangle textures in GL? For now we don't create linear textures
240 // internally, and I don't believe anyone is wrapping them.
241 bool srcIsLinear = false;
242 bool dstIsLinear = false;
243
244 int dstSampleCnt = 0;
245 int srcSampleCnt = 0;
246 if (const GrRenderTargetProxy* rtProxy = dst->asRenderTargetProxy()) {
Greg Danielbe7fc462019-01-03 16:40:42 -0500247 // Copying to or from render targets that wrap a secondary command buffer is not allowed
248 // since they would require us to know the VkImage, which we don't have, as well as need us
249 // to stop and start the VkRenderPass which we don't have access to.
250 if (rtProxy->wrapsVkSecondaryCB()) {
251 return false;
252 }
Greg Daniel25af6712018-04-25 10:44:38 -0400253 dstSampleCnt = rtProxy->numColorSamples();
254 }
255 if (const GrRenderTargetProxy* rtProxy = src->asRenderTargetProxy()) {
Greg Danielbe7fc462019-01-03 16:40:42 -0500256 // Copying to or from render targets that wrap a secondary command buffer is not allowed
257 // since they would require us to know the VkImage, which we don't have, as well as need us
258 // to stop and start the VkRenderPass which we don't have access to.
259 if (rtProxy->wrapsVkSecondaryCB()) {
260 return false;
261 }
Greg Daniel25af6712018-04-25 10:44:38 -0400262 srcSampleCnt = rtProxy->numColorSamples();
263 }
264 SkASSERT((dstSampleCnt > 0) == SkToBool(dst->asRenderTargetProxy()));
265 SkASSERT((srcSampleCnt > 0) == SkToBool(src->asRenderTargetProxy()));
266
Greg Daniela51e93c2019-03-25 12:30:45 -0400267 bool dstHasYcbcr = false;
268 if (auto ycbcr = dst->backendFormat().getVkYcbcrConversionInfo()) {
269 if (ycbcr->isValid()) {
270 dstHasYcbcr = true;
271 }
272 }
273
274 bool srcHasYcbcr = false;
275 if (auto ycbcr = src->backendFormat().getVkYcbcrConversionInfo()) {
276 if (ycbcr->isValid()) {
277 srcHasYcbcr = true;
278 }
279 }
280
281 return this->canCopyImage(dstConfig, dstSampleCnt, dstOrigin, dstHasYcbcr,
282 srcConfig, srcSampleCnt, srcOrigin, srcHasYcbcr) ||
283 this->canCopyAsBlit(dstConfig, dstSampleCnt, dstIsLinear, dstHasYcbcr,
284 srcConfig, srcSampleCnt, srcIsLinear, srcHasYcbcr) ||
285 this->canCopyAsResolve(dstConfig, dstSampleCnt, dstOrigin, dstHasYcbcr,
286 srcConfig, srcSampleCnt, srcOrigin, srcHasYcbcr) ||
287 this->canCopyAsDraw(dstConfig, dstSampleCnt > 0, dstHasYcbcr,
288 srcConfig, SkToBool(src->asTextureProxy()), srcHasYcbcr);
Greg Daniel25af6712018-04-25 10:44:38 -0400289}
290
Greg Daniel7e000222018-12-03 10:08:21 -0500291template<typename T> T* get_extension_feature_struct(const VkPhysicalDeviceFeatures2& features,
292 VkStructureType type) {
293 // All Vulkan structs that could be part of the features chain will start with the
294 // structure type followed by the pNext pointer. We cast to the CommonVulkanHeader
295 // so we can get access to the pNext for the next struct.
296 struct CommonVulkanHeader {
297 VkStructureType sType;
298 void* pNext;
299 };
300
301 void* pNext = features.pNext;
302 while (pNext) {
303 CommonVulkanHeader* header = static_cast<CommonVulkanHeader*>(pNext);
304 if (header->sType == type) {
305 return static_cast<T*>(pNext);
306 }
307 pNext = header->pNext;
308 }
309 return nullptr;
310}
311
Greg Daniel164a9f02016-02-22 09:56:40 -0500312void GrVkCaps::init(const GrContextOptions& contextOptions, const GrVkInterface* vkInterface,
Greg Daniela0651ac2018-08-08 09:23:18 -0400313 VkPhysicalDevice physDev, const VkPhysicalDeviceFeatures2& features,
Greg Daniel41f0e282019-01-28 13:15:05 -0500314 uint32_t physicalDeviceVersion, const GrVkExtensions& extensions) {
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000315 VkPhysicalDeviceProperties properties;
316 GR_VK_CALL(vkInterface, GetPhysicalDeviceProperties(physDev, &properties));
egdanield5e3b9e2016-03-08 12:19:54 -0800317
egdanield5e3b9e2016-03-08 12:19:54 -0800318 VkPhysicalDeviceMemoryProperties memoryProperties;
319 GR_VK_CALL(vkInterface, GetPhysicalDeviceMemoryProperties(physDev, &memoryProperties));
320
Greg Daniel41f0e282019-01-28 13:15:05 -0500321 SkASSERT(physicalDeviceVersion <= properties.apiVersion);
Greg Danielc0b03d82018-08-03 14:41:15 -0400322
Greg Danielcb324152019-02-25 11:36:53 -0500323 if (extensions.hasExtension(VK_KHR_SWAPCHAIN_EXTENSION_NAME, 1)) {
324 fSupportsSwapchain = true;
325 }
326
Greg Danielc0b03d82018-08-03 14:41:15 -0400327 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
328 extensions.hasExtension(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, 1)) {
329 fSupportsPhysicalDeviceProperties2 = true;
330 }
331
332 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
333 extensions.hasExtension(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, 1)) {
334 fSupportsMemoryRequirements2 = true;
335 }
336
337 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
Greg Daniel637c06a2018-09-12 09:44:25 -0400338 extensions.hasExtension(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, 1)) {
339 fSupportsBindMemory2 = true;
340 }
341
342 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
Greg Danielc0b03d82018-08-03 14:41:15 -0400343 extensions.hasExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME, 1)) {
344 fSupportsMaintenance1 = true;
345 }
346
347 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
348 extensions.hasExtension(VK_KHR_MAINTENANCE2_EXTENSION_NAME, 1)) {
349 fSupportsMaintenance2 = true;
350 }
351
352 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
353 extensions.hasExtension(VK_KHR_MAINTENANCE3_EXTENSION_NAME, 1)) {
354 fSupportsMaintenance3 = true;
355 }
356
Greg Daniela9979d12018-08-27 15:56:46 -0400357 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
Greg Daniel637c06a2018-09-12 09:44:25 -0400358 (extensions.hasExtension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, 1) &&
Greg Daniela9979d12018-08-27 15:56:46 -0400359 this->supportsMemoryRequirements2())) {
360 fSupportsDedicatedAllocation = true;
361 }
362
363 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
364 (extensions.hasExtension(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, 1) &&
365 this->supportsPhysicalDeviceProperties2() &&
366 extensions.hasExtension(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, 1) &&
367 this->supportsDedicatedAllocation())) {
368 fSupportsExternalMemory = true;
369 }
370
371#ifdef SK_BUILD_FOR_ANDROID
Greg Daniel637c06a2018-09-12 09:44:25 -0400372 // Currently Adreno devices are not supporting the QUEUE_FAMILY_FOREIGN_EXTENSION, so until they
373 // do we don't explicitly require it here even the spec says it is required.
Greg Daniela9979d12018-08-27 15:56:46 -0400374 if (extensions.hasExtension(
Greg Daniel637c06a2018-09-12 09:44:25 -0400375 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 2) &&
376 /* extensions.hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1) &&*/
377 this->supportsExternalMemory() &&
378 this->supportsBindMemory2()) {
Greg Daniela9979d12018-08-27 15:56:46 -0400379 fSupportsAndroidHWBExternalMemory = true;
Greg Daniel637c06a2018-09-12 09:44:25 -0400380 fSupportsAHardwareBufferImages = true;
Greg Daniela9979d12018-08-27 15:56:46 -0400381 }
382#endif
383
Greg Daniel7e000222018-12-03 10:08:21 -0500384 auto ycbcrFeatures =
385 get_extension_feature_struct<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(
386 features,
387 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES);
388 if (ycbcrFeatures && ycbcrFeatures->samplerYcbcrConversion &&
389 fSupportsAndroidHWBExternalMemory &&
390 (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
391 (extensions.hasExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, 1) &&
392 this->supportsMaintenance1() &&
393 this->supportsBindMemory2() &&
394 this->supportsMemoryRequirements2() &&
395 this->supportsPhysicalDeviceProperties2()))) {
396 fSupportsYcbcrConversion = true;
397 }
398 // We always push back the default GrVkYcbcrConversionInfo so that the case of no conversion
399 // will return a key of 0.
400 fYcbcrInfos.push_back(GrVkYcbcrConversionInfo());
401
Greg Daniel313c6952018-08-08 09:24:08 -0400402 this->initGrCaps(vkInterface, physDev, properties, memoryProperties, features, extensions);
Greg Daniel36443602018-08-02 12:51:52 -0400403 this->initShaderCaps(properties, features);
Greg Danielf3b11622018-03-01 15:01:27 -0500404
405 if (!contextOptions.fDisableDriverCorrectnessWorkarounds) {
406#if defined(SK_CPU_X86)
407 // We need to do this before initing the config table since it uses fSRGBSupport
408 if (kImagination_VkVendor == properties.vendorID) {
409 fSRGBSupport = false;
410 }
411#endif
412 }
413
Chris Dalton8e738a22018-10-05 16:41:44 -0600414 if (kQualcomm_VkVendor == properties.vendorID) {
415 // A "clear" load for the CCPR atlas runs faster on QC than a "discard" load followed by a
416 // scissored clear.
417 // On NVIDIA and Intel, the discard load followed by clear is faster.
418 // TODO: Evaluate on ARM, Imagination, and ATI.
419 fPreferFullscreenClears = true;
420 }
421
Greg Daniel44e69f92019-03-20 11:18:25 -0400422 if (kQualcomm_VkVendor == properties.vendorID || kARM_VkVendor == properties.vendorID) {
423 // On Qualcomm and ARM mapping a gpu buffer and doing both reads and writes to it is slow.
424 // Thus for index and vertex buffers we will force to use a cpu side buffer and then copy
425 // the whole buffer up to the gpu.
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400426 fBufferMapThreshold = SK_MaxS32;
427 }
428
429 if (kQualcomm_VkVendor == properties.vendorID) {
430 // On Qualcomm it looks like using vkCmdUpdateBuffer is slower than using a transfer buffer
431 // even for small sizes.
432 fAvoidUpdateBuffers = true;
433 }
434
Chris Dalton0dffbab2019-03-27 13:08:50 -0600435 if (kARM_VkVendor == properties.vendorID) {
436 // ARM seems to do better with more fine triangles as opposed to using the sample mask.
437 // (At least in our current round rect op.)
438 fPreferTrianglesOverSampleMask = true;
439 }
Greg Daniel78e6a4c2019-03-19 14:13:36 -0400440
Greg Danielcaa795f2019-05-14 11:54:25 -0400441 this->initFormatTable(vkInterface, physDev, properties);
egdaniel8f1dcaa2016-04-01 10:10:45 -0700442 this->initStencilFormat(vkInterface, physDev);
Greg Daniel164a9f02016-02-22 09:56:40 -0500443
Greg Daniel691f5e72018-02-28 14:21:34 -0500444 if (!contextOptions.fDisableDriverCorrectnessWorkarounds) {
445 this->applyDriverCorrectnessWorkarounds(properties);
egdanielc5ec1402016-03-28 12:14:42 -0700446 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500447
Greg Danielddc0c602018-06-18 11:26:30 -0400448 // On nexus player we disable suballocating VkImage memory since we've seen large slow downs on
449 // bot run times.
450 if (kImagination_VkVendor == properties.vendorID) {
451 fShouldAlwaysUseDedicatedImageMemory = true;
452 }
453
Greg Daniel691f5e72018-02-28 14:21:34 -0500454 this->applyOptionsOverrides(contextOptions);
455 fShaderCaps->applyOptionsOverrides(contextOptions);
456}
457
458void GrVkCaps::applyDriverCorrectnessWorkarounds(const VkPhysicalDeviceProperties& properties) {
egdaniel6fa0a912016-09-12 11:51:29 -0700459 if (kQualcomm_VkVendor == properties.vendorID) {
460 fMustDoCopiesFromOrigin = true;
Brian Salomona585fe92019-04-09 14:57:00 -0400461 // Transfer doesn't support this workaround.
462 fTransferBufferSupport = false;
egdaniel6fa0a912016-09-12 11:51:29 -0700463 }
464
Greg Daniel80a08dd2017-01-20 10:45:49 -0500465#if defined(SK_BUILD_FOR_WIN)
Greg Daniel900e5c82018-08-28 10:59:24 -0400466 if (kNvidia_VkVendor == properties.vendorID || kIntel_VkVendor == properties.vendorID) {
Greg Daniel80a08dd2017-01-20 10:45:49 -0500467 fMustSleepOnTearDown = true;
468 }
469#elif defined(SK_BUILD_FOR_ANDROID)
470 if (kImagination_VkVendor == properties.vendorID) {
471 fMustSleepOnTearDown = true;
472 }
473#endif
Greg Danielbce5eb92018-03-01 13:13:44 -0500474
475 // AMD seems to have issues binding new VkPipelines inside a secondary command buffer.
476 // Current workaround is to use a different secondary command buffer for each new VkPipeline.
477 if (kAMD_VkVendor == properties.vendorID) {
478 fNewCBOnPipelineChange = true;
479 }
480
Greg Danielddc0c602018-06-18 11:26:30 -0400481 // On Mali galaxy s7 we see lots of rendering issues when we suballocate VkImages.
482 if (kARM_VkVendor == properties.vendorID) {
483 fShouldAlwaysUseDedicatedImageMemory = true;
484 }
485
Greg Danielbce5eb92018-03-01 13:13:44 -0500486 ////////////////////////////////////////////////////////////////////////////
487 // GrCaps workarounds
488 ////////////////////////////////////////////////////////////////////////////
489
490 if (kARM_VkVendor == properties.vendorID) {
491 fInstanceAttribSupport = false;
Greg Daniel4374e962018-09-28 15:09:47 -0400492 fAvoidWritePixelsFastPath = true; // bugs.skia.org/8064
Greg Danielbce5eb92018-03-01 13:13:44 -0500493 }
494
495 // AMD advertises support for MAX_UINT vertex input attributes, but in reality only supports 32.
496 if (kAMD_VkVendor == properties.vendorID) {
497 fMaxVertexAttributes = SkTMin(fMaxVertexAttributes, 32);
498 }
499
Greg Danielbce5eb92018-03-01 13:13:44 -0500500 ////////////////////////////////////////////////////////////////////////////
501 // GrShaderCaps workarounds
502 ////////////////////////////////////////////////////////////////////////////
503
Greg Danielbce5eb92018-03-01 13:13:44 -0500504 if (kImagination_VkVendor == properties.vendorID) {
505 fShaderCaps->fAtan2ImplementedAsAtanYOverX = true;
506 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500507}
508
509int get_max_sample_count(VkSampleCountFlags flags) {
510 SkASSERT(flags & VK_SAMPLE_COUNT_1_BIT);
511 if (!(flags & VK_SAMPLE_COUNT_2_BIT)) {
512 return 0;
513 }
514 if (!(flags & VK_SAMPLE_COUNT_4_BIT)) {
515 return 2;
516 }
517 if (!(flags & VK_SAMPLE_COUNT_8_BIT)) {
518 return 4;
519 }
520 if (!(flags & VK_SAMPLE_COUNT_16_BIT)) {
521 return 8;
522 }
523 if (!(flags & VK_SAMPLE_COUNT_32_BIT)) {
524 return 16;
525 }
526 if (!(flags & VK_SAMPLE_COUNT_64_BIT)) {
527 return 32;
528 }
529 return 64;
530}
531
Greg Daniel313c6952018-08-08 09:24:08 -0400532void GrVkCaps::initGrCaps(const GrVkInterface* vkInterface,
533 VkPhysicalDevice physDev,
534 const VkPhysicalDeviceProperties& properties,
jvanverthfd7bd452016-03-25 06:29:52 -0700535 const VkPhysicalDeviceMemoryProperties& memoryProperties,
Greg Daniel313c6952018-08-08 09:24:08 -0400536 const VkPhysicalDeviceFeatures2& features,
537 const GrVkExtensions& extensions) {
Greg Danielc5cc2de2017-03-20 11:40:58 -0400538 // So GPUs, like AMD, are reporting MAX_INT support vertex attributes. In general, there is no
539 // need for us ever to support that amount, and it makes tests which tests all the vertex
540 // attribs timeout looping over that many. For now, we'll cap this at 64 max and can raise it if
541 // we ever find that need.
542 static const uint32_t kMaxVertexAttributes = 64;
543 fMaxVertexAttributes = SkTMin(properties.limits.maxVertexInputAttributes, kMaxVertexAttributes);
Greg Danielc5cc2de2017-03-20 11:40:58 -0400544
egdanield5e3b9e2016-03-08 12:19:54 -0800545 // We could actually query and get a max size for each config, however maxImageDimension2D will
546 // give the minimum max size across all configs. So for simplicity we will use that for now.
jvanverthe78d4872016-09-27 03:33:05 -0700547 fMaxRenderTargetSize = SkTMin(properties.limits.maxImageDimension2D, (uint32_t)INT_MAX);
548 fMaxTextureSize = SkTMin(properties.limits.maxImageDimension2D, (uint32_t)INT_MAX);
Adrienne Walker724afe82018-05-15 11:36:26 -0700549 if (fDriverBugWorkarounds.max_texture_size_limit_4096) {
550 fMaxTextureSize = SkTMin(fMaxTextureSize, 4096);
551 }
552 // Our render targets are always created with textures as the color
553 // attachment, hence this min:
554 fMaxRenderTargetSize = SkTMin(fMaxTextureSize, fMaxRenderTargetSize);
egdanield5e3b9e2016-03-08 12:19:54 -0800555
Chris Dalton2612bae2018-02-22 13:41:37 -0700556 // TODO: check if RT's larger than 4k incur a performance cost on ARM.
557 fMaxPreferredRenderTargetSize = fMaxRenderTargetSize;
558
egdanield5e3b9e2016-03-08 12:19:54 -0800559 // Assuming since we will always map in the end to upload the data we might as well just map
560 // from the get go. There is no hard data to suggest this is faster or slower.
cdalton397536c2016-03-25 12:15:03 -0700561 fBufferMapThreshold = 0;
egdanield5e3b9e2016-03-08 12:19:54 -0800562
Brian Salomon105d7c22019-04-16 13:46:14 -0400563 fMapBufferFlags = kCanMap_MapFlag | kSubset_MapFlag | kAsyncRead_MapFlag;
egdanield5e3b9e2016-03-08 12:19:54 -0800564
egdanield5e3b9e2016-03-08 12:19:54 -0800565 fOversizedStencilSupport = true;
Greg Daniel313c6952018-08-08 09:24:08 -0400566
567 if (extensions.hasExtension(VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME, 2) &&
568 this->supportsPhysicalDeviceProperties2()) {
569
570 VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT blendProps;
571 blendProps.sType =
572 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT;
573 blendProps.pNext = nullptr;
574
575 VkPhysicalDeviceProperties2 props;
576 props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
577 props.pNext = &blendProps;
578
579 GR_VK_CALL(vkInterface, GetPhysicalDeviceProperties2(physDev, &props));
580
581 if (blendProps.advancedBlendAllOperations == VK_TRUE) {
582 fShaderCaps->fAdvBlendEqInteraction = GrShaderCaps::kAutomatic_AdvBlendEqInteraction;
583
584 auto blendFeatures =
585 get_extension_feature_struct<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT>(
586 features,
587 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT);
588 if (blendFeatures && blendFeatures->advancedBlendCoherentOperations == VK_TRUE) {
589 fBlendEquationSupport = kAdvancedCoherent_BlendEquationSupport;
590 } else {
591 // TODO: Currently non coherent blends are not supported in our vulkan backend. They
592 // require us to support self dependencies in our render passes.
593 // fBlendEquationSupport = kAdvanced_BlendEquationSupport;
594 }
595 }
596 }
egdanield5e3b9e2016-03-08 12:19:54 -0800597}
598
Greg Daniel36443602018-08-02 12:51:52 -0400599void GrVkCaps::initShaderCaps(const VkPhysicalDeviceProperties& properties,
Greg Daniela0651ac2018-08-08 09:23:18 -0400600 const VkPhysicalDeviceFeatures2& features) {
Brian Salomon1edc5b92016-11-29 13:43:46 -0500601 GrShaderCaps* shaderCaps = fShaderCaps.get();
602 shaderCaps->fVersionDeclString = "#version 330\n";
egdaniel3a15fd42016-04-05 11:00:29 -0700603
Greg Daniel164a9f02016-02-22 09:56:40 -0500604
605 // fConfigOutputSwizzle will default to RGBA so we only need to set it for alpha only config.
606 for (int i = 0; i < kGrPixelConfigCnt; ++i) {
607 GrPixelConfig config = static_cast<GrPixelConfig>(i);
Greg Danielef59d872017-11-17 16:47:21 -0500608 // Vulkan doesn't support a single channel format stored in alpha.
609 if (GrPixelConfigIsAlphaOnly(config) &&
610 kAlpha_8_as_Alpha_GrPixelConfig != config) {
Brian Salomon1edc5b92016-11-29 13:43:46 -0500611 shaderCaps->fConfigTextureSwizzle[i] = GrSwizzle::RRRR();
612 shaderCaps->fConfigOutputSwizzle[i] = GrSwizzle::AAAA();
Greg Daniel164a9f02016-02-22 09:56:40 -0500613 } else {
Greg Daniel7af060a2017-12-05 16:27:11 -0500614 if (kGray_8_GrPixelConfig == config ||
615 kGray_8_as_Red_GrPixelConfig == config) {
Brian Osman986563b2017-01-10 14:20:02 -0500616 shaderCaps->fConfigTextureSwizzle[i] = GrSwizzle::RRRA();
617 } else if (kRGBA_4444_GrPixelConfig == config) {
egdaniel3fe03272016-08-15 10:59:17 -0700618 // The vulkan spec does not require R4G4B4A4 to be supported for texturing so we
619 // store the data in a B4G4R4A4 texture and then swizzle it when doing texture reads
620 // or writing to outputs. Since we're not actually changing the data at all, the
621 // only extra work is the swizzle in the shader for all operations.
Brian Salomon1edc5b92016-11-29 13:43:46 -0500622 shaderCaps->fConfigTextureSwizzle[i] = GrSwizzle::BGRA();
623 shaderCaps->fConfigOutputSwizzle[i] = GrSwizzle::BGRA();
Greg Danielf259b8b2019-02-14 09:03:43 -0500624 } else if (kRGB_888X_GrPixelConfig == config) {
625 shaderCaps->fConfigTextureSwizzle[i] = GrSwizzle::RGB1();
egdaniel3fe03272016-08-15 10:59:17 -0700626 } else {
Brian Salomon1edc5b92016-11-29 13:43:46 -0500627 shaderCaps->fConfigTextureSwizzle[i] = GrSwizzle::RGBA();
egdaniel3fe03272016-08-15 10:59:17 -0700628 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500629 }
630 }
egdaniel67acb832016-02-26 08:32:20 -0800631
egdanield5e3b9e2016-03-08 12:19:54 -0800632 // Vulkan is based off ES 3.0 so the following should all be supported
Brian Salomon1edc5b92016-11-29 13:43:46 -0500633 shaderCaps->fUsesPrecisionModifiers = true;
634 shaderCaps->fFlatInterpolationSupport = true;
Brian Salomon41274562017-09-15 09:40:03 -0700635 // Flat interpolation appears to be slow on Qualcomm GPUs. This was tested in GL and is assumed
636 // to be true with Vulkan as well.
637 shaderCaps->fPreferFlatInterpolation = kQualcomm_VkVendor != properties.vendorID;
egdanield5e3b9e2016-03-08 12:19:54 -0800638
639 // GrShaderCaps
640
Brian Salomon1edc5b92016-11-29 13:43:46 -0500641 shaderCaps->fShaderDerivativeSupport = true;
Chris Daltonf1b47bb2017-10-06 11:57:51 -0600642
Ethan Nicholas6ac8d362019-01-22 21:43:55 +0000643 // FIXME: http://skbug.com/7733: Disable geometry shaders until Intel/Radeon GMs draw correctly.
644 // shaderCaps->fGeometryShaderSupport =
645 // shaderCaps->fGSInvocationsSupport = features.features.geometryShader;
egdanield632bb42016-03-30 12:06:48 -0700646
Greg Daniela0651ac2018-08-08 09:23:18 -0400647 shaderCaps->fDualSourceBlendingSupport = features.features.dualSrcBlend;
egdanield632bb42016-03-30 12:06:48 -0700648
Brian Salomon1edc5b92016-11-29 13:43:46 -0500649 shaderCaps->fIntegerSupport = true;
Chris Dalton1d616352017-05-31 12:51:23 -0600650 shaderCaps->fVertexIDSupport = true;
Chris Dalton7c7ff032018-03-28 20:09:58 -0600651 shaderCaps->fFPManipulationSupport = true;
cdalton9c3f1432016-03-11 10:07:37 -0800652
cdaltona6b92ad2016-04-11 12:03:08 -0700653 // Assume the minimum precisions mandated by the SPIR-V spec.
Chris Dalton47c8ed32017-11-15 18:27:09 -0700654 shaderCaps->fFloatIs32Bits = true;
655 shaderCaps->fHalfIs32Bits = false;
cdaltona6b92ad2016-04-11 12:03:08 -0700656
Ruiqi Maob609e6d2018-07-17 10:19:38 -0400657 // SPIR-V supports unsigned integers.
658 shaderCaps->fUnsignedSupport = true;
659
Brian Salomon1edc5b92016-11-29 13:43:46 -0500660 shaderCaps->fMaxFragmentSamplers = SkTMin(
661 SkTMin(properties.limits.maxPerStageDescriptorSampledImages,
662 properties.limits.maxPerStageDescriptorSamplers),
663 (uint32_t)INT_MAX);
Greg Daniel164a9f02016-02-22 09:56:40 -0500664}
665
egdaniel8f1dcaa2016-04-01 10:10:45 -0700666bool stencil_format_supported(const GrVkInterface* interface,
667 VkPhysicalDevice physDev,
668 VkFormat format) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500669 VkFormatProperties props;
670 memset(&props, 0, sizeof(VkFormatProperties));
671 GR_VK_CALL(interface, GetPhysicalDeviceFormatProperties(physDev, format, &props));
egdaniel8f1dcaa2016-04-01 10:10:45 -0700672 return SkToBool(VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT & props.optimalTilingFeatures);
Greg Daniel164a9f02016-02-22 09:56:40 -0500673}
674
egdaniel8f1dcaa2016-04-01 10:10:45 -0700675void GrVkCaps::initStencilFormat(const GrVkInterface* interface, VkPhysicalDevice physDev) {
676 // List of legal stencil formats (though perhaps not supported on
677 // the particular gpu/driver) from most preferred to least. We are guaranteed to have either
jvanvertha4b0fed2016-04-27 11:42:21 -0700678 // VK_FORMAT_D24_UNORM_S8_UINT or VK_FORMAT_D32_SFLOAT_S8_UINT. VK_FORMAT_D32_SFLOAT_S8_UINT
egdaniel8f1dcaa2016-04-01 10:10:45 -0700679 // can optionally have 24 unused bits at the end so we assume the total bits is 64.
Greg Daniel164a9f02016-02-22 09:56:40 -0500680 static const StencilFormat
681 // internal Format stencil bits total bits packed?
682 gS8 = { VK_FORMAT_S8_UINT, 8, 8, false },
egdaniel8f1dcaa2016-04-01 10:10:45 -0700683 gD24S8 = { VK_FORMAT_D24_UNORM_S8_UINT, 8, 32, true },
684 gD32S8 = { VK_FORMAT_D32_SFLOAT_S8_UINT, 8, 64, true };
Greg Daniel164a9f02016-02-22 09:56:40 -0500685
egdaniel8f1dcaa2016-04-01 10:10:45 -0700686 if (stencil_format_supported(interface, physDev, VK_FORMAT_S8_UINT)) {
Ethan Nicholasf610bae2018-09-20 16:55:21 -0400687 fPreferredStencilFormat = gS8;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700688 } else if (stencil_format_supported(interface, physDev, VK_FORMAT_D24_UNORM_S8_UINT)) {
Ethan Nicholasf610bae2018-09-20 16:55:21 -0400689 fPreferredStencilFormat = gD24S8;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700690 } else {
691 SkASSERT(stencil_format_supported(interface, physDev, VK_FORMAT_D32_SFLOAT_S8_UINT));
Ethan Nicholasf610bae2018-09-20 16:55:21 -0400692 fPreferredStencilFormat = gD32S8;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700693 }
694}
695
Greg Danielcaa795f2019-05-14 11:54:25 -0400696static bool format_is_srgb(VkFormat format) {
697 switch (format) {
698 case VK_FORMAT_R8G8B8A8_SRGB:
699 case VK_FORMAT_B8G8R8A8_SRGB:
700 return true;
701 case VK_FORMAT_R8G8B8A8_UNORM:
702 case VK_FORMAT_B8G8R8A8_UNORM:
703 case VK_FORMAT_R8G8B8A8_SINT:
704 case VK_FORMAT_R8G8B8_UNORM:
705 case VK_FORMAT_R8G8_UNORM:
706 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
707 case VK_FORMAT_R5G6B5_UNORM_PACK16:
708 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
709 case VK_FORMAT_R8_UNORM:
710 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
711 case VK_FORMAT_R32G32B32A32_SFLOAT:
712 case VK_FORMAT_R32G32_SFLOAT:
713 case VK_FORMAT_R16G16B16A16_SFLOAT:
714 case VK_FORMAT_R16_SFLOAT:
715 return false;
716 default:
717 SK_ABORT("Unsupported VkFormat");
718 return false;
719 }
720}
721
722// These are all the valid VkFormats that we support in Skia. They are roughly order from most
723// frequently used to least to improve look up times in arrays.
724static constexpr VkFormat kVkFormats[] = {
725 VK_FORMAT_R8G8B8A8_UNORM,
726 VK_FORMAT_R8_UNORM,
727 VK_FORMAT_B8G8R8A8_UNORM,
728 VK_FORMAT_R5G6B5_UNORM_PACK16,
729 VK_FORMAT_R16G16B16A16_SFLOAT,
730 VK_FORMAT_R16_SFLOAT,
731 VK_FORMAT_R8G8B8A8_SINT,
732 VK_FORMAT_R8G8B8_UNORM,
733 VK_FORMAT_R8G8_UNORM,
734 VK_FORMAT_A2B10G10R10_UNORM_PACK32,
735 VK_FORMAT_B4G4R4A4_UNORM_PACK16,
736 VK_FORMAT_R32G32B32A32_SFLOAT,
737 VK_FORMAT_R32G32_SFLOAT,
738 VK_FORMAT_R8G8B8A8_SRGB,
739 VK_FORMAT_B8G8R8A8_SRGB,
740 VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK
741};
742
743const GrVkCaps::FormatInfo& GrVkCaps::getFormatInfo(VkFormat format) const {
744 static_assert(SK_ARRAY_COUNT(kVkFormats) == GrVkCaps::kNumVkFormats,
745 "Size of VkFormats array must match static value in header");
746 for (size_t i = 0; i < SK_ARRAY_COUNT(kVkFormats); ++i) {
747 if (kVkFormats[i] == format) {
748 return fFormatTable[i];
749 }
750 }
751 SK_ABORT("Invalid VkFormat");
752 static const FormatInfo kInvalidConfig;
753 return kInvalidConfig;
754}
755
756void GrVkCaps::initFormatTable(const GrVkInterface* interface, VkPhysicalDevice physDev,
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000757 const VkPhysicalDeviceProperties& properties) {
Greg Danielcaa795f2019-05-14 11:54:25 -0400758 static_assert(SK_ARRAY_COUNT(kVkFormats) == GrVkCaps::kNumVkFormats,
759 "Size of VkFormats array must match static value in header");
760 for (size_t i = 0; i < SK_ARRAY_COUNT(kVkFormats); ++i) {
761 VkFormat format = kVkFormats[i];
762 if (!format_is_srgb(format) || fSRGBSupport) {
763 fFormatTable[i].init(interface, physDev, properties, format);
egdaniel8f1dcaa2016-04-01 10:10:45 -0700764 }
765 }
766}
767
Greg Danielcaa795f2019-05-14 11:54:25 -0400768void GrVkCaps::FormatInfo::InitConfigFlags(VkFormatFeatureFlags vkFlags, uint16_t* flags) {
egdaniel8f1dcaa2016-04-01 10:10:45 -0700769 if (SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & vkFlags) &&
770 SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT & vkFlags)) {
771 *flags = *flags | kTextureable_Flag;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700772
Robert Phillipsb7b7e5f2017-05-22 13:23:19 -0400773 // Ganesh assumes that all renderable surfaces are also texturable
Greg Danielcaa795f2019-05-14 11:54:25 -0400774 if (SkToBool(VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT & vkFlags)) {
Robert Phillipsb7b7e5f2017-05-22 13:23:19 -0400775 *flags = *flags | kRenderable_Flag;
776 }
egdaniel8f1dcaa2016-04-01 10:10:45 -0700777 }
778
779 if (SkToBool(VK_FORMAT_FEATURE_BLIT_SRC_BIT & vkFlags)) {
780 *flags = *flags | kBlitSrc_Flag;
781 }
782
783 if (SkToBool(VK_FORMAT_FEATURE_BLIT_DST_BIT & vkFlags)) {
784 *flags = *flags | kBlitDst_Flag;
785 }
786}
787
Greg Danielcaa795f2019-05-14 11:54:25 -0400788void GrVkCaps::FormatInfo::initSampleCounts(const GrVkInterface* interface,
Greg Daniel81e7bf82017-07-19 14:47:42 -0400789 VkPhysicalDevice physDev,
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000790 const VkPhysicalDeviceProperties& physProps,
Greg Daniel81e7bf82017-07-19 14:47:42 -0400791 VkFormat format) {
792 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
793 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
794 VK_IMAGE_USAGE_SAMPLED_BIT |
795 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
Greg Daniel81e7bf82017-07-19 14:47:42 -0400796 VkImageFormatProperties properties;
797 GR_VK_CALL(interface, GetPhysicalDeviceImageFormatProperties(physDev,
798 format,
799 VK_IMAGE_TYPE_2D,
800 VK_IMAGE_TILING_OPTIMAL,
801 usage,
Brian Osman2b23c4b2018-06-01 12:25:08 -0400802 0, // createFlags
Greg Daniel81e7bf82017-07-19 14:47:42 -0400803 &properties));
804 VkSampleCountFlags flags = properties.sampleCounts;
805 if (flags & VK_SAMPLE_COUNT_1_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400806 fColorSampleCounts.push_back(1);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400807 }
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000808 if (kImagination_VkVendor == physProps.vendorID) {
809 // MSAA does not work on imagination
810 return;
811 }
Greg Daniel81e7bf82017-07-19 14:47:42 -0400812 if (flags & VK_SAMPLE_COUNT_2_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400813 fColorSampleCounts.push_back(2);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400814 }
815 if (flags & VK_SAMPLE_COUNT_4_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400816 fColorSampleCounts.push_back(4);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400817 }
818 if (flags & VK_SAMPLE_COUNT_8_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400819 fColorSampleCounts.push_back(8);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400820 }
821 if (flags & VK_SAMPLE_COUNT_16_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400822 fColorSampleCounts.push_back(16);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400823 }
824 if (flags & VK_SAMPLE_COUNT_32_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400825 fColorSampleCounts.push_back(32);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400826 }
827 if (flags & VK_SAMPLE_COUNT_64_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400828 fColorSampleCounts.push_back(64);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400829 }
830}
831
Greg Danielcaa795f2019-05-14 11:54:25 -0400832void GrVkCaps::FormatInfo::init(const GrVkInterface* interface,
egdaniel8f1dcaa2016-04-01 10:10:45 -0700833 VkPhysicalDevice physDev,
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000834 const VkPhysicalDeviceProperties& properties,
Greg Danielcaa795f2019-05-14 11:54:25 -0400835 VkFormat format) {
egdaniel8f1dcaa2016-04-01 10:10:45 -0700836 VkFormatProperties props;
837 memset(&props, 0, sizeof(VkFormatProperties));
838 GR_VK_CALL(interface, GetPhysicalDeviceFormatProperties(physDev, format, &props));
Greg Danielcaa795f2019-05-14 11:54:25 -0400839 InitConfigFlags(props.linearTilingFeatures, &fLinearFlags);
840 InitConfigFlags(props.optimalTilingFeatures, &fOptimalFlags);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400841 if (fOptimalFlags & kRenderable_Flag) {
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000842 this->initSampleCounts(interface, physDev, properties, format);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400843 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500844}
Greg Daniel81e7bf82017-07-19 14:47:42 -0400845
Greg Danielcaa795f2019-05-14 11:54:25 -0400846bool GrVkCaps::isConfigTexturable(VkFormat format) const {
847 if (!GrVkFormatIsSupported(format)) {
848 return false;
849 }
850
851 const FormatInfo& info = this->getFormatInfo(format);
852 return SkToBool(FormatInfo::kTextureable_Flag & info.fOptimalFlags);
853}
854
855bool GrVkCaps::isConfigTexturable(GrPixelConfig config) const {
856 VkFormat format;
857 if (!GrPixelConfigToVkFormat(config, &format)) {
858 return false;
859 }
860 return this->isConfigTexturable(format);
861}
862
Brian Salomonbdecacf2018-02-02 20:32:49 -0500863int GrVkCaps::getRenderTargetSampleCount(int requestedCount, GrPixelConfig config) const {
Greg Danielcaa795f2019-05-14 11:54:25 -0400864 // Currently we don't allow RGB_888X to be renderable because we don't have a way to handle
865 // blends that reference dst alpha when the values in the dst alpha channel are uninitialized.
866 if (config == kRGB_888X_GrPixelConfig) {
867 return 0;
868 }
869
870 VkFormat format;
871 if (!GrPixelConfigToVkFormat(config, &format)) {
872 return 0;
873 }
874
875 return this->getRenderTargetSampleCount(requestedCount, format);
876}
877
878int GrVkCaps::getRenderTargetSampleCount(int requestedCount, VkFormat format) const {
Brian Salomonbdecacf2018-02-02 20:32:49 -0500879 requestedCount = SkTMax(1, requestedCount);
Greg Danielcaa795f2019-05-14 11:54:25 -0400880
881 const FormatInfo& info = this->getFormatInfo(format);
882
883 int count = info.fColorSampleCounts.count();
Brian Salomonbdecacf2018-02-02 20:32:49 -0500884
885 if (!count) {
Greg Daniel81e7bf82017-07-19 14:47:42 -0400886 return 0;
887 }
888
Brian Salomonbdecacf2018-02-02 20:32:49 -0500889 if (1 == requestedCount) {
Greg Danielcaa795f2019-05-14 11:54:25 -0400890 SkASSERT(info.fColorSampleCounts.count() && info.fColorSampleCounts[0] == 1);
Brian Salomonbdecacf2018-02-02 20:32:49 -0500891 return 1;
892 }
893
Greg Daniel81e7bf82017-07-19 14:47:42 -0400894 for (int i = 0; i < count; ++i) {
Greg Danielcaa795f2019-05-14 11:54:25 -0400895 if (info.fColorSampleCounts[i] >= requestedCount) {
896 return info.fColorSampleCounts[i];
Greg Daniel81e7bf82017-07-19 14:47:42 -0400897 }
898 }
Brian Salomonbdecacf2018-02-02 20:32:49 -0500899 return 0;
900}
901
902int GrVkCaps::maxRenderTargetSampleCount(GrPixelConfig config) const {
Greg Danielcaa795f2019-05-14 11:54:25 -0400903 // Currently we don't allow RGB_888X to be renderable because we don't have a way to handle
904 // blends that reference dst alpha when the values in the dst alpha channel are uninitialized.
905 if (config == kRGB_888X_GrPixelConfig) {
906 return 0;
907 }
908
909 VkFormat format;
910 if (!GrPixelConfigToVkFormat(config, &format)) {
911 return 0;
912 }
913 return this->maxRenderTargetSampleCount(format);
914}
915
916int GrVkCaps::maxRenderTargetSampleCount(VkFormat format) const {
917 const FormatInfo& info = this->getFormatInfo(format);
918
919 const auto& table = info.fColorSampleCounts;
Brian Salomonbdecacf2018-02-02 20:32:49 -0500920 if (!table.count()) {
921 return 0;
922 }
923 return table[table.count() - 1];
Brian Salomond653cac2018-02-01 13:58:00 -0500924}
925
Greg Daniela51e93c2019-03-25 12:30:45 -0400926bool GrVkCaps::surfaceSupportsReadPixels(const GrSurface* surface) const {
927 if (auto tex = static_cast<const GrVkTexture*>(surface->asTexture())) {
928 // We can't directly read from a VkImage that has a ycbcr sampler.
929 if (tex->ycbcrConversionInfo().isValid()) {
930 return false;
931 }
932 }
933 return true;
934}
935
Brian Salomonc67c31c2018-12-06 10:00:03 -0500936bool GrVkCaps::onSurfaceSupportsWritePixels(const GrSurface* surface) const {
Brian Salomon3d86a192018-02-27 16:46:11 -0500937 if (auto rt = surface->asRenderTarget()) {
938 return rt->numColorSamples() <= 1 && SkToBool(surface->asTexture());
939 }
Greg Daniela51e93c2019-03-25 12:30:45 -0400940 // We can't write to a texture that has a ycbcr sampler.
941 if (auto tex = static_cast<const GrVkTexture*>(surface->asTexture())) {
942 // We can't directly read from a VkImage that has a ycbcr sampler.
943 if (tex->ycbcrConversionInfo().isValid()) {
944 return false;
945 }
946 }
Brian Salomon3d86a192018-02-27 16:46:11 -0500947 return true;
948}
949
Robert Phillipsc1bee132019-02-06 16:04:34 -0500950static GrPixelConfig validate_image_info(VkFormat format, SkColorType ct, bool hasYcbcrConversion) {
Greg Daniel14c55c22018-12-04 11:25:03 -0500951 if (format == VK_FORMAT_UNDEFINED) {
952 // If the format is undefined then it is only valid as an external image which requires that
953 // we have a valid VkYcbcrConversion.
954 if (hasYcbcrConversion) {
955 // We don't actually care what the color type or config are since we won't use those
Greg Daniela51e93c2019-03-25 12:30:45 -0400956 // values for external textures. However, for read pixels we will draw to a non ycbcr
957 // texture of this config so we set RGBA here for that.
Brian Salomonf391d0f2018-12-14 09:18:50 -0500958 return kRGBA_8888_GrPixelConfig;
Greg Daniel14c55c22018-12-04 11:25:03 -0500959 } else {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500960 return kUnknown_GrPixelConfig;
Greg Daniel14c55c22018-12-04 11:25:03 -0500961 }
962 }
963
964 if (hasYcbcrConversion) {
965 // We only support having a ycbcr conversion for external images.
Brian Salomonf391d0f2018-12-14 09:18:50 -0500966 return kUnknown_GrPixelConfig;
Greg Daniel14c55c22018-12-04 11:25:03 -0500967 }
968
Greg Danielf5d87582017-12-18 14:48:15 -0500969 switch (ct) {
970 case kUnknown_SkColorType:
Brian Salomonf391d0f2018-12-14 09:18:50 -0500971 break;
Greg Danielf5d87582017-12-18 14:48:15 -0500972 case kAlpha_8_SkColorType:
973 if (VK_FORMAT_R8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500974 return kAlpha_8_as_Red_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500975 }
976 break;
977 case kRGB_565_SkColorType:
978 if (VK_FORMAT_R5G6B5_UNORM_PACK16 == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500979 return kRGB_565_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500980 }
981 break;
982 case kARGB_4444_SkColorType:
983 if (VK_FORMAT_B4G4R4A4_UNORM_PACK16 == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500984 return kRGBA_4444_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500985 }
986 break;
987 case kRGBA_8888_SkColorType:
988 if (VK_FORMAT_R8G8B8A8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500989 return kRGBA_8888_GrPixelConfig;
Greg Daniel7b219ac2017-12-18 14:49:04 -0500990 } else if (VK_FORMAT_R8G8B8A8_SRGB == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500991 return kSRGBA_8888_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500992 }
993 break;
Brian Salomone41e1762018-01-25 14:07:47 -0500994 case kRGB_888x_SkColorType:
Greg Daniel475eb702018-09-28 14:16:50 -0400995 if (VK_FORMAT_R8G8B8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -0500996 return kRGB_888_GrPixelConfig;
Greg Daniel475eb702018-09-28 14:16:50 -0400997 }
Greg Danielf259b8b2019-02-14 09:03:43 -0500998 if (VK_FORMAT_R8G8B8A8_UNORM == format) {
999 return kRGB_888X_GrPixelConfig;
1000 }
Greg Daniel475eb702018-09-28 14:16:50 -04001001 break;
Greg Danielf5d87582017-12-18 14:48:15 -05001002 case kBGRA_8888_SkColorType:
1003 if (VK_FORMAT_B8G8R8A8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001004 return kBGRA_8888_GrPixelConfig;
Greg Daniel7b219ac2017-12-18 14:49:04 -05001005 } else if (VK_FORMAT_B8G8R8A8_SRGB == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001006 return kSBGRA_8888_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -05001007 }
1008 break;
Brian Salomone41e1762018-01-25 14:07:47 -05001009 case kRGBA_1010102_SkColorType:
Brian Osman10fc6fd2018-03-02 11:01:10 -05001010 if (VK_FORMAT_A2B10G10R10_UNORM_PACK32 == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001011 return kRGBA_1010102_GrPixelConfig;
Brian Osman10fc6fd2018-03-02 11:01:10 -05001012 }
1013 break;
Brian Salomone41e1762018-01-25 14:07:47 -05001014 case kRGB_101010x_SkColorType:
Brian Salomonf391d0f2018-12-14 09:18:50 -05001015 return kUnknown_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -05001016 case kGray_8_SkColorType:
1017 if (VK_FORMAT_R8_UNORM == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001018 return kGray_8_as_Red_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -05001019 }
1020 break;
Brian Osmand0626aa2019-03-11 15:28:06 -04001021 case kRGBA_F16Norm_SkColorType:
Mike Kleinb70990e2019-02-28 10:03:27 -06001022 if (VK_FORMAT_R16G16B16A16_SFLOAT == format) {
Brian Osmand0626aa2019-03-11 15:28:06 -04001023 return kRGBA_half_Clamped_GrPixelConfig;
Mike Kleinb70990e2019-02-28 10:03:27 -06001024 }
1025 break;
Greg Danielf5d87582017-12-18 14:48:15 -05001026 case kRGBA_F16_SkColorType:
1027 if (VK_FORMAT_R16G16B16A16_SFLOAT == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001028 return kRGBA_half_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -05001029 }
1030 break;
Mike Klein37854712018-06-26 11:43:06 -04001031 case kRGBA_F32_SkColorType:
1032 if (VK_FORMAT_R32G32B32A32_SFLOAT == format) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001033 return kRGBA_float_GrPixelConfig;
Mike Klein37854712018-06-26 11:43:06 -04001034 }
1035 break;
Greg Danielf5d87582017-12-18 14:48:15 -05001036 }
1037
Brian Salomonf391d0f2018-12-14 09:18:50 -05001038 return kUnknown_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -05001039}
1040
Brian Salomonf391d0f2018-12-14 09:18:50 -05001041GrPixelConfig GrVkCaps::validateBackendRenderTarget(const GrBackendRenderTarget& rt,
1042 SkColorType ct) const {
Greg Daniel323fbcf2018-04-10 13:46:30 -04001043 GrVkImageInfo imageInfo;
1044 if (!rt.getVkImageInfo(&imageInfo)) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001045 return kUnknown_GrPixelConfig;
Robert Phillipsfc711a22018-02-13 17:03:00 -05001046 }
Brian Salomonf391d0f2018-12-14 09:18:50 -05001047 return validate_image_info(imageInfo.fFormat, ct, imageInfo.fYcbcrConversionInfo.isValid());
Robert Phillipsfc711a22018-02-13 17:03:00 -05001048}
1049
Brian Salomonf391d0f2018-12-14 09:18:50 -05001050GrPixelConfig GrVkCaps::getConfigFromBackendFormat(const GrBackendFormat& format,
1051 SkColorType ct) const {
Robert Phillipsfc711a22018-02-13 17:03:00 -05001052 const VkFormat* vkFormat = format.getVkFormat();
Greg Daniel14c55c22018-12-04 11:25:03 -05001053 const GrVkYcbcrConversionInfo* ycbcrInfo = format.getVkYcbcrConversionInfo();
1054 if (!vkFormat || !ycbcrInfo) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001055 return kUnknown_GrPixelConfig;
Robert Phillipsfc711a22018-02-13 17:03:00 -05001056 }
Brian Salomonf391d0f2018-12-14 09:18:50 -05001057 return validate_image_info(*vkFormat, ct, ycbcrInfo->isValid());
Greg Danielfaa095e2017-12-19 13:15:02 -05001058}
Greg Danielf5d87582017-12-18 14:48:15 -05001059
Brian Salomonf391d0f2018-12-14 09:18:50 -05001060static GrPixelConfig get_yuva_config(VkFormat vkFormat) {
Jim Van Verth9bf81202018-10-30 15:53:36 -04001061 switch (vkFormat) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001062 case VK_FORMAT_R8_UNORM:
1063 return kAlpha_8_as_Red_GrPixelConfig;
1064 case VK_FORMAT_R8G8B8A8_UNORM:
1065 return kRGBA_8888_GrPixelConfig;
1066 case VK_FORMAT_R8G8B8_UNORM:
1067 return kRGB_888_GrPixelConfig;
1068 case VK_FORMAT_R8G8_UNORM:
1069 return kRG_88_GrPixelConfig;
1070 case VK_FORMAT_B8G8R8A8_UNORM:
1071 return kBGRA_8888_GrPixelConfig;
Robert Phillips2dd1b472019-03-21 09:00:20 -04001072 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
1073 return kRGBA_1010102_GrPixelConfig;
Brian Salomonf391d0f2018-12-14 09:18:50 -05001074 default:
1075 return kUnknown_GrPixelConfig;
Jim Van Verthb7f0b9c2018-10-22 14:12:03 -04001076 }
Jim Van Verthb7f0b9c2018-10-22 14:12:03 -04001077}
1078
Brian Salomonf391d0f2018-12-14 09:18:50 -05001079GrPixelConfig GrVkCaps::getYUVAConfigFromBackendFormat(const GrBackendFormat& format) const {
Jim Van Verth9bf81202018-10-30 15:53:36 -04001080 const VkFormat* vkFormat = format.getVkFormat();
1081 if (!vkFormat) {
Brian Salomonf391d0f2018-12-14 09:18:50 -05001082 return kUnknown_GrPixelConfig;
Jim Van Verth9bf81202018-10-30 15:53:36 -04001083 }
Brian Salomonf391d0f2018-12-14 09:18:50 -05001084 return get_yuva_config(*vkFormat);
Timothy Liang036fdfe2018-06-28 15:50:36 -04001085}
Greg Daniel4065d452018-11-16 15:43:41 -05001086
1087GrBackendFormat GrVkCaps::getBackendFormatFromGrColorType(GrColorType ct,
1088 GrSRGBEncoded srgbEncoded) const {
1089 GrPixelConfig config = GrColorTypeToPixelConfig(ct, srgbEncoded);
1090 if (config == kUnknown_GrPixelConfig) {
1091 return GrBackendFormat();
1092 }
1093 VkFormat format;
1094 if (!GrPixelConfigToVkFormat(config, &format)) {
1095 return GrBackendFormat();
1096 }
1097 return GrBackendFormat::MakeVk(format);
1098}
Timothy Liang036fdfe2018-06-28 15:50:36 -04001099
Brian Salomon26de56e2019-04-10 12:14:26 -04001100size_t GrVkCaps::onTransferFromOffsetAlignment(GrColorType bufferColorType) const {
Brian Salomona585fe92019-04-09 14:57:00 -04001101 // This GrColorType has 32 bpp but the Vulkan pixel format we use for with may have 24bpp
1102 // (VK_FORMAT_R8G8B8_...) or may be 32 bpp. We don't support post transforming the pixel data
1103 // for transfer-from currently and don't want to have to pass info about the src surface here.
1104 if (bufferColorType == GrColorType::kRGB_888x) {
1105 return false;
1106 }
1107 size_t bpp = GrColorTypeBytesPerPixel(bufferColorType);
1108 // The VkBufferImageCopy bufferOffset field must be both a multiple of 4 and of a single texel.
1109 switch (bpp & 0b11) {
Brian Salomon26de56e2019-04-10 12:14:26 -04001110 // bpp is already a multiple of 4.
1111 case 0: return bpp;
1112 // bpp is a multiple of 2 but not 4.
1113 case 2: return 2 * bpp;
1114 // bpp is not a multiple of 2.
1115 default: return 4 * bpp;
Brian Salomona585fe92019-04-09 14:57:00 -04001116 }
Brian Salomona585fe92019-04-09 14:57:00 -04001117}