blob: 08a3e4d7d1116a1d26249353e46bf30af8f5f189 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrVkCaps.h"
Greg Danielf5d87582017-12-18 14:48:15 -05009#include "GrBackendSurface.h"
Robert Phillipsbf25d432017-04-07 10:08:53 -040010#include "GrRenderTargetProxy.h"
Brian Salomon3d86a192018-02-27 16:46:11 -050011#include "GrRenderTarget.h"
Brian Salomon94efbf52016-11-29 13:43:05 -050012#include "GrShaderCaps.h"
Greg Danield3e65aa2018-08-01 09:19:45 -040013#include "GrVkInterface.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050014#include "GrVkUtil.h"
jvanverthfd7bd452016-03-25 06:29:52 -070015#include "vk/GrVkBackendContext.h"
Greg Danielc0b03d82018-08-03 14:41:15 -040016#include "vk/GrVkExtensions.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050017
18GrVkCaps::GrVkCaps(const GrContextOptions& contextOptions, const GrVkInterface* vkInterface,
Greg Daniela0651ac2018-08-08 09:23:18 -040019 VkPhysicalDevice physDev, const VkPhysicalDeviceFeatures2& features,
Greg Danielc0b03d82018-08-03 14:41:15 -040020 uint32_t instanceVersion, const GrVkExtensions& extensions)
egdanielc5ec1402016-03-28 12:14:42 -070021 : INHERITED(contextOptions) {
Greg Danielc0b03d82018-08-03 14:41:15 -040022
Greg Daniel164a9f02016-02-22 09:56:40 -050023 /**************************************************************************
Brian Salomonf7232642018-09-19 08:58:08 -040024 * GrCaps fields
25 **************************************************************************/
jvanverth62340062016-04-26 08:01:44 -070026 fMipMapSupport = true; // always available in Vulkan
brianosmanf05ab1b2016-05-12 11:01:10 -070027 fSRGBSupport = true; // always available in Vulkan
brianosman88791862016-05-23 10:15:27 -070028 fNPOTTextureTileSupport = true; // always available in Vulkan
egdaniel37535c92016-06-30 08:23:30 -070029 fDiscardRenderTargetSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050030 fReuseScratchTextures = true; //TODO: figure this out
31 fGpuTracingSupport = false; //TODO: figure this out
Greg Daniel164a9f02016-02-22 09:56:40 -050032 fOversizedStencilSupport = false; //TODO: figure this out
Chris Dalton1d616352017-05-31 12:51:23 -060033 fInstanceAttribSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050034
jvanverth84741b32016-09-30 08:39:02 -070035 fFenceSyncSupport = true; // always available in Vulkan
Greg Daniel691f5e72018-02-28 14:21:34 -050036 fCrossContextTextureSupport = true;
Brian Osman499bf1a2018-09-17 11:32:42 -040037 fHalfFloatVertexAttributeSupport = true;
Greg Daniel164a9f02016-02-22 09:56:40 -050038
39 fMapBufferFlags = kNone_MapFlags; //TODO: figure this out
cdalton397536c2016-03-25 12:15:03 -070040 fBufferMapThreshold = SK_MaxS32; //TODO: figure this out
Greg Daniel164a9f02016-02-22 09:56:40 -050041
42 fMaxRenderTargetSize = 4096; // minimum required by spec
43 fMaxTextureSize = 4096; // minimum required by spec
Greg Daniel164a9f02016-02-22 09:56:40 -050044
Brian Salomonf7232642018-09-19 08:58:08 -040045 fDynamicStateArrayGeometryProcessorTextureSupport = true;
46
Brian Salomon94efbf52016-11-29 13:43:05 -050047 fShaderCaps.reset(new GrShaderCaps(contextOptions));
Greg Daniel164a9f02016-02-22 09:56:40 -050048
Greg Danielc0b03d82018-08-03 14:41:15 -040049 this->init(contextOptions, vkInterface, physDev, features, extensions);
Greg Daniel164a9f02016-02-22 09:56:40 -050050}
51
Robert Phillipsbf25d432017-04-07 10:08:53 -040052bool GrVkCaps::initDescForDstCopy(const GrRenderTargetProxy* src, GrSurfaceDesc* desc,
Brian Salomon2a4f9832018-03-03 22:43:43 -050053 GrSurfaceOrigin* origin, bool* rectsMustMatch,
54 bool* disallowSubrect) const {
Eric Karl74480882017-04-03 14:49:05 -070055 // Vk doesn't use rectsMustMatch or disallowSubrect. Always return false.
56 *rectsMustMatch = false;
57 *disallowSubrect = false;
58
Brian Salomon467921e2017-03-06 16:17:12 -050059 // We can always succeed here with either a CopyImage (none msaa src) or ResolveImage (msaa).
60 // For CopyImage we can make a simple texture, for ResolveImage we require the dst to be a
61 // render target as well.
Brian Salomon2a4f9832018-03-03 22:43:43 -050062 *origin = src->origin();
Brian Salomon467921e2017-03-06 16:17:12 -050063 desc->fConfig = src->config();
Greg Daniel55fa6472018-03-16 16:13:10 -040064 if (src->numColorSamples() > 1 || src->asTextureProxy()) {
Brian Salomon467921e2017-03-06 16:17:12 -050065 desc->fFlags = kRenderTarget_GrSurfaceFlag;
66 } else {
67 // Just going to use CopyImage here
68 desc->fFlags = kNone_GrSurfaceFlags;
69 }
70
71 return true;
72}
73
Greg Daniel25af6712018-04-25 10:44:38 -040074bool GrVkCaps::canCopyImage(GrPixelConfig dstConfig, int dstSampleCnt, GrSurfaceOrigin dstOrigin,
75 GrPixelConfig srcConfig, int srcSampleCnt,
76 GrSurfaceOrigin srcOrigin) const {
77 if ((dstSampleCnt > 1 || srcSampleCnt > 1) && dstSampleCnt != srcSampleCnt) {
78 return false;
79 }
80
81 // We require that all vulkan GrSurfaces have been created with transfer_dst and transfer_src
82 // as image usage flags.
83 if (srcOrigin != dstOrigin || GrBytesPerPixel(srcConfig) != GrBytesPerPixel(dstConfig)) {
84 return false;
85 }
86
87 if (this->shaderCaps()->configOutputSwizzle(srcConfig) !=
88 this->shaderCaps()->configOutputSwizzle(dstConfig)) {
89 return false;
90 }
91
92 return true;
93}
94
95bool GrVkCaps::canCopyAsBlit(GrPixelConfig dstConfig, int dstSampleCnt, bool dstIsLinear,
96 GrPixelConfig srcConfig, int srcSampleCnt, bool srcIsLinear) const {
97 // We require that all vulkan GrSurfaces have been created with transfer_dst and transfer_src
98 // as image usage flags.
99 if (!this->configCanBeDstofBlit(dstConfig, dstIsLinear) ||
100 !this->configCanBeSrcofBlit(srcConfig, srcIsLinear)) {
101 return false;
102 }
103
104 if (this->shaderCaps()->configOutputSwizzle(srcConfig) !=
105 this->shaderCaps()->configOutputSwizzle(dstConfig)) {
106 return false;
107 }
108
109 // We cannot blit images that are multisampled. Will need to figure out if we can blit the
110 // resolved msaa though.
111 if (dstSampleCnt > 1 || srcSampleCnt > 1) {
112 return false;
113 }
114
115 return true;
116}
117
118bool GrVkCaps::canCopyAsResolve(GrPixelConfig dstConfig, int dstSampleCnt,
119 GrSurfaceOrigin dstOrigin, GrPixelConfig srcConfig,
120 int srcSampleCnt, GrSurfaceOrigin srcOrigin) const {
121 // The src surface must be multisampled.
122 if (srcSampleCnt <= 1) {
123 return false;
124 }
125
126 // The dst must not be multisampled.
127 if (dstSampleCnt > 1) {
128 return false;
129 }
130
131 // Surfaces must have the same format.
132 if (dstConfig != srcConfig) {
133 return false;
134 }
135
136 // Surfaces must have the same origin.
137 if (srcOrigin != dstOrigin) {
138 return false;
139 }
140
141 return true;
142}
143
144bool GrVkCaps::canCopyAsDraw(GrPixelConfig dstConfig, bool dstIsRenderable,
145 GrPixelConfig srcConfig, bool srcIsTextureable) const {
146 // TODO: Make copySurfaceAsDraw handle the swizzle
147 if (this->shaderCaps()->configOutputSwizzle(srcConfig) !=
148 this->shaderCaps()->configOutputSwizzle(dstConfig)) {
149 return false;
150 }
151
152 // Make sure the dst is a render target and the src is a texture.
153 if (!dstIsRenderable || !srcIsTextureable) {
154 return false;
155 }
156
157 return true;
158}
159
160bool GrVkCaps::canCopySurface(const GrSurfaceProxy* dst, const GrSurfaceProxy* src,
161 const SkIRect& srcRect, const SkIPoint& dstPoint) const {
162 GrSurfaceOrigin dstOrigin = dst->origin();
163 GrSurfaceOrigin srcOrigin = src->origin();
164
165 GrPixelConfig dstConfig = dst->config();
166 GrPixelConfig srcConfig = src->config();
167
168 // TODO: Figure out a way to track if we've wrapped a linear texture in a proxy (e.g.
169 // PromiseImage which won't get instantiated right away. Does this need a similar thing like the
170 // tracking of external or rectangle textures in GL? For now we don't create linear textures
171 // internally, and I don't believe anyone is wrapping them.
172 bool srcIsLinear = false;
173 bool dstIsLinear = false;
174
175 int dstSampleCnt = 0;
176 int srcSampleCnt = 0;
177 if (const GrRenderTargetProxy* rtProxy = dst->asRenderTargetProxy()) {
178 dstSampleCnt = rtProxy->numColorSamples();
179 }
180 if (const GrRenderTargetProxy* rtProxy = src->asRenderTargetProxy()) {
181 srcSampleCnt = rtProxy->numColorSamples();
182 }
183 SkASSERT((dstSampleCnt > 0) == SkToBool(dst->asRenderTargetProxy()));
184 SkASSERT((srcSampleCnt > 0) == SkToBool(src->asRenderTargetProxy()));
185
186 return this->canCopyImage(dstConfig, dstSampleCnt, dstOrigin,
187 srcConfig, srcSampleCnt, srcOrigin) ||
188 this->canCopyAsBlit(dstConfig, dstSampleCnt, dstIsLinear,
189 srcConfig, srcSampleCnt, srcIsLinear) ||
190 this->canCopyAsResolve(dstConfig, dstSampleCnt, dstOrigin,
191 srcConfig, srcSampleCnt, srcOrigin) ||
192 this->canCopyAsDraw(dstConfig, dstSampleCnt > 0,
193 srcConfig, SkToBool(src->asTextureProxy()));
194}
195
Greg Daniel164a9f02016-02-22 09:56:40 -0500196void GrVkCaps::init(const GrContextOptions& contextOptions, const GrVkInterface* vkInterface,
Greg Daniela0651ac2018-08-08 09:23:18 -0400197 VkPhysicalDevice physDev, const VkPhysicalDeviceFeatures2& features,
Greg Danielc0b03d82018-08-03 14:41:15 -0400198 const GrVkExtensions& extensions) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500199
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000200 VkPhysicalDeviceProperties properties;
201 GR_VK_CALL(vkInterface, GetPhysicalDeviceProperties(physDev, &properties));
egdanield5e3b9e2016-03-08 12:19:54 -0800202
egdanield5e3b9e2016-03-08 12:19:54 -0800203 VkPhysicalDeviceMemoryProperties memoryProperties;
204 GR_VK_CALL(vkInterface, GetPhysicalDeviceMemoryProperties(physDev, &memoryProperties));
205
Greg Danielc0b03d82018-08-03 14:41:15 -0400206 uint32_t physicalDeviceVersion = properties.apiVersion;
207
208 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
209 extensions.hasExtension(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, 1)) {
210 fSupportsPhysicalDeviceProperties2 = true;
211 }
212
213 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
214 extensions.hasExtension(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, 1)) {
215 fSupportsMemoryRequirements2 = true;
216 }
217
218 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
Greg Daniel637c06a2018-09-12 09:44:25 -0400219 extensions.hasExtension(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, 1)) {
220 fSupportsBindMemory2 = true;
221 }
222
223 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
Greg Danielc0b03d82018-08-03 14:41:15 -0400224 extensions.hasExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME, 1)) {
225 fSupportsMaintenance1 = true;
226 }
227
228 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
229 extensions.hasExtension(VK_KHR_MAINTENANCE2_EXTENSION_NAME, 1)) {
230 fSupportsMaintenance2 = true;
231 }
232
233 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
234 extensions.hasExtension(VK_KHR_MAINTENANCE3_EXTENSION_NAME, 1)) {
235 fSupportsMaintenance3 = true;
236 }
237
Greg Daniela9979d12018-08-27 15:56:46 -0400238 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
Greg Daniel637c06a2018-09-12 09:44:25 -0400239 (extensions.hasExtension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, 1) &&
Greg Daniela9979d12018-08-27 15:56:46 -0400240 this->supportsMemoryRequirements2())) {
241 fSupportsDedicatedAllocation = true;
242 }
243
244 if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
245 (extensions.hasExtension(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, 1) &&
246 this->supportsPhysicalDeviceProperties2() &&
247 extensions.hasExtension(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, 1) &&
248 this->supportsDedicatedAllocation())) {
249 fSupportsExternalMemory = true;
250 }
251
252#ifdef SK_BUILD_FOR_ANDROID
Greg Daniel637c06a2018-09-12 09:44:25 -0400253 // Currently Adreno devices are not supporting the QUEUE_FAMILY_FOREIGN_EXTENSION, so until they
254 // do we don't explicitly require it here even the spec says it is required.
Greg Daniela9979d12018-08-27 15:56:46 -0400255 if (extensions.hasExtension(
Greg Daniel637c06a2018-09-12 09:44:25 -0400256 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 2) &&
257 /* extensions.hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1) &&*/
258 this->supportsExternalMemory() &&
259 this->supportsBindMemory2()) {
Greg Daniela9979d12018-08-27 15:56:46 -0400260 fSupportsAndroidHWBExternalMemory = true;
Greg Daniel637c06a2018-09-12 09:44:25 -0400261 fSupportsAHardwareBufferImages = true;
Greg Daniela9979d12018-08-27 15:56:46 -0400262 }
263#endif
264
Greg Daniel313c6952018-08-08 09:24:08 -0400265 this->initGrCaps(vkInterface, physDev, properties, memoryProperties, features, extensions);
Greg Daniel36443602018-08-02 12:51:52 -0400266 this->initShaderCaps(properties, features);
Greg Danielf3b11622018-03-01 15:01:27 -0500267
268 if (!contextOptions.fDisableDriverCorrectnessWorkarounds) {
269#if defined(SK_CPU_X86)
270 // We need to do this before initing the config table since it uses fSRGBSupport
271 if (kImagination_VkVendor == properties.vendorID) {
272 fSRGBSupport = false;
273 }
274#endif
275 }
276
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000277 this->initConfigTable(vkInterface, physDev, properties);
egdaniel8f1dcaa2016-04-01 10:10:45 -0700278 this->initStencilFormat(vkInterface, physDev);
Greg Daniel164a9f02016-02-22 09:56:40 -0500279
Greg Daniel691f5e72018-02-28 14:21:34 -0500280 if (!contextOptions.fDisableDriverCorrectnessWorkarounds) {
281 this->applyDriverCorrectnessWorkarounds(properties);
egdanielc5ec1402016-03-28 12:14:42 -0700282 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500283
Greg Danielddc0c602018-06-18 11:26:30 -0400284 // On nexus player we disable suballocating VkImage memory since we've seen large slow downs on
285 // bot run times.
286 if (kImagination_VkVendor == properties.vendorID) {
287 fShouldAlwaysUseDedicatedImageMemory = true;
288 }
289
Greg Daniel691f5e72018-02-28 14:21:34 -0500290 this->applyOptionsOverrides(contextOptions);
291 fShaderCaps->applyOptionsOverrides(contextOptions);
292}
293
294void GrVkCaps::applyDriverCorrectnessWorkarounds(const VkPhysicalDeviceProperties& properties) {
egdaniel6fa0a912016-09-12 11:51:29 -0700295 if (kQualcomm_VkVendor == properties.vendorID) {
296 fMustDoCopiesFromOrigin = true;
297 }
298
egdanielfd016d72016-09-27 12:13:05 -0700299 if (kNvidia_VkVendor == properties.vendorID) {
egdanielfd016d72016-09-27 12:13:05 -0700300 fMustSubmitCommandsBeforeCopyOp = true;
301 }
302
Greg Daniel80a08dd2017-01-20 10:45:49 -0500303#if defined(SK_BUILD_FOR_WIN)
Greg Daniel900e5c82018-08-28 10:59:24 -0400304 if (kNvidia_VkVendor == properties.vendorID || kIntel_VkVendor == properties.vendorID) {
Greg Daniel80a08dd2017-01-20 10:45:49 -0500305 fMustSleepOnTearDown = true;
306 }
307#elif defined(SK_BUILD_FOR_ANDROID)
308 if (kImagination_VkVendor == properties.vendorID) {
309 fMustSleepOnTearDown = true;
310 }
311#endif
Greg Danielbce5eb92018-03-01 13:13:44 -0500312
313 // AMD seems to have issues binding new VkPipelines inside a secondary command buffer.
314 // Current workaround is to use a different secondary command buffer for each new VkPipeline.
315 if (kAMD_VkVendor == properties.vendorID) {
316 fNewCBOnPipelineChange = true;
317 }
318
Greg Danielddc0c602018-06-18 11:26:30 -0400319 // On Mali galaxy s7 we see lots of rendering issues when we suballocate VkImages.
320 if (kARM_VkVendor == properties.vendorID) {
321 fShouldAlwaysUseDedicatedImageMemory = true;
322 }
323
Greg Danielbce5eb92018-03-01 13:13:44 -0500324 ////////////////////////////////////////////////////////////////////////////
325 // GrCaps workarounds
326 ////////////////////////////////////////////////////////////////////////////
327
328 if (kARM_VkVendor == properties.vendorID) {
329 fInstanceAttribSupport = false;
330 }
331
332 // AMD advertises support for MAX_UINT vertex input attributes, but in reality only supports 32.
333 if (kAMD_VkVendor == properties.vendorID) {
334 fMaxVertexAttributes = SkTMin(fMaxVertexAttributes, 32);
335 }
336
Greg Danielbce5eb92018-03-01 13:13:44 -0500337 ////////////////////////////////////////////////////////////////////////////
338 // GrShaderCaps workarounds
339 ////////////////////////////////////////////////////////////////////////////
340
Greg Danielbce5eb92018-03-01 13:13:44 -0500341 if (kImagination_VkVendor == properties.vendorID) {
342 fShaderCaps->fAtan2ImplementedAsAtanYOverX = true;
343 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500344}
345
346int get_max_sample_count(VkSampleCountFlags flags) {
347 SkASSERT(flags & VK_SAMPLE_COUNT_1_BIT);
348 if (!(flags & VK_SAMPLE_COUNT_2_BIT)) {
349 return 0;
350 }
351 if (!(flags & VK_SAMPLE_COUNT_4_BIT)) {
352 return 2;
353 }
354 if (!(flags & VK_SAMPLE_COUNT_8_BIT)) {
355 return 4;
356 }
357 if (!(flags & VK_SAMPLE_COUNT_16_BIT)) {
358 return 8;
359 }
360 if (!(flags & VK_SAMPLE_COUNT_32_BIT)) {
361 return 16;
362 }
363 if (!(flags & VK_SAMPLE_COUNT_64_BIT)) {
364 return 32;
365 }
366 return 64;
367}
368
Greg Daniel313c6952018-08-08 09:24:08 -0400369template<typename T> T* get_extension_feature_struct(const VkPhysicalDeviceFeatures2& features,
370 VkStructureType type) {
371 // All Vulkan structs that could be part of the features chain will start with the
372 // structure type followed by the pNext pointer. We cast to the CommonVulkanHeader
373 // so we can get access to the pNext for the next struct.
374 struct CommonVulkanHeader {
375 VkStructureType sType;
376 void* pNext;
377 };
378
379 void* pNext = features.pNext;
380 while (pNext) {
381 CommonVulkanHeader* header = static_cast<CommonVulkanHeader*>(pNext);
382 if (header->sType == type) {
383 return static_cast<T*>(pNext);
384 }
385 pNext = header->pNext;
386 }
387 return nullptr;
388}
389
390void GrVkCaps::initGrCaps(const GrVkInterface* vkInterface,
391 VkPhysicalDevice physDev,
392 const VkPhysicalDeviceProperties& properties,
jvanverthfd7bd452016-03-25 06:29:52 -0700393 const VkPhysicalDeviceMemoryProperties& memoryProperties,
Greg Daniel313c6952018-08-08 09:24:08 -0400394 const VkPhysicalDeviceFeatures2& features,
395 const GrVkExtensions& extensions) {
Greg Danielc5cc2de2017-03-20 11:40:58 -0400396 // So GPUs, like AMD, are reporting MAX_INT support vertex attributes. In general, there is no
397 // need for us ever to support that amount, and it makes tests which tests all the vertex
398 // attribs timeout looping over that many. For now, we'll cap this at 64 max and can raise it if
399 // we ever find that need.
400 static const uint32_t kMaxVertexAttributes = 64;
401 fMaxVertexAttributes = SkTMin(properties.limits.maxVertexInputAttributes, kMaxVertexAttributes);
Greg Danielc5cc2de2017-03-20 11:40:58 -0400402
egdanield5e3b9e2016-03-08 12:19:54 -0800403 // We could actually query and get a max size for each config, however maxImageDimension2D will
404 // give the minimum max size across all configs. So for simplicity we will use that for now.
jvanverthe78d4872016-09-27 03:33:05 -0700405 fMaxRenderTargetSize = SkTMin(properties.limits.maxImageDimension2D, (uint32_t)INT_MAX);
406 fMaxTextureSize = SkTMin(properties.limits.maxImageDimension2D, (uint32_t)INT_MAX);
Adrienne Walker724afe82018-05-15 11:36:26 -0700407 if (fDriverBugWorkarounds.max_texture_size_limit_4096) {
408 fMaxTextureSize = SkTMin(fMaxTextureSize, 4096);
409 }
410 // Our render targets are always created with textures as the color
411 // attachment, hence this min:
412 fMaxRenderTargetSize = SkTMin(fMaxTextureSize, fMaxRenderTargetSize);
egdanield5e3b9e2016-03-08 12:19:54 -0800413
Chris Dalton2612bae2018-02-22 13:41:37 -0700414 // TODO: check if RT's larger than 4k incur a performance cost on ARM.
415 fMaxPreferredRenderTargetSize = fMaxRenderTargetSize;
416
egdanield5e3b9e2016-03-08 12:19:54 -0800417 // Assuming since we will always map in the end to upload the data we might as well just map
418 // from the get go. There is no hard data to suggest this is faster or slower.
cdalton397536c2016-03-25 12:15:03 -0700419 fBufferMapThreshold = 0;
egdanield5e3b9e2016-03-08 12:19:54 -0800420
421 fMapBufferFlags = kCanMap_MapFlag | kSubset_MapFlag;
422
egdanield5e3b9e2016-03-08 12:19:54 -0800423 fOversizedStencilSupport = true;
Greg Daniela0651ac2018-08-08 09:23:18 -0400424 fSampleShadingSupport = features.features.sampleRateShading;
Greg Daniel313c6952018-08-08 09:24:08 -0400425
426 if (extensions.hasExtension(VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME, 2) &&
427 this->supportsPhysicalDeviceProperties2()) {
428
429 VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT blendProps;
430 blendProps.sType =
431 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT;
432 blendProps.pNext = nullptr;
433
434 VkPhysicalDeviceProperties2 props;
435 props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
436 props.pNext = &blendProps;
437
438 GR_VK_CALL(vkInterface, GetPhysicalDeviceProperties2(physDev, &props));
439
440 if (blendProps.advancedBlendAllOperations == VK_TRUE) {
441 fShaderCaps->fAdvBlendEqInteraction = GrShaderCaps::kAutomatic_AdvBlendEqInteraction;
442
443 auto blendFeatures =
444 get_extension_feature_struct<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT>(
445 features,
446 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT);
447 if (blendFeatures && blendFeatures->advancedBlendCoherentOperations == VK_TRUE) {
448 fBlendEquationSupport = kAdvancedCoherent_BlendEquationSupport;
449 } else {
450 // TODO: Currently non coherent blends are not supported in our vulkan backend. They
451 // require us to support self dependencies in our render passes.
452 // fBlendEquationSupport = kAdvanced_BlendEquationSupport;
453 }
454 }
455 }
egdanield5e3b9e2016-03-08 12:19:54 -0800456}
457
Greg Daniel36443602018-08-02 12:51:52 -0400458void GrVkCaps::initShaderCaps(const VkPhysicalDeviceProperties& properties,
Greg Daniela0651ac2018-08-08 09:23:18 -0400459 const VkPhysicalDeviceFeatures2& features) {
Brian Salomon1edc5b92016-11-29 13:43:46 -0500460 GrShaderCaps* shaderCaps = fShaderCaps.get();
461 shaderCaps->fVersionDeclString = "#version 330\n";
egdaniel3a15fd42016-04-05 11:00:29 -0700462
Greg Daniel164a9f02016-02-22 09:56:40 -0500463
464 // fConfigOutputSwizzle will default to RGBA so we only need to set it for alpha only config.
465 for (int i = 0; i < kGrPixelConfigCnt; ++i) {
466 GrPixelConfig config = static_cast<GrPixelConfig>(i);
Greg Danielef59d872017-11-17 16:47:21 -0500467 // Vulkan doesn't support a single channel format stored in alpha.
468 if (GrPixelConfigIsAlphaOnly(config) &&
469 kAlpha_8_as_Alpha_GrPixelConfig != config) {
Brian Salomon1edc5b92016-11-29 13:43:46 -0500470 shaderCaps->fConfigTextureSwizzle[i] = GrSwizzle::RRRR();
471 shaderCaps->fConfigOutputSwizzle[i] = GrSwizzle::AAAA();
Greg Daniel164a9f02016-02-22 09:56:40 -0500472 } else {
Greg Daniel7af060a2017-12-05 16:27:11 -0500473 if (kGray_8_GrPixelConfig == config ||
474 kGray_8_as_Red_GrPixelConfig == config) {
Brian Osman986563b2017-01-10 14:20:02 -0500475 shaderCaps->fConfigTextureSwizzle[i] = GrSwizzle::RRRA();
476 } else if (kRGBA_4444_GrPixelConfig == config) {
egdaniel3fe03272016-08-15 10:59:17 -0700477 // The vulkan spec does not require R4G4B4A4 to be supported for texturing so we
478 // store the data in a B4G4R4A4 texture and then swizzle it when doing texture reads
479 // or writing to outputs. Since we're not actually changing the data at all, the
480 // only extra work is the swizzle in the shader for all operations.
Brian Salomon1edc5b92016-11-29 13:43:46 -0500481 shaderCaps->fConfigTextureSwizzle[i] = GrSwizzle::BGRA();
482 shaderCaps->fConfigOutputSwizzle[i] = GrSwizzle::BGRA();
egdaniel3fe03272016-08-15 10:59:17 -0700483 } else {
Brian Salomon1edc5b92016-11-29 13:43:46 -0500484 shaderCaps->fConfigTextureSwizzle[i] = GrSwizzle::RGBA();
egdaniel3fe03272016-08-15 10:59:17 -0700485 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500486 }
487 }
egdaniel67acb832016-02-26 08:32:20 -0800488
egdanield5e3b9e2016-03-08 12:19:54 -0800489 // Vulkan is based off ES 3.0 so the following should all be supported
Brian Salomon1edc5b92016-11-29 13:43:46 -0500490 shaderCaps->fUsesPrecisionModifiers = true;
491 shaderCaps->fFlatInterpolationSupport = true;
Brian Salomon41274562017-09-15 09:40:03 -0700492 // Flat interpolation appears to be slow on Qualcomm GPUs. This was tested in GL and is assumed
493 // to be true with Vulkan as well.
494 shaderCaps->fPreferFlatInterpolation = kQualcomm_VkVendor != properties.vendorID;
egdanield5e3b9e2016-03-08 12:19:54 -0800495
496 // GrShaderCaps
497
Brian Salomon1edc5b92016-11-29 13:43:46 -0500498 shaderCaps->fShaderDerivativeSupport = true;
Chris Daltonf1b47bb2017-10-06 11:57:51 -0600499
Chris Dalton40599fe2018-09-24 20:27:18 -0600500 // FIXME: http://skbug.com/7733: Disable geometry shaders until Intel/Radeon GMs draw correctly.
501 // shaderCaps->fGeometryShaderSupport =
502 // shaderCaps->fGSInvocationsSupport = features.features.geometryShader;
egdanield632bb42016-03-30 12:06:48 -0700503
Greg Daniela0651ac2018-08-08 09:23:18 -0400504 shaderCaps->fDualSourceBlendingSupport = features.features.dualSrcBlend;
egdanield632bb42016-03-30 12:06:48 -0700505
Brian Salomon1edc5b92016-11-29 13:43:46 -0500506 shaderCaps->fIntegerSupport = true;
Chris Dalton1d616352017-05-31 12:51:23 -0600507 shaderCaps->fVertexIDSupport = true;
Chris Dalton7c7ff032018-03-28 20:09:58 -0600508 shaderCaps->fFPManipulationSupport = true;
cdalton9c3f1432016-03-11 10:07:37 -0800509
cdaltona6b92ad2016-04-11 12:03:08 -0700510 // Assume the minimum precisions mandated by the SPIR-V spec.
Chris Dalton47c8ed32017-11-15 18:27:09 -0700511 shaderCaps->fFloatIs32Bits = true;
512 shaderCaps->fHalfIs32Bits = false;
cdaltona6b92ad2016-04-11 12:03:08 -0700513
Ruiqi Maob609e6d2018-07-17 10:19:38 -0400514 // SPIR-V supports unsigned integers.
515 shaderCaps->fUnsignedSupport = true;
516
Brian Salomon1edc5b92016-11-29 13:43:46 -0500517 shaderCaps->fMaxVertexSamplers =
518 shaderCaps->fMaxGeometrySamplers =
519 shaderCaps->fMaxFragmentSamplers = SkTMin(
520 SkTMin(properties.limits.maxPerStageDescriptorSampledImages,
521 properties.limits.maxPerStageDescriptorSamplers),
522 (uint32_t)INT_MAX);
523 shaderCaps->fMaxCombinedSamplers = SkTMin(
524 SkTMin(properties.limits.maxDescriptorSetSampledImages,
525 properties.limits.maxDescriptorSetSamplers),
526 (uint32_t)INT_MAX);
Greg Daniel164a9f02016-02-22 09:56:40 -0500527}
528
egdaniel8f1dcaa2016-04-01 10:10:45 -0700529bool stencil_format_supported(const GrVkInterface* interface,
530 VkPhysicalDevice physDev,
531 VkFormat format) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500532 VkFormatProperties props;
533 memset(&props, 0, sizeof(VkFormatProperties));
534 GR_VK_CALL(interface, GetPhysicalDeviceFormatProperties(physDev, format, &props));
egdaniel8f1dcaa2016-04-01 10:10:45 -0700535 return SkToBool(VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT & props.optimalTilingFeatures);
Greg Daniel164a9f02016-02-22 09:56:40 -0500536}
537
egdaniel8f1dcaa2016-04-01 10:10:45 -0700538void GrVkCaps::initStencilFormat(const GrVkInterface* interface, VkPhysicalDevice physDev) {
539 // List of legal stencil formats (though perhaps not supported on
540 // the particular gpu/driver) from most preferred to least. We are guaranteed to have either
jvanvertha4b0fed2016-04-27 11:42:21 -0700541 // VK_FORMAT_D24_UNORM_S8_UINT or VK_FORMAT_D32_SFLOAT_S8_UINT. VK_FORMAT_D32_SFLOAT_S8_UINT
egdaniel8f1dcaa2016-04-01 10:10:45 -0700542 // can optionally have 24 unused bits at the end so we assume the total bits is 64.
Greg Daniel164a9f02016-02-22 09:56:40 -0500543 static const StencilFormat
544 // internal Format stencil bits total bits packed?
545 gS8 = { VK_FORMAT_S8_UINT, 8, 8, false },
egdaniel8f1dcaa2016-04-01 10:10:45 -0700546 gD24S8 = { VK_FORMAT_D24_UNORM_S8_UINT, 8, 32, true },
547 gD32S8 = { VK_FORMAT_D32_SFLOAT_S8_UINT, 8, 64, true };
Greg Daniel164a9f02016-02-22 09:56:40 -0500548
egdaniel8f1dcaa2016-04-01 10:10:45 -0700549 if (stencil_format_supported(interface, physDev, VK_FORMAT_S8_UINT)) {
Ethan Nicholasf610bae2018-09-20 16:55:21 -0400550 fPreferredStencilFormat = gS8;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700551 } else if (stencil_format_supported(interface, physDev, VK_FORMAT_D24_UNORM_S8_UINT)) {
Ethan Nicholasf610bae2018-09-20 16:55:21 -0400552 fPreferredStencilFormat = gD24S8;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700553 } else {
554 SkASSERT(stencil_format_supported(interface, physDev, VK_FORMAT_D32_SFLOAT_S8_UINT));
Ethan Nicholasf610bae2018-09-20 16:55:21 -0400555 fPreferredStencilFormat = gD32S8;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700556 }
557}
558
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000559void GrVkCaps::initConfigTable(const GrVkInterface* interface, VkPhysicalDevice physDev,
560 const VkPhysicalDeviceProperties& properties) {
egdaniel8f1dcaa2016-04-01 10:10:45 -0700561 for (int i = 0; i < kGrPixelConfigCnt; ++i) {
562 VkFormat format;
563 if (GrPixelConfigToVkFormat(static_cast<GrPixelConfig>(i), &format)) {
Greg Daniel01907872017-05-23 15:21:02 -0400564 if (!GrPixelConfigIsSRGB(static_cast<GrPixelConfig>(i)) || fSRGBSupport) {
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000565 fConfigTable[i].init(interface, physDev, properties, format);
Greg Daniel01907872017-05-23 15:21:02 -0400566 }
egdaniel8f1dcaa2016-04-01 10:10:45 -0700567 }
568 }
569}
570
571void GrVkCaps::ConfigInfo::InitConfigFlags(VkFormatFeatureFlags vkFlags, uint16_t* flags) {
572 if (SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & vkFlags) &&
573 SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT & vkFlags)) {
574 *flags = *flags | kTextureable_Flag;
egdaniel8f1dcaa2016-04-01 10:10:45 -0700575
Robert Phillipsb7b7e5f2017-05-22 13:23:19 -0400576 // Ganesh assumes that all renderable surfaces are also texturable
577 if (SkToBool(VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT & vkFlags)) {
578 *flags = *flags | kRenderable_Flag;
579 }
egdaniel8f1dcaa2016-04-01 10:10:45 -0700580 }
581
582 if (SkToBool(VK_FORMAT_FEATURE_BLIT_SRC_BIT & vkFlags)) {
583 *flags = *flags | kBlitSrc_Flag;
584 }
585
586 if (SkToBool(VK_FORMAT_FEATURE_BLIT_DST_BIT & vkFlags)) {
587 *flags = *flags | kBlitDst_Flag;
588 }
589}
590
Greg Daniel81e7bf82017-07-19 14:47:42 -0400591void GrVkCaps::ConfigInfo::initSampleCounts(const GrVkInterface* interface,
592 VkPhysicalDevice physDev,
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000593 const VkPhysicalDeviceProperties& physProps,
Greg Daniel81e7bf82017-07-19 14:47:42 -0400594 VkFormat format) {
595 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
596 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
597 VK_IMAGE_USAGE_SAMPLED_BIT |
598 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
Greg Daniel81e7bf82017-07-19 14:47:42 -0400599 VkImageFormatProperties properties;
600 GR_VK_CALL(interface, GetPhysicalDeviceImageFormatProperties(physDev,
601 format,
602 VK_IMAGE_TYPE_2D,
603 VK_IMAGE_TILING_OPTIMAL,
604 usage,
Brian Osman2b23c4b2018-06-01 12:25:08 -0400605 0, // createFlags
Greg Daniel81e7bf82017-07-19 14:47:42 -0400606 &properties));
607 VkSampleCountFlags flags = properties.sampleCounts;
608 if (flags & VK_SAMPLE_COUNT_1_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400609 fColorSampleCounts.push_back(1);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400610 }
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000611 if (kImagination_VkVendor == physProps.vendorID) {
612 // MSAA does not work on imagination
613 return;
614 }
Greg Daniel81e7bf82017-07-19 14:47:42 -0400615 if (flags & VK_SAMPLE_COUNT_2_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400616 fColorSampleCounts.push_back(2);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400617 }
618 if (flags & VK_SAMPLE_COUNT_4_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400619 fColorSampleCounts.push_back(4);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400620 }
621 if (flags & VK_SAMPLE_COUNT_8_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400622 fColorSampleCounts.push_back(8);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400623 }
624 if (flags & VK_SAMPLE_COUNT_16_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400625 fColorSampleCounts.push_back(16);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400626 }
627 if (flags & VK_SAMPLE_COUNT_32_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400628 fColorSampleCounts.push_back(32);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400629 }
630 if (flags & VK_SAMPLE_COUNT_64_BIT) {
Mike Reedb5475792018-08-08 16:17:42 -0400631 fColorSampleCounts.push_back(64);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400632 }
633}
634
egdaniel8f1dcaa2016-04-01 10:10:45 -0700635void GrVkCaps::ConfigInfo::init(const GrVkInterface* interface,
636 VkPhysicalDevice physDev,
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000637 const VkPhysicalDeviceProperties& properties,
egdaniel8f1dcaa2016-04-01 10:10:45 -0700638 VkFormat format) {
639 VkFormatProperties props;
640 memset(&props, 0, sizeof(VkFormatProperties));
641 GR_VK_CALL(interface, GetPhysicalDeviceFormatProperties(physDev, format, &props));
642 InitConfigFlags(props.linearTilingFeatures, &fLinearFlags);
643 InitConfigFlags(props.optimalTilingFeatures, &fOptimalFlags);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400644 if (fOptimalFlags & kRenderable_Flag) {
Greg Daniel2bb6ecc2017-07-20 13:11:14 +0000645 this->initSampleCounts(interface, physDev, properties, format);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400646 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500647}
Greg Daniel81e7bf82017-07-19 14:47:42 -0400648
Brian Salomonbdecacf2018-02-02 20:32:49 -0500649int GrVkCaps::getRenderTargetSampleCount(int requestedCount, GrPixelConfig config) const {
650 requestedCount = SkTMax(1, requestedCount);
Greg Daniel81e7bf82017-07-19 14:47:42 -0400651 int count = fConfigTable[config].fColorSampleCounts.count();
Brian Salomonbdecacf2018-02-02 20:32:49 -0500652
653 if (!count) {
Greg Daniel81e7bf82017-07-19 14:47:42 -0400654 return 0;
655 }
656
Brian Salomonbdecacf2018-02-02 20:32:49 -0500657 if (1 == requestedCount) {
658 SkASSERT(fConfigTable[config].fColorSampleCounts.count() &&
659 fConfigTable[config].fColorSampleCounts[0] == 1);
660 return 1;
661 }
662
Greg Daniel81e7bf82017-07-19 14:47:42 -0400663 for (int i = 0; i < count; ++i) {
664 if (fConfigTable[config].fColorSampleCounts[i] >= requestedCount) {
665 return fConfigTable[config].fColorSampleCounts[i];
666 }
667 }
Brian Salomonbdecacf2018-02-02 20:32:49 -0500668 return 0;
669}
670
671int GrVkCaps::maxRenderTargetSampleCount(GrPixelConfig config) const {
672 const auto& table = fConfigTable[config].fColorSampleCounts;
673 if (!table.count()) {
674 return 0;
675 }
676 return table[table.count() - 1];
Brian Salomond653cac2018-02-01 13:58:00 -0500677}
678
Brian Salomon3d86a192018-02-27 16:46:11 -0500679bool GrVkCaps::surfaceSupportsWritePixels(const GrSurface* surface) const {
680 if (auto rt = surface->asRenderTarget()) {
681 return rt->numColorSamples() <= 1 && SkToBool(surface->asTexture());
682 }
683 return true;
684}
685
Robert Phillipsfc711a22018-02-13 17:03:00 -0500686bool validate_image_info(VkFormat format, SkColorType ct, GrPixelConfig* config) {
Greg Danielfaa095e2017-12-19 13:15:02 -0500687 *config = kUnknown_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500688
689 switch (ct) {
690 case kUnknown_SkColorType:
691 return false;
692 case kAlpha_8_SkColorType:
693 if (VK_FORMAT_R8_UNORM == format) {
Greg Danielfaa095e2017-12-19 13:15:02 -0500694 *config = kAlpha_8_as_Red_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500695 }
696 break;
697 case kRGB_565_SkColorType:
698 if (VK_FORMAT_R5G6B5_UNORM_PACK16 == format) {
Greg Danielfaa095e2017-12-19 13:15:02 -0500699 *config = kRGB_565_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500700 }
701 break;
702 case kARGB_4444_SkColorType:
703 if (VK_FORMAT_B4G4R4A4_UNORM_PACK16 == format) {
Greg Danielfaa095e2017-12-19 13:15:02 -0500704 *config = kRGBA_4444_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500705 }
706 break;
707 case kRGBA_8888_SkColorType:
708 if (VK_FORMAT_R8G8B8A8_UNORM == format) {
Greg Danielfaa095e2017-12-19 13:15:02 -0500709 *config = kRGBA_8888_GrPixelConfig;
Greg Daniel7b219ac2017-12-18 14:49:04 -0500710 } else if (VK_FORMAT_R8G8B8A8_SRGB == format) {
Greg Danielfaa095e2017-12-19 13:15:02 -0500711 *config = kSRGBA_8888_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500712 }
713 break;
Brian Salomone41e1762018-01-25 14:07:47 -0500714 case kRGB_888x_SkColorType:
Greg Daniel475eb702018-09-28 14:16:50 -0400715 if (VK_FORMAT_R8G8B8_UNORM == format) {
716 *config = kRGB_888_GrPixelConfig;
717 }
718 break;
Greg Danielf5d87582017-12-18 14:48:15 -0500719 case kBGRA_8888_SkColorType:
720 if (VK_FORMAT_B8G8R8A8_UNORM == format) {
Greg Danielfaa095e2017-12-19 13:15:02 -0500721 *config = kBGRA_8888_GrPixelConfig;
Greg Daniel7b219ac2017-12-18 14:49:04 -0500722 } else if (VK_FORMAT_B8G8R8A8_SRGB == format) {
Greg Danielfaa095e2017-12-19 13:15:02 -0500723 *config = kSBGRA_8888_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500724 }
725 break;
Brian Salomone41e1762018-01-25 14:07:47 -0500726 case kRGBA_1010102_SkColorType:
Brian Osman10fc6fd2018-03-02 11:01:10 -0500727 if (VK_FORMAT_A2B10G10R10_UNORM_PACK32 == format) {
728 *config = kRGBA_1010102_GrPixelConfig;
729 }
730 break;
Brian Salomone41e1762018-01-25 14:07:47 -0500731 case kRGB_101010x_SkColorType:
732 return false;
Greg Danielf5d87582017-12-18 14:48:15 -0500733 case kGray_8_SkColorType:
734 if (VK_FORMAT_R8_UNORM == format) {
Greg Danielfaa095e2017-12-19 13:15:02 -0500735 *config = kGray_8_as_Red_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500736 }
737 break;
738 case kRGBA_F16_SkColorType:
739 if (VK_FORMAT_R16G16B16A16_SFLOAT == format) {
Greg Danielfaa095e2017-12-19 13:15:02 -0500740 *config = kRGBA_half_GrPixelConfig;
Greg Danielf5d87582017-12-18 14:48:15 -0500741 }
742 break;
Mike Klein37854712018-06-26 11:43:06 -0400743 case kRGBA_F32_SkColorType:
744 if (VK_FORMAT_R32G32B32A32_SFLOAT == format) {
745 *config = kRGBA_float_GrPixelConfig;
746 }
747 break;
Greg Danielf5d87582017-12-18 14:48:15 -0500748 }
749
Greg Danielfaa095e2017-12-19 13:15:02 -0500750 return kUnknown_GrPixelConfig != *config;
Greg Danielf5d87582017-12-18 14:48:15 -0500751}
752
Greg Danielfaa095e2017-12-19 13:15:02 -0500753bool GrVkCaps::validateBackendTexture(const GrBackendTexture& tex, SkColorType ct,
754 GrPixelConfig* config) const {
Greg Daniel52e16d92018-04-10 09:34:07 -0400755 GrVkImageInfo imageInfo;
756 if (!tex.getVkImageInfo(&imageInfo)) {
Robert Phillipsfc711a22018-02-13 17:03:00 -0500757 return false;
758 }
759
Greg Daniel52e16d92018-04-10 09:34:07 -0400760 return validate_image_info(imageInfo.fFormat, ct, config);
Greg Danielfaa095e2017-12-19 13:15:02 -0500761}
762
763bool GrVkCaps::validateBackendRenderTarget(const GrBackendRenderTarget& rt, SkColorType ct,
764 GrPixelConfig* config) const {
Greg Daniel323fbcf2018-04-10 13:46:30 -0400765 GrVkImageInfo imageInfo;
766 if (!rt.getVkImageInfo(&imageInfo)) {
Robert Phillipsfc711a22018-02-13 17:03:00 -0500767 return false;
768 }
769
Greg Daniel323fbcf2018-04-10 13:46:30 -0400770 return validate_image_info(imageInfo.fFormat, ct, config);
Robert Phillipsfc711a22018-02-13 17:03:00 -0500771}
772
773bool GrVkCaps::getConfigFromBackendFormat(const GrBackendFormat& format, SkColorType ct,
774 GrPixelConfig* config) const {
775 const VkFormat* vkFormat = format.getVkFormat();
776 if (!vkFormat) {
777 return false;
778 }
779 return validate_image_info(*vkFormat, ct, config);
Greg Danielfaa095e2017-12-19 13:15:02 -0500780}
Greg Danielf5d87582017-12-18 14:48:15 -0500781
Timothy Liang036fdfe2018-06-28 15:50:36 -0400782#ifdef GR_TEST_UTILS
783GrBackendFormat GrVkCaps::onCreateFormatFromBackendTexture(
784 const GrBackendTexture& backendTex) const {
785 GrVkImageInfo vkInfo;
786 SkAssertResult(backendTex.getVkImageInfo(&vkInfo));
787 return GrBackendFormat::MakeVk(vkInfo.fFormat);
788}
789#endif
790