blob: a82775a89ffafaf08f45601feff182ed98139398 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkGpu.h"
Greg Daniel164a9f02016-02-22 09:56:40 -05009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/gpu/GrBackendSemaphore.h"
11#include "include/gpu/GrBackendSurface.h"
12#include "include/gpu/GrContextOptions.h"
13#include "include/private/SkTo.h"
14#include "src/core/SkConvertPixels.h"
15#include "src/core/SkMipMap.h"
16#include "src/gpu/GrContextPriv.h"
17#include "src/gpu/GrGeometryProcessor.h"
18#include "src/gpu/GrGpuResourceCacheAccess.h"
19#include "src/gpu/GrMesh.h"
20#include "src/gpu/GrPipeline.h"
21#include "src/gpu/GrRenderTargetPriv.h"
22#include "src/gpu/GrTexturePriv.h"
23#include "src/gpu/vk/GrVkAMDMemoryAllocator.h"
24#include "src/gpu/vk/GrVkCommandBuffer.h"
25#include "src/gpu/vk/GrVkCommandPool.h"
26#include "src/gpu/vk/GrVkGpuCommandBuffer.h"
27#include "src/gpu/vk/GrVkImage.h"
28#include "src/gpu/vk/GrVkIndexBuffer.h"
29#include "src/gpu/vk/GrVkInterface.h"
30#include "src/gpu/vk/GrVkMemory.h"
31#include "src/gpu/vk/GrVkPipeline.h"
32#include "src/gpu/vk/GrVkPipelineState.h"
33#include "src/gpu/vk/GrVkRenderPass.h"
34#include "src/gpu/vk/GrVkResourceProvider.h"
35#include "src/gpu/vk/GrVkSemaphore.h"
36#include "src/gpu/vk/GrVkTexture.h"
37#include "src/gpu/vk/GrVkTextureRenderTarget.h"
38#include "src/gpu/vk/GrVkTransferBuffer.h"
39#include "src/gpu/vk/GrVkVertexBuffer.h"
40#include "src/sksl/SkSLCompiler.h"
Greg Daniel98bffae2018-08-01 13:25:41 -040041
Mike Kleinc0bd9f92019-04-23 12:05:21 -050042#include "include/gpu/vk/GrVkExtensions.h"
43#include "include/gpu/vk/GrVkTypes.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050044
Ben Wagnerf08d1d02018-06-18 15:11:00 -040045#include <utility>
46
Forrest Reiling44f85712017-03-27 23:22:20 -070047#if !defined(SK_BUILD_FOR_WIN)
48#include <unistd.h>
49#endif // !defined(SK_BUILD_FOR_WIN)
50
Greg Danieldef55462018-08-01 13:40:14 -040051#if defined(SK_BUILD_FOR_WIN) && defined(SK_DEBUG)
Mike Kleinc0bd9f92019-04-23 12:05:21 -050052#include "include/private/SkLeanWindows.h"
Greg Danieldef55462018-08-01 13:40:14 -040053#endif
54
Greg Daniel164a9f02016-02-22 09:56:40 -050055#define VK_CALL(X) GR_VK_CALL(this->vkInterface(), X)
56#define VK_CALL_RET(RET, X) GR_VK_CALL_RET(this->vkInterface(), RET, X)
57#define VK_CALL_ERRCHECK(X) GR_VK_CALL_ERRCHECK(this->vkInterface(), X)
58
Greg Danielf730c182018-07-02 20:15:37 +000059sk_sp<GrGpu> GrVkGpu::Make(const GrVkBackendContext& backendContext,
Brian Salomon384fab42017-12-07 12:33:05 -050060 const GrContextOptions& options, GrContext* context) {
Greg Danielf730c182018-07-02 20:15:37 +000061 if (backendContext.fInstance == VK_NULL_HANDLE ||
62 backendContext.fPhysicalDevice == VK_NULL_HANDLE ||
63 backendContext.fDevice == VK_NULL_HANDLE ||
64 backendContext.fQueue == VK_NULL_HANDLE) {
65 return nullptr;
66 }
Greg Danield3e65aa2018-08-01 09:19:45 -040067 if (!backendContext.fGetProc) {
68 return nullptr;
Greg Danielc8cd45a2018-07-12 10:02:37 -040069 }
Greg Danield3e65aa2018-08-01 09:19:45 -040070
Greg Daniel41f0e282019-01-28 13:15:05 -050071 PFN_vkEnumerateInstanceVersion localEnumerateInstanceVersion =
72 reinterpret_cast<PFN_vkEnumerateInstanceVersion>(
73 backendContext.fGetProc("vkEnumerateInstanceVersion",
74 VK_NULL_HANDLE, VK_NULL_HANDLE));
75 uint32_t instanceVersion = 0;
76 if (!localEnumerateInstanceVersion) {
77 instanceVersion = VK_MAKE_VERSION(1, 0, 0);
78 } else {
79 VkResult err = localEnumerateInstanceVersion(&instanceVersion);
80 if (err) {
81 SkDebugf("Failed to enumerate instance version. Err: %d\n", err);
82 return nullptr;
83 }
84 }
85
Greg Danielc0b03d82018-08-03 14:41:15 -040086 PFN_vkGetPhysicalDeviceProperties localGetPhysicalDeviceProperties =
87 reinterpret_cast<PFN_vkGetPhysicalDeviceProperties>(
88 backendContext.fGetProc("vkGetPhysicalDeviceProperties",
89 backendContext.fInstance,
90 VK_NULL_HANDLE));
91
92 if (!localGetPhysicalDeviceProperties) {
93 return nullptr;
94 }
95 VkPhysicalDeviceProperties physDeviceProperties;
96 localGetPhysicalDeviceProperties(backendContext.fPhysicalDevice, &physDeviceProperties);
97 uint32_t physDevVersion = physDeviceProperties.apiVersion;
98
Greg Daniel41f0e282019-01-28 13:15:05 -050099 uint32_t apiVersion = backendContext.fMaxAPIVersion ? backendContext.fMaxAPIVersion
100 : instanceVersion;
101
102 instanceVersion = SkTMin(instanceVersion, apiVersion);
103 physDevVersion = SkTMin(physDevVersion, apiVersion);
104
Greg Daniel98bffae2018-08-01 13:25:41 -0400105 sk_sp<const GrVkInterface> interface;
Greg Danield3e65aa2018-08-01 09:19:45 -0400106
Greg Daniel98bffae2018-08-01 13:25:41 -0400107 if (backendContext.fVkExtensions) {
108 interface.reset(new GrVkInterface(backendContext.fGetProc,
109 backendContext.fInstance,
110 backendContext.fDevice,
Greg Daniel41f0e282019-01-28 13:15:05 -0500111 instanceVersion,
Greg Danielc0b03d82018-08-03 14:41:15 -0400112 physDevVersion,
Greg Daniel98bffae2018-08-01 13:25:41 -0400113 backendContext.fVkExtensions));
Greg Daniel41f0e282019-01-28 13:15:05 -0500114 if (!interface->validate(instanceVersion, physDevVersion, backendContext.fVkExtensions)) {
Greg Daniel98bffae2018-08-01 13:25:41 -0400115 return nullptr;
116 }
117 } else {
Greg Daniel98bffae2018-08-01 13:25:41 -0400118 GrVkExtensions extensions;
Greg Daniel88e8ddc2019-04-25 16:37:08 -0400119 // The only extension flag that may effect the vulkan backend is the swapchain extension. We
120 // need to know if this is enabled to know if we can transition to a present layout when
121 // flushing a surface.
122 if (backendContext.fExtensions & kKHR_swapchain_GrVkExtensionFlag) {
123 const char* swapChainExtName = VK_KHR_SWAPCHAIN_EXTENSION_NAME;
124 extensions.init(backendContext.fGetProc, backendContext.fInstance,
125 backendContext.fPhysicalDevice, 0, nullptr, 1, &swapChainExtName);
126 }
Greg Daniel98bffae2018-08-01 13:25:41 -0400127 interface.reset(new GrVkInterface(backendContext.fGetProc,
128 backendContext.fInstance,
129 backendContext.fDevice,
Greg Daniel41f0e282019-01-28 13:15:05 -0500130 instanceVersion,
Greg Danielc0b03d82018-08-03 14:41:15 -0400131 physDevVersion,
Greg Daniel98bffae2018-08-01 13:25:41 -0400132 &extensions));
Greg Daniel41f0e282019-01-28 13:15:05 -0500133 if (!interface->validate(instanceVersion, physDevVersion, &extensions)) {
Greg Daniel98bffae2018-08-01 13:25:41 -0400134 return nullptr;
135 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500136 }
137
Greg Daniel41f0e282019-01-28 13:15:05 -0500138 return sk_sp<GrGpu>(new GrVkGpu(context, options, backendContext, interface, instanceVersion,
139 physDevVersion));
Greg Daniel164a9f02016-02-22 09:56:40 -0500140}
141
142////////////////////////////////////////////////////////////////////////////////
143
halcanary9d524f22016-03-29 09:03:52 -0700144GrVkGpu::GrVkGpu(GrContext* context, const GrContextOptions& options,
Greg Daniel41f0e282019-01-28 13:15:05 -0500145 const GrVkBackendContext& backendContext, sk_sp<const GrVkInterface> interface,
146 uint32_t instanceVersion, uint32_t physicalDeviceVersion)
Brian Salomon384fab42017-12-07 12:33:05 -0500147 : INHERITED(context)
Greg Danielc8cd45a2018-07-12 10:02:37 -0400148 , fInterface(std::move(interface))
Greg Danielf730c182018-07-02 20:15:37 +0000149 , fMemoryAllocator(backendContext.fMemoryAllocator)
150 , fInstance(backendContext.fInstance)
Greg Daniel637c06a2018-09-12 09:44:25 -0400151 , fPhysicalDevice(backendContext.fPhysicalDevice)
Greg Danielf730c182018-07-02 20:15:37 +0000152 , fDevice(backendContext.fDevice)
153 , fQueue(backendContext.fQueue)
Greg Danielecddbc02018-08-30 16:39:34 -0400154 , fQueueIndex(backendContext.fGraphicsQueueIndex)
Brian Salomon384fab42017-12-07 12:33:05 -0500155 , fResourceProvider(this)
156 , fDisconnected(false) {
Greg Danielf730c182018-07-02 20:15:37 +0000157 SkASSERT(!backendContext.fOwnsInstanceAndDevice);
jvanverth633b3562016-03-23 11:01:22 -0700158
Greg Daniel81df0412018-05-31 13:13:33 -0400159 if (!fMemoryAllocator) {
160 // We were not given a memory allocator at creation
Greg Danielf730c182018-07-02 20:15:37 +0000161 fMemoryAllocator.reset(new GrVkAMDMemoryAllocator(backendContext.fPhysicalDevice,
Greg Danielc8cd45a2018-07-12 10:02:37 -0400162 fDevice, fInterface));
Greg Daniel81df0412018-05-31 13:13:33 -0400163 }
164
ethannicholasb3058bd2016-07-01 08:22:01 -0700165 fCompiler = new SkSL::Compiler();
jvanverth633b3562016-03-23 11:01:22 -0700166
Greg Daniela0651ac2018-08-08 09:23:18 -0400167 if (backendContext.fDeviceFeatures2) {
Greg Daniel36443602018-08-02 12:51:52 -0400168 fVkCaps.reset(new GrVkCaps(options, this->vkInterface(), backendContext.fPhysicalDevice,
Greg Daniela0651ac2018-08-08 09:23:18 -0400169 *backendContext.fDeviceFeatures2, instanceVersion,
Greg Daniel41f0e282019-01-28 13:15:05 -0500170 physicalDeviceVersion,
Greg Danielc0b03d82018-08-03 14:41:15 -0400171 *backendContext.fVkExtensions));
Greg Daniela0651ac2018-08-08 09:23:18 -0400172 } else if (backendContext.fDeviceFeatures) {
173 VkPhysicalDeviceFeatures2 features2;
174 features2.pNext = nullptr;
175 features2.features = *backendContext.fDeviceFeatures;
176 fVkCaps.reset(new GrVkCaps(options, this->vkInterface(), backendContext.fPhysicalDevice,
Greg Daniel41f0e282019-01-28 13:15:05 -0500177 features2, instanceVersion, physicalDeviceVersion,
178 *backendContext.fVkExtensions));
Greg Daniel36443602018-08-02 12:51:52 -0400179 } else {
Greg Daniela0651ac2018-08-08 09:23:18 -0400180 VkPhysicalDeviceFeatures2 features;
181 memset(&features, 0, sizeof(VkPhysicalDeviceFeatures2));
182 features.pNext = nullptr;
Greg Daniel36443602018-08-02 12:51:52 -0400183 if (backendContext.fFeatures & kGeometryShader_GrVkFeatureFlag) {
Greg Daniela0651ac2018-08-08 09:23:18 -0400184 features.features.geometryShader = true;
Greg Daniel36443602018-08-02 12:51:52 -0400185 }
186 if (backendContext.fFeatures & kDualSrcBlend_GrVkFeatureFlag) {
Greg Daniela0651ac2018-08-08 09:23:18 -0400187 features.features.dualSrcBlend = true;
Greg Daniel36443602018-08-02 12:51:52 -0400188 }
189 if (backendContext.fFeatures & kSampleRateShading_GrVkFeatureFlag) {
Greg Daniela0651ac2018-08-08 09:23:18 -0400190 features.features.sampleRateShading = true;
Greg Daniel36443602018-08-02 12:51:52 -0400191 }
Greg Danielf808c5e2019-04-30 14:48:27 -0400192 GrVkExtensions extensions;
193 // The only extension flag that may effect the vulkan backend is the swapchain extension. We
194 // need to know if this is enabled to know if we can transition to a present layout when
195 // flushing a surface.
196 if (backendContext.fExtensions & kKHR_swapchain_GrVkExtensionFlag) {
197 const char* swapChainExtName = VK_KHR_SWAPCHAIN_EXTENSION_NAME;
198 extensions.init(backendContext.fGetProc, backendContext.fInstance,
199 backendContext.fPhysicalDevice, 0, nullptr, 1, &swapChainExtName);
200 }
Greg Daniel36443602018-08-02 12:51:52 -0400201 fVkCaps.reset(new GrVkCaps(options, this->vkInterface(), backendContext.fPhysicalDevice,
Greg Danielf808c5e2019-04-30 14:48:27 -0400202 features, instanceVersion, physicalDeviceVersion, extensions));
Greg Daniel36443602018-08-02 12:51:52 -0400203 }
jvanverth633b3562016-03-23 11:01:22 -0700204 fCaps.reset(SkRef(fVkCaps.get()));
205
Greg Danielf730c182018-07-02 20:15:37 +0000206 VK_CALL(GetPhysicalDeviceProperties(backendContext.fPhysicalDevice, &fPhysDevProps));
207 VK_CALL(GetPhysicalDeviceMemoryProperties(backendContext.fPhysicalDevice, &fPhysDevMemProps));
jvanverth633b3562016-03-23 11:01:22 -0700208
Greg Daniela870b462019-01-08 15:49:46 -0500209 fResourceProvider.init();
210
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500211 fCmdPool = fResourceProvider.findOrCreateCommandPool();
212 fCurrentCmdBuffer = fCmdPool->getPrimaryCommandBuffer();
Ethan Nicholasbff4e072018-12-12 18:17:24 +0000213 SkASSERT(fCurrentCmdBuffer);
jvanverth633b3562016-03-23 11:01:22 -0700214 fCurrentCmdBuffer->begin(this);
Greg Daniel164a9f02016-02-22 09:56:40 -0500215}
216
Greg Daniel8606cf82017-05-08 16:17:53 -0400217void GrVkGpu::destroyResources() {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500218 if (fCmdPool) {
219 fCmdPool->getPrimaryCommandBuffer()->end(this);
220 fCmdPool->close();
Greg Daniel8606cf82017-05-08 16:17:53 -0400221 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500222
223 // wait for all commands to finish
Jim Van Verth09557d72016-11-07 11:10:21 -0500224 VkResult res = VK_CALL(QueueWaitIdle(fQueue));
egdanielf8c2be32016-06-24 13:18:27 -0700225
226 // On windows, sometimes calls to QueueWaitIdle return before actually signalling the fences
227 // on the command buffers even though they have completed. This causes an assert to fire when
228 // destroying the command buffers. Currently this ony seems to happen on windows, so we add a
Jim Van Verth09557d72016-11-07 11:10:21 -0500229 // sleep to make sure the fence signals.
egdanielf8c2be32016-06-24 13:18:27 -0700230#ifdef SK_DEBUG
Greg Daniel80a08dd2017-01-20 10:45:49 -0500231 if (this->vkCaps().mustSleepOnTearDown()) {
egdanielf8c2be32016-06-24 13:18:27 -0700232#if defined(SK_BUILD_FOR_WIN)
Greg Daniel80a08dd2017-01-20 10:45:49 -0500233 Sleep(10); // In milliseconds
egdanielf8c2be32016-06-24 13:18:27 -0700234#else
Greg Daniel80a08dd2017-01-20 10:45:49 -0500235 sleep(1); // In seconds
egdanielf8c2be32016-06-24 13:18:27 -0700236#endif
Greg Daniel80a08dd2017-01-20 10:45:49 -0500237 }
egdanielf8c2be32016-06-24 13:18:27 -0700238#endif
239
egdanielbe9d8212016-09-20 08:54:23 -0700240#ifdef SK_DEBUG
Greg Daniel8a8668b2016-10-31 16:34:42 -0400241 SkASSERT(VK_SUCCESS == res || VK_ERROR_DEVICE_LOST == res);
egdanielbe9d8212016-09-20 08:54:23 -0700242#endif
halcanary9d524f22016-03-29 09:03:52 -0700243
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500244 if (fCmdPool) {
245 fCmdPool->unref(this);
246 fCmdPool = nullptr;
247 }
248
Greg Daniel6be35232017-03-01 17:01:09 -0500249 for (int i = 0; i < fSemaphoresToWaitOn.count(); ++i) {
250 fSemaphoresToWaitOn[i]->unref(this);
251 }
252 fSemaphoresToWaitOn.reset();
253
Greg Daniela5cb7812017-06-16 09:45:32 -0400254 for (int i = 0; i < fSemaphoresToSignal.count(); ++i) {
255 fSemaphoresToSignal[i]->unref(this);
256 }
257 fSemaphoresToSignal.reset();
258
259
egdanielbc9b2962016-09-27 08:00:53 -0700260 fCopyManager.destroyResources(this);
261
Jim Van Verth09557d72016-11-07 11:10:21 -0500262 // must call this just before we destroy the command pool and VkDevice
263 fResourceProvider.destroyResources(VK_ERROR_DEVICE_LOST == res);
Greg Daniel164a9f02016-02-22 09:56:40 -0500264
Greg Danielf730c182018-07-02 20:15:37 +0000265 fMemoryAllocator.reset();
266
267 fQueue = VK_NULL_HANDLE;
268 fDevice = VK_NULL_HANDLE;
269 fInstance = VK_NULL_HANDLE;
Greg Daniel8606cf82017-05-08 16:17:53 -0400270}
271
272GrVkGpu::~GrVkGpu() {
273 if (!fDisconnected) {
274 this->destroyResources();
275 }
276 delete fCompiler;
277}
278
279
280void GrVkGpu::disconnect(DisconnectType type) {
281 INHERITED::disconnect(type);
282 if (!fDisconnected) {
283 if (DisconnectType::kCleanup == type) {
284 this->destroyResources();
285 } else {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500286 if (fCmdPool) {
287 fCmdPool->unrefAndAbandon();
288 fCmdPool = nullptr;
Greg Danieladb4bfe2018-08-23 16:15:05 -0400289 }
Greg Daniel8606cf82017-05-08 16:17:53 -0400290 for (int i = 0; i < fSemaphoresToWaitOn.count(); ++i) {
291 fSemaphoresToWaitOn[i]->unrefAndAbandon();
292 }
Greg Daniela5cb7812017-06-16 09:45:32 -0400293 for (int i = 0; i < fSemaphoresToSignal.count(); ++i) {
294 fSemaphoresToSignal[i]->unrefAndAbandon();
295 }
Greg Daniel8606cf82017-05-08 16:17:53 -0400296 fCopyManager.abandonResources();
297
298 // must call this just before we destroy the command pool and VkDevice
299 fResourceProvider.abandonResources();
Greg Danieladb4bfe2018-08-23 16:15:05 -0400300
301 fMemoryAllocator.reset();
Greg Daniel8606cf82017-05-08 16:17:53 -0400302 }
303 fSemaphoresToWaitOn.reset();
Greg Daniela5cb7812017-06-16 09:45:32 -0400304 fSemaphoresToSignal.reset();
Greg Daniel8606cf82017-05-08 16:17:53 -0400305 fCurrentCmdBuffer = nullptr;
Greg Daniel8606cf82017-05-08 16:17:53 -0400306 fDisconnected = true;
307 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500308}
309
310///////////////////////////////////////////////////////////////////////////////
311
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400312GrGpuRTCommandBuffer* GrVkGpu::getCommandBuffer(
Ethan Nicholas56d19a52018-10-15 11:26:20 -0400313 GrRenderTarget* rt, GrSurfaceOrigin origin, const SkRect& bounds,
Greg Daniel500d58b2017-08-24 15:59:33 -0400314 const GrGpuRTCommandBuffer::LoadAndStoreInfo& colorInfo,
315 const GrGpuRTCommandBuffer::StencilLoadAndStoreInfo& stencilInfo) {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400316 if (!fCachedRTCommandBuffer) {
317 fCachedRTCommandBuffer.reset(new GrVkGpuRTCommandBuffer(this));
318 }
319
Greg Daniela41a74a2018-10-09 12:59:23 +0000320 fCachedRTCommandBuffer->set(rt, origin, colorInfo, stencilInfo);
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400321 return fCachedRTCommandBuffer.get();
Greg Daniel500d58b2017-08-24 15:59:33 -0400322}
323
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400324GrGpuTextureCommandBuffer* GrVkGpu::getCommandBuffer(GrTexture* texture, GrSurfaceOrigin origin) {
325 if (!fCachedTexCommandBuffer) {
326 fCachedTexCommandBuffer.reset(new GrVkGpuTextureCommandBuffer(this));
327 }
328
329 fCachedTexCommandBuffer->set(texture, origin);
330 return fCachedTexCommandBuffer.get();
egdaniel066df7c2016-06-08 14:02:27 -0700331}
332
Greg Daniela3aa75a2019-04-12 14:24:55 -0400333void GrVkGpu::submitCommandBuffer(SyncQueue sync, GrGpuFinishedProc finishedProc,
334 GrGpuFinishedContext finishedContext) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500335 SkASSERT(fCurrentCmdBuffer);
Robert Phillipsce0a2bf2019-04-02 13:37:34 -0400336
337 if (!fCurrentCmdBuffer->hasWork() && kForce_SyncQueue != sync &&
338 !fSemaphoresToSignal.count() && !fSemaphoresToWaitOn.count()) {
339 SkASSERT(fDrawables.empty());
Robert Phillips84614c32019-04-05 09:36:00 -0400340 fResourceProvider.checkCommandBuffers();
Greg Daniela3aa75a2019-04-12 14:24:55 -0400341 if (finishedProc) {
342 fResourceProvider.addFinishedProcToActiveCommandBuffers(finishedProc, finishedContext);
343 }
Robert Phillipsce0a2bf2019-04-02 13:37:34 -0400344 return;
345 }
346
Greg Daniel164a9f02016-02-22 09:56:40 -0500347 fCurrentCmdBuffer->end(this);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500348 fCmdPool->close();
Greg Daniela5cb7812017-06-16 09:45:32 -0400349 fCurrentCmdBuffer->submitToQueue(this, fQueue, sync, fSemaphoresToSignal, fSemaphoresToWaitOn);
Greg Daniel6be35232017-03-01 17:01:09 -0500350
Greg Daniela3aa75a2019-04-12 14:24:55 -0400351 if (finishedProc) {
352 // Make sure this is called after closing the current command pool
353 fResourceProvider.addFinishedProcToActiveCommandBuffers(finishedProc, finishedContext);
354 }
355
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400356 // We must delete and drawables that have been waitint till submit for us to destroy.
357 fDrawables.reset();
358
Greg Daniel6be35232017-03-01 17:01:09 -0500359 for (int i = 0; i < fSemaphoresToWaitOn.count(); ++i) {
360 fSemaphoresToWaitOn[i]->unref(this);
361 }
362 fSemaphoresToWaitOn.reset();
Greg Daniela5cb7812017-06-16 09:45:32 -0400363 for (int i = 0; i < fSemaphoresToSignal.count(); ++i) {
364 fSemaphoresToSignal[i]->unref(this);
365 }
366 fSemaphoresToSignal.reset();
Greg Daniel6be35232017-03-01 17:01:09 -0500367
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500368 // Release old command pool and create a new one
369 fCmdPool->unref(this);
Greg Daniel164a9f02016-02-22 09:56:40 -0500370 fResourceProvider.checkCommandBuffers();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500371 fCmdPool = fResourceProvider.findOrCreateCommandPool();
372 fCurrentCmdBuffer = fCmdPool->getPrimaryCommandBuffer();
Greg Daniel164a9f02016-02-22 09:56:40 -0500373 fCurrentCmdBuffer->begin(this);
374}
375
376///////////////////////////////////////////////////////////////////////////////
Brian Salomondbf70722019-02-07 11:31:24 -0500377sk_sp<GrGpuBuffer> GrVkGpu::onCreateBuffer(size_t size, GrGpuBufferType type,
378 GrAccessPattern accessPattern, const void* data) {
379 sk_sp<GrGpuBuffer> buff;
cdalton397536c2016-03-25 12:15:03 -0700380 switch (type) {
Brian Salomonae64c192019-02-05 09:41:37 -0500381 case GrGpuBufferType::kVertex:
cdalton397536c2016-03-25 12:15:03 -0700382 SkASSERT(kDynamic_GrAccessPattern == accessPattern ||
383 kStatic_GrAccessPattern == accessPattern);
Brian Salomon12d22642019-01-29 14:38:50 -0500384 buff = GrVkVertexBuffer::Make(this, size, kDynamic_GrAccessPattern == accessPattern);
egdaniele05bbbb2016-04-19 12:13:41 -0700385 break;
Brian Salomonae64c192019-02-05 09:41:37 -0500386 case GrGpuBufferType::kIndex:
cdalton397536c2016-03-25 12:15:03 -0700387 SkASSERT(kDynamic_GrAccessPattern == accessPattern ||
388 kStatic_GrAccessPattern == accessPattern);
Brian Salomon12d22642019-01-29 14:38:50 -0500389 buff = GrVkIndexBuffer::Make(this, size, kDynamic_GrAccessPattern == accessPattern);
egdaniele05bbbb2016-04-19 12:13:41 -0700390 break;
Brian Salomonae64c192019-02-05 09:41:37 -0500391 case GrGpuBufferType::kXferCpuToGpu:
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400392 SkASSERT(kDynamic_GrAccessPattern == accessPattern ||
393 kStream_GrAccessPattern == accessPattern);
Brian Salomon12d22642019-01-29 14:38:50 -0500394 buff = GrVkTransferBuffer::Make(this, size, GrVkBuffer::kCopyRead_Type);
egdaniele05bbbb2016-04-19 12:13:41 -0700395 break;
Brian Salomonae64c192019-02-05 09:41:37 -0500396 case GrGpuBufferType::kXferGpuToCpu:
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400397 SkASSERT(kDynamic_GrAccessPattern == accessPattern ||
398 kStream_GrAccessPattern == accessPattern);
Brian Salomon12d22642019-01-29 14:38:50 -0500399 buff = GrVkTransferBuffer::Make(this, size, GrVkBuffer::kCopyWrite_Type);
egdaniele05bbbb2016-04-19 12:13:41 -0700400 break;
cdalton397536c2016-03-25 12:15:03 -0700401 default:
Ben Wagnerb4aab9a2017-08-16 10:53:04 -0400402 SK_ABORT("Unknown buffer type.");
cdalton397536c2016-03-25 12:15:03 -0700403 return nullptr;
404 }
cdalton1bf3e712016-04-19 10:00:02 -0700405 if (data && buff) {
406 buff->updateData(data, size);
407 }
408 return buff;
Greg Daniel164a9f02016-02-22 09:56:40 -0500409}
410
Brian Salomona9b04b92018-06-01 15:04:28 -0400411bool GrVkGpu::onWritePixels(GrSurface* surface, int left, int top, int width, int height,
412 GrColorType srcColorType, const GrMipLevel texels[],
413 int mipLevelCount) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500414 GrVkTexture* vkTex = static_cast<GrVkTexture*>(surface->asTexture());
415 if (!vkTex) {
416 return false;
417 }
418
jvanverth900bd4a2016-04-29 13:53:12 -0700419 // Make sure we have at least the base level
Robert Phillips590533f2017-07-11 14:22:35 -0400420 if (!mipLevelCount || !texels[0].fPixels) {
jvanverth03509ea2016-03-02 13:19:47 -0800421 return false;
422 }
bsalomona1e6b3b2016-03-02 10:58:23 -0800423
Jim Van Verth1676cb92019-01-15 13:24:45 -0500424 SkASSERT(!GrPixelConfigIsCompressed(vkTex->config()));
Greg Daniel164a9f02016-02-22 09:56:40 -0500425 bool success = false;
Robert Phillips92de6312017-05-23 07:43:48 -0400426 bool linearTiling = vkTex->isLinearTiled();
427 if (linearTiling) {
Robert Phillips590533f2017-07-11 14:22:35 -0400428 if (mipLevelCount > 1) {
Robert Phillips92de6312017-05-23 07:43:48 -0400429 SkDebugf("Can't upload mipmap data to linear tiled texture");
430 return false;
431 }
432 if (VK_IMAGE_LAYOUT_PREINITIALIZED != vkTex->currentLayout()) {
433 // Need to change the layout to general in order to perform a host write
434 vkTex->setImageLayout(this,
435 VK_IMAGE_LAYOUT_GENERAL,
436 VK_ACCESS_HOST_WRITE_BIT,
437 VK_PIPELINE_STAGE_HOST_BIT,
438 false);
439 this->submitCommandBuffer(kForce_SyncQueue);
440 }
Brian Salomona9b04b92018-06-01 15:04:28 -0400441 success = this->uploadTexDataLinear(vkTex, left, top, width, height, srcColorType,
Robert Phillips590533f2017-07-11 14:22:35 -0400442 texels[0].fPixels, texels[0].fRowBytes);
Greg Daniel164a9f02016-02-22 09:56:40 -0500443 } else {
Greg Danielda86e282018-06-13 09:41:19 -0400444 SkASSERT(mipLevelCount <= vkTex->texturePriv().maxMipMapLevel() + 1);
Brian Salomona9b04b92018-06-01 15:04:28 -0400445 success = this->uploadTexDataOptimal(vkTex, left, top, width, height, srcColorType, texels,
446 mipLevelCount);
Greg Daniel164a9f02016-02-22 09:56:40 -0500447 }
egdaniel4583ec52016-06-27 12:57:00 -0700448
jvanverth900bd4a2016-04-29 13:53:12 -0700449 return success;
Greg Daniel164a9f02016-02-22 09:56:40 -0500450}
451
Brian Salomone05ba5a2019-04-08 11:59:07 -0400452bool GrVkGpu::onTransferPixelsTo(GrTexture* texture, int left, int top, int width, int height,
453 GrColorType bufferColorType, GrGpuBuffer* transferBuffer,
454 size_t bufferOffset, size_t rowBytes) {
Jim Van Verth1676cb92019-01-15 13:24:45 -0500455 // Can't transfer compressed data
456 SkASSERT(!GrPixelConfigIsCompressed(texture->config()));
457
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400458 // Vulkan only supports 4-byte aligned offsets
459 if (SkToBool(bufferOffset & 0x2)) {
460 return false;
461 }
462 GrVkTexture* vkTex = static_cast<GrVkTexture*>(texture);
463 if (!vkTex) {
464 return false;
465 }
466 GrVkTransferBuffer* vkBuffer = static_cast<GrVkTransferBuffer*>(transferBuffer);
467 if (!vkBuffer) {
468 return false;
469 }
470
Greg Daniel660cc992017-06-26 14:55:05 -0400471 SkDEBUGCODE(
472 SkIRect subRect = SkIRect::MakeXYWH(left, top, width, height);
473 SkIRect bounds = SkIRect::MakeWH(texture->width(), texture->height());
474 SkASSERT(bounds.contains(subRect));
475 )
Brian Salomonc320b152018-02-20 14:05:36 -0500476 int bpp = GrColorTypeBytesPerPixel(bufferColorType);
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400477 if (rowBytes == 0) {
Brian Salomonc320b152018-02-20 14:05:36 -0500478 rowBytes = bpp * width;
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400479 }
480
481 // Set up copy region
482 VkBufferImageCopy region;
483 memset(&region, 0, sizeof(VkBufferImageCopy));
484 region.bufferOffset = bufferOffset;
485 region.bufferRowLength = (uint32_t)(rowBytes/bpp);
486 region.bufferImageHeight = 0;
487 region.imageSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
488 region.imageOffset = { left, top, 0 };
489 region.imageExtent = { (uint32_t)width, (uint32_t)height, 1 };
490
491 // Change layout of our target so it can be copied to
492 vkTex->setImageLayout(this,
493 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
494 VK_ACCESS_TRANSFER_WRITE_BIT,
495 VK_PIPELINE_STAGE_TRANSFER_BIT,
496 false);
497
498 // Copy the buffer to the image
499 fCurrentCmdBuffer->copyBufferToImage(this,
500 vkBuffer,
501 vkTex,
502 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
503 1,
504 &region);
505
Greg Daniel0fc4d2d2017-10-12 11:23:36 -0400506 vkTex->texturePriv().markMipMapsDirty();
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400507 return true;
508}
509
Brian Salomon26de56e2019-04-10 12:14:26 -0400510bool GrVkGpu::onTransferPixelsFrom(GrSurface* surface, int left, int top, int width, int height,
511 GrColorType bufferColorType, GrGpuBuffer* transferBuffer,
512 size_t offset) {
Brian Salomona585fe92019-04-09 14:57:00 -0400513 SkASSERT(surface);
514 SkASSERT(transferBuffer);
515
Brian Salomona585fe92019-04-09 14:57:00 -0400516 GrVkTransferBuffer* vkBuffer = static_cast<GrVkTransferBuffer*>(transferBuffer);
517
518 GrVkImage* srcImage;
519 if (GrVkRenderTarget* rt = static_cast<GrVkRenderTarget*>(surface->asRenderTarget())) {
520 // Reading from render targets that wrap a secondary command buffer is not allowed since
521 // it would require us to know the VkImage, which we don't have, as well as need us to
522 // stop and start the VkRenderPass which we don't have access to.
523 if (rt->wrapsSecondaryCommandBuffer()) {
524 return false;
525 }
526 // resolve the render target if necessary
527 switch (rt->getResolveType()) {
528 case GrVkRenderTarget::kCantResolve_ResolveType:
529 return false;
530 case GrVkRenderTarget::kAutoResolves_ResolveType:
531 break;
532 case GrVkRenderTarget::kCanResolve_ResolveType:
533 this->resolveRenderTargetNoFlush(rt);
534 break;
535 default:
536 SK_ABORT("Unknown resolve type");
537 }
538 srcImage = rt;
539 } else {
540 srcImage = static_cast<GrVkTexture*>(surface->asTexture());
541 }
542
543 // Set up copy region
544 VkBufferImageCopy region;
545 memset(&region, 0, sizeof(VkBufferImageCopy));
546 region.bufferOffset = offset;
Brian Salomon26de56e2019-04-10 12:14:26 -0400547 region.bufferRowLength = width;
Brian Salomona585fe92019-04-09 14:57:00 -0400548 region.bufferImageHeight = 0;
549 region.imageSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
550 region.imageOffset = { left, top, 0 };
551 region.imageExtent = { (uint32_t)width, (uint32_t)height, 1 };
552
553 srcImage->setImageLayout(this,
554 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
555 VK_ACCESS_TRANSFER_READ_BIT,
556 VK_PIPELINE_STAGE_TRANSFER_BIT,
557 false);
558
559 fCurrentCmdBuffer->copyImageToBuffer(this, srcImage, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
560 vkBuffer, 1, &region);
561
562 // Make sure the copy to buffer has finished.
563 vkBuffer->addMemoryBarrier(this,
564 VK_ACCESS_TRANSFER_WRITE_BIT,
565 VK_ACCESS_HOST_READ_BIT,
566 VK_PIPELINE_STAGE_TRANSFER_BIT,
567 VK_PIPELINE_STAGE_HOST_BIT,
568 false);
569
570 // The caller is responsible for syncing.
571 this->submitCommandBuffer(kSkip_SyncQueue);
572
Brian Salomon26de56e2019-04-10 12:14:26 -0400573 return true;
Brian Salomona585fe92019-04-09 14:57:00 -0400574}
575
Brian Salomon1fabd512018-02-09 09:54:25 -0500576void GrVkGpu::resolveImage(GrSurface* dst, GrVkRenderTarget* src, const SkIRect& srcRect,
577 const SkIPoint& dstPoint) {
egdaniel4bcd62e2016-08-31 07:37:31 -0700578 SkASSERT(dst);
579 SkASSERT(src && src->numColorSamples() > 1 && src->msaaImage());
580
egdaniel4bcd62e2016-08-31 07:37:31 -0700581 VkImageResolve resolveInfo;
Brian Salomon1fabd512018-02-09 09:54:25 -0500582 resolveInfo.srcSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
583 resolveInfo.srcOffset = {srcRect.fLeft, srcRect.fTop, 0};
584 resolveInfo.dstSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
585 resolveInfo.dstOffset = {dstPoint.fX, dstPoint.fY, 0};
586 resolveInfo.extent = {(uint32_t)srcRect.width(), (uint32_t)srcRect.height(), 1};
egdaniel4bcd62e2016-08-31 07:37:31 -0700587
Greg Danielbc26c392017-04-18 13:32:10 -0400588 GrVkImage* dstImage;
589 GrRenderTarget* dstRT = dst->asRenderTarget();
590 if (dstRT) {
591 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(dstRT);
Greg Danielbc26c392017-04-18 13:32:10 -0400592 dstImage = vkRT;
593 } else {
594 SkASSERT(dst->asTexture());
595 dstImage = static_cast<GrVkTexture*>(dst->asTexture());
596 }
597 dstImage->setImageLayout(this,
598 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
599 VK_ACCESS_TRANSFER_WRITE_BIT,
600 VK_PIPELINE_STAGE_TRANSFER_BIT,
601 false);
egdaniel4bcd62e2016-08-31 07:37:31 -0700602
603 src->msaaImage()->setImageLayout(this,
604 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
605 VK_ACCESS_TRANSFER_READ_BIT,
606 VK_PIPELINE_STAGE_TRANSFER_BIT,
607 false);
608
Greg Danielbc26c392017-04-18 13:32:10 -0400609 fCurrentCmdBuffer->resolveImage(this, *src->msaaImage(), *dstImage, 1, &resolveInfo);
egdaniel4bcd62e2016-08-31 07:37:31 -0700610}
611
Brian Salomon1fabd512018-02-09 09:54:25 -0500612void GrVkGpu::internalResolveRenderTarget(GrRenderTarget* target, bool requiresSubmit) {
egdaniel66933552016-08-24 07:22:19 -0700613 if (target->needsResolve()) {
614 SkASSERT(target->numColorSamples() > 1);
egdaniel52ad2512016-08-04 12:50:01 -0700615 GrVkRenderTarget* rt = static_cast<GrVkRenderTarget*>(target);
616 SkASSERT(rt->msaaImage());
Greg Daniel69d49922017-02-23 09:44:02 -0500617
egdaniel4bcd62e2016-08-31 07:37:31 -0700618 const SkIRect& srcRect = rt->getResolveRect();
egdaniel52ad2512016-08-04 12:50:01 -0700619
Brian Salomon1fabd512018-02-09 09:54:25 -0500620 this->resolveImage(target, rt, srcRect, SkIPoint::Make(srcRect.fLeft, srcRect.fTop));
egdaniel52ad2512016-08-04 12:50:01 -0700621
622 rt->flagAsResolved();
Greg Daniel69d49922017-02-23 09:44:02 -0500623
624 if (requiresSubmit) {
625 this->submitCommandBuffer(kSkip_SyncQueue);
626 }
egdaniel52ad2512016-08-04 12:50:01 -0700627 }
628}
629
Brian Salomona9b04b92018-06-01 15:04:28 -0400630bool GrVkGpu::uploadTexDataLinear(GrVkTexture* tex, int left, int top, int width, int height,
631 GrColorType dataColorType, const void* data, size_t rowBytes) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500632 SkASSERT(data);
jvanverth900bd4a2016-04-29 13:53:12 -0700633 SkASSERT(tex->isLinearTiled());
Greg Daniel164a9f02016-02-22 09:56:40 -0500634
Jim Van Verth1676cb92019-01-15 13:24:45 -0500635 // If we're uploading compressed data then we should be using uploadCompressedTexData
636 SkASSERT(!GrPixelConfigIsCompressed(GrColorTypeToPixelConfig(dataColorType,
637 GrSRGBEncoded::kNo)));
638
Greg Daniel660cc992017-06-26 14:55:05 -0400639 SkDEBUGCODE(
640 SkIRect subRect = SkIRect::MakeXYWH(left, top, width, height);
641 SkIRect bounds = SkIRect::MakeWH(tex->width(), tex->height());
642 SkASSERT(bounds.contains(subRect));
643 )
Brian Salomonc320b152018-02-20 14:05:36 -0500644 int bpp = GrColorTypeBytesPerPixel(dataColorType);
Greg Daniel164a9f02016-02-22 09:56:40 -0500645 size_t trimRowBytes = width * bpp;
Greg Daniel660cc992017-06-26 14:55:05 -0400646 if (!rowBytes) {
647 rowBytes = trimRowBytes;
648 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500649
jvanverth900bd4a2016-04-29 13:53:12 -0700650 SkASSERT(VK_IMAGE_LAYOUT_PREINITIALIZED == tex->currentLayout() ||
651 VK_IMAGE_LAYOUT_GENERAL == tex->currentLayout());
652 const VkImageSubresource subres = {
653 VK_IMAGE_ASPECT_COLOR_BIT,
654 0, // mipLevel
655 0, // arraySlice
656 };
657 VkSubresourceLayout layout;
Greg Daniel164a9f02016-02-22 09:56:40 -0500658
jvanverth900bd4a2016-04-29 13:53:12 -0700659 const GrVkInterface* interface = this->vkInterface();
Greg Daniel164a9f02016-02-22 09:56:40 -0500660
jvanverth900bd4a2016-04-29 13:53:12 -0700661 GR_VK_CALL(interface, GetImageSubresourceLayout(fDevice,
egdanielb2df0c22016-05-13 11:30:37 -0700662 tex->image(),
jvanverth900bd4a2016-04-29 13:53:12 -0700663 &subres,
664 &layout));
Greg Daniel164a9f02016-02-22 09:56:40 -0500665
jvanverth1e305ba2016-06-01 09:39:15 -0700666 const GrVkAlloc& alloc = tex->alloc();
Brian Salomona9b04b92018-06-01 15:04:28 -0400667 VkDeviceSize offset = top * layout.rowPitch + left * bpp;
jvanverth900bd4a2016-04-29 13:53:12 -0700668 VkDeviceSize size = height*layout.rowPitch;
Greg Daniel81df0412018-05-31 13:13:33 -0400669 SkASSERT(size + offset <= alloc.fSize);
670 void* mapPtr = GrVkMemory::MapAlloc(this, alloc);
671 if (!mapPtr) {
jvanverth900bd4a2016-04-29 13:53:12 -0700672 return false;
673 }
Greg Daniel81df0412018-05-31 13:13:33 -0400674 mapPtr = reinterpret_cast<char*>(mapPtr) + offset;
jvanverth900bd4a2016-04-29 13:53:12 -0700675
Brian Salomona9b04b92018-06-01 15:04:28 -0400676 SkRectMemcpy(mapPtr, static_cast<size_t>(layout.rowPitch), data, rowBytes, trimRowBytes,
677 height);
jvanverth900bd4a2016-04-29 13:53:12 -0700678
Greg Daniele35a99e2018-03-02 11:44:22 -0500679 GrVkMemory::FlushMappedAlloc(this, alloc, offset, size);
Greg Daniel81df0412018-05-31 13:13:33 -0400680 GrVkMemory::UnmapAlloc(this, alloc);
jvanverth900bd4a2016-04-29 13:53:12 -0700681
682 return true;
683}
684
Brian Salomona9b04b92018-06-01 15:04:28 -0400685bool GrVkGpu::uploadTexDataOptimal(GrVkTexture* tex, int left, int top, int width, int height,
686 GrColorType dataColorType, const GrMipLevel texels[],
687 int mipLevelCount) {
jvanverth900bd4a2016-04-29 13:53:12 -0700688 SkASSERT(!tex->isLinearTiled());
689 // The assumption is either that we have no mipmaps, or that our rect is the entire texture
Robert Phillips590533f2017-07-11 14:22:35 -0400690 SkASSERT(1 == mipLevelCount ||
jvanverth900bd4a2016-04-29 13:53:12 -0700691 (0 == left && 0 == top && width == tex->width() && height == tex->height()));
692
Greg Danieldd20e912017-04-07 14:42:23 -0400693 // We assume that if the texture has mip levels, we either upload to all the levels or just the
694 // first.
Robert Phillips590533f2017-07-11 14:22:35 -0400695 SkASSERT(1 == mipLevelCount || mipLevelCount == (tex->texturePriv().maxMipMapLevel() + 1));
Greg Danieldd20e912017-04-07 14:42:23 -0400696
Jim Van Verth1676cb92019-01-15 13:24:45 -0500697 // If we're uploading compressed data then we should be using uploadCompressedTexData
698 SkASSERT(!GrPixelConfigIsCompressed(GrColorTypeToPixelConfig(dataColorType,
699 GrSRGBEncoded::kNo)));
700
jvanverth900bd4a2016-04-29 13:53:12 -0700701 if (width == 0 || height == 0) {
702 return false;
703 }
704
Greg Daniel475eb702018-09-28 14:16:50 -0400705 if (GrPixelConfigToColorType(tex->config()) != dataColorType) {
706 return false;
707 }
708
709 // For RGB_888x src data we are uploading it first to an RGBA texture and then copying it to the
710 // dst RGB texture. Thus we do not upload mip levels for that.
Greg Danielf259b8b2019-02-14 09:03:43 -0500711 if (dataColorType == GrColorType::kRGB_888x && tex->imageFormat() == VK_FORMAT_R8G8B8_UNORM) {
712 SkASSERT(tex->config() == kRGB_888_GrPixelConfig);
Greg Daniel475eb702018-09-28 14:16:50 -0400713 // First check that we'll be able to do the copy to the to the R8G8B8 image in the end via a
714 // blit or draw.
715 if (!this->vkCaps().configCanBeDstofBlit(kRGB_888_GrPixelConfig, tex->isLinearTiled()) &&
716 !this->vkCaps().maxRenderTargetSampleCount(kRGB_888_GrPixelConfig)) {
717 return false;
718 }
719 mipLevelCount = 1;
720 }
721
Brian Salomond1eaf492017-05-18 10:02:08 -0400722 SkASSERT(this->caps()->isConfigTexturable(tex->config()));
Brian Salomonc320b152018-02-20 14:05:36 -0500723 int bpp = GrColorTypeBytesPerPixel(dataColorType);
jvanverth900bd4a2016-04-29 13:53:12 -0700724
725 // texels is const.
jvanverthc578b0632016-05-02 10:58:12 -0700726 // But we may need to adjust the fPixels ptr based on the copyRect, or fRowBytes.
727 // Because of this we need to make a non-const shallow copy of texels.
Robert Phillips0f992772017-07-12 08:24:56 -0400728 SkAutoTMalloc<GrMipLevel> texelsShallowCopy;
729
Greg Daniel475eb702018-09-28 14:16:50 -0400730 texelsShallowCopy.reset(mipLevelCount);
731 memcpy(texelsShallowCopy.get(), texels, mipLevelCount*sizeof(GrMipLevel));
jvanverth900bd4a2016-04-29 13:53:12 -0700732
Robert Phillips590533f2017-07-11 14:22:35 -0400733 SkTArray<size_t> individualMipOffsets(mipLevelCount);
jvanverthc578b0632016-05-02 10:58:12 -0700734 individualMipOffsets.push_back(0);
735 size_t combinedBufferSize = width * bpp * height;
736 int currentWidth = width;
737 int currentHeight = height;
Greg Daniel475eb702018-09-28 14:16:50 -0400738 if (!texelsShallowCopy[0].fPixels) {
Greg Daniel55afd6d2017-09-29 09:32:44 -0400739 combinedBufferSize = 0;
740 }
741
Greg Daniel468fd632017-03-22 17:03:45 -0400742 // The alignment must be at least 4 bytes and a multiple of the bytes per pixel of the image
743 // config. This works with the assumption that the bytes in pixel config is always a power of 2.
744 SkASSERT((bpp & (bpp - 1)) == 0);
745 const size_t alignmentMask = 0x3 | (bpp - 1);
Robert Phillips590533f2017-07-11 14:22:35 -0400746 for (int currentMipLevel = 1; currentMipLevel < mipLevelCount; currentMipLevel++) {
jvanverthc578b0632016-05-02 10:58:12 -0700747 currentWidth = SkTMax(1, currentWidth/2);
748 currentHeight = SkTMax(1, currentHeight/2);
Greg Daniel660cc992017-06-26 14:55:05 -0400749
Greg Daniel55afd6d2017-09-29 09:32:44 -0400750 if (texelsShallowCopy[currentMipLevel].fPixels) {
751 const size_t trimmedSize = currentWidth * bpp * currentHeight;
752 const size_t alignmentDiff = combinedBufferSize & alignmentMask;
753 if (alignmentDiff != 0) {
754 combinedBufferSize += alignmentMask - alignmentDiff + 1;
755 }
756 individualMipOffsets.push_back(combinedBufferSize);
757 combinedBufferSize += trimmedSize;
758 } else {
759 individualMipOffsets.push_back(0);
Greg Daniel468fd632017-03-22 17:03:45 -0400760 }
Greg Daniel55afd6d2017-09-29 09:32:44 -0400761 }
762 if (0 == combinedBufferSize) {
763 // We don't actually have any data to upload so just return success
764 return true;
jvanverth900bd4a2016-04-29 13:53:12 -0700765 }
766
767 // allocate buffer to hold our mip data
Brian Salomon12d22642019-01-29 14:38:50 -0500768 sk_sp<GrVkTransferBuffer> transferBuffer =
769 GrVkTransferBuffer::Make(this, combinedBufferSize, GrVkBuffer::kCopyRead_Type);
Greg Daniel475eb702018-09-28 14:16:50 -0400770 if (!transferBuffer) {
Forrest Reilingc04f8452017-04-26 19:26:12 -0700771 return false;
Greg Daniel6888c0d2017-08-25 11:55:50 -0400772 }
jvanverth900bd4a2016-04-29 13:53:12 -0700773
Greg Daniel475eb702018-09-28 14:16:50 -0400774 int uploadLeft = left;
775 int uploadTop = top;
776 GrVkTexture* uploadTexture = tex;
777 // For uploading RGB_888x data to an R8G8B8_UNORM texture we must first upload the data to an
778 // R8G8B8A8_UNORM image and then copy it.
779 sk_sp<GrVkTexture> copyTexture;
Greg Danielf259b8b2019-02-14 09:03:43 -0500780 if (dataColorType == GrColorType::kRGB_888x && tex->imageFormat() == VK_FORMAT_R8G8B8_UNORM) {
Greg Daniel475eb702018-09-28 14:16:50 -0400781 GrSurfaceDesc surfDesc;
782 surfDesc.fFlags = kRenderTarget_GrSurfaceFlag;
783 surfDesc.fWidth = width;
784 surfDesc.fHeight = height;
785 surfDesc.fConfig = kRGBA_8888_GrPixelConfig;
786 surfDesc.fSampleCnt = 1;
787
788 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
789 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
790 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
791
792 GrVkImage::ImageDesc imageDesc;
793 imageDesc.fImageType = VK_IMAGE_TYPE_2D;
794 imageDesc.fFormat = VK_FORMAT_R8G8B8A8_UNORM;
795 imageDesc.fWidth = width;
796 imageDesc.fHeight = height;
797 imageDesc.fLevels = 1;
798 imageDesc.fSamples = 1;
799 imageDesc.fImageTiling = VK_IMAGE_TILING_OPTIMAL;
800 imageDesc.fUsageFlags = usageFlags;
801 imageDesc.fMemProps = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
802
803 copyTexture = GrVkTexture::MakeNewTexture(this, SkBudgeted::kYes, surfDesc, imageDesc,
804 GrMipMapsStatus::kNotAllocated);
805 if (!copyTexture) {
806 return false;
807 }
808 uploadTexture = copyTexture.get();
809 uploadLeft = 0;
810 uploadTop = 0;
811 }
812
jvanverth900bd4a2016-04-29 13:53:12 -0700813 char* buffer = (char*) transferBuffer->map();
Robert Phillips590533f2017-07-11 14:22:35 -0400814 SkTArray<VkBufferImageCopy> regions(mipLevelCount);
jvanverth900bd4a2016-04-29 13:53:12 -0700815
jvanverthc578b0632016-05-02 10:58:12 -0700816 currentWidth = width;
817 currentHeight = height;
Greg Daniel475eb702018-09-28 14:16:50 -0400818 int layerHeight = uploadTexture->height();
Robert Phillips590533f2017-07-11 14:22:35 -0400819 for (int currentMipLevel = 0; currentMipLevel < mipLevelCount; currentMipLevel++) {
Greg Daniel55afd6d2017-09-29 09:32:44 -0400820 if (texelsShallowCopy[currentMipLevel].fPixels) {
821 SkASSERT(1 == mipLevelCount || currentHeight == layerHeight);
822 const size_t trimRowBytes = currentWidth * bpp;
823 const size_t rowBytes = texelsShallowCopy[currentMipLevel].fRowBytes
824 ? texelsShallowCopy[currentMipLevel].fRowBytes
825 : trimRowBytes;
jvanverth900bd4a2016-04-29 13:53:12 -0700826
Greg Daniel55afd6d2017-09-29 09:32:44 -0400827 // copy data into the buffer, skipping the trailing bytes
828 char* dst = buffer + individualMipOffsets[currentMipLevel];
829 const char* src = (const char*)texelsShallowCopy[currentMipLevel].fPixels;
Brian Salomona9b04b92018-06-01 15:04:28 -0400830 SkRectMemcpy(dst, trimRowBytes, src, rowBytes, trimRowBytes, currentHeight);
Greg Daniel55afd6d2017-09-29 09:32:44 -0400831
832 VkBufferImageCopy& region = regions.push_back();
833 memset(&region, 0, sizeof(VkBufferImageCopy));
834 region.bufferOffset = transferBuffer->offset() + individualMipOffsets[currentMipLevel];
835 region.bufferRowLength = currentWidth;
836 region.bufferImageHeight = currentHeight;
837 region.imageSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, SkToU32(currentMipLevel), 0, 1 };
Greg Daniel475eb702018-09-28 14:16:50 -0400838 region.imageOffset = {uploadLeft, uploadTop, 0};
Greg Daniel55afd6d2017-09-29 09:32:44 -0400839 region.imageExtent = { (uint32_t)currentWidth, (uint32_t)currentHeight, 1 };
jvanverth900bd4a2016-04-29 13:53:12 -0700840 }
jvanverthc578b0632016-05-02 10:58:12 -0700841 currentWidth = SkTMax(1, currentWidth/2);
842 currentHeight = SkTMax(1, currentHeight/2);
Greg Daniela1b282b2017-03-28 14:56:46 -0400843 layerHeight = currentHeight;
jvanverth900bd4a2016-04-29 13:53:12 -0700844 }
845
jvanverth9d54afc2016-09-20 09:20:03 -0700846 // no need to flush non-coherent memory, unmap will do that for us
jvanverth900bd4a2016-04-29 13:53:12 -0700847 transferBuffer->unmap();
848
jvanverth900bd4a2016-04-29 13:53:12 -0700849 // Change layout of our target so it can be copied to
Greg Daniel475eb702018-09-28 14:16:50 -0400850 uploadTexture->setImageLayout(this,
851 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
852 VK_ACCESS_TRANSFER_WRITE_BIT,
853 VK_PIPELINE_STAGE_TRANSFER_BIT,
854 false);
jvanverth900bd4a2016-04-29 13:53:12 -0700855
856 // Copy the buffer to the image
857 fCurrentCmdBuffer->copyBufferToImage(this,
Brian Salomon12d22642019-01-29 14:38:50 -0500858 transferBuffer.get(),
Greg Daniel475eb702018-09-28 14:16:50 -0400859 uploadTexture,
jvanverth900bd4a2016-04-29 13:53:12 -0700860 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
861 regions.count(),
862 regions.begin());
Greg Daniel475eb702018-09-28 14:16:50 -0400863
864 // If we copied the data into a temporary image first, copy that image into our main texture
865 // now.
866 if (copyTexture.get()) {
867 SkASSERT(dataColorType == GrColorType::kRGB_888x);
868 static const GrSurfaceOrigin kOrigin = kTopLeft_GrSurfaceOrigin;
869 SkAssertResult(this->copySurface(tex, kOrigin, copyTexture.get(), kOrigin,
870 SkIRect::MakeWH(width, height), SkIPoint::Make(left, top),
871 false));
872 }
Robert Phillips590533f2017-07-11 14:22:35 -0400873 if (1 == mipLevelCount) {
Greg Daniel0fc4d2d2017-10-12 11:23:36 -0400874 tex->texturePriv().markMipMapsDirty();
Greg Danieldd20e912017-04-07 14:42:23 -0400875 }
jvanverth900bd4a2016-04-29 13:53:12 -0700876
Greg Daniel164a9f02016-02-22 09:56:40 -0500877 return true;
878}
879
Jim Van Verth1676cb92019-01-15 13:24:45 -0500880// It's probably possible to roll this into uploadTexDataOptimal,
881// but for now it's easier to maintain as a separate entity.
882bool GrVkGpu::uploadTexDataCompressed(GrVkTexture* tex, int left, int top, int width, int height,
883 GrColorType dataColorType, const GrMipLevel texels[],
884 int mipLevelCount) {
885 SkASSERT(!tex->isLinearTiled());
886 // For now the assumption is that our rect is the entire texture.
887 // Compressed textures are read-only so this should be a reasonable assumption.
888 SkASSERT(0 == left && 0 == top && width == tex->width() && height == tex->height());
889
890 // We assume that if the texture has mip levels, we either upload to all the levels or just the
891 // first.
892 SkASSERT(1 == mipLevelCount || mipLevelCount == (tex->texturePriv().maxMipMapLevel() + 1));
893
894 SkASSERT(GrPixelConfigIsCompressed(GrColorTypeToPixelConfig(dataColorType,
895 GrSRGBEncoded::kNo)));
896
897 if (width == 0 || height == 0) {
898 return false;
899 }
900
901 if (GrPixelConfigToColorType(tex->config()) != dataColorType) {
902 return false;
903 }
904
905 SkASSERT(this->caps()->isConfigTexturable(tex->config()));
906
907 SkTArray<size_t> individualMipOffsets(mipLevelCount);
908 individualMipOffsets.push_back(0);
909 size_t combinedBufferSize = GrCompressedFormatDataSize(tex->config(), width, height);
910 int currentWidth = width;
911 int currentHeight = height;
912 if (!texels[0].fPixels) {
913 return false;
914 }
915
916 // We assume that the alignment for any compressed format is at least 4 bytes and so we don't
917 // need to worry about alignment issues. For example, each block in ETC1 is 8 bytes.
918 for (int currentMipLevel = 1; currentMipLevel < mipLevelCount; currentMipLevel++) {
919 currentWidth = SkTMax(1, currentWidth / 2);
920 currentHeight = SkTMax(1, currentHeight / 2);
921
922 if (texels[currentMipLevel].fPixels) {
923 const size_t dataSize = GrCompressedFormatDataSize(tex->config(), currentWidth,
924 currentHeight);
925 individualMipOffsets.push_back(combinedBufferSize);
926 combinedBufferSize += dataSize;
927 } else {
928 return false;
929 }
930 }
931 if (0 == combinedBufferSize) {
932 // We don't have any data to upload so fail (compressed textures are read-only).
933 return false;
934 }
935
936 // allocate buffer to hold our mip data
Brian Salomon12d22642019-01-29 14:38:50 -0500937 sk_sp<GrVkTransferBuffer> transferBuffer =
938 GrVkTransferBuffer::Make(this, combinedBufferSize, GrVkBuffer::kCopyRead_Type);
Jim Van Verth1676cb92019-01-15 13:24:45 -0500939 if (!transferBuffer) {
940 return false;
941 }
942
943 int uploadLeft = left;
944 int uploadTop = top;
945 GrVkTexture* uploadTexture = tex;
946
947 char* buffer = (char*)transferBuffer->map();
948 SkTArray<VkBufferImageCopy> regions(mipLevelCount);
949
950 currentWidth = width;
951 currentHeight = height;
952 int layerHeight = uploadTexture->height();
953 for (int currentMipLevel = 0; currentMipLevel < mipLevelCount; currentMipLevel++) {
954 if (texels[currentMipLevel].fPixels) {
955 // Again, we're assuming that our rect is the entire texture
956 SkASSERT(currentHeight == layerHeight);
957 SkASSERT(0 == uploadLeft && 0 == uploadTop);
958
959 const size_t dataSize = GrCompressedFormatDataSize(tex->config(), currentWidth,
960 currentHeight);
961
962 // copy data into the buffer, skipping the trailing bytes
963 char* dst = buffer + individualMipOffsets[currentMipLevel];
964 const char* src = (const char*)texels[currentMipLevel].fPixels;
965 memcpy(dst, src, dataSize);
966
967 VkBufferImageCopy& region = regions.push_back();
968 memset(&region, 0, sizeof(VkBufferImageCopy));
969 region.bufferOffset = transferBuffer->offset() + individualMipOffsets[currentMipLevel];
970 region.bufferRowLength = currentWidth;
971 region.bufferImageHeight = currentHeight;
972 region.imageSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, SkToU32(currentMipLevel), 0, 1 };
973 region.imageOffset = { uploadLeft, uploadTop, 0 };
974 region.imageExtent = { (uint32_t)currentWidth, (uint32_t)currentHeight, 1 };
975 }
976 currentWidth = SkTMax(1, currentWidth / 2);
977 currentHeight = SkTMax(1, currentHeight / 2);
978 layerHeight = currentHeight;
979 }
980
981 // no need to flush non-coherent memory, unmap will do that for us
982 transferBuffer->unmap();
983
984 // Change layout of our target so it can be copied to
985 uploadTexture->setImageLayout(this,
986 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
987 VK_ACCESS_TRANSFER_WRITE_BIT,
988 VK_PIPELINE_STAGE_TRANSFER_BIT,
989 false);
990
991 // Copy the buffer to the image
992 fCurrentCmdBuffer->copyBufferToImage(this,
Brian Salomon12d22642019-01-29 14:38:50 -0500993 transferBuffer.get(),
Jim Van Verth1676cb92019-01-15 13:24:45 -0500994 uploadTexture,
995 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
996 regions.count(),
997 regions.begin());
Jim Van Verth1676cb92019-01-15 13:24:45 -0500998
999 if (1 == mipLevelCount) {
1000 tex->texturePriv().markMipMapsDirty();
1001 }
1002
1003 return true;
1004}
1005
Greg Daniel164a9f02016-02-22 09:56:40 -05001006////////////////////////////////////////////////////////////////////////////////
Robert Phillips67d52cf2017-06-05 13:38:13 -04001007sk_sp<GrTexture> GrVkGpu::onCreateTexture(const GrSurfaceDesc& desc, SkBudgeted budgeted,
Brian Salomon58389b92018-03-07 13:01:25 -05001008 const GrMipLevel texels[], int mipLevelCount) {
Greg Daniel164a9f02016-02-22 09:56:40 -05001009 bool renderTarget = SkToBool(desc.fFlags & kRenderTarget_GrSurfaceFlag);
1010
1011 VkFormat pixelFormat;
Brian Salomonbdecacf2018-02-02 20:32:49 -05001012 SkAssertResult(GrPixelConfigToVkFormat(desc.fConfig, &pixelFormat));
egdaniel0a3a7f72016-06-24 09:22:31 -07001013
Greg Daniel164a9f02016-02-22 09:56:40 -05001014 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT;
1015 if (renderTarget) {
1016 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
1017 }
1018
1019 // For now we will set the VK_IMAGE_USAGE_TRANSFER_DESTINATION_BIT and
1020 // VK_IMAGE_USAGE_TRANSFER_SOURCE_BIT on every texture since we do not know whether or not we
1021 // will be using this texture in some copy or not. Also this assumes, as is the current case,
jvanverth62340062016-04-26 08:01:44 -07001022 // that all render targets in vulkan are also textures. If we change this practice of setting
Greg Daniel164a9f02016-02-22 09:56:40 -05001023 // both bits, we must make sure to set the destination bit if we are uploading srcData to the
1024 // texture.
1025 usageFlags |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1026
Greg Daniel164a9f02016-02-22 09:56:40 -05001027 // This ImageDesc refers to the texture that will be read by the client. Thus even if msaa is
jvanverth62340062016-04-26 08:01:44 -07001028 // requested, this ImageDesc describes the resolved texture. Therefore we always have samples set
Greg Daniel164a9f02016-02-22 09:56:40 -05001029 // to 1.
Robert Phillips590533f2017-07-11 14:22:35 -04001030 int mipLevels = !mipLevelCount ? 1 : mipLevelCount;
Greg Daniel164a9f02016-02-22 09:56:40 -05001031 GrVkImage::ImageDesc imageDesc;
1032 imageDesc.fImageType = VK_IMAGE_TYPE_2D;
1033 imageDesc.fFormat = pixelFormat;
1034 imageDesc.fWidth = desc.fWidth;
1035 imageDesc.fHeight = desc.fHeight;
Brian Salomon7128fdd2017-05-22 14:00:07 -04001036 imageDesc.fLevels = mipLevels;
Greg Daniel164a9f02016-02-22 09:56:40 -05001037 imageDesc.fSamples = 1;
Brian Salomon7128fdd2017-05-22 14:00:07 -04001038 imageDesc.fImageTiling = VK_IMAGE_TILING_OPTIMAL;
Greg Daniel164a9f02016-02-22 09:56:40 -05001039 imageDesc.fUsageFlags = usageFlags;
Brian Salomon7128fdd2017-05-22 14:00:07 -04001040 imageDesc.fMemProps = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -05001041
Greg Daniel0fc4d2d2017-10-12 11:23:36 -04001042 GrMipMapsStatus mipMapsStatus = GrMipMapsStatus::kNotAllocated;
1043 if (mipLevels > 1) {
1044 mipMapsStatus = GrMipMapsStatus::kValid;
1045 for (int i = 0; i < mipLevels; ++i) {
1046 if (!texels[i].fPixels) {
1047 mipMapsStatus = GrMipMapsStatus::kDirty;
1048 break;
1049 }
Greg Daniel834f1202017-10-09 15:06:20 -04001050 }
1051 }
1052
Robert Phillips67d52cf2017-06-05 13:38:13 -04001053 sk_sp<GrVkTexture> tex;
Greg Daniel164a9f02016-02-22 09:56:40 -05001054 if (renderTarget) {
Greg Daniel475eb702018-09-28 14:16:50 -04001055 tex = GrVkTextureRenderTarget::MakeNewTextureRenderTarget(this, budgeted, desc,
1056 imageDesc,
1057 mipMapsStatus);
Greg Daniel164a9f02016-02-22 09:56:40 -05001058 } else {
Greg Daniel475eb702018-09-28 14:16:50 -04001059 tex = GrVkTexture::MakeNewTexture(this, budgeted, desc, imageDesc, mipMapsStatus);
Greg Daniel164a9f02016-02-22 09:56:40 -05001060 }
1061
1062 if (!tex) {
1063 return nullptr;
1064 }
1065
Jim Van Verth1676cb92019-01-15 13:24:45 -05001066 bool isCompressed = GrPixelConfigIsCompressed(desc.fConfig);
Brian Salomonc320b152018-02-20 14:05:36 -05001067 auto colorType = GrPixelConfigToColorType(desc.fConfig);
Robert Phillips590533f2017-07-11 14:22:35 -04001068 if (mipLevelCount) {
Jim Van Verth1676cb92019-01-15 13:24:45 -05001069 bool success;
1070 if (isCompressed) {
1071 success = this->uploadTexDataCompressed(tex.get(), 0, 0, desc.fWidth, desc.fHeight,
1072 colorType, texels, mipLevelCount);
1073 } else {
1074 success = this->uploadTexDataOptimal(tex.get(), 0, 0, desc.fWidth, desc.fHeight,
1075 colorType, texels, mipLevelCount);
1076 }
1077 if (!success) {
Greg Daniel164a9f02016-02-22 09:56:40 -05001078 tex->unref();
1079 return nullptr;
1080 }
1081 }
1082
Jim Van Verth1676cb92019-01-15 13:24:45 -05001083 if (SkToBool(desc.fFlags & kPerformInitialClear_GrSurfaceFlag) && !isCompressed) {
Brian Salomond17b4a62017-05-23 16:53:47 -04001084 VkClearColorValue zeroClearColor;
1085 memset(&zeroClearColor, 0, sizeof(zeroClearColor));
1086 VkImageSubresourceRange range;
1087 range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1088 range.baseArrayLayer = 0;
1089 range.baseMipLevel = 0;
1090 range.layerCount = 1;
1091 range.levelCount = 1;
1092 tex->setImageLayout(this, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1093 VK_ACCESS_TRANSFER_WRITE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, false);
Robert Phillips67d52cf2017-06-05 13:38:13 -04001094 this->currentCommandBuffer()->clearColorImage(this, tex.get(), &zeroClearColor, 1, &range);
Brian Salomond17b4a62017-05-23 16:53:47 -04001095 }
Ben Wagnerff134f22018-04-24 16:29:16 -04001096 return std::move(tex);
Greg Daniel164a9f02016-02-22 09:56:40 -05001097}
1098
1099////////////////////////////////////////////////////////////////////////////////
1100
Greg Daniel6888c0d2017-08-25 11:55:50 -04001101void GrVkGpu::copyBuffer(GrVkBuffer* srcBuffer, GrVkBuffer* dstBuffer, VkDeviceSize srcOffset,
1102 VkDeviceSize dstOffset, VkDeviceSize size) {
1103 VkBufferCopy copyRegion;
1104 copyRegion.srcOffset = srcOffset;
1105 copyRegion.dstOffset = dstOffset;
1106 copyRegion.size = size;
1107 fCurrentCmdBuffer->copyBuffer(this, srcBuffer, dstBuffer, 1, &copyRegion);
1108}
1109
jvanverthdb379092016-07-07 11:18:46 -07001110bool GrVkGpu::updateBuffer(GrVkBuffer* buffer, const void* src,
1111 VkDeviceSize offset, VkDeviceSize size) {
jvanvertha584de92016-06-30 09:10:52 -07001112 // Update the buffer
jvanverthdb379092016-07-07 11:18:46 -07001113 fCurrentCmdBuffer->updateBuffer(this, buffer, offset, size, src);
jvanvertha584de92016-06-30 09:10:52 -07001114
1115 return true;
1116}
1117
1118////////////////////////////////////////////////////////////////////////////////
1119
Greg Daniel7e000222018-12-03 10:08:21 -05001120static bool check_image_info(const GrVkCaps& caps,
1121 const GrVkImageInfo& info,
Greg Danielcb324152019-02-25 11:36:53 -05001122 GrPixelConfig config,
1123 bool isWrappedRT) {
1124 if (VK_NULL_HANDLE == info.fImage) {
1125 return false;
1126 }
1127
1128 if (VK_NULL_HANDLE == info.fAlloc.fMemory && !isWrappedRT) {
Brian Salomond17f6582017-07-19 18:28:58 -04001129 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -05001130 }
1131
Greg Daniel7e000222018-12-03 10:08:21 -05001132 if (info.fYcbcrConversionInfo.isValid()) {
1133 if (!caps.supportsYcbcrConversion() || info.fFormat != VK_NULL_HANDLE) {
1134 return false;
1135 }
jvanverthfd359ca2016-03-18 11:57:24 -07001136 }
Greg Daniel7ef28f32017-04-20 16:41:55 +00001137
Greg Danielcb324152019-02-25 11:36:53 -05001138 if (info.fImageLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR && !caps.supportsSwapchain()) {
1139 return false;
1140 }
1141
Greg Daniel52e16d92018-04-10 09:34:07 -04001142 SkASSERT(GrVkFormatPixelConfigPairIsValid(info.fFormat, config));
Brian Salomond17f6582017-07-19 18:28:58 -04001143 return true;
1144}
1145
1146sk_sp<GrTexture> GrVkGpu::onWrapBackendTexture(const GrBackendTexture& backendTex,
Brian Salomonfa2ebea2019-01-24 15:58:58 -05001147 GrWrapOwnership ownership, GrWrapCacheable cacheable,
1148 GrIOType ioType) {
Greg Daniel7e000222018-12-03 10:08:21 -05001149 GrVkImageInfo imageInfo;
1150 if (!backendTex.getVkImageInfo(&imageInfo)) {
1151 return nullptr;
1152 }
1153
Greg Danielcb324152019-02-25 11:36:53 -05001154 if (!check_image_info(this->vkCaps(), imageInfo, backendTex.config(), false)) {
Brian Salomond17f6582017-07-19 18:28:58 -04001155 return nullptr;
1156 }
Greg Daniel164a9f02016-02-22 09:56:40 -05001157
Greg Daniel164a9f02016-02-22 09:56:40 -05001158 GrSurfaceDesc surfDesc;
Brian Salomond17f6582017-07-19 18:28:58 -04001159 surfDesc.fFlags = kNone_GrSurfaceFlags;
Greg Daniel7ef28f32017-04-20 16:41:55 +00001160 surfDesc.fWidth = backendTex.width();
1161 surfDesc.fHeight = backendTex.height();
1162 surfDesc.fConfig = backendTex.config();
Brian Salomonbdecacf2018-02-02 20:32:49 -05001163 surfDesc.fSampleCnt = 1;
Greg Daniel164a9f02016-02-22 09:56:40 -05001164
Greg Daniel52e16d92018-04-10 09:34:07 -04001165 sk_sp<GrVkImageLayout> layout = backendTex.getGrVkImageLayout();
1166 SkASSERT(layout);
Brian Salomonfa2ebea2019-01-24 15:58:58 -05001167 return GrVkTexture::MakeWrappedTexture(this, surfDesc, ownership, cacheable, ioType, imageInfo,
1168 std::move(layout));
Brian Salomond17f6582017-07-19 18:28:58 -04001169}
1170
1171sk_sp<GrTexture> GrVkGpu::onWrapRenderableBackendTexture(const GrBackendTexture& backendTex,
Brian Salomond17f6582017-07-19 18:28:58 -04001172 int sampleCnt,
Brian Salomonaa6ca0a2019-01-24 16:03:07 -05001173 GrWrapOwnership ownership,
1174 GrWrapCacheable cacheable) {
Greg Daniel7e000222018-12-03 10:08:21 -05001175 GrVkImageInfo imageInfo;
1176 if (!backendTex.getVkImageInfo(&imageInfo)) {
1177 return nullptr;
1178 }
1179
Greg Danielcb324152019-02-25 11:36:53 -05001180 if (!check_image_info(this->vkCaps(), imageInfo, backendTex.config(), false)) {
Brian Salomond17f6582017-07-19 18:28:58 -04001181 return nullptr;
Greg Daniel164a9f02016-02-22 09:56:40 -05001182 }
Brian Salomond17f6582017-07-19 18:28:58 -04001183
1184 GrSurfaceDesc surfDesc;
1185 surfDesc.fFlags = kRenderTarget_GrSurfaceFlag;
1186 surfDesc.fWidth = backendTex.width();
1187 surfDesc.fHeight = backendTex.height();
1188 surfDesc.fConfig = backendTex.config();
Brian Salomonbdecacf2018-02-02 20:32:49 -05001189 surfDesc.fSampleCnt = this->caps()->getRenderTargetSampleCount(sampleCnt, backendTex.config());
Brian Salomond17f6582017-07-19 18:28:58 -04001190
Greg Daniel52e16d92018-04-10 09:34:07 -04001191 sk_sp<GrVkImageLayout> layout = backendTex.getGrVkImageLayout();
1192 SkASSERT(layout);
1193
Brian Salomonaa6ca0a2019-01-24 16:03:07 -05001194 return GrVkTextureRenderTarget::MakeWrappedTextureRenderTarget(
1195 this, surfDesc, ownership, cacheable, imageInfo, std::move(layout));
Greg Daniel164a9f02016-02-22 09:56:40 -05001196}
1197
Robert Phillipsb0e93a22017-08-29 08:26:54 -04001198sk_sp<GrRenderTarget> GrVkGpu::onWrapBackendRenderTarget(const GrBackendRenderTarget& backendRT){
Greg Daniele79b4732017-04-20 14:07:46 -04001199 // Currently the Vulkan backend does not support wrapping of msaa render targets directly. In
1200 // general this is not an issue since swapchain images in vulkan are never multisampled. Thus if
1201 // you want a multisampled RT it is best to wrap the swapchain images and then let Skia handle
1202 // creating and owning the MSAA images.
Brian Salomonbdecacf2018-02-02 20:32:49 -05001203 if (backendRT.sampleCnt() > 1) {
Greg Daniele79b4732017-04-20 14:07:46 -04001204 return nullptr;
1205 }
halcanary9d524f22016-03-29 09:03:52 -07001206
Greg Daniel323fbcf2018-04-10 13:46:30 -04001207 GrVkImageInfo info;
1208 if (!backendRT.getVkImageInfo(&info)) {
Greg Danielbcf612b2017-05-01 13:50:58 +00001209 return nullptr;
1210 }
Greg Daniel323fbcf2018-04-10 13:46:30 -04001211
Greg Danielcb324152019-02-25 11:36:53 -05001212 if (!check_image_info(this->vkCaps(), info, backendRT.config(), true)) {
jvanverthfd359ca2016-03-18 11:57:24 -07001213 return nullptr;
1214 }
Greg Daniel164a9f02016-02-22 09:56:40 -05001215
Greg Daniel164a9f02016-02-22 09:56:40 -05001216 GrSurfaceDesc desc;
Brian Salomon0ec981b2017-05-15 13:48:50 -04001217 desc.fFlags = kRenderTarget_GrSurfaceFlag;
Robert Phillips16d8ec62017-07-27 16:16:25 -04001218 desc.fWidth = backendRT.width();
1219 desc.fHeight = backendRT.height();
1220 desc.fConfig = backendRT.config();
Brian Salomonbdecacf2018-02-02 20:32:49 -05001221 desc.fSampleCnt = 1;
Greg Daniel164a9f02016-02-22 09:56:40 -05001222
Greg Daniel323fbcf2018-04-10 13:46:30 -04001223 sk_sp<GrVkImageLayout> layout = backendRT.getGrVkImageLayout();
Greg Daniel52e16d92018-04-10 09:34:07 -04001224
Greg Daniel323fbcf2018-04-10 13:46:30 -04001225 sk_sp<GrVkRenderTarget> tgt = GrVkRenderTarget::MakeWrappedRenderTarget(this, desc, info,
Greg Daniel52e16d92018-04-10 09:34:07 -04001226 std::move(layout));
Brian Salomonafdc6b12018-03-09 12:02:32 -05001227
1228 // We don't allow the client to supply a premade stencil buffer. We always create one if needed.
1229 SkASSERT(!backendRT.stencilBits());
1230 if (tgt) {
1231 SkASSERT(tgt->canAttemptStencilAttachment());
Greg Daniel164a9f02016-02-22 09:56:40 -05001232 }
Brian Salomonafdc6b12018-03-09 12:02:32 -05001233
Ben Wagnerff134f22018-04-24 16:29:16 -04001234 return std::move(tgt);
Greg Daniel164a9f02016-02-22 09:56:40 -05001235}
1236
Greg Daniel7ef28f32017-04-20 16:41:55 +00001237sk_sp<GrRenderTarget> GrVkGpu::onWrapBackendTextureAsRenderTarget(const GrBackendTexture& tex,
Greg Daniel7ef28f32017-04-20 16:41:55 +00001238 int sampleCnt) {
Brian Osman33910292017-04-18 14:38:53 -04001239
Greg Daniel52e16d92018-04-10 09:34:07 -04001240 GrVkImageInfo imageInfo;
1241 if (!tex.getVkImageInfo(&imageInfo)) {
Greg Danielbcf612b2017-05-01 13:50:58 +00001242 return nullptr;
1243 }
Greg Danielcb324152019-02-25 11:36:53 -05001244 if (!check_image_info(this->vkCaps(), imageInfo, tex.config(), false)) {
Brian Osman33910292017-04-18 14:38:53 -04001245 return nullptr;
1246 }
1247
Greg Danielcb324152019-02-25 11:36:53 -05001248
Brian Osman33910292017-04-18 14:38:53 -04001249 GrSurfaceDesc desc;
Greg Daniel7ef28f32017-04-20 16:41:55 +00001250 desc.fFlags = kRenderTarget_GrSurfaceFlag;
Greg Daniel7ef28f32017-04-20 16:41:55 +00001251 desc.fWidth = tex.width();
1252 desc.fHeight = tex.height();
Robert Phillips16d8ec62017-07-27 16:16:25 -04001253 desc.fConfig = tex.config();
Brian Salomonbdecacf2018-02-02 20:32:49 -05001254 desc.fSampleCnt = this->caps()->getRenderTargetSampleCount(sampleCnt, tex.config());
1255 if (!desc.fSampleCnt) {
1256 return nullptr;
1257 }
Brian Osman33910292017-04-18 14:38:53 -04001258
Greg Daniel52e16d92018-04-10 09:34:07 -04001259 sk_sp<GrVkImageLayout> layout = tex.getGrVkImageLayout();
1260 SkASSERT(layout);
1261
Ben Wagnerff134f22018-04-24 16:29:16 -04001262 return GrVkRenderTarget::MakeWrappedRenderTarget(this, desc, imageInfo, std::move(layout));
Brian Osman33910292017-04-18 14:38:53 -04001263}
1264
Greg Danielb46add82019-01-02 14:51:29 -05001265sk_sp<GrRenderTarget> GrVkGpu::onWrapVulkanSecondaryCBAsRenderTarget(
1266 const SkImageInfo& imageInfo, const GrVkDrawableInfo& vkInfo) {
1267 int maxSize = this->caps()->maxTextureSize();
1268 if (imageInfo.width() > maxSize || imageInfo.height() > maxSize) {
1269 return nullptr;
1270 }
1271
1272 GrBackendFormat backendFormat = GrBackendFormat::MakeVk(vkInfo.fFormat);
1273 if (!backendFormat.isValid()) {
1274 return nullptr;
1275 }
1276 GrPixelConfig config = this->caps()->getConfigFromBackendFormat(backendFormat,
1277 imageInfo.colorType());
1278 if (config == kUnknown_GrPixelConfig) {
1279 return nullptr;
1280 }
1281
1282 GrSurfaceDesc desc;
1283 desc.fFlags = kRenderTarget_GrSurfaceFlag;
1284 desc.fWidth = imageInfo.width();
1285 desc.fHeight = imageInfo.height();
1286 desc.fConfig = config;
1287 desc.fSampleCnt = this->caps()->getRenderTargetSampleCount(1, config);
1288 if (!desc.fSampleCnt) {
1289 return nullptr;
1290 }
1291
1292 return GrVkRenderTarget::MakeSecondaryCBRenderTarget(this, desc, vkInfo);
1293}
1294
Brian Salomon930f9392018-06-20 16:25:26 -04001295bool GrVkGpu::onRegenerateMipMapLevels(GrTexture* tex) {
1296 auto* vkTex = static_cast<GrVkTexture*>(tex);
jvanverth900bd4a2016-04-29 13:53:12 -07001297 // don't do anything for linearly tiled textures (can't have mipmaps)
Brian Salomon930f9392018-06-20 16:25:26 -04001298 if (vkTex->isLinearTiled()) {
jvanverth900bd4a2016-04-29 13:53:12 -07001299 SkDebugf("Trying to create mipmap for linear tiled texture");
Brian Salomon930f9392018-06-20 16:25:26 -04001300 return false;
jvanverth62340062016-04-26 08:01:44 -07001301 }
1302
jvanverth62340062016-04-26 08:01:44 -07001303 // determine if we can blit to and from this format
1304 const GrVkCaps& caps = this->vkCaps();
1305 if (!caps.configCanBeDstofBlit(tex->config(), false) ||
egdaniel2f5792a2016-07-06 08:51:23 -07001306 !caps.configCanBeSrcofBlit(tex->config(), false) ||
1307 !caps.mipMapSupport()) {
Brian Salomon930f9392018-06-20 16:25:26 -04001308 return false;
jvanverth62340062016-04-26 08:01:44 -07001309 }
1310
egdaniel7ac5da82016-07-15 13:41:42 -07001311 int width = tex->width();
1312 int height = tex->height();
1313 VkImageBlit blitRegion;
1314 memset(&blitRegion, 0, sizeof(VkImageBlit));
jvanverth62340062016-04-26 08:01:44 -07001315
jvanverth82c05582016-05-03 11:19:01 -07001316 // SkMipMap doesn't include the base level in the level count so we have to add 1
1317 uint32_t levelCount = SkMipMap::ComputeLevelCount(tex->width(), tex->height()) + 1;
Brian Salomon930f9392018-06-20 16:25:26 -04001318 SkASSERT(levelCount == vkTex->mipLevels());
egdaniel7ac5da82016-07-15 13:41:42 -07001319
Greg Danielda86e282018-06-13 09:41:19 -04001320 // change layout of the layers so we can write to them.
Brian Salomon930f9392018-06-20 16:25:26 -04001321 vkTex->setImageLayout(this, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_ACCESS_TRANSFER_WRITE_BIT,
1322 VK_PIPELINE_STAGE_TRANSFER_BIT, false);
jvanverth62340062016-04-26 08:01:44 -07001323
jvanverth50c46c72016-05-06 12:31:28 -07001324 // setup memory barrier
Brian Salomon930f9392018-06-20 16:25:26 -04001325 SkASSERT(GrVkFormatIsSupported(vkTex->imageFormat()));
jvanverth50c46c72016-05-06 12:31:28 -07001326 VkImageAspectFlags aspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
1327 VkImageMemoryBarrier imageMemoryBarrier = {
Brian Salomon930f9392018-06-20 16:25:26 -04001328 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType
1329 nullptr, // pNext
1330 VK_ACCESS_TRANSFER_WRITE_BIT, // srcAccessMask
1331 VK_ACCESS_TRANSFER_READ_BIT, // dstAccessMask
1332 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // oldLayout
1333 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // newLayout
1334 VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex
1335 VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex
1336 vkTex->image(), // image
1337 {aspectFlags, 0, 1, 0, 1} // subresourceRange
jvanverth50c46c72016-05-06 12:31:28 -07001338 };
1339
jvanverth62340062016-04-26 08:01:44 -07001340 // Blit the miplevels
jvanverth82c05582016-05-03 11:19:01 -07001341 uint32_t mipLevel = 1;
1342 while (mipLevel < levelCount) {
1343 int prevWidth = width;
1344 int prevHeight = height;
1345 width = SkTMax(1, width / 2);
1346 height = SkTMax(1, height / 2);
jvanverth62340062016-04-26 08:01:44 -07001347
jvanverth50c46c72016-05-06 12:31:28 -07001348 imageMemoryBarrier.subresourceRange.baseMipLevel = mipLevel - 1;
Greg Daniel59dc1482019-02-22 10:46:38 -05001349 this->addImageMemoryBarrier(vkTex->resource(), VK_PIPELINE_STAGE_TRANSFER_BIT,
1350 VK_PIPELINE_STAGE_TRANSFER_BIT, false, &imageMemoryBarrier);
jvanverth50c46c72016-05-06 12:31:28 -07001351
1352 blitRegion.srcSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, mipLevel - 1, 0, 1 };
jvanverth82c05582016-05-03 11:19:01 -07001353 blitRegion.srcOffsets[0] = { 0, 0, 0 };
brianosmane9906e72016-06-08 12:44:27 -07001354 blitRegion.srcOffsets[1] = { prevWidth, prevHeight, 1 };
jvanverth82c05582016-05-03 11:19:01 -07001355 blitRegion.dstSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, mipLevel, 0, 1 };
1356 blitRegion.dstOffsets[0] = { 0, 0, 0 };
brianosmane9906e72016-06-08 12:44:27 -07001357 blitRegion.dstOffsets[1] = { width, height, 1 };
jvanverth62340062016-04-26 08:01:44 -07001358 fCurrentCmdBuffer->blitImage(this,
Brian Salomon930f9392018-06-20 16:25:26 -04001359 vkTex->resource(),
1360 vkTex->image(),
Greg Daniel31cc7312018-03-05 11:41:06 -05001361 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
Brian Salomon930f9392018-06-20 16:25:26 -04001362 vkTex->resource(),
1363 vkTex->image(),
Greg Daniel31cc7312018-03-05 11:41:06 -05001364 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
jvanverth62340062016-04-26 08:01:44 -07001365 1,
1366 &blitRegion,
1367 VK_FILTER_LINEAR);
jvanverth82c05582016-05-03 11:19:01 -07001368 ++mipLevel;
jvanverth62340062016-04-26 08:01:44 -07001369 }
Greg Danielee54f232019-04-03 14:58:40 -04001370 if (levelCount > 1) {
1371 // This barrier logically is not needed, but it changes the final level to the same layout
1372 // as all the others, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL. This makes tracking of the
1373 // layouts and future layout changes easier. The alternative here would be to track layout
1374 // and memory accesses per layer which doesn't seem work it.
1375 imageMemoryBarrier.subresourceRange.baseMipLevel = mipLevel - 1;
1376 this->addImageMemoryBarrier(vkTex->resource(), VK_PIPELINE_STAGE_TRANSFER_BIT,
1377 VK_PIPELINE_STAGE_TRANSFER_BIT, false, &imageMemoryBarrier);
1378 vkTex->updateImageLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
1379 }
Brian Salomon930f9392018-06-20 16:25:26 -04001380 return true;
jvanverth62340062016-04-26 08:01:44 -07001381}
1382
Greg Daniel164a9f02016-02-22 09:56:40 -05001383////////////////////////////////////////////////////////////////////////////////
1384
1385GrStencilAttachment* GrVkGpu::createStencilAttachmentForRenderTarget(const GrRenderTarget* rt,
1386 int width,
1387 int height) {
Greg Daniel164a9f02016-02-22 09:56:40 -05001388 SkASSERT(width >= rt->width());
1389 SkASSERT(height >= rt->height());
1390
1391 int samples = rt->numStencilSamples();
1392
Ethan Nicholasf610bae2018-09-20 16:55:21 -04001393 const GrVkCaps::StencilFormat& sFmt = this->vkCaps().preferredStencilFormat();
Greg Daniel164a9f02016-02-22 09:56:40 -05001394
1395 GrVkStencilAttachment* stencil(GrVkStencilAttachment::Create(this,
Greg Daniel164a9f02016-02-22 09:56:40 -05001396 width,
1397 height,
1398 samples,
1399 sFmt));
1400 fStats.incStencilAttachmentCreates();
1401 return stencil;
1402}
1403
1404////////////////////////////////////////////////////////////////////////////////
1405
Brian Salomon52e943a2018-03-13 09:32:39 -04001406bool copy_testing_data(GrVkGpu* gpu, const void* srcData, const GrVkAlloc& alloc,
Robert Phillips646f6372018-09-25 09:31:10 -04001407 size_t bufferOffset, size_t srcRowBytes, size_t dstRowBytes,
1408 size_t trimRowBytes, int h) {
Greg Daniel81df0412018-05-31 13:13:33 -04001409 VkDeviceSize size = dstRowBytes * h;
1410 VkDeviceSize offset = bufferOffset;
1411 SkASSERT(size + offset <= alloc.fSize);
1412 void* mapPtr = GrVkMemory::MapAlloc(gpu, alloc);
1413 if (!mapPtr) {
egdaniel3602d4f2016-08-12 11:58:53 -07001414 return false;
1415 }
Greg Daniel81df0412018-05-31 13:13:33 -04001416 mapPtr = reinterpret_cast<char*>(mapPtr) + offset;
egdaniel3602d4f2016-08-12 11:58:53 -07001417
Greg Daniel20ece3a2017-03-28 10:24:43 -04001418 if (srcData) {
1419 // If there is no padding on dst we can do a single memcopy.
1420 // This assumes the srcData comes in with no padding.
Robert Phillips646f6372018-09-25 09:31:10 -04001421 SkRectMemcpy(mapPtr, dstRowBytes, srcData, srcRowBytes, trimRowBytes, h);
Greg Daniel20ece3a2017-03-28 10:24:43 -04001422 } else {
1423 // If there is no srcdata we always copy 0's into the textures so that it is initialized
1424 // with some data.
Robert Phillips646f6372018-09-25 09:31:10 -04001425 memset(mapPtr, 0, dstRowBytes * h);
Greg Daniel20ece3a2017-03-28 10:24:43 -04001426 }
Greg Daniel81df0412018-05-31 13:13:33 -04001427 GrVkMemory::FlushMappedAlloc(gpu, alloc, offset, size);
1428 GrVkMemory::UnmapAlloc(gpu, alloc);
egdaniel3602d4f2016-08-12 11:58:53 -07001429 return true;
1430}
1431
Brian Salomonf865b052018-03-09 09:01:53 -05001432#if GR_TEST_UTILS
Brian Salomon52e943a2018-03-13 09:32:39 -04001433bool GrVkGpu::createTestingOnlyVkImage(GrPixelConfig config, int w, int h, bool texturable,
1434 bool renderable, GrMipMapped mipMapped, const void* srcData,
Robert Phillips646f6372018-09-25 09:31:10 -04001435 size_t srcRowBytes, GrVkImageInfo* info) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001436 SkASSERT(texturable || renderable);
1437 if (!texturable) {
1438 SkASSERT(GrMipMapped::kNo == mipMapped);
1439 SkASSERT(!srcData);
1440 }
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001441 VkFormat pixelFormat;
1442 if (!GrPixelConfigToVkFormat(config, &pixelFormat)) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001443 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001444 }
1445
Brian Salomon52e943a2018-03-13 09:32:39 -04001446 if (texturable && !fVkCaps->isConfigTexturable(config)) {
1447 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001448 }
1449
Brian Salomon52e943a2018-03-13 09:32:39 -04001450 if (renderable && !fVkCaps->isConfigRenderable(config)) {
1451 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001452 }
1453
1454 // Currently we don't support uploading pixel data when mipped.
1455 if (srcData && GrMipMapped::kYes == mipMapped) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001456 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001457 }
1458
Brian Salomon52e943a2018-03-13 09:32:39 -04001459 VkImageUsageFlags usageFlags = 0;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001460 usageFlags |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1461 usageFlags |= VK_IMAGE_USAGE_TRANSFER_DST_BIT;
Brian Salomon52e943a2018-03-13 09:32:39 -04001462 if (texturable) {
1463 usageFlags |= VK_IMAGE_USAGE_SAMPLED_BIT;
1464 }
1465 if (renderable) {
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001466 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
1467 }
1468
1469 VkImage image = VK_NULL_HANDLE;
Greg Daniel8385a8a2018-02-26 13:29:37 -05001470 GrVkAlloc alloc;
Brian Salomonde9f5462018-03-07 14:23:58 -05001471 VkImageLayout initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001472
1473 // Create Image
1474 VkSampleCountFlagBits vkSamples;
1475 if (!GrSampleCountToVkSampleCount(1, &vkSamples)) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001476 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001477 }
1478
1479 // Figure out the number of mip levels.
1480 uint32_t mipLevels = 1;
1481 if (GrMipMapped::kYes == mipMapped) {
1482 mipLevels = SkMipMap::ComputeLevelCount(w, h) + 1;
1483 }
1484
1485 const VkImageCreateInfo imageCreateInfo = {
Brian Salomonde9f5462018-03-07 14:23:58 -05001486 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
1487 nullptr, // pNext
Brian Osman2b23c4b2018-06-01 12:25:08 -04001488 0, // VkImageCreateFlags
Brian Salomonde9f5462018-03-07 14:23:58 -05001489 VK_IMAGE_TYPE_2D, // VkImageType
1490 pixelFormat, // VkFormat
1491 {(uint32_t)w, (uint32_t)h, 1}, // VkExtent3D
1492 mipLevels, // mipLevels
1493 1, // arrayLayers
1494 vkSamples, // samples
1495 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling
1496 usageFlags, // VkImageUsageFlags
1497 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
1498 0, // queueFamilyCount
1499 0, // pQueueFamilyIndices
1500 initialLayout // initialLayout
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001501 };
1502
Brian Salomon52e943a2018-03-13 09:32:39 -04001503 GR_VK_CALL_ERRCHECK(this->vkInterface(),
1504 CreateImage(this->device(), &imageCreateInfo, nullptr, &image));
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001505
Brian Salomonde9f5462018-03-07 14:23:58 -05001506 if (!GrVkMemory::AllocAndBindImageMemory(this, image, false, &alloc)) {
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001507 VK_CALL(DestroyImage(this->device(), image, nullptr));
Brian Salomon52e943a2018-03-13 09:32:39 -04001508 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001509 }
1510
1511 // We need to declare these early so that we can delete them at the end outside of the if block.
Greg Daniel8385a8a2018-02-26 13:29:37 -05001512 GrVkAlloc bufferAlloc;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001513 VkBuffer buffer = VK_NULL_HANDLE;
1514
1515 VkResult err;
1516 const VkCommandBufferAllocateInfo cmdInfo = {
1517 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
1518 nullptr, // pNext
Ethan Nicholas8e265a72018-12-12 16:22:40 -05001519 fCmdPool->vkCommandPool(), // commandPool
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001520 VK_COMMAND_BUFFER_LEVEL_PRIMARY, // level
1521 1 // bufferCount
1522 };
1523
1524 VkCommandBuffer cmdBuffer;
1525 err = VK_CALL(AllocateCommandBuffers(fDevice, &cmdInfo, &cmdBuffer));
1526 if (err) {
1527 GrVkMemory::FreeImageMemory(this, false, alloc);
1528 VK_CALL(DestroyImage(fDevice, image, nullptr));
Brian Salomon52e943a2018-03-13 09:32:39 -04001529 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001530 }
1531
1532 VkCommandBufferBeginInfo cmdBufferBeginInfo;
1533 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
1534 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
1535 cmdBufferBeginInfo.pNext = nullptr;
1536 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
1537 cmdBufferBeginInfo.pInheritanceInfo = nullptr;
1538
1539 err = VK_CALL(BeginCommandBuffer(cmdBuffer, &cmdBufferBeginInfo));
1540 SkASSERT(!err);
1541
1542 size_t bpp = GrBytesPerPixel(config);
Brian Salomonde9f5462018-03-07 14:23:58 -05001543 SkASSERT(w && h);
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001544
Robert Phillips646f6372018-09-25 09:31:10 -04001545 const size_t trimRowBytes = w * bpp;
1546 if (!srcRowBytes) {
1547 srcRowBytes = trimRowBytes;
1548 }
1549
Brian Salomonde9f5462018-03-07 14:23:58 -05001550 SkTArray<size_t> individualMipOffsets(mipLevels);
1551 individualMipOffsets.push_back(0);
1552 size_t combinedBufferSize = w * bpp * h;
Jim Van Verth1676cb92019-01-15 13:24:45 -05001553 if (GrPixelConfigIsCompressed(config)) {
1554 combinedBufferSize = GrCompressedFormatDataSize(config, w, h);
1555 bpp = 4; // we have at least this alignment, which will pass the code below
1556 }
Brian Salomonde9f5462018-03-07 14:23:58 -05001557 int currentWidth = w;
1558 int currentHeight = h;
1559 // The alignment must be at least 4 bytes and a multiple of the bytes per pixel of the image
1560 // config. This works with the assumption that the bytes in pixel config is always a power
1561 // of 2.
1562 SkASSERT((bpp & (bpp - 1)) == 0);
1563 const size_t alignmentMask = 0x3 | (bpp - 1);
1564 for (uint32_t currentMipLevel = 1; currentMipLevel < mipLevels; currentMipLevel++) {
1565 currentWidth = SkTMax(1, currentWidth / 2);
1566 currentHeight = SkTMax(1, currentHeight / 2);
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001567
Jim Van Verth1676cb92019-01-15 13:24:45 -05001568 size_t trimmedSize;
1569 if (GrPixelConfigIsCompressed(config)) {
1570 trimmedSize = GrCompressedFormatDataSize(config, currentWidth, currentHeight);
1571 } else {
1572 trimmedSize = currentWidth * bpp * currentHeight;
1573 }
Brian Salomonde9f5462018-03-07 14:23:58 -05001574 const size_t alignmentDiff = combinedBufferSize & alignmentMask;
1575 if (alignmentDiff != 0) {
1576 combinedBufferSize += alignmentMask - alignmentDiff + 1;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001577 }
Brian Salomonde9f5462018-03-07 14:23:58 -05001578 individualMipOffsets.push_back(combinedBufferSize);
1579 combinedBufferSize += trimmedSize;
1580 }
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001581
Brian Salomonde9f5462018-03-07 14:23:58 -05001582 VkBufferCreateInfo bufInfo;
1583 memset(&bufInfo, 0, sizeof(VkBufferCreateInfo));
1584 bufInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
1585 bufInfo.flags = 0;
1586 bufInfo.size = combinedBufferSize;
1587 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1588 bufInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
1589 bufInfo.queueFamilyIndexCount = 0;
1590 bufInfo.pQueueFamilyIndices = nullptr;
1591 err = VK_CALL(CreateBuffer(fDevice, &bufInfo, nullptr, &buffer));
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001592
Brian Salomonde9f5462018-03-07 14:23:58 -05001593 if (err) {
1594 GrVkMemory::FreeImageMemory(this, false, alloc);
1595 VK_CALL(DestroyImage(fDevice, image, nullptr));
1596 VK_CALL(EndCommandBuffer(cmdBuffer));
Ethan Nicholas8e265a72018-12-12 16:22:40 -05001597 VK_CALL(FreeCommandBuffers(fDevice, fCmdPool->vkCommandPool(), 1, &cmdBuffer));
Brian Salomon52e943a2018-03-13 09:32:39 -04001598 return false;
Brian Salomonde9f5462018-03-07 14:23:58 -05001599 }
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001600
Brian Salomonde9f5462018-03-07 14:23:58 -05001601 if (!GrVkMemory::AllocAndBindBufferMemory(this, buffer, GrVkBuffer::kCopyRead_Type, true,
1602 &bufferAlloc)) {
1603 GrVkMemory::FreeImageMemory(this, false, alloc);
1604 VK_CALL(DestroyImage(fDevice, image, nullptr));
1605 VK_CALL(DestroyBuffer(fDevice, buffer, nullptr));
1606 VK_CALL(EndCommandBuffer(cmdBuffer));
Ethan Nicholas8e265a72018-12-12 16:22:40 -05001607 VK_CALL(FreeCommandBuffers(fDevice, fCmdPool->vkCommandPool(), 1, &cmdBuffer));
Brian Salomon52e943a2018-03-13 09:32:39 -04001608 return false;
Brian Salomonde9f5462018-03-07 14:23:58 -05001609 }
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001610
Brian Salomonde9f5462018-03-07 14:23:58 -05001611 currentWidth = w;
1612 currentHeight = h;
1613 for (uint32_t currentMipLevel = 0; currentMipLevel < mipLevels; currentMipLevel++) {
1614 SkASSERT(0 == currentMipLevel || !srcData);
Brian Salomonde9f5462018-03-07 14:23:58 -05001615 size_t bufferOffset = individualMipOffsets[currentMipLevel];
Jim Van Verth1676cb92019-01-15 13:24:45 -05001616 bool result;
1617 if (GrPixelConfigIsCompressed(config)) {
1618 size_t levelSize = GrCompressedFormatDataSize(config, currentWidth, currentHeight);
1619 size_t currentRowBytes = levelSize / currentHeight;
1620 result = copy_testing_data(this, srcData, bufferAlloc, bufferOffset, currentRowBytes,
1621 currentRowBytes, currentRowBytes, currentHeight);
1622 } else {
1623 size_t currentRowBytes = bpp * currentWidth;
1624 result = copy_testing_data(this, srcData, bufferAlloc, bufferOffset, srcRowBytes,
1625 currentRowBytes, trimRowBytes, currentHeight);
1626 }
1627 if (!result) {
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001628 GrVkMemory::FreeImageMemory(this, false, alloc);
1629 VK_CALL(DestroyImage(fDevice, image, nullptr));
Brian Salomonde9f5462018-03-07 14:23:58 -05001630 GrVkMemory::FreeBufferMemory(this, GrVkBuffer::kCopyRead_Type, bufferAlloc);
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001631 VK_CALL(DestroyBuffer(fDevice, buffer, nullptr));
1632 VK_CALL(EndCommandBuffer(cmdBuffer));
Ethan Nicholas8e265a72018-12-12 16:22:40 -05001633 VK_CALL(FreeCommandBuffers(fDevice, fCmdPool->vkCommandPool(), 1, &cmdBuffer));
Brian Salomon52e943a2018-03-13 09:32:39 -04001634 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001635 }
Brian Salomonde9f5462018-03-07 14:23:58 -05001636 currentWidth = SkTMax(1, currentWidth / 2);
1637 currentHeight = SkTMax(1, currentHeight / 2);
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001638 }
Brian Salomonde9f5462018-03-07 14:23:58 -05001639
1640 // Set image layout and add barrier
1641 VkImageMemoryBarrier barrier;
1642 memset(&barrier, 0, sizeof(VkImageMemoryBarrier));
1643 barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
1644 barrier.pNext = nullptr;
Greg Daniel6ddbafc2018-05-24 12:34:29 -04001645 barrier.srcAccessMask = GrVkImage::LayoutToSrcAccessMask(initialLayout);
Brian Salomonde9f5462018-03-07 14:23:58 -05001646 barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1647 barrier.oldLayout = initialLayout;
1648 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1649 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1650 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1651 barrier.image = image;
1652 barrier.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, mipLevels, 0, 1};
1653
Greg Danielf7828d02018-10-09 12:01:32 -04001654 VK_CALL(CmdPipelineBarrier(cmdBuffer, GrVkImage::LayoutToPipelineSrcStageFlags(initialLayout),
Brian Salomonde9f5462018-03-07 14:23:58 -05001655 VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
1656 &barrier));
1657 initialLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1658
1659 SkTArray<VkBufferImageCopy> regions(mipLevels);
1660
1661 currentWidth = w;
1662 currentHeight = h;
1663 for (uint32_t currentMipLevel = 0; currentMipLevel < mipLevels; currentMipLevel++) {
1664 // Submit copy command
1665 VkBufferImageCopy& region = regions.push_back();
1666 memset(&region, 0, sizeof(VkBufferImageCopy));
1667 region.bufferOffset = individualMipOffsets[currentMipLevel];
1668 region.bufferRowLength = currentWidth;
1669 region.bufferImageHeight = currentHeight;
1670 region.imageSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
1671 region.imageOffset = {0, 0, 0};
1672 region.imageExtent = {(uint32_t)currentWidth, (uint32_t)currentHeight, 1};
1673 currentWidth = SkTMax(1, currentWidth / 2);
1674 currentHeight = SkTMax(1, currentHeight / 2);
1675 }
1676
1677 VK_CALL(CmdCopyBufferToImage(cmdBuffer, buffer, image, initialLayout, regions.count(),
1678 regions.begin()));
1679
Brian Salomon52e943a2018-03-13 09:32:39 -04001680 if (texturable) {
1681 // Change Image layout to shader read since if we use this texture as a borrowed textures
1682 // within Ganesh we require that its layout be set to that
1683 memset(&barrier, 0, sizeof(VkImageMemoryBarrier));
1684 barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
1685 barrier.pNext = nullptr;
Greg Daniel6ddbafc2018-05-24 12:34:29 -04001686 barrier.srcAccessMask = GrVkImage::LayoutToSrcAccessMask(initialLayout);
Brian Salomon52e943a2018-03-13 09:32:39 -04001687 barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
1688 barrier.oldLayout = initialLayout;
1689 barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
1690 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1691 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1692 barrier.image = image;
1693 barrier.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, mipLevels, 0, 1};
Brian Salomon52e943a2018-03-13 09:32:39 -04001694 VK_CALL(CmdPipelineBarrier(cmdBuffer,
Greg Danielf7828d02018-10-09 12:01:32 -04001695 GrVkImage::LayoutToPipelineSrcStageFlags(initialLayout),
1696 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
Brian Salomon52e943a2018-03-13 09:32:39 -04001697 0,
1698 0, nullptr,
1699 0, nullptr,
1700 1, &barrier));
Greg Daniel4f4a53f2018-03-15 10:20:45 -04001701 initialLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
Brian Salomon52e943a2018-03-13 09:32:39 -04001702 }
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001703
1704 // End CommandBuffer
1705 err = VK_CALL(EndCommandBuffer(cmdBuffer));
1706 SkASSERT(!err);
1707
1708 // Create Fence for queue
1709 VkFence fence;
1710 VkFenceCreateInfo fenceInfo;
1711 memset(&fenceInfo, 0, sizeof(VkFenceCreateInfo));
1712 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
1713
1714 err = VK_CALL(CreateFence(fDevice, &fenceInfo, nullptr, &fence));
1715 SkASSERT(!err);
1716
1717 VkSubmitInfo submitInfo;
1718 memset(&submitInfo, 0, sizeof(VkSubmitInfo));
1719 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
1720 submitInfo.pNext = nullptr;
1721 submitInfo.waitSemaphoreCount = 0;
1722 submitInfo.pWaitSemaphores = nullptr;
1723 submitInfo.pWaitDstStageMask = 0;
1724 submitInfo.commandBufferCount = 1;
1725 submitInfo.pCommandBuffers = &cmdBuffer;
1726 submitInfo.signalSemaphoreCount = 0;
1727 submitInfo.pSignalSemaphores = nullptr;
1728 err = VK_CALL(QueueSubmit(this->queue(), 1, &submitInfo, fence));
1729 SkASSERT(!err);
1730
1731 err = VK_CALL(WaitForFences(fDevice, 1, &fence, true, UINT64_MAX));
1732 if (VK_TIMEOUT == err) {
1733 GrVkMemory::FreeImageMemory(this, false, alloc);
1734 VK_CALL(DestroyImage(fDevice, image, nullptr));
1735 GrVkMemory::FreeBufferMemory(this, GrVkBuffer::kCopyRead_Type, bufferAlloc);
1736 VK_CALL(DestroyBuffer(fDevice, buffer, nullptr));
Ethan Nicholas8e265a72018-12-12 16:22:40 -05001737 VK_CALL(FreeCommandBuffers(fDevice, fCmdPool->vkCommandPool(), 1, &cmdBuffer));
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001738 VK_CALL(DestroyFence(fDevice, fence, nullptr));
1739 SkDebugf("Fence failed to signal: %d\n", err);
1740 SK_ABORT("failing");
1741 }
1742 SkASSERT(!err);
1743
1744 // Clean up transfer resources
1745 if (buffer != VK_NULL_HANDLE) { // workaround for an older NVidia driver crash
1746 GrVkMemory::FreeBufferMemory(this, GrVkBuffer::kCopyRead_Type, bufferAlloc);
1747 VK_CALL(DestroyBuffer(fDevice, buffer, nullptr));
1748 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -05001749 VK_CALL(FreeCommandBuffers(fDevice, fCmdPool->vkCommandPool(), 1, &cmdBuffer));
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001750 VK_CALL(DestroyFence(fDevice, fence, nullptr));
1751
Brian Salomon52e943a2018-03-13 09:32:39 -04001752 info->fImage = image;
1753 info->fAlloc = alloc;
1754 info->fImageTiling = VK_IMAGE_TILING_OPTIMAL;
1755 info->fImageLayout = initialLayout;
1756 info->fFormat = pixelFormat;
1757 info->fLevelCount = mipLevels;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001758
Brian Salomon52e943a2018-03-13 09:32:39 -04001759 return true;
1760}
1761
1762GrBackendTexture GrVkGpu::createTestingOnlyBackendTexture(const void* srcData, int w, int h,
Robert Phillips646f6372018-09-25 09:31:10 -04001763 GrColorType colorType,
1764 bool isRenderTarget,
1765 GrMipMapped mipMapped, size_t rowBytes) {
Brian Salomon8a375832018-03-14 10:21:40 -04001766 this->handleDirtyContext();
Robert Phillipsa479f962018-04-10 11:45:40 -04001767
1768 if (w > this->caps()->maxTextureSize() || h > this->caps()->maxTextureSize()) {
1769 return GrBackendTexture();
1770 }
1771
Robert Phillips646f6372018-09-25 09:31:10 -04001772 GrPixelConfig config = GrColorTypeToPixelConfig(colorType, GrSRGBEncoded::kNo);
1773 if (!this->caps()->isConfigTexturable(config)) {
1774 return GrBackendTexture();
1775 }
1776
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001777 GrVkImageInfo info;
Brian Salomon52e943a2018-03-13 09:32:39 -04001778 if (!this->createTestingOnlyVkImage(config, w, h, true, isRenderTarget, mipMapped, srcData,
Robert Phillips646f6372018-09-25 09:31:10 -04001779 rowBytes, &info)) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001780 return {};
1781 }
Greg Daniel108bb232018-07-03 16:18:29 -04001782 GrBackendTexture beTex = GrBackendTexture(w, h, info);
1783 // Lots of tests don't go through Skia's public interface which will set the config so for
1784 // testing we make sure we set a config here.
1785 beTex.setPixelConfig(config);
1786 return beTex;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001787}
1788
1789bool GrVkGpu::isTestingOnlyBackendTexture(const GrBackendTexture& tex) const {
Greg Danielbdf12ad2018-10-12 09:31:11 -04001790 SkASSERT(GrBackendApi::kVulkan == tex.fBackend);
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001791
Greg Daniel52e16d92018-04-10 09:34:07 -04001792 GrVkImageInfo backend;
1793 if (!tex.getVkImageInfo(&backend)) {
1794 return false;
1795 }
Greg Daniel164a9f02016-02-22 09:56:40 -05001796
Greg Daniel52e16d92018-04-10 09:34:07 -04001797 if (backend.fImage && backend.fAlloc.fMemory) {
Greg Daniel164a9f02016-02-22 09:56:40 -05001798 VkMemoryRequirements req;
1799 memset(&req, 0, sizeof(req));
1800 GR_VK_CALL(this->vkInterface(), GetImageMemoryRequirements(fDevice,
Greg Daniel52e16d92018-04-10 09:34:07 -04001801 backend.fImage,
Greg Daniel164a9f02016-02-22 09:56:40 -05001802 &req));
1803 // TODO: find a better check
1804 // This will probably fail with a different driver
1805 return (req.size > 0) && (req.size <= 8192 * 8192);
1806 }
1807
1808 return false;
1809}
1810
Brian Salomon26102cb2018-03-09 09:33:19 -05001811void GrVkGpu::deleteTestingOnlyBackendTexture(const GrBackendTexture& tex) {
Greg Danielbdf12ad2018-10-12 09:31:11 -04001812 SkASSERT(GrBackendApi::kVulkan == tex.fBackend);
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001813
Greg Daniel52e16d92018-04-10 09:34:07 -04001814 GrVkImageInfo info;
1815 if (tex.getVkImageInfo(&info)) {
Greg Daniel52e16d92018-04-10 09:34:07 -04001816 GrVkImage::DestroyImageInfo(this, const_cast<GrVkImageInfo*>(&info));
Greg Daniel164a9f02016-02-22 09:56:40 -05001817 }
1818}
1819
Brian Osman2d010b62018-08-09 10:55:09 -04001820GrBackendRenderTarget GrVkGpu::createTestingOnlyBackendRenderTarget(int w, int h, GrColorType ct) {
Greg Daniel92cbf3f2018-04-12 16:50:17 -04001821 if (w > this->caps()->maxRenderTargetSize() || h > this->caps()->maxRenderTargetSize()) {
1822 return GrBackendRenderTarget();
1823 }
1824
Brian Salomon8a375832018-03-14 10:21:40 -04001825 this->handleDirtyContext();
Brian Salomon52e943a2018-03-13 09:32:39 -04001826 GrVkImageInfo info;
Brian Osman2d010b62018-08-09 10:55:09 -04001827 auto config = GrColorTypeToPixelConfig(ct, GrSRGBEncoded::kNo);
Brian Salomon52e943a2018-03-13 09:32:39 -04001828 if (kUnknown_GrPixelConfig == config) {
1829 return {};
1830 }
Robert Phillips646f6372018-09-25 09:31:10 -04001831 if (!this->createTestingOnlyVkImage(config, w, h, false, true, GrMipMapped::kNo, nullptr, 0,
Brian Salomon52e943a2018-03-13 09:32:39 -04001832 &info)) {
1833 return {};
1834 }
Greg Daniel108bb232018-07-03 16:18:29 -04001835 GrBackendRenderTarget beRT = GrBackendRenderTarget(w, h, 1, 0, info);
1836 // Lots of tests don't go through Skia's public interface which will set the config so for
1837 // testing we make sure we set a config here.
1838 beRT.setPixelConfig(config);
1839 return beRT;
Brian Salomonf865b052018-03-09 09:01:53 -05001840}
1841
Brian Salomon52e943a2018-03-13 09:32:39 -04001842void GrVkGpu::deleteTestingOnlyBackendRenderTarget(const GrBackendRenderTarget& rt) {
Greg Danielbdf12ad2018-10-12 09:31:11 -04001843 SkASSERT(GrBackendApi::kVulkan == rt.fBackend);
Brian Salomonf865b052018-03-09 09:01:53 -05001844
Greg Daniel323fbcf2018-04-10 13:46:30 -04001845 GrVkImageInfo info;
1846 if (rt.getVkImageInfo(&info)) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001847 // something in the command buffer may still be using this, so force submit
1848 this->submitCommandBuffer(kForce_SyncQueue);
Greg Daniel323fbcf2018-04-10 13:46:30 -04001849 GrVkImage::DestroyImageInfo(this, const_cast<GrVkImageInfo*>(&info));
Brian Salomon52e943a2018-03-13 09:32:39 -04001850 }
1851}
Brian Salomonf865b052018-03-09 09:01:53 -05001852
Greg Daniel26b50a42018-03-08 09:49:58 -05001853void GrVkGpu::testingOnly_flushGpuAndSync() {
1854 this->submitCommandBuffer(kForce_SyncQueue);
1855}
Brian Salomonf865b052018-03-09 09:01:53 -05001856#endif
Greg Daniel26b50a42018-03-08 09:49:58 -05001857
Greg Daniel164a9f02016-02-22 09:56:40 -05001858////////////////////////////////////////////////////////////////////////////////
1859
Greg Daniel59dc1482019-02-22 10:46:38 -05001860void GrVkGpu::addBufferMemoryBarrier(const GrVkResource* resource,
1861 VkPipelineStageFlags srcStageMask,
Greg Daniel164a9f02016-02-22 09:56:40 -05001862 VkPipelineStageFlags dstStageMask,
1863 bool byRegion,
1864 VkBufferMemoryBarrier* barrier) const {
1865 SkASSERT(fCurrentCmdBuffer);
Greg Daniel59dc1482019-02-22 10:46:38 -05001866 SkASSERT(resource);
Greg Daniel164a9f02016-02-22 09:56:40 -05001867 fCurrentCmdBuffer->pipelineBarrier(this,
Greg Daniel59dc1482019-02-22 10:46:38 -05001868 resource,
Greg Daniel164a9f02016-02-22 09:56:40 -05001869 srcStageMask,
1870 dstStageMask,
1871 byRegion,
1872 GrVkCommandBuffer::kBufferMemory_BarrierType,
1873 barrier);
1874}
1875
Greg Daniel59dc1482019-02-22 10:46:38 -05001876void GrVkGpu::addImageMemoryBarrier(const GrVkResource* resource,
1877 VkPipelineStageFlags srcStageMask,
Greg Daniel164a9f02016-02-22 09:56:40 -05001878 VkPipelineStageFlags dstStageMask,
1879 bool byRegion,
1880 VkImageMemoryBarrier* barrier) const {
1881 SkASSERT(fCurrentCmdBuffer);
Greg Daniel59dc1482019-02-22 10:46:38 -05001882 SkASSERT(resource);
Greg Daniel164a9f02016-02-22 09:56:40 -05001883 fCurrentCmdBuffer->pipelineBarrier(this,
Greg Daniel59dc1482019-02-22 10:46:38 -05001884 resource,
Greg Daniel164a9f02016-02-22 09:56:40 -05001885 srcStageMask,
1886 dstStageMask,
1887 byRegion,
1888 GrVkCommandBuffer::kImageMemory_BarrierType,
1889 barrier);
1890}
1891
Greg Danielbae71212019-03-01 15:24:35 -05001892void GrVkGpu::onFinishFlush(GrSurfaceProxy* proxy, SkSurface::BackendSurfaceAccess access,
Greg Daniele6bfb7d2019-04-17 15:26:11 -04001893 const GrFlushInfo& info) {
Greg Daniel51316782017-08-02 15:10:09 +00001894 // Submit the current command buffer to the Queue. Whether we inserted semaphores or not does
1895 // not effect what we do here.
Greg Danielbae71212019-03-01 15:24:35 -05001896 if (proxy && access == SkSurface::BackendSurfaceAccess::kPresent) {
1897 GrVkImage* image;
1898 SkASSERT(proxy->isInstantiated());
1899 if (GrTexture* tex = proxy->peekTexture()) {
1900 image = static_cast<GrVkTexture*>(tex);
1901 } else {
1902 GrRenderTarget* rt = proxy->peekRenderTarget();
1903 SkASSERT(rt);
1904 image = static_cast<GrVkRenderTarget*>(rt);
1905 }
1906 image->prepareForPresent(this);
1907 }
Greg Daniele6bfb7d2019-04-17 15:26:11 -04001908 if (info.fFlags & kSyncCpu_GrFlushFlag) {
1909 this->submitCommandBuffer(kForce_SyncQueue, info.fFinishedProc, info.fFinishedContext);
Greg Danielbae71212019-03-01 15:24:35 -05001910 } else {
Greg Daniele6bfb7d2019-04-17 15:26:11 -04001911 this->submitCommandBuffer(kSkip_SyncQueue, info.fFinishedProc, info.fFinishedContext);
Greg Danielbae71212019-03-01 15:24:35 -05001912 }
Greg Daniel164a9f02016-02-22 09:56:40 -05001913}
1914
Greg Daniel25af6712018-04-25 10:44:38 -04001915static int get_surface_sample_cnt(GrSurface* surf) {
1916 if (const GrRenderTarget* rt = surf->asRenderTarget()) {
1917 return rt->numColorSamples();
egdaniel17b89252016-04-05 07:23:38 -07001918 }
Greg Daniel25af6712018-04-25 10:44:38 -04001919 return 0;
Greg Daniel164a9f02016-02-22 09:56:40 -05001920}
1921
Robert Phillipsb0e93a22017-08-29 08:26:54 -04001922void GrVkGpu::copySurfaceAsCopyImage(GrSurface* dst, GrSurfaceOrigin dstOrigin,
1923 GrSurface* src, GrSurfaceOrigin srcOrigin,
egdaniel17b89252016-04-05 07:23:38 -07001924 GrVkImage* dstImage,
1925 GrVkImage* srcImage,
Greg Daniel164a9f02016-02-22 09:56:40 -05001926 const SkIRect& srcRect,
1927 const SkIPoint& dstPoint) {
Greg Daniel25af6712018-04-25 10:44:38 -04001928#ifdef SK_DEBUG
1929 int dstSampleCnt = get_surface_sample_cnt(dst);
1930 int srcSampleCnt = get_surface_sample_cnt(src);
Greg Daniela51e93c2019-03-25 12:30:45 -04001931 bool dstHasYcbcr = dstImage->ycbcrConversionInfo().isValid();
1932 bool srcHasYcbcr = srcImage->ycbcrConversionInfo().isValid();
1933 SkASSERT(this->vkCaps().canCopyImage(dst->config(), dstSampleCnt, dstOrigin, dstHasYcbcr,
1934 src->config(), srcSampleCnt, srcOrigin, srcHasYcbcr));
Greg Daniel25af6712018-04-25 10:44:38 -04001935
1936#endif
Greg Daniel164a9f02016-02-22 09:56:40 -05001937
Greg Daniel164a9f02016-02-22 09:56:40 -05001938 // These flags are for flushing/invalidating caches and for the dst image it doesn't matter if
1939 // the cache is flushed since it is only being written to.
egdaniel17b89252016-04-05 07:23:38 -07001940 dstImage->setImageLayout(this,
jvanverth50c46c72016-05-06 12:31:28 -07001941 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1942 VK_ACCESS_TRANSFER_WRITE_BIT,
1943 VK_PIPELINE_STAGE_TRANSFER_BIT,
1944 false);
Greg Daniel164a9f02016-02-22 09:56:40 -05001945
egdaniel17b89252016-04-05 07:23:38 -07001946 srcImage->setImageLayout(this,
1947 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
jvanverth50c46c72016-05-06 12:31:28 -07001948 VK_ACCESS_TRANSFER_READ_BIT,
1949 VK_PIPELINE_STAGE_TRANSFER_BIT,
egdaniel17b89252016-04-05 07:23:38 -07001950 false);
Greg Daniel164a9f02016-02-22 09:56:40 -05001951
1952 // Flip rect if necessary
1953 SkIRect srcVkRect = srcRect;
1954 int32_t dstY = dstPoint.fY;
1955
Robert Phillipsb0e93a22017-08-29 08:26:54 -04001956 if (kBottomLeft_GrSurfaceOrigin == srcOrigin) {
1957 SkASSERT(kBottomLeft_GrSurfaceOrigin == dstOrigin);
Greg Daniel164a9f02016-02-22 09:56:40 -05001958 srcVkRect.fTop = src->height() - srcRect.fBottom;
1959 srcVkRect.fBottom = src->height() - srcRect.fTop;
1960 dstY = dst->height() - dstPoint.fY - srcVkRect.height();
1961 }
1962
1963 VkImageCopy copyRegion;
1964 memset(&copyRegion, 0, sizeof(VkImageCopy));
1965 copyRegion.srcSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
1966 copyRegion.srcOffset = { srcVkRect.fLeft, srcVkRect.fTop, 0 };
1967 copyRegion.dstSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
1968 copyRegion.dstOffset = { dstPoint.fX, dstY, 0 };
egdanielc355bc82016-04-27 11:31:59 -07001969 copyRegion.extent = { (uint32_t)srcVkRect.width(), (uint32_t)srcVkRect.height(), 1 };
Greg Daniel164a9f02016-02-22 09:56:40 -05001970
1971 fCurrentCmdBuffer->copyImage(this,
egdaniel17b89252016-04-05 07:23:38 -07001972 srcImage,
Greg Daniel164a9f02016-02-22 09:56:40 -05001973 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
egdaniel17b89252016-04-05 07:23:38 -07001974 dstImage,
Greg Daniel164a9f02016-02-22 09:56:40 -05001975 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1976 1,
1977 &copyRegion);
jvanverth900bd4a2016-04-29 13:53:12 -07001978
1979 SkIRect dstRect = SkIRect::MakeXYWH(dstPoint.fX, dstPoint.fY,
1980 srcRect.width(), srcRect.height());
Brian Salomon1fabd512018-02-09 09:54:25 -05001981 this->didWriteToSurface(dst, dstOrigin, &dstRect);
Greg Daniel164a9f02016-02-22 09:56:40 -05001982}
1983
Robert Phillipsb0e93a22017-08-29 08:26:54 -04001984void GrVkGpu::copySurfaceAsBlit(GrSurface* dst, GrSurfaceOrigin dstOrigin,
1985 GrSurface* src, GrSurfaceOrigin srcOrigin,
egdaniel17b89252016-04-05 07:23:38 -07001986 GrVkImage* dstImage,
1987 GrVkImage* srcImage,
1988 const SkIRect& srcRect,
1989 const SkIPoint& dstPoint) {
Greg Daniel25af6712018-04-25 10:44:38 -04001990#ifdef SK_DEBUG
1991 int dstSampleCnt = get_surface_sample_cnt(dst);
1992 int srcSampleCnt = get_surface_sample_cnt(src);
Greg Daniela51e93c2019-03-25 12:30:45 -04001993 bool dstHasYcbcr = dstImage->ycbcrConversionInfo().isValid();
1994 bool srcHasYcbcr = srcImage->ycbcrConversionInfo().isValid();
Greg Daniel25af6712018-04-25 10:44:38 -04001995 SkASSERT(this->vkCaps().canCopyAsBlit(dst->config(), dstSampleCnt, dstImage->isLinearTiled(),
Greg Daniela51e93c2019-03-25 12:30:45 -04001996 dstHasYcbcr, src->config(), srcSampleCnt,
1997 srcImage->isLinearTiled(), srcHasYcbcr));
egdaniel17b89252016-04-05 07:23:38 -07001998
Greg Daniel25af6712018-04-25 10:44:38 -04001999#endif
egdaniel17b89252016-04-05 07:23:38 -07002000 dstImage->setImageLayout(this,
2001 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
jvanverth50c46c72016-05-06 12:31:28 -07002002 VK_ACCESS_TRANSFER_WRITE_BIT,
2003 VK_PIPELINE_STAGE_TRANSFER_BIT,
egdaniel17b89252016-04-05 07:23:38 -07002004 false);
2005
egdaniel17b89252016-04-05 07:23:38 -07002006 srcImage->setImageLayout(this,
2007 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
jvanverth50c46c72016-05-06 12:31:28 -07002008 VK_ACCESS_TRANSFER_READ_BIT,
2009 VK_PIPELINE_STAGE_TRANSFER_BIT,
egdaniel17b89252016-04-05 07:23:38 -07002010 false);
2011
2012 // Flip rect if necessary
2013 SkIRect srcVkRect;
egdaniel8af936d2016-04-07 10:17:47 -07002014 srcVkRect.fLeft = srcRect.fLeft;
2015 srcVkRect.fRight = srcRect.fRight;
egdaniel17b89252016-04-05 07:23:38 -07002016 SkIRect dstRect;
2017 dstRect.fLeft = dstPoint.fX;
egdaniel8af936d2016-04-07 10:17:47 -07002018 dstRect.fRight = dstPoint.fX + srcRect.width();
egdaniel17b89252016-04-05 07:23:38 -07002019
Robert Phillipsb0e93a22017-08-29 08:26:54 -04002020 if (kBottomLeft_GrSurfaceOrigin == srcOrigin) {
egdaniel17b89252016-04-05 07:23:38 -07002021 srcVkRect.fTop = src->height() - srcRect.fBottom;
2022 srcVkRect.fBottom = src->height() - srcRect.fTop;
2023 } else {
egdaniel8af936d2016-04-07 10:17:47 -07002024 srcVkRect.fTop = srcRect.fTop;
2025 srcVkRect.fBottom = srcRect.fBottom;
egdaniel17b89252016-04-05 07:23:38 -07002026 }
2027
Robert Phillipsb0e93a22017-08-29 08:26:54 -04002028 if (kBottomLeft_GrSurfaceOrigin == dstOrigin) {
egdaniel17b89252016-04-05 07:23:38 -07002029 dstRect.fTop = dst->height() - dstPoint.fY - srcVkRect.height();
2030 } else {
2031 dstRect.fTop = dstPoint.fY;
2032 }
2033 dstRect.fBottom = dstRect.fTop + srcVkRect.height();
2034
2035 // If we have different origins, we need to flip the top and bottom of the dst rect so that we
2036 // get the correct origintation of the copied data.
Robert Phillipsb0e93a22017-08-29 08:26:54 -04002037 if (srcOrigin != dstOrigin) {
Ben Wagnerf08d1d02018-06-18 15:11:00 -04002038 using std::swap;
2039 swap(dstRect.fTop, dstRect.fBottom);
egdaniel17b89252016-04-05 07:23:38 -07002040 }
2041
2042 VkImageBlit blitRegion;
2043 memset(&blitRegion, 0, sizeof(VkImageBlit));
2044 blitRegion.srcSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
2045 blitRegion.srcOffsets[0] = { srcVkRect.fLeft, srcVkRect.fTop, 0 };
Greg Daniele76071c2016-11-02 11:57:06 -04002046 blitRegion.srcOffsets[1] = { srcVkRect.fRight, srcVkRect.fBottom, 1 };
egdaniel17b89252016-04-05 07:23:38 -07002047 blitRegion.dstSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
2048 blitRegion.dstOffsets[0] = { dstRect.fLeft, dstRect.fTop, 0 };
Greg Daniele76071c2016-11-02 11:57:06 -04002049 blitRegion.dstOffsets[1] = { dstRect.fRight, dstRect.fBottom, 1 };
egdaniel17b89252016-04-05 07:23:38 -07002050
2051 fCurrentCmdBuffer->blitImage(this,
egdanielb2df0c22016-05-13 11:30:37 -07002052 *srcImage,
2053 *dstImage,
egdaniel17b89252016-04-05 07:23:38 -07002054 1,
2055 &blitRegion,
2056 VK_FILTER_NEAREST); // We never scale so any filter works here
jvanverth900bd4a2016-04-29 13:53:12 -07002057
Greg Daniel1ba1bfc2018-06-21 13:55:19 -04002058 dstRect = SkIRect::MakeXYWH(dstPoint.fX, dstPoint.fY, srcRect.width(), srcRect.height());
Brian Salomon1fabd512018-02-09 09:54:25 -05002059 this->didWriteToSurface(dst, dstOrigin, &dstRect);
egdaniel17b89252016-04-05 07:23:38 -07002060}
2061
Brian Salomon1fabd512018-02-09 09:54:25 -05002062void GrVkGpu::copySurfaceAsResolve(GrSurface* dst, GrSurfaceOrigin dstOrigin, GrSurface* src,
2063 GrSurfaceOrigin srcOrigin, const SkIRect& origSrcRect,
2064 const SkIPoint& origDstPoint) {
egdaniel4bcd62e2016-08-31 07:37:31 -07002065 GrVkRenderTarget* srcRT = static_cast<GrVkRenderTarget*>(src->asRenderTarget());
Brian Salomon1fabd512018-02-09 09:54:25 -05002066 SkIRect srcRect = origSrcRect;
2067 SkIPoint dstPoint = origDstPoint;
2068 if (kBottomLeft_GrSurfaceOrigin == srcOrigin) {
2069 SkASSERT(kBottomLeft_GrSurfaceOrigin == dstOrigin);
2070 srcRect = {origSrcRect.fLeft, src->height() - origSrcRect.fBottom,
2071 origSrcRect.fRight, src->height() - origSrcRect.fTop};
2072 dstPoint.fY = dst->height() - dstPoint.fY - srcRect.height();
2073 }
2074 this->resolveImage(dst, srcRT, srcRect, dstPoint);
Greg Daniel1ba1bfc2018-06-21 13:55:19 -04002075 SkIRect dstRect = SkIRect::MakeXYWH(origDstPoint.fX, origDstPoint.fY,
2076 srcRect.width(), srcRect.height());
2077 this->didWriteToSurface(dst, dstOrigin, &dstRect);
egdaniel4bcd62e2016-08-31 07:37:31 -07002078}
2079
Robert Phillipsb0e93a22017-08-29 08:26:54 -04002080bool GrVkGpu::onCopySurface(GrSurface* dst, GrSurfaceOrigin dstOrigin,
2081 GrSurface* src, GrSurfaceOrigin srcOrigin,
Greg Daniel55fa6472018-03-16 16:13:10 -04002082 const SkIRect& srcRect, const SkIPoint& dstPoint,
2083 bool canDiscardOutsideDstRect) {
Greg Danielbe7fc462019-01-03 16:40:42 -05002084#ifdef SK_DEBUG
2085 if (GrVkRenderTarget* srcRT = static_cast<GrVkRenderTarget*>(src->asRenderTarget())) {
2086 SkASSERT(!srcRT->wrapsSecondaryCommandBuffer());
2087 }
2088 if (GrVkRenderTarget* dstRT = static_cast<GrVkRenderTarget*>(dst->asRenderTarget())) {
2089 SkASSERT(!dstRT->wrapsSecondaryCommandBuffer());
2090 }
2091#endif
2092
Greg Daniel25af6712018-04-25 10:44:38 -04002093 GrPixelConfig dstConfig = dst->config();
2094 GrPixelConfig srcConfig = src->config();
2095
2096 int dstSampleCnt = get_surface_sample_cnt(dst);
2097 int srcSampleCnt = get_surface_sample_cnt(src);
2098
egdaniel17b89252016-04-05 07:23:38 -07002099 GrVkImage* dstImage;
2100 GrVkImage* srcImage;
egdaniel4bcd62e2016-08-31 07:37:31 -07002101 GrRenderTarget* dstRT = dst->asRenderTarget();
2102 if (dstRT) {
2103 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(dstRT);
Greg Danielbe7fc462019-01-03 16:40:42 -05002104 if (vkRT->wrapsSecondaryCommandBuffer()) {
2105 return false;
2106 }
egdaniel4bcd62e2016-08-31 07:37:31 -07002107 dstImage = vkRT->numColorSamples() > 1 ? vkRT->msaaImage() : vkRT;
2108 } else {
2109 SkASSERT(dst->asTexture());
egdaniel17b89252016-04-05 07:23:38 -07002110 dstImage = static_cast<GrVkTexture*>(dst->asTexture());
egdaniel17b89252016-04-05 07:23:38 -07002111 }
egdaniel4bcd62e2016-08-31 07:37:31 -07002112 GrRenderTarget* srcRT = src->asRenderTarget();
2113 if (srcRT) {
2114 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(srcRT);
2115 srcImage = vkRT->numColorSamples() > 1 ? vkRT->msaaImage() : vkRT;
egdaniel17b89252016-04-05 07:23:38 -07002116 } else {
egdaniel4bcd62e2016-08-31 07:37:31 -07002117 SkASSERT(src->asTexture());
2118 srcImage = static_cast<GrVkTexture*>(src->asTexture());
egdaniel17b89252016-04-05 07:23:38 -07002119 }
2120
Greg Daniela51e93c2019-03-25 12:30:45 -04002121 bool dstHasYcbcr = dstImage->ycbcrConversionInfo().isValid();
2122 bool srcHasYcbcr = srcImage->ycbcrConversionInfo().isValid();
2123
2124 if (this->vkCaps().canCopyAsResolve(dstConfig, dstSampleCnt, dstOrigin, dstHasYcbcr,
2125 srcConfig, srcSampleCnt, srcOrigin, srcHasYcbcr)) {
2126 this->copySurfaceAsResolve(dst, dstOrigin, src, srcOrigin, srcRect, dstPoint);
2127 return true;
2128 }
2129
2130 if (this->vkCaps().canCopyAsDraw(dstConfig, SkToBool(dst->asRenderTarget()), dstHasYcbcr,
2131 srcConfig, SkToBool(src->asTexture()), srcHasYcbcr)) {
2132 SkAssertResult(fCopyManager.copySurfaceAsDraw(this, dst, dstOrigin, src, srcOrigin, srcRect,
2133 dstPoint, canDiscardOutsideDstRect));
2134 auto dstRect = srcRect.makeOffset(dstPoint.fX, dstPoint.fY);
2135 this->didWriteToSurface(dst, dstOrigin, &dstRect);
2136 return true;
2137 }
2138
2139 if (this->vkCaps().canCopyImage(dstConfig, dstSampleCnt, dstOrigin, dstHasYcbcr,
2140 srcConfig, srcSampleCnt, srcOrigin, srcHasYcbcr)) {
Robert Phillipsb0e93a22017-08-29 08:26:54 -04002141 this->copySurfaceAsCopyImage(dst, dstOrigin, src, srcOrigin, dstImage, srcImage,
2142 srcRect, dstPoint);
egdaniel17b89252016-04-05 07:23:38 -07002143 return true;
2144 }
2145
Greg Daniel25af6712018-04-25 10:44:38 -04002146 if (this->vkCaps().canCopyAsBlit(dstConfig, dstSampleCnt, dstImage->isLinearTiled(),
Greg Daniela51e93c2019-03-25 12:30:45 -04002147 dstHasYcbcr, srcConfig, srcSampleCnt,
2148 srcImage->isLinearTiled(), srcHasYcbcr)) {
Robert Phillipsb0e93a22017-08-29 08:26:54 -04002149 this->copySurfaceAsBlit(dst, dstOrigin, src, srcOrigin, dstImage, srcImage,
2150 srcRect, dstPoint);
Greg Daniel164a9f02016-02-22 09:56:40 -05002151 return true;
2152 }
2153
Greg Daniel164a9f02016-02-22 09:56:40 -05002154 return false;
2155}
2156
Brian Salomona6948702018-06-01 15:33:20 -04002157bool GrVkGpu::onReadPixels(GrSurface* surface, int left, int top, int width, int height,
2158 GrColorType dstColorType, void* buffer, size_t rowBytes) {
Brian Salomonc320b152018-02-20 14:05:36 -05002159 if (GrPixelConfigToColorType(surface->config()) != dstColorType) {
Greg Daniel164a9f02016-02-22 09:56:40 -05002160 return false;
2161 }
2162
egdaniel66933552016-08-24 07:22:19 -07002163 GrVkImage* image = nullptr;
2164 GrVkRenderTarget* rt = static_cast<GrVkRenderTarget*>(surface->asRenderTarget());
2165 if (rt) {
Greg Danielbe7fc462019-01-03 16:40:42 -05002166 // Reading from render targets that wrap a secondary command buffer is not allowed since
2167 // it would require us to know the VkImage, which we don't have, as well as need us to
2168 // stop and start the VkRenderPass which we don't have access to.
2169 if (rt->wrapsSecondaryCommandBuffer()) {
2170 return false;
2171 }
egdaniel66933552016-08-24 07:22:19 -07002172 // resolve the render target if necessary
2173 switch (rt->getResolveType()) {
2174 case GrVkRenderTarget::kCantResolve_ResolveType:
2175 return false;
2176 case GrVkRenderTarget::kAutoResolves_ResolveType:
2177 break;
2178 case GrVkRenderTarget::kCanResolve_ResolveType:
Greg Daniel0a77f432018-12-06 11:23:32 -05002179 this->resolveRenderTargetNoFlush(rt);
egdaniel66933552016-08-24 07:22:19 -07002180 break;
2181 default:
Ben Wagnerb4aab9a2017-08-16 10:53:04 -04002182 SK_ABORT("Unknown resolve type");
egdaniel66933552016-08-24 07:22:19 -07002183 }
2184 image = rt;
2185 } else {
2186 image = static_cast<GrVkTexture*>(surface->asTexture());
2187 }
2188
2189 if (!image) {
Greg Daniel164a9f02016-02-22 09:56:40 -05002190 return false;
2191 }
2192
Greg Daniel475eb702018-09-28 14:16:50 -04002193 // Skia's RGB_888x color type, which we map to the vulkan R8G8B8_UNORM, expects the data to be
2194 // 32 bits, but the Vulkan format is only 24. So we first copy the surface into an R8G8B8A8
2195 // image and then do the read pixels from that.
2196 sk_sp<GrVkTextureRenderTarget> copySurface;
Greg Danielf259b8b2019-02-14 09:03:43 -05002197 if (dstColorType == GrColorType::kRGB_888x && image->imageFormat() == VK_FORMAT_R8G8B8_UNORM) {
2198 SkASSERT(surface->config() == kRGB_888_GrPixelConfig);
Greg Daniel475eb702018-09-28 14:16:50 -04002199
2200 // Make a new surface that is RGBA to copy the RGB surface into.
2201 GrSurfaceDesc surfDesc;
2202 surfDesc.fFlags = kRenderTarget_GrSurfaceFlag;
2203 surfDesc.fWidth = width;
2204 surfDesc.fHeight = height;
2205 surfDesc.fConfig = kRGBA_8888_GrPixelConfig;
2206 surfDesc.fSampleCnt = 1;
2207
2208 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
2209 VK_IMAGE_USAGE_SAMPLED_BIT |
2210 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
2211 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2212
2213 GrVkImage::ImageDesc imageDesc;
2214 imageDesc.fImageType = VK_IMAGE_TYPE_2D;
2215 imageDesc.fFormat = VK_FORMAT_R8G8B8A8_UNORM;
2216 imageDesc.fWidth = width;
2217 imageDesc.fHeight = height;
2218 imageDesc.fLevels = 1;
2219 imageDesc.fSamples = 1;
2220 imageDesc.fImageTiling = VK_IMAGE_TILING_OPTIMAL;
2221 imageDesc.fUsageFlags = usageFlags;
2222 imageDesc.fMemProps = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
2223
2224 copySurface = GrVkTextureRenderTarget::MakeNewTextureRenderTarget(
2225 this, SkBudgeted::kYes, surfDesc, imageDesc, GrMipMapsStatus::kNotAllocated);
2226 if (!copySurface) {
2227 return false;
2228 }
2229
2230 int srcSampleCount = 0;
2231 if (rt) {
2232 srcSampleCount = rt->numColorSamples();
2233 }
Greg Daniela51e93c2019-03-25 12:30:45 -04002234 bool srcHasYcbcr = image->ycbcrConversionInfo().isValid();
Greg Daniel475eb702018-09-28 14:16:50 -04002235 static const GrSurfaceOrigin kOrigin = kTopLeft_GrSurfaceOrigin;
Greg Daniela51e93c2019-03-25 12:30:45 -04002236 if (!this->vkCaps().canCopyAsBlit(copySurface->config(), 1, kOrigin, false,
2237 surface->config(), srcSampleCount, kOrigin,
2238 srcHasYcbcr) &&
2239 !this->vkCaps().canCopyAsDraw(copySurface->config(), false, false,
2240 surface->config(), SkToBool(surface->asTexture()),
2241 srcHasYcbcr)) {
Greg Daniel475eb702018-09-28 14:16:50 -04002242 return false;
2243 }
2244 SkIRect srcRect = SkIRect::MakeXYWH(left, top, width, height);
2245 if (!this->copySurface(copySurface.get(), kOrigin, surface, kOrigin,
2246 srcRect, SkIPoint::Make(0,0))) {
2247 return false;
2248 }
2249 top = 0;
2250 left = 0;
2251 dstColorType = GrColorType::kRGBA_8888;
2252 image = copySurface.get();
2253 }
2254
Greg Daniel164a9f02016-02-22 09:56:40 -05002255 // Change layout of our target so it can be used as copy
egdaniel66933552016-08-24 07:22:19 -07002256 image->setImageLayout(this,
2257 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2258 VK_ACCESS_TRANSFER_READ_BIT,
2259 VK_PIPELINE_STAGE_TRANSFER_BIT,
2260 false);
Greg Daniel164a9f02016-02-22 09:56:40 -05002261
Brian Salomonc320b152018-02-20 14:05:36 -05002262 int bpp = GrColorTypeBytesPerPixel(dstColorType);
egdaniel6fa0a912016-09-12 11:51:29 -07002263 size_t tightRowBytes = bpp * width;
Greg Daniel164a9f02016-02-22 09:56:40 -05002264
Greg Daniel164a9f02016-02-22 09:56:40 -05002265 VkBufferImageCopy region;
2266 memset(&region, 0, sizeof(VkBufferImageCopy));
egdaniel6fa0a912016-09-12 11:51:29 -07002267
2268 bool copyFromOrigin = this->vkCaps().mustDoCopiesFromOrigin();
2269 if (copyFromOrigin) {
2270 region.imageOffset = { 0, 0, 0 };
Brian Salomona6948702018-06-01 15:33:20 -04002271 region.imageExtent = { (uint32_t)(left + width), (uint32_t)(top + height), 1 };
egdaniel6fa0a912016-09-12 11:51:29 -07002272 } else {
Brian Salomona6948702018-06-01 15:33:20 -04002273 VkOffset3D offset = { left, top, 0 };
egdaniel6fa0a912016-09-12 11:51:29 -07002274 region.imageOffset = offset;
2275 region.imageExtent = { (uint32_t)width, (uint32_t)height, 1 };
2276 }
2277
2278 size_t transBufferRowBytes = bpp * region.imageExtent.width;
Greg Daniel386a9b62018-07-03 10:52:30 -04002279 size_t imageRows = region.imageExtent.height;
Brian Salomon12d22642019-01-29 14:38:50 -05002280 auto transferBuffer = sk_sp<GrVkTransferBuffer>(
Greg Daniel3cdfa092018-02-26 16:14:10 -05002281 static_cast<GrVkTransferBuffer*>(this->createBuffer(transBufferRowBytes * imageRows,
Brian Salomonae64c192019-02-05 09:41:37 -05002282 GrGpuBufferType::kXferGpuToCpu,
Brian Salomon12d22642019-01-29 14:38:50 -05002283 kStream_GrAccessPattern)
2284 .release()));
egdaniel6fa0a912016-09-12 11:51:29 -07002285
2286 // Copy the image to a buffer so we can map it to cpu memory
jvanverthdb379092016-07-07 11:18:46 -07002287 region.bufferOffset = transferBuffer->offset();
egdaniel88e8aef2016-06-27 14:34:55 -07002288 region.bufferRowLength = 0; // Forces RowLength to be width. We handle the rowBytes below.
Greg Daniel164a9f02016-02-22 09:56:40 -05002289 region.bufferImageHeight = 0; // Forces height to be tightly packed. Only useful for 3d images.
2290 region.imageSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
Greg Daniel164a9f02016-02-22 09:56:40 -05002291
2292 fCurrentCmdBuffer->copyImageToBuffer(this,
egdaniel66933552016-08-24 07:22:19 -07002293 image,
Greg Daniel164a9f02016-02-22 09:56:40 -05002294 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
Brian Salomon12d22642019-01-29 14:38:50 -05002295 transferBuffer.get(),
Greg Daniel164a9f02016-02-22 09:56:40 -05002296 1,
2297 &region);
2298
2299 // make sure the copy to buffer has finished
2300 transferBuffer->addMemoryBarrier(this,
2301 VK_ACCESS_TRANSFER_WRITE_BIT,
2302 VK_ACCESS_HOST_READ_BIT,
2303 VK_PIPELINE_STAGE_TRANSFER_BIT,
2304 VK_PIPELINE_STAGE_HOST_BIT,
2305 false);
2306
2307 // We need to submit the current command buffer to the Queue and make sure it finishes before
2308 // we can copy the data out of the buffer.
2309 this->submitCommandBuffer(kForce_SyncQueue);
Greg Daniel88fdee92018-02-24 22:41:50 +00002310 void* mappedMemory = transferBuffer->map();
Greg Daniele35a99e2018-03-02 11:44:22 -05002311 const GrVkAlloc& transAlloc = transferBuffer->alloc();
Greg Daniel81df0412018-05-31 13:13:33 -04002312 GrVkMemory::InvalidateMappedAlloc(this, transAlloc, 0, transAlloc.fSize);
Greg Daniel164a9f02016-02-22 09:56:40 -05002313
egdaniel6fa0a912016-09-12 11:51:29 -07002314 if (copyFromOrigin) {
2315 uint32_t skipRows = region.imageExtent.height - height;
2316 mappedMemory = (char*)mappedMemory + transBufferRowBytes * skipRows + bpp * left;
2317 }
2318
Brian Salomona6948702018-06-01 15:33:20 -04002319 SkRectMemcpy(buffer, rowBytes, mappedMemory, transBufferRowBytes, tightRowBytes, height);
Greg Daniel164a9f02016-02-22 09:56:40 -05002320
2321 transferBuffer->unmap();
Greg Daniel164a9f02016-02-22 09:56:40 -05002322 return true;
2323}
egdaniel066df7c2016-06-08 14:02:27 -07002324
egdaniel27bb2842016-07-07 11:58:35 -07002325// The RenderArea bounds we pass into BeginRenderPass must have a start x value that is a multiple
2326// of the granularity. The width must also be a multiple of the granularity or eaqual to the width
2327// the the entire attachment. Similar requirements for the y and height components.
2328void adjust_bounds_to_granularity(SkIRect* dstBounds, const SkIRect& srcBounds,
2329 const VkExtent2D& granularity, int maxWidth, int maxHeight) {
2330 // Adjust Width
egdanield5797b32016-09-20 12:57:45 -07002331 if ((0 != granularity.width && 1 != granularity.width)) {
2332 // Start with the right side of rect so we know if we end up going pass the maxWidth.
2333 int rightAdj = srcBounds.fRight % granularity.width;
2334 if (rightAdj != 0) {
2335 rightAdj = granularity.width - rightAdj;
2336 }
2337 dstBounds->fRight = srcBounds.fRight + rightAdj;
2338 if (dstBounds->fRight > maxWidth) {
2339 dstBounds->fRight = maxWidth;
2340 dstBounds->fLeft = 0;
2341 } else {
2342 dstBounds->fLeft = srcBounds.fLeft - srcBounds.fLeft % granularity.width;
2343 }
egdaniel27bb2842016-07-07 11:58:35 -07002344 } else {
egdanield5797b32016-09-20 12:57:45 -07002345 dstBounds->fLeft = srcBounds.fLeft;
2346 dstBounds->fRight = srcBounds.fRight;
egdaniel27bb2842016-07-07 11:58:35 -07002347 }
2348
2349 // Adjust height
egdanield5797b32016-09-20 12:57:45 -07002350 if ((0 != granularity.height && 1 != granularity.height)) {
2351 // Start with the bottom side of rect so we know if we end up going pass the maxHeight.
2352 int bottomAdj = srcBounds.fBottom % granularity.height;
2353 if (bottomAdj != 0) {
2354 bottomAdj = granularity.height - bottomAdj;
2355 }
2356 dstBounds->fBottom = srcBounds.fBottom + bottomAdj;
2357 if (dstBounds->fBottom > maxHeight) {
2358 dstBounds->fBottom = maxHeight;
2359 dstBounds->fTop = 0;
2360 } else {
2361 dstBounds->fTop = srcBounds.fTop - srcBounds.fTop % granularity.height;
2362 }
egdaniel27bb2842016-07-07 11:58:35 -07002363 } else {
egdanield5797b32016-09-20 12:57:45 -07002364 dstBounds->fTop = srcBounds.fTop;
2365 dstBounds->fBottom = srcBounds.fBottom;
egdaniel27bb2842016-07-07 11:58:35 -07002366 }
2367}
2368
Greg Daniel22bc8652017-03-22 15:45:43 -04002369void GrVkGpu::submitSecondaryCommandBuffer(const SkTArray<GrVkSecondaryCommandBuffer*>& buffers,
egdaniel9cb63402016-06-23 08:37:05 -07002370 const GrVkRenderPass* renderPass,
2371 const VkClearValue* colorClear,
Robert Phillipsb0e93a22017-08-29 08:26:54 -04002372 GrVkRenderTarget* target, GrSurfaceOrigin origin,
egdaniel9cb63402016-06-23 08:37:05 -07002373 const SkIRect& bounds) {
Greg Danielbe7fc462019-01-03 16:40:42 -05002374 SkASSERT (!target->wrapsSecondaryCommandBuffer());
egdaniele7d1b242016-07-01 08:06:45 -07002375 const SkIRect* pBounds = &bounds;
2376 SkIRect flippedBounds;
Robert Phillipsb0e93a22017-08-29 08:26:54 -04002377 if (kBottomLeft_GrSurfaceOrigin == origin) {
egdaniele7d1b242016-07-01 08:06:45 -07002378 flippedBounds = bounds;
2379 flippedBounds.fTop = target->height() - bounds.fBottom;
2380 flippedBounds.fBottom = target->height() - bounds.fTop;
2381 pBounds = &flippedBounds;
2382 }
2383
egdaniel27bb2842016-07-07 11:58:35 -07002384 // The bounds we use for the render pass should be of the granularity supported
2385 // by the device.
2386 const VkExtent2D& granularity = renderPass->granularity();
2387 SkIRect adjustedBounds;
2388 if ((0 != granularity.width && 1 != granularity.width) ||
2389 (0 != granularity.height && 1 != granularity.height)) {
2390 adjust_bounds_to_granularity(&adjustedBounds, *pBounds, granularity,
2391 target->width(), target->height());
2392 pBounds = &adjustedBounds;
2393 }
2394
Robert Phillips95214472017-08-08 18:00:03 -04002395#ifdef SK_DEBUG
2396 uint32_t index;
2397 bool result = renderPass->colorAttachmentIndex(&index);
2398 SkASSERT(result && 0 == index);
2399 result = renderPass->stencilAttachmentIndex(&index);
2400 if (result) {
2401 SkASSERT(1 == index);
2402 }
2403#endif
2404 VkClearValue clears[2];
2405 clears[0].color = colorClear->color;
Robert Phillips8c326e92017-08-10 13:50:17 -04002406 clears[1].depthStencil.depth = 0.0f;
2407 clears[1].depthStencil.stencil = 0;
Robert Phillips95214472017-08-08 18:00:03 -04002408
2409 fCurrentCmdBuffer->beginRenderPass(this, renderPass, clears, *target, *pBounds, true);
Greg Daniel22bc8652017-03-22 15:45:43 -04002410 for (int i = 0; i < buffers.count(); ++i) {
2411 fCurrentCmdBuffer->executeCommands(this, buffers[i]);
2412 }
Greg Daniel164a9f02016-02-22 09:56:40 -05002413 fCurrentCmdBuffer->endRenderPass(this);
egdaniel66933552016-08-24 07:22:19 -07002414
Brian Salomon1fabd512018-02-09 09:54:25 -05002415 this->didWriteToSurface(target, origin, &bounds);
Greg Daniel164a9f02016-02-22 09:56:40 -05002416}
egdaniel9cb63402016-06-23 08:37:05 -07002417
Robert Phillips5b5d84c2018-08-09 15:12:18 -04002418void GrVkGpu::submit(GrGpuCommandBuffer* buffer) {
2419 if (buffer->asRTCommandBuffer()) {
2420 SkASSERT(fCachedRTCommandBuffer.get() == buffer);
2421
2422 fCachedRTCommandBuffer->submit();
2423 fCachedRTCommandBuffer->reset();
2424 } else {
2425 SkASSERT(fCachedTexCommandBuffer.get() == buffer);
2426
2427 fCachedTexCommandBuffer->submit();
2428 fCachedTexCommandBuffer->reset();
2429 }
2430}
2431
Greg Daniel6be35232017-03-01 17:01:09 -05002432GrFence SK_WARN_UNUSED_RESULT GrVkGpu::insertFence() {
jvanverth84741b32016-09-30 08:39:02 -07002433 VkFenceCreateInfo createInfo;
2434 memset(&createInfo, 0, sizeof(VkFenceCreateInfo));
2435 createInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
2436 createInfo.pNext = nullptr;
2437 createInfo.flags = 0;
2438 VkFence fence = VK_NULL_HANDLE;
Greg Daniel6be35232017-03-01 17:01:09 -05002439
2440 VK_CALL_ERRCHECK(CreateFence(this->device(), &createInfo, nullptr, &fence));
2441 VK_CALL(QueueSubmit(this->queue(), 0, nullptr, fence));
2442
2443 GR_STATIC_ASSERT(sizeof(GrFence) >= sizeof(VkFence));
jvanverth84741b32016-09-30 08:39:02 -07002444 return (GrFence)fence;
2445}
2446
Greg Daniel6be35232017-03-01 17:01:09 -05002447bool GrVkGpu::waitFence(GrFence fence, uint64_t timeout) {
2448 SkASSERT(VK_NULL_HANDLE != (VkFence)fence);
2449
2450 VkResult result = VK_CALL(WaitForFences(this->device(), 1, (VkFence*)&fence, VK_TRUE, timeout));
jvanverth84741b32016-09-30 08:39:02 -07002451 return (VK_SUCCESS == result);
2452}
2453
2454void GrVkGpu::deleteFence(GrFence fence) const {
Greg Daniel6be35232017-03-01 17:01:09 -05002455 VK_CALL(DestroyFence(this->device(), (VkFence)fence, nullptr));
2456}
2457
Greg Daniela5cb7812017-06-16 09:45:32 -04002458sk_sp<GrSemaphore> SK_WARN_UNUSED_RESULT GrVkGpu::makeSemaphore(bool isOwned) {
2459 return GrVkSemaphore::Make(this, isOwned);
Greg Daniel6be35232017-03-01 17:01:09 -05002460}
2461
Greg Daniel48661b82018-01-22 16:11:35 -05002462sk_sp<GrSemaphore> GrVkGpu::wrapBackendSemaphore(const GrBackendSemaphore& semaphore,
2463 GrResourceProvider::SemaphoreWrapType wrapType,
2464 GrWrapOwnership ownership) {
2465 return GrVkSemaphore::MakeWrapped(this, semaphore.vkSemaphore(), wrapType, ownership);
Greg Daniela5cb7812017-06-16 09:45:32 -04002466}
2467
Greg Daniel858e12c2018-12-06 11:11:37 -05002468void GrVkGpu::insertSemaphore(sk_sp<GrSemaphore> semaphore) {
Greg Daniel6be35232017-03-01 17:01:09 -05002469 GrVkSemaphore* vkSem = static_cast<GrVkSemaphore*>(semaphore.get());
2470
Greg Daniel48661b82018-01-22 16:11:35 -05002471 GrVkSemaphore::Resource* resource = vkSem->getResource();
2472 if (resource->shouldSignal()) {
Greg Daniel17b7c052018-01-09 13:55:33 -05002473 resource->ref();
2474 fSemaphoresToSignal.push_back(resource);
2475 }
Greg Daniel6be35232017-03-01 17:01:09 -05002476}
2477
Greg Daniel48661b82018-01-22 16:11:35 -05002478void GrVkGpu::waitSemaphore(sk_sp<GrSemaphore> semaphore) {
Greg Daniel6be35232017-03-01 17:01:09 -05002479 GrVkSemaphore* vkSem = static_cast<GrVkSemaphore*>(semaphore.get());
2480
Greg Daniel48661b82018-01-22 16:11:35 -05002481 GrVkSemaphore::Resource* resource = vkSem->getResource();
2482 if (resource->shouldWait()) {
2483 resource->ref();
2484 fSemaphoresToWaitOn.push_back(resource);
2485 }
jvanverth84741b32016-09-30 08:39:02 -07002486}
Brian Osman13dddce2017-05-09 13:19:50 -04002487
2488sk_sp<GrSemaphore> GrVkGpu::prepareTextureForCrossContextUsage(GrTexture* texture) {
2489 SkASSERT(texture);
2490 GrVkTexture* vkTexture = static_cast<GrVkTexture*>(texture);
2491 vkTexture->setImageLayout(this,
2492 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2493 VK_ACCESS_SHADER_READ_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -04002494 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
Brian Osman13dddce2017-05-09 13:19:50 -04002495 false);
2496 this->submitCommandBuffer(kSkip_SyncQueue);
2497
2498 // The image layout change serves as a barrier, so no semaphore is needed
2499 return nullptr;
2500}
Greg Danielf5d87582017-12-18 14:48:15 -05002501
Greg Daniel64cc9aa2018-10-19 13:54:56 -04002502void GrVkGpu::addDrawable(std::unique_ptr<SkDrawable::GpuDrawHandler> drawable) {
2503 fDrawables.emplace_back(std::move(drawable));
2504}
2505
Greg Daniel7a82edf2018-12-04 10:54:34 -05002506uint32_t GrVkGpu::getExtraSamplerKeyForProgram(const GrSamplerState& samplerState,
2507 const GrBackendFormat& format) {
2508 const GrVkYcbcrConversionInfo* ycbcrInfo = format.getVkYcbcrConversionInfo();
2509 SkASSERT(ycbcrInfo);
2510 if (!ycbcrInfo->isValid()) {
2511 return 0;
2512 }
2513
2514 const GrVkSampler* sampler = this->resourceProvider().findOrCreateCompatibleSampler(
2515 samplerState, *ycbcrInfo);
2516
2517 return sampler->uniqueID();
2518}
2519
Greg Daniela870b462019-01-08 15:49:46 -05002520void GrVkGpu::storeVkPipelineCacheData() {
Robert Phillips9da87e02019-02-04 13:26:26 -05002521 if (this->getContext()->priv().getPersistentCache()) {
Greg Daniela870b462019-01-08 15:49:46 -05002522 this->resourceProvider().storePipelineCacheData();
2523 }
2524}