blob: 3aa82868f437c721222c68ae63870df1c674f885 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrVkGpu.h"
9
Greg Daniela5cb7812017-06-16 09:45:32 -040010#include "GrBackendSemaphore.h"
Greg Daniel7ef28f32017-04-20 16:41:55 +000011#include "GrBackendSurface.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050012#include "GrContextOptions.h"
13#include "GrGeometryProcessor.h"
14#include "GrGpuResourceCacheAccess.h"
egdaniel0e1853c2016-03-17 11:35:45 -070015#include "GrMesh.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050016#include "GrPipeline.h"
17#include "GrRenderTargetPriv.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050018#include "GrTexturePriv.h"
Greg Daniel81df0412018-05-31 13:13:33 -040019#include "GrVkAMDMemoryAllocator.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050020#include "GrVkCommandBuffer.h"
egdaniel066df7c2016-06-08 14:02:27 -070021#include "GrVkGpuCommandBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050022#include "GrVkImage.h"
23#include "GrVkIndexBuffer.h"
Greg Danield3e65aa2018-08-01 09:19:45 -040024#include "GrVkInterface.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050025#include "GrVkMemory.h"
26#include "GrVkPipeline.h"
egdaniel22281c12016-03-23 13:49:40 -070027#include "GrVkPipelineState.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050028#include "GrVkRenderPass.h"
29#include "GrVkResourceProvider.h"
Greg Daniel6be35232017-03-01 17:01:09 -050030#include "GrVkSemaphore.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050031#include "GrVkTexture.h"
32#include "GrVkTextureRenderTarget.h"
33#include "GrVkTransferBuffer.h"
34#include "GrVkVertexBuffer.h"
Matt Sarett485c4992017-02-14 14:18:27 -050035#include "SkConvertPixels.h"
jvanverth900bd4a2016-04-29 13:53:12 -070036#include "SkMipMap.h"
Hal Canaryc640d0d2018-06-13 09:59:02 -040037#include "SkSLCompiler.h"
38#include "SkTo.h"
Greg Daniel98bffae2018-08-01 13:25:41 -040039
Greg Daniela31f4e52018-08-01 16:48:52 -040040#include "vk/GrVkExtensions.h"
jvanverthfd359ca2016-03-18 11:57:24 -070041#include "vk/GrVkTypes.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050042
Ben Wagnerf08d1d02018-06-18 15:11:00 -040043#include <utility>
44
Forrest Reiling44f85712017-03-27 23:22:20 -070045#if !defined(SK_BUILD_FOR_WIN)
46#include <unistd.h>
47#endif // !defined(SK_BUILD_FOR_WIN)
48
Greg Danieldef55462018-08-01 13:40:14 -040049#if defined(SK_BUILD_FOR_WIN) && defined(SK_DEBUG)
50#include "SkLeanWindows.h"
51#endif
52
Greg Daniel164a9f02016-02-22 09:56:40 -050053#define VK_CALL(X) GR_VK_CALL(this->vkInterface(), X)
54#define VK_CALL_RET(RET, X) GR_VK_CALL_RET(this->vkInterface(), RET, X)
55#define VK_CALL_ERRCHECK(X) GR_VK_CALL_ERRCHECK(this->vkInterface(), X)
56
Greg Danielf730c182018-07-02 20:15:37 +000057sk_sp<GrGpu> GrVkGpu::Make(const GrVkBackendContext& backendContext,
Brian Salomon384fab42017-12-07 12:33:05 -050058 const GrContextOptions& options, GrContext* context) {
Greg Danielf730c182018-07-02 20:15:37 +000059 if (backendContext.fInstance == VK_NULL_HANDLE ||
60 backendContext.fPhysicalDevice == VK_NULL_HANDLE ||
61 backendContext.fDevice == VK_NULL_HANDLE ||
62 backendContext.fQueue == VK_NULL_HANDLE) {
63 return nullptr;
64 }
Greg Danield3e65aa2018-08-01 09:19:45 -040065 if (!backendContext.fGetProc) {
66 return nullptr;
Greg Danielc8cd45a2018-07-12 10:02:37 -040067 }
Greg Danield3e65aa2018-08-01 09:19:45 -040068
Greg Danielc0b03d82018-08-03 14:41:15 -040069 PFN_vkGetPhysicalDeviceProperties localGetPhysicalDeviceProperties =
70 reinterpret_cast<PFN_vkGetPhysicalDeviceProperties>(
71 backendContext.fGetProc("vkGetPhysicalDeviceProperties",
72 backendContext.fInstance,
73 VK_NULL_HANDLE));
74
75 if (!localGetPhysicalDeviceProperties) {
76 return nullptr;
77 }
78 VkPhysicalDeviceProperties physDeviceProperties;
79 localGetPhysicalDeviceProperties(backendContext.fPhysicalDevice, &physDeviceProperties);
80 uint32_t physDevVersion = physDeviceProperties.apiVersion;
81
Greg Daniel98bffae2018-08-01 13:25:41 -040082 sk_sp<const GrVkInterface> interface;
Greg Danield3e65aa2018-08-01 09:19:45 -040083
Greg Daniel98bffae2018-08-01 13:25:41 -040084 if (backendContext.fVkExtensions) {
85 interface.reset(new GrVkInterface(backendContext.fGetProc,
86 backendContext.fInstance,
87 backendContext.fDevice,
Greg Danielc0b03d82018-08-03 14:41:15 -040088 backendContext.fInstanceVersion,
89 physDevVersion,
Greg Daniel98bffae2018-08-01 13:25:41 -040090 backendContext.fVkExtensions));
Greg Danielc0b03d82018-08-03 14:41:15 -040091 if (!interface->validate(backendContext.fInstanceVersion, physDevVersion,
92 backendContext.fVkExtensions)) {
Greg Daniel98bffae2018-08-01 13:25:41 -040093 return nullptr;
94 }
95 } else {
96 // None of our current GrVkExtension flags actually affect the vulkan backend so we just
97 // make an empty GrVkExtensions and pass that to the GrVkInterface.
98 GrVkExtensions extensions;
99 interface.reset(new GrVkInterface(backendContext.fGetProc,
100 backendContext.fInstance,
101 backendContext.fDevice,
Greg Danielc0b03d82018-08-03 14:41:15 -0400102 backendContext.fInstanceVersion,
103 physDevVersion,
Greg Daniel98bffae2018-08-01 13:25:41 -0400104 &extensions));
Greg Danielc0b03d82018-08-03 14:41:15 -0400105 if (!interface->validate(backendContext.fInstanceVersion, physDevVersion, &extensions)) {
Greg Daniel98bffae2018-08-01 13:25:41 -0400106 return nullptr;
107 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500108 }
109
Greg Danielc8cd45a2018-07-12 10:02:37 -0400110 return sk_sp<GrGpu>(new GrVkGpu(context, options, backendContext, interface));
Greg Daniel164a9f02016-02-22 09:56:40 -0500111}
112
113////////////////////////////////////////////////////////////////////////////////
114
halcanary9d524f22016-03-29 09:03:52 -0700115GrVkGpu::GrVkGpu(GrContext* context, const GrContextOptions& options,
Greg Danielc8cd45a2018-07-12 10:02:37 -0400116 const GrVkBackendContext& backendContext, sk_sp<const GrVkInterface> interface)
Brian Salomon384fab42017-12-07 12:33:05 -0500117 : INHERITED(context)
Greg Danielc8cd45a2018-07-12 10:02:37 -0400118 , fInterface(std::move(interface))
Greg Danielf730c182018-07-02 20:15:37 +0000119 , fMemoryAllocator(backendContext.fMemoryAllocator)
120 , fInstance(backendContext.fInstance)
Greg Daniel637c06a2018-09-12 09:44:25 -0400121 , fPhysicalDevice(backendContext.fPhysicalDevice)
Greg Danielf730c182018-07-02 20:15:37 +0000122 , fDevice(backendContext.fDevice)
123 , fQueue(backendContext.fQueue)
Greg Danielecddbc02018-08-30 16:39:34 -0400124 , fQueueIndex(backendContext.fGraphicsQueueIndex)
Brian Salomon384fab42017-12-07 12:33:05 -0500125 , fResourceProvider(this)
126 , fDisconnected(false) {
Greg Danielf730c182018-07-02 20:15:37 +0000127 SkASSERT(!backendContext.fOwnsInstanceAndDevice);
jvanverth633b3562016-03-23 11:01:22 -0700128
Greg Daniel81df0412018-05-31 13:13:33 -0400129 if (!fMemoryAllocator) {
130 // We were not given a memory allocator at creation
Greg Danielf730c182018-07-02 20:15:37 +0000131 fMemoryAllocator.reset(new GrVkAMDMemoryAllocator(backendContext.fPhysicalDevice,
Greg Danielc8cd45a2018-07-12 10:02:37 -0400132 fDevice, fInterface));
Greg Daniel81df0412018-05-31 13:13:33 -0400133 }
134
ethannicholasb3058bd2016-07-01 08:22:01 -0700135 fCompiler = new SkSL::Compiler();
jvanverth633b3562016-03-23 11:01:22 -0700136
Greg Daniel92aef4b2018-08-02 13:55:49 -0400137 uint32_t instanceVersion = backendContext.fInstanceVersion ? backendContext.fInstanceVersion
138 : backendContext.fMinAPIVersion;
139
Greg Daniela0651ac2018-08-08 09:23:18 -0400140 if (backendContext.fDeviceFeatures2) {
Greg Daniel36443602018-08-02 12:51:52 -0400141 fVkCaps.reset(new GrVkCaps(options, this->vkInterface(), backendContext.fPhysicalDevice,
Greg Daniela0651ac2018-08-08 09:23:18 -0400142 *backendContext.fDeviceFeatures2, instanceVersion,
Greg Danielc0b03d82018-08-03 14:41:15 -0400143 *backendContext.fVkExtensions));
Greg Daniela0651ac2018-08-08 09:23:18 -0400144 } else if (backendContext.fDeviceFeatures) {
145 VkPhysicalDeviceFeatures2 features2;
146 features2.pNext = nullptr;
147 features2.features = *backendContext.fDeviceFeatures;
148 fVkCaps.reset(new GrVkCaps(options, this->vkInterface(), backendContext.fPhysicalDevice,
149 features2, instanceVersion, *backendContext.fVkExtensions));
Greg Daniel36443602018-08-02 12:51:52 -0400150 } else {
Greg Daniela0651ac2018-08-08 09:23:18 -0400151 VkPhysicalDeviceFeatures2 features;
152 memset(&features, 0, sizeof(VkPhysicalDeviceFeatures2));
153 features.pNext = nullptr;
Greg Daniel36443602018-08-02 12:51:52 -0400154 if (backendContext.fFeatures & kGeometryShader_GrVkFeatureFlag) {
Greg Daniela0651ac2018-08-08 09:23:18 -0400155 features.features.geometryShader = true;
Greg Daniel36443602018-08-02 12:51:52 -0400156 }
157 if (backendContext.fFeatures & kDualSrcBlend_GrVkFeatureFlag) {
Greg Daniela0651ac2018-08-08 09:23:18 -0400158 features.features.dualSrcBlend = true;
Greg Daniel36443602018-08-02 12:51:52 -0400159 }
160 if (backendContext.fFeatures & kSampleRateShading_GrVkFeatureFlag) {
Greg Daniela0651ac2018-08-08 09:23:18 -0400161 features.features.sampleRateShading = true;
Greg Daniel36443602018-08-02 12:51:52 -0400162 }
163 fVkCaps.reset(new GrVkCaps(options, this->vkInterface(), backendContext.fPhysicalDevice,
Greg Danielc0b03d82018-08-03 14:41:15 -0400164 features, instanceVersion, GrVkExtensions()));
Greg Daniel36443602018-08-02 12:51:52 -0400165 }
jvanverth633b3562016-03-23 11:01:22 -0700166 fCaps.reset(SkRef(fVkCaps.get()));
167
Greg Danielf730c182018-07-02 20:15:37 +0000168 VK_CALL(GetPhysicalDeviceProperties(backendContext.fPhysicalDevice, &fPhysDevProps));
169 VK_CALL(GetPhysicalDeviceMemoryProperties(backendContext.fPhysicalDevice, &fPhysDevMemProps));
jvanverth633b3562016-03-23 11:01:22 -0700170
171 const VkCommandPoolCreateInfo cmdPoolInfo = {
jvanverth7ec92412016-07-06 09:24:57 -0700172 VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, // sType
173 nullptr, // pNext
174 VK_COMMAND_POOL_CREATE_TRANSIENT_BIT |
175 VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, // CmdPoolCreateFlags
Greg Danielf730c182018-07-02 20:15:37 +0000176 backendContext.fGraphicsQueueIndex, // queueFamilyIndex
jvanverth633b3562016-03-23 11:01:22 -0700177 };
halcanary9d524f22016-03-29 09:03:52 -0700178 GR_VK_CALL_ERRCHECK(this->vkInterface(), CreateCommandPool(fDevice, &cmdPoolInfo, nullptr,
jvanverth633b3562016-03-23 11:01:22 -0700179 &fCmdPool));
180
181 // must call this after creating the CommandPool
182 fResourceProvider.init();
jvanverth7ec92412016-07-06 09:24:57 -0700183 fCurrentCmdBuffer = fResourceProvider.findOrCreatePrimaryCommandBuffer();
jvanverth633b3562016-03-23 11:01:22 -0700184 SkASSERT(fCurrentCmdBuffer);
185 fCurrentCmdBuffer->begin(this);
Greg Daniel164a9f02016-02-22 09:56:40 -0500186}
187
Greg Daniel8606cf82017-05-08 16:17:53 -0400188void GrVkGpu::destroyResources() {
189 if (fCurrentCmdBuffer) {
190 fCurrentCmdBuffer->end(this);
191 fCurrentCmdBuffer->unref(this);
192 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500193
194 // wait for all commands to finish
jvanverthddf98352016-03-21 11:46:00 -0700195 fResourceProvider.checkCommandBuffers();
Jim Van Verth09557d72016-11-07 11:10:21 -0500196 VkResult res = VK_CALL(QueueWaitIdle(fQueue));
egdanielf8c2be32016-06-24 13:18:27 -0700197
198 // On windows, sometimes calls to QueueWaitIdle return before actually signalling the fences
199 // on the command buffers even though they have completed. This causes an assert to fire when
200 // destroying the command buffers. Currently this ony seems to happen on windows, so we add a
Jim Van Verth09557d72016-11-07 11:10:21 -0500201 // sleep to make sure the fence signals.
egdanielf8c2be32016-06-24 13:18:27 -0700202#ifdef SK_DEBUG
Greg Daniel80a08dd2017-01-20 10:45:49 -0500203 if (this->vkCaps().mustSleepOnTearDown()) {
egdanielf8c2be32016-06-24 13:18:27 -0700204#if defined(SK_BUILD_FOR_WIN)
Greg Daniel80a08dd2017-01-20 10:45:49 -0500205 Sleep(10); // In milliseconds
egdanielf8c2be32016-06-24 13:18:27 -0700206#else
Greg Daniel80a08dd2017-01-20 10:45:49 -0500207 sleep(1); // In seconds
egdanielf8c2be32016-06-24 13:18:27 -0700208#endif
Greg Daniel80a08dd2017-01-20 10:45:49 -0500209 }
egdanielf8c2be32016-06-24 13:18:27 -0700210#endif
211
egdanielbe9d8212016-09-20 08:54:23 -0700212#ifdef SK_DEBUG
Greg Daniel8a8668b2016-10-31 16:34:42 -0400213 SkASSERT(VK_SUCCESS == res || VK_ERROR_DEVICE_LOST == res);
egdanielbe9d8212016-09-20 08:54:23 -0700214#endif
halcanary9d524f22016-03-29 09:03:52 -0700215
Greg Daniel6be35232017-03-01 17:01:09 -0500216 for (int i = 0; i < fSemaphoresToWaitOn.count(); ++i) {
217 fSemaphoresToWaitOn[i]->unref(this);
218 }
219 fSemaphoresToWaitOn.reset();
220
Greg Daniela5cb7812017-06-16 09:45:32 -0400221 for (int i = 0; i < fSemaphoresToSignal.count(); ++i) {
222 fSemaphoresToSignal[i]->unref(this);
223 }
224 fSemaphoresToSignal.reset();
225
226
egdanielbc9b2962016-09-27 08:00:53 -0700227 fCopyManager.destroyResources(this);
228
Jim Van Verth09557d72016-11-07 11:10:21 -0500229 // must call this just before we destroy the command pool and VkDevice
230 fResourceProvider.destroyResources(VK_ERROR_DEVICE_LOST == res);
Greg Daniel164a9f02016-02-22 09:56:40 -0500231
Greg Daniel8606cf82017-05-08 16:17:53 -0400232 if (fCmdPool != VK_NULL_HANDLE) {
233 VK_CALL(DestroyCommandPool(fDevice, fCmdPool, nullptr));
234 }
jvanverth633b3562016-03-23 11:01:22 -0700235
Greg Danielf730c182018-07-02 20:15:37 +0000236 fMemoryAllocator.reset();
237
238 fQueue = VK_NULL_HANDLE;
239 fDevice = VK_NULL_HANDLE;
240 fInstance = VK_NULL_HANDLE;
Greg Daniel8606cf82017-05-08 16:17:53 -0400241}
242
243GrVkGpu::~GrVkGpu() {
244 if (!fDisconnected) {
245 this->destroyResources();
246 }
247 delete fCompiler;
248}
249
250
251void GrVkGpu::disconnect(DisconnectType type) {
252 INHERITED::disconnect(type);
253 if (!fDisconnected) {
254 if (DisconnectType::kCleanup == type) {
255 this->destroyResources();
256 } else {
Greg Danieladb4bfe2018-08-23 16:15:05 -0400257 if (fCurrentCmdBuffer) {
258 fCurrentCmdBuffer->unrefAndAbandon();
259 }
Greg Daniel8606cf82017-05-08 16:17:53 -0400260 for (int i = 0; i < fSemaphoresToWaitOn.count(); ++i) {
261 fSemaphoresToWaitOn[i]->unrefAndAbandon();
262 }
Greg Daniela5cb7812017-06-16 09:45:32 -0400263 for (int i = 0; i < fSemaphoresToSignal.count(); ++i) {
264 fSemaphoresToSignal[i]->unrefAndAbandon();
265 }
Greg Daniel8606cf82017-05-08 16:17:53 -0400266 fCopyManager.abandonResources();
267
268 // must call this just before we destroy the command pool and VkDevice
269 fResourceProvider.abandonResources();
Greg Danieladb4bfe2018-08-23 16:15:05 -0400270
271 fMemoryAllocator.reset();
Greg Daniel8606cf82017-05-08 16:17:53 -0400272 }
273 fSemaphoresToWaitOn.reset();
Greg Daniela5cb7812017-06-16 09:45:32 -0400274 fSemaphoresToSignal.reset();
Greg Daniel8606cf82017-05-08 16:17:53 -0400275 fCurrentCmdBuffer = nullptr;
276 fCmdPool = VK_NULL_HANDLE;
277 fDisconnected = true;
278 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500279}
280
281///////////////////////////////////////////////////////////////////////////////
282
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400283GrGpuRTCommandBuffer* GrVkGpu::getCommandBuffer(
Robert Phillips95214472017-08-08 18:00:03 -0400284 GrRenderTarget* rt, GrSurfaceOrigin origin,
Greg Daniel500d58b2017-08-24 15:59:33 -0400285 const GrGpuRTCommandBuffer::LoadAndStoreInfo& colorInfo,
286 const GrGpuRTCommandBuffer::StencilLoadAndStoreInfo& stencilInfo) {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400287 if (!fCachedRTCommandBuffer) {
288 fCachedRTCommandBuffer.reset(new GrVkGpuRTCommandBuffer(this));
289 }
290
Greg Daniela41a74a2018-10-09 12:59:23 +0000291 fCachedRTCommandBuffer->set(rt, origin, colorInfo, stencilInfo);
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400292 return fCachedRTCommandBuffer.get();
Greg Daniel500d58b2017-08-24 15:59:33 -0400293}
294
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400295GrGpuTextureCommandBuffer* GrVkGpu::getCommandBuffer(GrTexture* texture, GrSurfaceOrigin origin) {
296 if (!fCachedTexCommandBuffer) {
297 fCachedTexCommandBuffer.reset(new GrVkGpuTextureCommandBuffer(this));
298 }
299
300 fCachedTexCommandBuffer->set(texture, origin);
301 return fCachedTexCommandBuffer.get();
egdaniel066df7c2016-06-08 14:02:27 -0700302}
303
Greg Daniela5cb7812017-06-16 09:45:32 -0400304void GrVkGpu::submitCommandBuffer(SyncQueue sync) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500305 SkASSERT(fCurrentCmdBuffer);
306 fCurrentCmdBuffer->end(this);
307
Greg Daniela5cb7812017-06-16 09:45:32 -0400308 fCurrentCmdBuffer->submitToQueue(this, fQueue, sync, fSemaphoresToSignal, fSemaphoresToWaitOn);
Greg Daniel6be35232017-03-01 17:01:09 -0500309
310 for (int i = 0; i < fSemaphoresToWaitOn.count(); ++i) {
311 fSemaphoresToWaitOn[i]->unref(this);
312 }
313 fSemaphoresToWaitOn.reset();
Greg Daniela5cb7812017-06-16 09:45:32 -0400314 for (int i = 0; i < fSemaphoresToSignal.count(); ++i) {
315 fSemaphoresToSignal[i]->unref(this);
316 }
317 fSemaphoresToSignal.reset();
Greg Daniel6be35232017-03-01 17:01:09 -0500318
Greg Daniel164a9f02016-02-22 09:56:40 -0500319 fResourceProvider.checkCommandBuffers();
320
321 // Release old command buffer and create a new one
322 fCurrentCmdBuffer->unref(this);
jvanverth7ec92412016-07-06 09:24:57 -0700323 fCurrentCmdBuffer = fResourceProvider.findOrCreatePrimaryCommandBuffer();
Greg Daniel164a9f02016-02-22 09:56:40 -0500324 SkASSERT(fCurrentCmdBuffer);
325
326 fCurrentCmdBuffer->begin(this);
327}
328
329///////////////////////////////////////////////////////////////////////////////
cdalton1bf3e712016-04-19 10:00:02 -0700330GrBuffer* GrVkGpu::onCreateBuffer(size_t size, GrBufferType type, GrAccessPattern accessPattern,
331 const void* data) {
332 GrBuffer* buff;
cdalton397536c2016-03-25 12:15:03 -0700333 switch (type) {
334 case kVertex_GrBufferType:
335 SkASSERT(kDynamic_GrAccessPattern == accessPattern ||
336 kStatic_GrAccessPattern == accessPattern);
cdalton1bf3e712016-04-19 10:00:02 -0700337 buff = GrVkVertexBuffer::Create(this, size, kDynamic_GrAccessPattern == accessPattern);
egdaniele05bbbb2016-04-19 12:13:41 -0700338 break;
cdalton397536c2016-03-25 12:15:03 -0700339 case kIndex_GrBufferType:
340 SkASSERT(kDynamic_GrAccessPattern == accessPattern ||
341 kStatic_GrAccessPattern == accessPattern);
cdalton1bf3e712016-04-19 10:00:02 -0700342 buff = GrVkIndexBuffer::Create(this, size, kDynamic_GrAccessPattern == accessPattern);
egdaniele05bbbb2016-04-19 12:13:41 -0700343 break;
cdalton397536c2016-03-25 12:15:03 -0700344 case kXferCpuToGpu_GrBufferType:
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400345 SkASSERT(kDynamic_GrAccessPattern == accessPattern ||
346 kStream_GrAccessPattern == accessPattern);
cdalton1bf3e712016-04-19 10:00:02 -0700347 buff = GrVkTransferBuffer::Create(this, size, GrVkBuffer::kCopyRead_Type);
egdaniele05bbbb2016-04-19 12:13:41 -0700348 break;
cdalton397536c2016-03-25 12:15:03 -0700349 case kXferGpuToCpu_GrBufferType:
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400350 SkASSERT(kDynamic_GrAccessPattern == accessPattern ||
351 kStream_GrAccessPattern == accessPattern);
cdalton1bf3e712016-04-19 10:00:02 -0700352 buff = GrVkTransferBuffer::Create(this, size, GrVkBuffer::kCopyWrite_Type);
egdaniele05bbbb2016-04-19 12:13:41 -0700353 break;
Greg Danielc2dd5ed2017-05-05 13:49:11 -0400354 case kDrawIndirect_GrBufferType:
Ben Wagnerb4aab9a2017-08-16 10:53:04 -0400355 SK_ABORT("DrawIndirect Buffers not supported in vulkan backend.");
Greg Danielc2dd5ed2017-05-05 13:49:11 -0400356 return nullptr;
cdalton397536c2016-03-25 12:15:03 -0700357 default:
Ben Wagnerb4aab9a2017-08-16 10:53:04 -0400358 SK_ABORT("Unknown buffer type.");
cdalton397536c2016-03-25 12:15:03 -0700359 return nullptr;
360 }
cdalton1bf3e712016-04-19 10:00:02 -0700361 if (data && buff) {
362 buff->updateData(data, size);
363 }
364 return buff;
Greg Daniel164a9f02016-02-22 09:56:40 -0500365}
366
Brian Salomona9b04b92018-06-01 15:04:28 -0400367bool GrVkGpu::onWritePixels(GrSurface* surface, int left, int top, int width, int height,
368 GrColorType srcColorType, const GrMipLevel texels[],
369 int mipLevelCount) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500370 GrVkTexture* vkTex = static_cast<GrVkTexture*>(surface->asTexture());
371 if (!vkTex) {
372 return false;
373 }
374
jvanverth900bd4a2016-04-29 13:53:12 -0700375 // Make sure we have at least the base level
Robert Phillips590533f2017-07-11 14:22:35 -0400376 if (!mipLevelCount || !texels[0].fPixels) {
jvanverth03509ea2016-03-02 13:19:47 -0800377 return false;
378 }
bsalomona1e6b3b2016-03-02 10:58:23 -0800379
Greg Daniel164a9f02016-02-22 09:56:40 -0500380 bool success = false;
Robert Phillips92de6312017-05-23 07:43:48 -0400381 bool linearTiling = vkTex->isLinearTiled();
382 if (linearTiling) {
Robert Phillips590533f2017-07-11 14:22:35 -0400383 if (mipLevelCount > 1) {
Robert Phillips92de6312017-05-23 07:43:48 -0400384 SkDebugf("Can't upload mipmap data to linear tiled texture");
385 return false;
386 }
387 if (VK_IMAGE_LAYOUT_PREINITIALIZED != vkTex->currentLayout()) {
388 // Need to change the layout to general in order to perform a host write
389 vkTex->setImageLayout(this,
390 VK_IMAGE_LAYOUT_GENERAL,
391 VK_ACCESS_HOST_WRITE_BIT,
392 VK_PIPELINE_STAGE_HOST_BIT,
393 false);
394 this->submitCommandBuffer(kForce_SyncQueue);
395 }
Brian Salomona9b04b92018-06-01 15:04:28 -0400396 success = this->uploadTexDataLinear(vkTex, left, top, width, height, srcColorType,
Robert Phillips590533f2017-07-11 14:22:35 -0400397 texels[0].fPixels, texels[0].fRowBytes);
Greg Daniel164a9f02016-02-22 09:56:40 -0500398 } else {
Greg Danielda86e282018-06-13 09:41:19 -0400399 SkASSERT(mipLevelCount <= vkTex->texturePriv().maxMipMapLevel() + 1);
Brian Salomona9b04b92018-06-01 15:04:28 -0400400 success = this->uploadTexDataOptimal(vkTex, left, top, width, height, srcColorType, texels,
401 mipLevelCount);
Greg Daniel164a9f02016-02-22 09:56:40 -0500402 }
egdaniel4583ec52016-06-27 12:57:00 -0700403
jvanverth900bd4a2016-04-29 13:53:12 -0700404 return success;
Greg Daniel164a9f02016-02-22 09:56:40 -0500405}
406
Brian Salomonc320b152018-02-20 14:05:36 -0500407bool GrVkGpu::onTransferPixels(GrTexture* texture, int left, int top, int width, int height,
408 GrColorType bufferColorType, GrBuffer* transferBuffer,
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400409 size_t bufferOffset, size_t rowBytes) {
410 // Vulkan only supports 4-byte aligned offsets
411 if (SkToBool(bufferOffset & 0x2)) {
412 return false;
413 }
414 GrVkTexture* vkTex = static_cast<GrVkTexture*>(texture);
415 if (!vkTex) {
416 return false;
417 }
418 GrVkTransferBuffer* vkBuffer = static_cast<GrVkTransferBuffer*>(transferBuffer);
419 if (!vkBuffer) {
420 return false;
421 }
422
Greg Daniel660cc992017-06-26 14:55:05 -0400423 SkDEBUGCODE(
424 SkIRect subRect = SkIRect::MakeXYWH(left, top, width, height);
425 SkIRect bounds = SkIRect::MakeWH(texture->width(), texture->height());
426 SkASSERT(bounds.contains(subRect));
427 )
Brian Salomonc320b152018-02-20 14:05:36 -0500428 int bpp = GrColorTypeBytesPerPixel(bufferColorType);
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400429 if (rowBytes == 0) {
Brian Salomonc320b152018-02-20 14:05:36 -0500430 rowBytes = bpp * width;
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400431 }
432
433 // Set up copy region
434 VkBufferImageCopy region;
435 memset(&region, 0, sizeof(VkBufferImageCopy));
436 region.bufferOffset = bufferOffset;
437 region.bufferRowLength = (uint32_t)(rowBytes/bpp);
438 region.bufferImageHeight = 0;
439 region.imageSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
440 region.imageOffset = { left, top, 0 };
441 region.imageExtent = { (uint32_t)width, (uint32_t)height, 1 };
442
443 // Change layout of our target so it can be copied to
444 vkTex->setImageLayout(this,
445 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
446 VK_ACCESS_TRANSFER_WRITE_BIT,
447 VK_PIPELINE_STAGE_TRANSFER_BIT,
448 false);
449
450 // Copy the buffer to the image
451 fCurrentCmdBuffer->copyBufferToImage(this,
452 vkBuffer,
453 vkTex,
454 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
455 1,
456 &region);
457
Greg Daniel0fc4d2d2017-10-12 11:23:36 -0400458 vkTex->texturePriv().markMipMapsDirty();
Jim Van Verth2e5eaf02017-06-21 15:55:46 -0400459 return true;
460}
461
Brian Salomon1fabd512018-02-09 09:54:25 -0500462void GrVkGpu::resolveImage(GrSurface* dst, GrVkRenderTarget* src, const SkIRect& srcRect,
463 const SkIPoint& dstPoint) {
egdaniel4bcd62e2016-08-31 07:37:31 -0700464 SkASSERT(dst);
465 SkASSERT(src && src->numColorSamples() > 1 && src->msaaImage());
466
egdanielfd016d72016-09-27 12:13:05 -0700467 if (this->vkCaps().mustSubmitCommandsBeforeCopyOp()) {
468 this->submitCommandBuffer(GrVkGpu::kSkip_SyncQueue);
469 }
470
egdaniel4bcd62e2016-08-31 07:37:31 -0700471 VkImageResolve resolveInfo;
Brian Salomon1fabd512018-02-09 09:54:25 -0500472 resolveInfo.srcSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
473 resolveInfo.srcOffset = {srcRect.fLeft, srcRect.fTop, 0};
474 resolveInfo.dstSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
475 resolveInfo.dstOffset = {dstPoint.fX, dstPoint.fY, 0};
476 resolveInfo.extent = {(uint32_t)srcRect.width(), (uint32_t)srcRect.height(), 1};
egdaniel4bcd62e2016-08-31 07:37:31 -0700477
Greg Danielbc26c392017-04-18 13:32:10 -0400478 GrVkImage* dstImage;
479 GrRenderTarget* dstRT = dst->asRenderTarget();
480 if (dstRT) {
481 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(dstRT);
Greg Danielbc26c392017-04-18 13:32:10 -0400482 dstImage = vkRT;
483 } else {
484 SkASSERT(dst->asTexture());
485 dstImage = static_cast<GrVkTexture*>(dst->asTexture());
486 }
487 dstImage->setImageLayout(this,
488 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
489 VK_ACCESS_TRANSFER_WRITE_BIT,
490 VK_PIPELINE_STAGE_TRANSFER_BIT,
491 false);
egdaniel4bcd62e2016-08-31 07:37:31 -0700492
493 src->msaaImage()->setImageLayout(this,
494 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
495 VK_ACCESS_TRANSFER_READ_BIT,
496 VK_PIPELINE_STAGE_TRANSFER_BIT,
497 false);
498
Greg Danielbc26c392017-04-18 13:32:10 -0400499 fCurrentCmdBuffer->resolveImage(this, *src->msaaImage(), *dstImage, 1, &resolveInfo);
egdaniel4bcd62e2016-08-31 07:37:31 -0700500}
501
Brian Salomon1fabd512018-02-09 09:54:25 -0500502void GrVkGpu::internalResolveRenderTarget(GrRenderTarget* target, bool requiresSubmit) {
egdaniel66933552016-08-24 07:22:19 -0700503 if (target->needsResolve()) {
504 SkASSERT(target->numColorSamples() > 1);
egdaniel52ad2512016-08-04 12:50:01 -0700505 GrVkRenderTarget* rt = static_cast<GrVkRenderTarget*>(target);
506 SkASSERT(rt->msaaImage());
Greg Daniel69d49922017-02-23 09:44:02 -0500507
egdaniel4bcd62e2016-08-31 07:37:31 -0700508 const SkIRect& srcRect = rt->getResolveRect();
egdaniel52ad2512016-08-04 12:50:01 -0700509
Brian Salomon1fabd512018-02-09 09:54:25 -0500510 this->resolveImage(target, rt, srcRect, SkIPoint::Make(srcRect.fLeft, srcRect.fTop));
egdaniel52ad2512016-08-04 12:50:01 -0700511
512 rt->flagAsResolved();
Greg Daniel69d49922017-02-23 09:44:02 -0500513
514 if (requiresSubmit) {
515 this->submitCommandBuffer(kSkip_SyncQueue);
516 }
egdaniel52ad2512016-08-04 12:50:01 -0700517 }
518}
519
Brian Salomona9b04b92018-06-01 15:04:28 -0400520bool GrVkGpu::uploadTexDataLinear(GrVkTexture* tex, int left, int top, int width, int height,
521 GrColorType dataColorType, const void* data, size_t rowBytes) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500522 SkASSERT(data);
jvanverth900bd4a2016-04-29 13:53:12 -0700523 SkASSERT(tex->isLinearTiled());
Greg Daniel164a9f02016-02-22 09:56:40 -0500524
Greg Daniel660cc992017-06-26 14:55:05 -0400525 SkDEBUGCODE(
526 SkIRect subRect = SkIRect::MakeXYWH(left, top, width, height);
527 SkIRect bounds = SkIRect::MakeWH(tex->width(), tex->height());
528 SkASSERT(bounds.contains(subRect));
529 )
Brian Salomonc320b152018-02-20 14:05:36 -0500530 int bpp = GrColorTypeBytesPerPixel(dataColorType);
Greg Daniel164a9f02016-02-22 09:56:40 -0500531 size_t trimRowBytes = width * bpp;
Greg Daniel660cc992017-06-26 14:55:05 -0400532 if (!rowBytes) {
533 rowBytes = trimRowBytes;
534 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500535
jvanverth900bd4a2016-04-29 13:53:12 -0700536 SkASSERT(VK_IMAGE_LAYOUT_PREINITIALIZED == tex->currentLayout() ||
537 VK_IMAGE_LAYOUT_GENERAL == tex->currentLayout());
538 const VkImageSubresource subres = {
539 VK_IMAGE_ASPECT_COLOR_BIT,
540 0, // mipLevel
541 0, // arraySlice
542 };
543 VkSubresourceLayout layout;
Greg Daniel164a9f02016-02-22 09:56:40 -0500544
jvanverth900bd4a2016-04-29 13:53:12 -0700545 const GrVkInterface* interface = this->vkInterface();
Greg Daniel164a9f02016-02-22 09:56:40 -0500546
jvanverth900bd4a2016-04-29 13:53:12 -0700547 GR_VK_CALL(interface, GetImageSubresourceLayout(fDevice,
egdanielb2df0c22016-05-13 11:30:37 -0700548 tex->image(),
jvanverth900bd4a2016-04-29 13:53:12 -0700549 &subres,
550 &layout));
Greg Daniel164a9f02016-02-22 09:56:40 -0500551
jvanverth1e305ba2016-06-01 09:39:15 -0700552 const GrVkAlloc& alloc = tex->alloc();
Brian Salomona9b04b92018-06-01 15:04:28 -0400553 VkDeviceSize offset = top * layout.rowPitch + left * bpp;
jvanverth900bd4a2016-04-29 13:53:12 -0700554 VkDeviceSize size = height*layout.rowPitch;
Greg Daniel81df0412018-05-31 13:13:33 -0400555 SkASSERT(size + offset <= alloc.fSize);
556 void* mapPtr = GrVkMemory::MapAlloc(this, alloc);
557 if (!mapPtr) {
jvanverth900bd4a2016-04-29 13:53:12 -0700558 return false;
559 }
Greg Daniel81df0412018-05-31 13:13:33 -0400560 mapPtr = reinterpret_cast<char*>(mapPtr) + offset;
jvanverth900bd4a2016-04-29 13:53:12 -0700561
Brian Salomona9b04b92018-06-01 15:04:28 -0400562 SkRectMemcpy(mapPtr, static_cast<size_t>(layout.rowPitch), data, rowBytes, trimRowBytes,
563 height);
jvanverth900bd4a2016-04-29 13:53:12 -0700564
Greg Daniele35a99e2018-03-02 11:44:22 -0500565 GrVkMemory::FlushMappedAlloc(this, alloc, offset, size);
Greg Daniel81df0412018-05-31 13:13:33 -0400566 GrVkMemory::UnmapAlloc(this, alloc);
jvanverth900bd4a2016-04-29 13:53:12 -0700567
568 return true;
569}
570
Brian Salomona9b04b92018-06-01 15:04:28 -0400571bool GrVkGpu::uploadTexDataOptimal(GrVkTexture* tex, int left, int top, int width, int height,
572 GrColorType dataColorType, const GrMipLevel texels[],
573 int mipLevelCount) {
jvanverth900bd4a2016-04-29 13:53:12 -0700574 SkASSERT(!tex->isLinearTiled());
575 // The assumption is either that we have no mipmaps, or that our rect is the entire texture
Robert Phillips590533f2017-07-11 14:22:35 -0400576 SkASSERT(1 == mipLevelCount ||
jvanverth900bd4a2016-04-29 13:53:12 -0700577 (0 == left && 0 == top && width == tex->width() && height == tex->height()));
578
Greg Danieldd20e912017-04-07 14:42:23 -0400579 // We assume that if the texture has mip levels, we either upload to all the levels or just the
580 // first.
Robert Phillips590533f2017-07-11 14:22:35 -0400581 SkASSERT(1 == mipLevelCount || mipLevelCount == (tex->texturePriv().maxMipMapLevel() + 1));
Greg Danieldd20e912017-04-07 14:42:23 -0400582
jvanverth900bd4a2016-04-29 13:53:12 -0700583 if (width == 0 || height == 0) {
584 return false;
585 }
586
Greg Daniel475eb702018-09-28 14:16:50 -0400587 if (GrPixelConfigToColorType(tex->config()) != dataColorType) {
588 return false;
589 }
590
591 // For RGB_888x src data we are uploading it first to an RGBA texture and then copying it to the
592 // dst RGB texture. Thus we do not upload mip levels for that.
593 if (dataColorType == GrColorType::kRGB_888x) {
594 SkASSERT(tex->imageFormat() == VK_FORMAT_R8G8B8_UNORM &&
595 tex->config() == kRGB_888_GrPixelConfig);
596 // First check that we'll be able to do the copy to the to the R8G8B8 image in the end via a
597 // blit or draw.
598 if (!this->vkCaps().configCanBeDstofBlit(kRGB_888_GrPixelConfig, tex->isLinearTiled()) &&
599 !this->vkCaps().maxRenderTargetSampleCount(kRGB_888_GrPixelConfig)) {
600 return false;
601 }
602 mipLevelCount = 1;
603 }
604
Brian Salomond1eaf492017-05-18 10:02:08 -0400605 SkASSERT(this->caps()->isConfigTexturable(tex->config()));
Brian Salomonc320b152018-02-20 14:05:36 -0500606 int bpp = GrColorTypeBytesPerPixel(dataColorType);
jvanverth900bd4a2016-04-29 13:53:12 -0700607
608 // texels is const.
jvanverthc578b0632016-05-02 10:58:12 -0700609 // But we may need to adjust the fPixels ptr based on the copyRect, or fRowBytes.
610 // Because of this we need to make a non-const shallow copy of texels.
Robert Phillips0f992772017-07-12 08:24:56 -0400611 SkAutoTMalloc<GrMipLevel> texelsShallowCopy;
612
Greg Daniel475eb702018-09-28 14:16:50 -0400613 texelsShallowCopy.reset(mipLevelCount);
614 memcpy(texelsShallowCopy.get(), texels, mipLevelCount*sizeof(GrMipLevel));
jvanverth900bd4a2016-04-29 13:53:12 -0700615
Robert Phillips590533f2017-07-11 14:22:35 -0400616 SkTArray<size_t> individualMipOffsets(mipLevelCount);
jvanverthc578b0632016-05-02 10:58:12 -0700617 individualMipOffsets.push_back(0);
618 size_t combinedBufferSize = width * bpp * height;
619 int currentWidth = width;
620 int currentHeight = height;
Greg Daniel475eb702018-09-28 14:16:50 -0400621 if (!texelsShallowCopy[0].fPixels) {
Greg Daniel55afd6d2017-09-29 09:32:44 -0400622 combinedBufferSize = 0;
623 }
624
Greg Daniel468fd632017-03-22 17:03:45 -0400625 // The alignment must be at least 4 bytes and a multiple of the bytes per pixel of the image
626 // config. This works with the assumption that the bytes in pixel config is always a power of 2.
627 SkASSERT((bpp & (bpp - 1)) == 0);
628 const size_t alignmentMask = 0x3 | (bpp - 1);
Robert Phillips590533f2017-07-11 14:22:35 -0400629 for (int currentMipLevel = 1; currentMipLevel < mipLevelCount; currentMipLevel++) {
jvanverthc578b0632016-05-02 10:58:12 -0700630 currentWidth = SkTMax(1, currentWidth/2);
631 currentHeight = SkTMax(1, currentHeight/2);
Greg Daniel660cc992017-06-26 14:55:05 -0400632
Greg Daniel55afd6d2017-09-29 09:32:44 -0400633 if (texelsShallowCopy[currentMipLevel].fPixels) {
634 const size_t trimmedSize = currentWidth * bpp * currentHeight;
635 const size_t alignmentDiff = combinedBufferSize & alignmentMask;
636 if (alignmentDiff != 0) {
637 combinedBufferSize += alignmentMask - alignmentDiff + 1;
638 }
639 individualMipOffsets.push_back(combinedBufferSize);
640 combinedBufferSize += trimmedSize;
641 } else {
642 individualMipOffsets.push_back(0);
Greg Daniel468fd632017-03-22 17:03:45 -0400643 }
Greg Daniel55afd6d2017-09-29 09:32:44 -0400644 }
645 if (0 == combinedBufferSize) {
646 // We don't actually have any data to upload so just return success
647 return true;
jvanverth900bd4a2016-04-29 13:53:12 -0700648 }
649
650 // allocate buffer to hold our mip data
651 GrVkTransferBuffer* transferBuffer =
652 GrVkTransferBuffer::Create(this, combinedBufferSize, GrVkBuffer::kCopyRead_Type);
Greg Daniel475eb702018-09-28 14:16:50 -0400653 if (!transferBuffer) {
Forrest Reilingc04f8452017-04-26 19:26:12 -0700654 return false;
Greg Daniel6888c0d2017-08-25 11:55:50 -0400655 }
jvanverth900bd4a2016-04-29 13:53:12 -0700656
Greg Daniel475eb702018-09-28 14:16:50 -0400657 int uploadLeft = left;
658 int uploadTop = top;
659 GrVkTexture* uploadTexture = tex;
660 // For uploading RGB_888x data to an R8G8B8_UNORM texture we must first upload the data to an
661 // R8G8B8A8_UNORM image and then copy it.
662 sk_sp<GrVkTexture> copyTexture;
663 if (dataColorType == GrColorType::kRGB_888x) {
664 GrSurfaceDesc surfDesc;
665 surfDesc.fFlags = kRenderTarget_GrSurfaceFlag;
666 surfDesc.fWidth = width;
667 surfDesc.fHeight = height;
668 surfDesc.fConfig = kRGBA_8888_GrPixelConfig;
669 surfDesc.fSampleCnt = 1;
670
671 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
672 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
673 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
674
675 GrVkImage::ImageDesc imageDesc;
676 imageDesc.fImageType = VK_IMAGE_TYPE_2D;
677 imageDesc.fFormat = VK_FORMAT_R8G8B8A8_UNORM;
678 imageDesc.fWidth = width;
679 imageDesc.fHeight = height;
680 imageDesc.fLevels = 1;
681 imageDesc.fSamples = 1;
682 imageDesc.fImageTiling = VK_IMAGE_TILING_OPTIMAL;
683 imageDesc.fUsageFlags = usageFlags;
684 imageDesc.fMemProps = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
685
686 copyTexture = GrVkTexture::MakeNewTexture(this, SkBudgeted::kYes, surfDesc, imageDesc,
687 GrMipMapsStatus::kNotAllocated);
688 if (!copyTexture) {
689 return false;
690 }
691 uploadTexture = copyTexture.get();
692 uploadLeft = 0;
693 uploadTop = 0;
694 }
695
jvanverth900bd4a2016-04-29 13:53:12 -0700696 char* buffer = (char*) transferBuffer->map();
Robert Phillips590533f2017-07-11 14:22:35 -0400697 SkTArray<VkBufferImageCopy> regions(mipLevelCount);
jvanverth900bd4a2016-04-29 13:53:12 -0700698
jvanverthc578b0632016-05-02 10:58:12 -0700699 currentWidth = width;
700 currentHeight = height;
Greg Daniel475eb702018-09-28 14:16:50 -0400701 int layerHeight = uploadTexture->height();
Robert Phillips590533f2017-07-11 14:22:35 -0400702 for (int currentMipLevel = 0; currentMipLevel < mipLevelCount; currentMipLevel++) {
Greg Daniel55afd6d2017-09-29 09:32:44 -0400703 if (texelsShallowCopy[currentMipLevel].fPixels) {
704 SkASSERT(1 == mipLevelCount || currentHeight == layerHeight);
705 const size_t trimRowBytes = currentWidth * bpp;
706 const size_t rowBytes = texelsShallowCopy[currentMipLevel].fRowBytes
707 ? texelsShallowCopy[currentMipLevel].fRowBytes
708 : trimRowBytes;
jvanverth900bd4a2016-04-29 13:53:12 -0700709
Greg Daniel55afd6d2017-09-29 09:32:44 -0400710 // copy data into the buffer, skipping the trailing bytes
711 char* dst = buffer + individualMipOffsets[currentMipLevel];
712 const char* src = (const char*)texelsShallowCopy[currentMipLevel].fPixels;
Brian Salomona9b04b92018-06-01 15:04:28 -0400713 SkRectMemcpy(dst, trimRowBytes, src, rowBytes, trimRowBytes, currentHeight);
Greg Daniel55afd6d2017-09-29 09:32:44 -0400714
715 VkBufferImageCopy& region = regions.push_back();
716 memset(&region, 0, sizeof(VkBufferImageCopy));
717 region.bufferOffset = transferBuffer->offset() + individualMipOffsets[currentMipLevel];
718 region.bufferRowLength = currentWidth;
719 region.bufferImageHeight = currentHeight;
720 region.imageSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, SkToU32(currentMipLevel), 0, 1 };
Greg Daniel475eb702018-09-28 14:16:50 -0400721 region.imageOffset = {uploadLeft, uploadTop, 0};
Greg Daniel55afd6d2017-09-29 09:32:44 -0400722 region.imageExtent = { (uint32_t)currentWidth, (uint32_t)currentHeight, 1 };
jvanverth900bd4a2016-04-29 13:53:12 -0700723 }
jvanverthc578b0632016-05-02 10:58:12 -0700724 currentWidth = SkTMax(1, currentWidth/2);
725 currentHeight = SkTMax(1, currentHeight/2);
Greg Daniela1b282b2017-03-28 14:56:46 -0400726 layerHeight = currentHeight;
jvanverth900bd4a2016-04-29 13:53:12 -0700727 }
728
jvanverth9d54afc2016-09-20 09:20:03 -0700729 // no need to flush non-coherent memory, unmap will do that for us
jvanverth900bd4a2016-04-29 13:53:12 -0700730 transferBuffer->unmap();
731
jvanverth900bd4a2016-04-29 13:53:12 -0700732 // Change layout of our target so it can be copied to
Greg Daniel475eb702018-09-28 14:16:50 -0400733 uploadTexture->setImageLayout(this,
734 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
735 VK_ACCESS_TRANSFER_WRITE_BIT,
736 VK_PIPELINE_STAGE_TRANSFER_BIT,
737 false);
jvanverth900bd4a2016-04-29 13:53:12 -0700738
739 // Copy the buffer to the image
740 fCurrentCmdBuffer->copyBufferToImage(this,
741 transferBuffer,
Greg Daniel475eb702018-09-28 14:16:50 -0400742 uploadTexture,
jvanverth900bd4a2016-04-29 13:53:12 -0700743 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
744 regions.count(),
745 regions.begin());
jvanverth900bd4a2016-04-29 13:53:12 -0700746 transferBuffer->unref();
Greg Daniel475eb702018-09-28 14:16:50 -0400747
748 // If we copied the data into a temporary image first, copy that image into our main texture
749 // now.
750 if (copyTexture.get()) {
751 SkASSERT(dataColorType == GrColorType::kRGB_888x);
752 static const GrSurfaceOrigin kOrigin = kTopLeft_GrSurfaceOrigin;
753 SkAssertResult(this->copySurface(tex, kOrigin, copyTexture.get(), kOrigin,
754 SkIRect::MakeWH(width, height), SkIPoint::Make(left, top),
755 false));
756 }
Robert Phillips590533f2017-07-11 14:22:35 -0400757 if (1 == mipLevelCount) {
Greg Daniel0fc4d2d2017-10-12 11:23:36 -0400758 tex->texturePriv().markMipMapsDirty();
Greg Danieldd20e912017-04-07 14:42:23 -0400759 }
jvanverth900bd4a2016-04-29 13:53:12 -0700760
Greg Daniel164a9f02016-02-22 09:56:40 -0500761 return true;
762}
763
764////////////////////////////////////////////////////////////////////////////////
Robert Phillips67d52cf2017-06-05 13:38:13 -0400765sk_sp<GrTexture> GrVkGpu::onCreateTexture(const GrSurfaceDesc& desc, SkBudgeted budgeted,
Brian Salomon58389b92018-03-07 13:01:25 -0500766 const GrMipLevel texels[], int mipLevelCount) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500767 bool renderTarget = SkToBool(desc.fFlags & kRenderTarget_GrSurfaceFlag);
768
769 VkFormat pixelFormat;
Brian Salomonbdecacf2018-02-02 20:32:49 -0500770 SkAssertResult(GrPixelConfigToVkFormat(desc.fConfig, &pixelFormat));
egdaniel0a3a7f72016-06-24 09:22:31 -0700771
Greg Daniel164a9f02016-02-22 09:56:40 -0500772 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT;
773 if (renderTarget) {
774 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
775 }
776
777 // For now we will set the VK_IMAGE_USAGE_TRANSFER_DESTINATION_BIT and
778 // VK_IMAGE_USAGE_TRANSFER_SOURCE_BIT on every texture since we do not know whether or not we
779 // will be using this texture in some copy or not. Also this assumes, as is the current case,
jvanverth62340062016-04-26 08:01:44 -0700780 // that all render targets in vulkan are also textures. If we change this practice of setting
Greg Daniel164a9f02016-02-22 09:56:40 -0500781 // both bits, we must make sure to set the destination bit if we are uploading srcData to the
782 // texture.
783 usageFlags |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
784
Greg Daniel164a9f02016-02-22 09:56:40 -0500785 // This ImageDesc refers to the texture that will be read by the client. Thus even if msaa is
jvanverth62340062016-04-26 08:01:44 -0700786 // requested, this ImageDesc describes the resolved texture. Therefore we always have samples set
Greg Daniel164a9f02016-02-22 09:56:40 -0500787 // to 1.
Robert Phillips590533f2017-07-11 14:22:35 -0400788 int mipLevels = !mipLevelCount ? 1 : mipLevelCount;
Greg Daniel164a9f02016-02-22 09:56:40 -0500789 GrVkImage::ImageDesc imageDesc;
790 imageDesc.fImageType = VK_IMAGE_TYPE_2D;
791 imageDesc.fFormat = pixelFormat;
792 imageDesc.fWidth = desc.fWidth;
793 imageDesc.fHeight = desc.fHeight;
Brian Salomon7128fdd2017-05-22 14:00:07 -0400794 imageDesc.fLevels = mipLevels;
Greg Daniel164a9f02016-02-22 09:56:40 -0500795 imageDesc.fSamples = 1;
Brian Salomon7128fdd2017-05-22 14:00:07 -0400796 imageDesc.fImageTiling = VK_IMAGE_TILING_OPTIMAL;
Greg Daniel164a9f02016-02-22 09:56:40 -0500797 imageDesc.fUsageFlags = usageFlags;
Brian Salomon7128fdd2017-05-22 14:00:07 -0400798 imageDesc.fMemProps = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
Greg Daniel164a9f02016-02-22 09:56:40 -0500799
Greg Daniel0fc4d2d2017-10-12 11:23:36 -0400800 GrMipMapsStatus mipMapsStatus = GrMipMapsStatus::kNotAllocated;
801 if (mipLevels > 1) {
802 mipMapsStatus = GrMipMapsStatus::kValid;
803 for (int i = 0; i < mipLevels; ++i) {
804 if (!texels[i].fPixels) {
805 mipMapsStatus = GrMipMapsStatus::kDirty;
806 break;
807 }
Greg Daniel834f1202017-10-09 15:06:20 -0400808 }
809 }
810
Robert Phillips67d52cf2017-06-05 13:38:13 -0400811 sk_sp<GrVkTexture> tex;
Greg Daniel164a9f02016-02-22 09:56:40 -0500812 if (renderTarget) {
Greg Daniel475eb702018-09-28 14:16:50 -0400813 tex = GrVkTextureRenderTarget::MakeNewTextureRenderTarget(this, budgeted, desc,
814 imageDesc,
815 mipMapsStatus);
Greg Daniel164a9f02016-02-22 09:56:40 -0500816 } else {
Greg Daniel475eb702018-09-28 14:16:50 -0400817 tex = GrVkTexture::MakeNewTexture(this, budgeted, desc, imageDesc, mipMapsStatus);
Greg Daniel164a9f02016-02-22 09:56:40 -0500818 }
819
820 if (!tex) {
821 return nullptr;
822 }
823
Brian Salomonc320b152018-02-20 14:05:36 -0500824 auto colorType = GrPixelConfigToColorType(desc.fConfig);
Robert Phillips590533f2017-07-11 14:22:35 -0400825 if (mipLevelCount) {
Brian Salomona9b04b92018-06-01 15:04:28 -0400826 if (!this->uploadTexDataOptimal(tex.get(), 0, 0, desc.fWidth, desc.fHeight, colorType,
827 texels, mipLevelCount)) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500828 tex->unref();
829 return nullptr;
830 }
831 }
832
Brian Salomond17b4a62017-05-23 16:53:47 -0400833 if (desc.fFlags & kPerformInitialClear_GrSurfaceFlag) {
834 VkClearColorValue zeroClearColor;
835 memset(&zeroClearColor, 0, sizeof(zeroClearColor));
836 VkImageSubresourceRange range;
837 range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
838 range.baseArrayLayer = 0;
839 range.baseMipLevel = 0;
840 range.layerCount = 1;
841 range.levelCount = 1;
842 tex->setImageLayout(this, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
843 VK_ACCESS_TRANSFER_WRITE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, false);
Robert Phillips67d52cf2017-06-05 13:38:13 -0400844 this->currentCommandBuffer()->clearColorImage(this, tex.get(), &zeroClearColor, 1, &range);
Brian Salomond17b4a62017-05-23 16:53:47 -0400845 }
Ben Wagnerff134f22018-04-24 16:29:16 -0400846 return std::move(tex);
Greg Daniel164a9f02016-02-22 09:56:40 -0500847}
848
849////////////////////////////////////////////////////////////////////////////////
850
Greg Daniel6888c0d2017-08-25 11:55:50 -0400851void GrVkGpu::copyBuffer(GrVkBuffer* srcBuffer, GrVkBuffer* dstBuffer, VkDeviceSize srcOffset,
852 VkDeviceSize dstOffset, VkDeviceSize size) {
853 VkBufferCopy copyRegion;
854 copyRegion.srcOffset = srcOffset;
855 copyRegion.dstOffset = dstOffset;
856 copyRegion.size = size;
857 fCurrentCmdBuffer->copyBuffer(this, srcBuffer, dstBuffer, 1, &copyRegion);
858}
859
jvanverthdb379092016-07-07 11:18:46 -0700860bool GrVkGpu::updateBuffer(GrVkBuffer* buffer, const void* src,
861 VkDeviceSize offset, VkDeviceSize size) {
jvanvertha584de92016-06-30 09:10:52 -0700862 // Update the buffer
jvanverthdb379092016-07-07 11:18:46 -0700863 fCurrentCmdBuffer->updateBuffer(this, buffer, offset, size, src);
jvanvertha584de92016-06-30 09:10:52 -0700864
865 return true;
866}
867
868////////////////////////////////////////////////////////////////////////////////
869
Greg Daniel5254ccc2017-11-13 11:05:52 -0500870static bool check_backend_texture(const GrBackendTexture& backendTex,
871 GrPixelConfig config) {
Greg Daniel52e16d92018-04-10 09:34:07 -0400872 GrVkImageInfo info;
873 if (!backendTex.getVkImageInfo(&info)) {
Brian Salomond17f6582017-07-19 18:28:58 -0400874 return false;
Greg Daniel164a9f02016-02-22 09:56:40 -0500875 }
876
Greg Daniel52e16d92018-04-10 09:34:07 -0400877 if (VK_NULL_HANDLE == info.fImage || VK_NULL_HANDLE == info.fAlloc.fMemory) {
Brian Salomond17f6582017-07-19 18:28:58 -0400878 return false;
jvanverthfd359ca2016-03-18 11:57:24 -0700879 }
Greg Daniel7ef28f32017-04-20 16:41:55 +0000880
Greg Daniel52e16d92018-04-10 09:34:07 -0400881 SkASSERT(GrVkFormatPixelConfigPairIsValid(info.fFormat, config));
Brian Salomond17f6582017-07-19 18:28:58 -0400882 return true;
883}
884
885sk_sp<GrTexture> GrVkGpu::onWrapBackendTexture(const GrBackendTexture& backendTex,
Brian Salomond17f6582017-07-19 18:28:58 -0400886 GrWrapOwnership ownership) {
Greg Daniel5254ccc2017-11-13 11:05:52 -0500887 if (!check_backend_texture(backendTex, backendTex.config())) {
Brian Salomond17f6582017-07-19 18:28:58 -0400888 return nullptr;
889 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500890
Greg Daniel164a9f02016-02-22 09:56:40 -0500891 GrSurfaceDesc surfDesc;
Brian Salomond17f6582017-07-19 18:28:58 -0400892 surfDesc.fFlags = kNone_GrSurfaceFlags;
Greg Daniel7ef28f32017-04-20 16:41:55 +0000893 surfDesc.fWidth = backendTex.width();
894 surfDesc.fHeight = backendTex.height();
895 surfDesc.fConfig = backendTex.config();
Brian Salomonbdecacf2018-02-02 20:32:49 -0500896 surfDesc.fSampleCnt = 1;
Greg Daniel164a9f02016-02-22 09:56:40 -0500897
Greg Daniel52e16d92018-04-10 09:34:07 -0400898 GrVkImageInfo imageInfo;
899 if (!backendTex.getVkImageInfo(&imageInfo)) {
900 return nullptr;
901 }
902 sk_sp<GrVkImageLayout> layout = backendTex.getGrVkImageLayout();
903 SkASSERT(layout);
904 return GrVkTexture::MakeWrappedTexture(this, surfDesc, ownership, imageInfo, std::move(layout));
Brian Salomond17f6582017-07-19 18:28:58 -0400905}
906
907sk_sp<GrTexture> GrVkGpu::onWrapRenderableBackendTexture(const GrBackendTexture& backendTex,
Brian Salomond17f6582017-07-19 18:28:58 -0400908 int sampleCnt,
909 GrWrapOwnership ownership) {
Greg Daniel5254ccc2017-11-13 11:05:52 -0500910 if (!check_backend_texture(backendTex, backendTex.config())) {
Brian Salomond17f6582017-07-19 18:28:58 -0400911 return nullptr;
Greg Daniel164a9f02016-02-22 09:56:40 -0500912 }
Brian Salomond17f6582017-07-19 18:28:58 -0400913
914 GrSurfaceDesc surfDesc;
915 surfDesc.fFlags = kRenderTarget_GrSurfaceFlag;
916 surfDesc.fWidth = backendTex.width();
917 surfDesc.fHeight = backendTex.height();
918 surfDesc.fConfig = backendTex.config();
Brian Salomonbdecacf2018-02-02 20:32:49 -0500919 surfDesc.fSampleCnt = this->caps()->getRenderTargetSampleCount(sampleCnt, backendTex.config());
Brian Salomond17f6582017-07-19 18:28:58 -0400920
Greg Daniel52e16d92018-04-10 09:34:07 -0400921 GrVkImageInfo imageInfo;
922 if (!backendTex.getVkImageInfo(&imageInfo)) {
923 return nullptr;
924 }
925 sk_sp<GrVkImageLayout> layout = backendTex.getGrVkImageLayout();
926 SkASSERT(layout);
927
Brian Salomond17f6582017-07-19 18:28:58 -0400928 return GrVkTextureRenderTarget::MakeWrappedTextureRenderTarget(this, surfDesc, ownership,
Greg Daniel52e16d92018-04-10 09:34:07 -0400929 imageInfo, std::move(layout));
Greg Daniel164a9f02016-02-22 09:56:40 -0500930}
931
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400932sk_sp<GrRenderTarget> GrVkGpu::onWrapBackendRenderTarget(const GrBackendRenderTarget& backendRT){
Greg Daniele79b4732017-04-20 14:07:46 -0400933 // Currently the Vulkan backend does not support wrapping of msaa render targets directly. In
934 // general this is not an issue since swapchain images in vulkan are never multisampled. Thus if
935 // you want a multisampled RT it is best to wrap the swapchain images and then let Skia handle
936 // creating and owning the MSAA images.
Brian Salomonbdecacf2018-02-02 20:32:49 -0500937 if (backendRT.sampleCnt() > 1) {
Greg Daniele79b4732017-04-20 14:07:46 -0400938 return nullptr;
939 }
halcanary9d524f22016-03-29 09:03:52 -0700940
Greg Daniel323fbcf2018-04-10 13:46:30 -0400941 GrVkImageInfo info;
942 if (!backendRT.getVkImageInfo(&info)) {
Greg Danielbcf612b2017-05-01 13:50:58 +0000943 return nullptr;
944 }
Greg Daniel323fbcf2018-04-10 13:46:30 -0400945
946 if (VK_NULL_HANDLE == info.fImage) {
jvanverthfd359ca2016-03-18 11:57:24 -0700947 return nullptr;
948 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500949
Greg Daniel164a9f02016-02-22 09:56:40 -0500950 GrSurfaceDesc desc;
Brian Salomon0ec981b2017-05-15 13:48:50 -0400951 desc.fFlags = kRenderTarget_GrSurfaceFlag;
Robert Phillips16d8ec62017-07-27 16:16:25 -0400952 desc.fWidth = backendRT.width();
953 desc.fHeight = backendRT.height();
954 desc.fConfig = backendRT.config();
Brian Salomonbdecacf2018-02-02 20:32:49 -0500955 desc.fSampleCnt = 1;
Greg Daniel164a9f02016-02-22 09:56:40 -0500956
Greg Daniel323fbcf2018-04-10 13:46:30 -0400957 sk_sp<GrVkImageLayout> layout = backendRT.getGrVkImageLayout();
Greg Daniel52e16d92018-04-10 09:34:07 -0400958
Greg Daniel323fbcf2018-04-10 13:46:30 -0400959 sk_sp<GrVkRenderTarget> tgt = GrVkRenderTarget::MakeWrappedRenderTarget(this, desc, info,
Greg Daniel52e16d92018-04-10 09:34:07 -0400960 std::move(layout));
Brian Salomonafdc6b12018-03-09 12:02:32 -0500961
962 // We don't allow the client to supply a premade stencil buffer. We always create one if needed.
963 SkASSERT(!backendRT.stencilBits());
964 if (tgt) {
965 SkASSERT(tgt->canAttemptStencilAttachment());
Greg Daniel164a9f02016-02-22 09:56:40 -0500966 }
Brian Salomonafdc6b12018-03-09 12:02:32 -0500967
Ben Wagnerff134f22018-04-24 16:29:16 -0400968 return std::move(tgt);
Greg Daniel164a9f02016-02-22 09:56:40 -0500969}
970
Greg Daniel7ef28f32017-04-20 16:41:55 +0000971sk_sp<GrRenderTarget> GrVkGpu::onWrapBackendTextureAsRenderTarget(const GrBackendTexture& tex,
Greg Daniel7ef28f32017-04-20 16:41:55 +0000972 int sampleCnt) {
Brian Osman33910292017-04-18 14:38:53 -0400973
Greg Daniel52e16d92018-04-10 09:34:07 -0400974 GrVkImageInfo imageInfo;
975 if (!tex.getVkImageInfo(&imageInfo)) {
Greg Danielbcf612b2017-05-01 13:50:58 +0000976 return nullptr;
977 }
Greg Daniel52e16d92018-04-10 09:34:07 -0400978 if (VK_NULL_HANDLE == imageInfo.fImage) {
Brian Osman33910292017-04-18 14:38:53 -0400979 return nullptr;
980 }
981
982 GrSurfaceDesc desc;
Greg Daniel7ef28f32017-04-20 16:41:55 +0000983 desc.fFlags = kRenderTarget_GrSurfaceFlag;
Greg Daniel7ef28f32017-04-20 16:41:55 +0000984 desc.fWidth = tex.width();
985 desc.fHeight = tex.height();
Robert Phillips16d8ec62017-07-27 16:16:25 -0400986 desc.fConfig = tex.config();
Brian Salomonbdecacf2018-02-02 20:32:49 -0500987 desc.fSampleCnt = this->caps()->getRenderTargetSampleCount(sampleCnt, tex.config());
988 if (!desc.fSampleCnt) {
989 return nullptr;
990 }
Brian Osman33910292017-04-18 14:38:53 -0400991
Greg Daniel52e16d92018-04-10 09:34:07 -0400992 sk_sp<GrVkImageLayout> layout = tex.getGrVkImageLayout();
993 SkASSERT(layout);
994
Ben Wagnerff134f22018-04-24 16:29:16 -0400995 return GrVkRenderTarget::MakeWrappedRenderTarget(this, desc, imageInfo, std::move(layout));
Brian Osman33910292017-04-18 14:38:53 -0400996}
997
Brian Salomon930f9392018-06-20 16:25:26 -0400998bool GrVkGpu::onRegenerateMipMapLevels(GrTexture* tex) {
999 auto* vkTex = static_cast<GrVkTexture*>(tex);
jvanverth900bd4a2016-04-29 13:53:12 -07001000 // don't do anything for linearly tiled textures (can't have mipmaps)
Brian Salomon930f9392018-06-20 16:25:26 -04001001 if (vkTex->isLinearTiled()) {
jvanverth900bd4a2016-04-29 13:53:12 -07001002 SkDebugf("Trying to create mipmap for linear tiled texture");
Brian Salomon930f9392018-06-20 16:25:26 -04001003 return false;
jvanverth62340062016-04-26 08:01:44 -07001004 }
1005
jvanverth62340062016-04-26 08:01:44 -07001006 // determine if we can blit to and from this format
1007 const GrVkCaps& caps = this->vkCaps();
1008 if (!caps.configCanBeDstofBlit(tex->config(), false) ||
egdaniel2f5792a2016-07-06 08:51:23 -07001009 !caps.configCanBeSrcofBlit(tex->config(), false) ||
1010 !caps.mipMapSupport()) {
Brian Salomon930f9392018-06-20 16:25:26 -04001011 return false;
jvanverth62340062016-04-26 08:01:44 -07001012 }
1013
egdanielfd016d72016-09-27 12:13:05 -07001014 if (this->vkCaps().mustSubmitCommandsBeforeCopyOp()) {
1015 this->submitCommandBuffer(kSkip_SyncQueue);
1016 }
1017
egdaniel7ac5da82016-07-15 13:41:42 -07001018 int width = tex->width();
1019 int height = tex->height();
1020 VkImageBlit blitRegion;
1021 memset(&blitRegion, 0, sizeof(VkImageBlit));
jvanverth62340062016-04-26 08:01:44 -07001022
jvanverth82c05582016-05-03 11:19:01 -07001023 // SkMipMap doesn't include the base level in the level count so we have to add 1
1024 uint32_t levelCount = SkMipMap::ComputeLevelCount(tex->width(), tex->height()) + 1;
Brian Salomon930f9392018-06-20 16:25:26 -04001025 SkASSERT(levelCount == vkTex->mipLevels());
egdaniel7ac5da82016-07-15 13:41:42 -07001026
Greg Danielda86e282018-06-13 09:41:19 -04001027 // change layout of the layers so we can write to them.
Brian Salomon930f9392018-06-20 16:25:26 -04001028 vkTex->setImageLayout(this, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_ACCESS_TRANSFER_WRITE_BIT,
1029 VK_PIPELINE_STAGE_TRANSFER_BIT, false);
jvanverth62340062016-04-26 08:01:44 -07001030
jvanverth50c46c72016-05-06 12:31:28 -07001031 // setup memory barrier
Brian Salomon930f9392018-06-20 16:25:26 -04001032 SkASSERT(GrVkFormatIsSupported(vkTex->imageFormat()));
jvanverth50c46c72016-05-06 12:31:28 -07001033 VkImageAspectFlags aspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
1034 VkImageMemoryBarrier imageMemoryBarrier = {
Brian Salomon930f9392018-06-20 16:25:26 -04001035 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType
1036 nullptr, // pNext
1037 VK_ACCESS_TRANSFER_WRITE_BIT, // srcAccessMask
1038 VK_ACCESS_TRANSFER_READ_BIT, // dstAccessMask
1039 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // oldLayout
1040 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // newLayout
1041 VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex
1042 VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex
1043 vkTex->image(), // image
1044 {aspectFlags, 0, 1, 0, 1} // subresourceRange
jvanverth50c46c72016-05-06 12:31:28 -07001045 };
1046
jvanverth62340062016-04-26 08:01:44 -07001047 // Blit the miplevels
jvanverth82c05582016-05-03 11:19:01 -07001048 uint32_t mipLevel = 1;
1049 while (mipLevel < levelCount) {
1050 int prevWidth = width;
1051 int prevHeight = height;
1052 width = SkTMax(1, width / 2);
1053 height = SkTMax(1, height / 2);
jvanverth62340062016-04-26 08:01:44 -07001054
jvanverth50c46c72016-05-06 12:31:28 -07001055 imageMemoryBarrier.subresourceRange.baseMipLevel = mipLevel - 1;
1056 this->addImageMemoryBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1057 false, &imageMemoryBarrier);
1058
1059 blitRegion.srcSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, mipLevel - 1, 0, 1 };
jvanverth82c05582016-05-03 11:19:01 -07001060 blitRegion.srcOffsets[0] = { 0, 0, 0 };
brianosmane9906e72016-06-08 12:44:27 -07001061 blitRegion.srcOffsets[1] = { prevWidth, prevHeight, 1 };
jvanverth82c05582016-05-03 11:19:01 -07001062 blitRegion.dstSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, mipLevel, 0, 1 };
1063 blitRegion.dstOffsets[0] = { 0, 0, 0 };
brianosmane9906e72016-06-08 12:44:27 -07001064 blitRegion.dstOffsets[1] = { width, height, 1 };
jvanverth62340062016-04-26 08:01:44 -07001065 fCurrentCmdBuffer->blitImage(this,
Brian Salomon930f9392018-06-20 16:25:26 -04001066 vkTex->resource(),
1067 vkTex->image(),
Greg Daniel31cc7312018-03-05 11:41:06 -05001068 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
Brian Salomon930f9392018-06-20 16:25:26 -04001069 vkTex->resource(),
1070 vkTex->image(),
Greg Daniel31cc7312018-03-05 11:41:06 -05001071 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
jvanverth62340062016-04-26 08:01:44 -07001072 1,
1073 &blitRegion,
1074 VK_FILTER_LINEAR);
jvanverth82c05582016-05-03 11:19:01 -07001075 ++mipLevel;
jvanverth62340062016-04-26 08:01:44 -07001076 }
Greg Daniel31cc7312018-03-05 11:41:06 -05001077 // This barrier logically is not needed, but it changes the final level to the same layout as
1078 // all the others, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL. This makes tracking of the layouts and
1079 // future layout changes easier.
1080 imageMemoryBarrier.subresourceRange.baseMipLevel = mipLevel - 1;
1081 this->addImageMemoryBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1082 false, &imageMemoryBarrier);
Brian Salomon930f9392018-06-20 16:25:26 -04001083 vkTex->updateImageLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
1084 return true;
jvanverth62340062016-04-26 08:01:44 -07001085}
1086
Greg Daniel164a9f02016-02-22 09:56:40 -05001087////////////////////////////////////////////////////////////////////////////////
1088
1089GrStencilAttachment* GrVkGpu::createStencilAttachmentForRenderTarget(const GrRenderTarget* rt,
1090 int width,
1091 int height) {
Greg Daniel164a9f02016-02-22 09:56:40 -05001092 SkASSERT(width >= rt->width());
1093 SkASSERT(height >= rt->height());
1094
1095 int samples = rt->numStencilSamples();
1096
Ethan Nicholasf610bae2018-09-20 16:55:21 -04001097 const GrVkCaps::StencilFormat& sFmt = this->vkCaps().preferredStencilFormat();
Greg Daniel164a9f02016-02-22 09:56:40 -05001098
1099 GrVkStencilAttachment* stencil(GrVkStencilAttachment::Create(this,
Greg Daniel164a9f02016-02-22 09:56:40 -05001100 width,
1101 height,
1102 samples,
1103 sFmt));
1104 fStats.incStencilAttachmentCreates();
1105 return stencil;
1106}
1107
1108////////////////////////////////////////////////////////////////////////////////
1109
Brian Salomon52e943a2018-03-13 09:32:39 -04001110bool copy_testing_data(GrVkGpu* gpu, const void* srcData, const GrVkAlloc& alloc,
Robert Phillips646f6372018-09-25 09:31:10 -04001111 size_t bufferOffset, size_t srcRowBytes, size_t dstRowBytes,
1112 size_t trimRowBytes, int h) {
Greg Daniel81df0412018-05-31 13:13:33 -04001113 VkDeviceSize size = dstRowBytes * h;
1114 VkDeviceSize offset = bufferOffset;
1115 SkASSERT(size + offset <= alloc.fSize);
1116 void* mapPtr = GrVkMemory::MapAlloc(gpu, alloc);
1117 if (!mapPtr) {
egdaniel3602d4f2016-08-12 11:58:53 -07001118 return false;
1119 }
Greg Daniel81df0412018-05-31 13:13:33 -04001120 mapPtr = reinterpret_cast<char*>(mapPtr) + offset;
egdaniel3602d4f2016-08-12 11:58:53 -07001121
Greg Daniel20ece3a2017-03-28 10:24:43 -04001122 if (srcData) {
1123 // If there is no padding on dst we can do a single memcopy.
1124 // This assumes the srcData comes in with no padding.
Robert Phillips646f6372018-09-25 09:31:10 -04001125 SkRectMemcpy(mapPtr, dstRowBytes, srcData, srcRowBytes, trimRowBytes, h);
Greg Daniel20ece3a2017-03-28 10:24:43 -04001126 } else {
1127 // If there is no srcdata we always copy 0's into the textures so that it is initialized
1128 // with some data.
Robert Phillips646f6372018-09-25 09:31:10 -04001129 memset(mapPtr, 0, dstRowBytes * h);
Greg Daniel20ece3a2017-03-28 10:24:43 -04001130 }
Greg Daniel81df0412018-05-31 13:13:33 -04001131 GrVkMemory::FlushMappedAlloc(gpu, alloc, offset, size);
1132 GrVkMemory::UnmapAlloc(gpu, alloc);
egdaniel3602d4f2016-08-12 11:58:53 -07001133 return true;
1134}
1135
Brian Salomonf865b052018-03-09 09:01:53 -05001136#if GR_TEST_UTILS
Brian Salomon52e943a2018-03-13 09:32:39 -04001137bool GrVkGpu::createTestingOnlyVkImage(GrPixelConfig config, int w, int h, bool texturable,
1138 bool renderable, GrMipMapped mipMapped, const void* srcData,
Robert Phillips646f6372018-09-25 09:31:10 -04001139 size_t srcRowBytes, GrVkImageInfo* info) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001140 SkASSERT(texturable || renderable);
1141 if (!texturable) {
1142 SkASSERT(GrMipMapped::kNo == mipMapped);
1143 SkASSERT(!srcData);
1144 }
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001145 VkFormat pixelFormat;
1146 if (!GrPixelConfigToVkFormat(config, &pixelFormat)) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001147 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001148 }
1149
Brian Salomon52e943a2018-03-13 09:32:39 -04001150 if (texturable && !fVkCaps->isConfigTexturable(config)) {
1151 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001152 }
1153
Brian Salomon52e943a2018-03-13 09:32:39 -04001154 if (renderable && !fVkCaps->isConfigRenderable(config)) {
1155 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001156 }
1157
1158 // Currently we don't support uploading pixel data when mipped.
1159 if (srcData && GrMipMapped::kYes == mipMapped) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001160 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001161 }
1162
Brian Salomon52e943a2018-03-13 09:32:39 -04001163 VkImageUsageFlags usageFlags = 0;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001164 usageFlags |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1165 usageFlags |= VK_IMAGE_USAGE_TRANSFER_DST_BIT;
Brian Salomon52e943a2018-03-13 09:32:39 -04001166 if (texturable) {
1167 usageFlags |= VK_IMAGE_USAGE_SAMPLED_BIT;
1168 }
1169 if (renderable) {
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001170 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
1171 }
1172
1173 VkImage image = VK_NULL_HANDLE;
Greg Daniel8385a8a2018-02-26 13:29:37 -05001174 GrVkAlloc alloc;
Brian Salomonde9f5462018-03-07 14:23:58 -05001175 VkImageLayout initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001176
1177 // Create Image
1178 VkSampleCountFlagBits vkSamples;
1179 if (!GrSampleCountToVkSampleCount(1, &vkSamples)) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001180 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001181 }
1182
1183 // Figure out the number of mip levels.
1184 uint32_t mipLevels = 1;
1185 if (GrMipMapped::kYes == mipMapped) {
1186 mipLevels = SkMipMap::ComputeLevelCount(w, h) + 1;
1187 }
1188
1189 const VkImageCreateInfo imageCreateInfo = {
Brian Salomonde9f5462018-03-07 14:23:58 -05001190 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
1191 nullptr, // pNext
Brian Osman2b23c4b2018-06-01 12:25:08 -04001192 0, // VkImageCreateFlags
Brian Salomonde9f5462018-03-07 14:23:58 -05001193 VK_IMAGE_TYPE_2D, // VkImageType
1194 pixelFormat, // VkFormat
1195 {(uint32_t)w, (uint32_t)h, 1}, // VkExtent3D
1196 mipLevels, // mipLevels
1197 1, // arrayLayers
1198 vkSamples, // samples
1199 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling
1200 usageFlags, // VkImageUsageFlags
1201 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
1202 0, // queueFamilyCount
1203 0, // pQueueFamilyIndices
1204 initialLayout // initialLayout
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001205 };
1206
Brian Salomon52e943a2018-03-13 09:32:39 -04001207 GR_VK_CALL_ERRCHECK(this->vkInterface(),
1208 CreateImage(this->device(), &imageCreateInfo, nullptr, &image));
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001209
Brian Salomonde9f5462018-03-07 14:23:58 -05001210 if (!GrVkMemory::AllocAndBindImageMemory(this, image, false, &alloc)) {
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001211 VK_CALL(DestroyImage(this->device(), image, nullptr));
Brian Salomon52e943a2018-03-13 09:32:39 -04001212 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001213 }
1214
1215 // We need to declare these early so that we can delete them at the end outside of the if block.
Greg Daniel8385a8a2018-02-26 13:29:37 -05001216 GrVkAlloc bufferAlloc;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001217 VkBuffer buffer = VK_NULL_HANDLE;
1218
1219 VkResult err;
1220 const VkCommandBufferAllocateInfo cmdInfo = {
1221 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
1222 nullptr, // pNext
1223 fCmdPool, // commandPool
1224 VK_COMMAND_BUFFER_LEVEL_PRIMARY, // level
1225 1 // bufferCount
1226 };
1227
1228 VkCommandBuffer cmdBuffer;
1229 err = VK_CALL(AllocateCommandBuffers(fDevice, &cmdInfo, &cmdBuffer));
1230 if (err) {
1231 GrVkMemory::FreeImageMemory(this, false, alloc);
1232 VK_CALL(DestroyImage(fDevice, image, nullptr));
Brian Salomon52e943a2018-03-13 09:32:39 -04001233 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001234 }
1235
1236 VkCommandBufferBeginInfo cmdBufferBeginInfo;
1237 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
1238 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
1239 cmdBufferBeginInfo.pNext = nullptr;
1240 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
1241 cmdBufferBeginInfo.pInheritanceInfo = nullptr;
1242
1243 err = VK_CALL(BeginCommandBuffer(cmdBuffer, &cmdBufferBeginInfo));
1244 SkASSERT(!err);
1245
1246 size_t bpp = GrBytesPerPixel(config);
Brian Salomonde9f5462018-03-07 14:23:58 -05001247 SkASSERT(w && h);
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001248
Robert Phillips646f6372018-09-25 09:31:10 -04001249 const size_t trimRowBytes = w * bpp;
1250 if (!srcRowBytes) {
1251 srcRowBytes = trimRowBytes;
1252 }
1253
Brian Salomonde9f5462018-03-07 14:23:58 -05001254 SkTArray<size_t> individualMipOffsets(mipLevels);
1255 individualMipOffsets.push_back(0);
1256 size_t combinedBufferSize = w * bpp * h;
1257 int currentWidth = w;
1258 int currentHeight = h;
1259 // The alignment must be at least 4 bytes and a multiple of the bytes per pixel of the image
1260 // config. This works with the assumption that the bytes in pixel config is always a power
1261 // of 2.
1262 SkASSERT((bpp & (bpp - 1)) == 0);
1263 const size_t alignmentMask = 0x3 | (bpp - 1);
1264 for (uint32_t currentMipLevel = 1; currentMipLevel < mipLevels; currentMipLevel++) {
1265 currentWidth = SkTMax(1, currentWidth / 2);
1266 currentHeight = SkTMax(1, currentHeight / 2);
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001267
Brian Salomonde9f5462018-03-07 14:23:58 -05001268 const size_t trimmedSize = currentWidth * bpp * currentHeight;
1269 const size_t alignmentDiff = combinedBufferSize & alignmentMask;
1270 if (alignmentDiff != 0) {
1271 combinedBufferSize += alignmentMask - alignmentDiff + 1;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001272 }
Brian Salomonde9f5462018-03-07 14:23:58 -05001273 individualMipOffsets.push_back(combinedBufferSize);
1274 combinedBufferSize += trimmedSize;
1275 }
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001276
Brian Salomonde9f5462018-03-07 14:23:58 -05001277 VkBufferCreateInfo bufInfo;
1278 memset(&bufInfo, 0, sizeof(VkBufferCreateInfo));
1279 bufInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
1280 bufInfo.flags = 0;
1281 bufInfo.size = combinedBufferSize;
1282 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1283 bufInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
1284 bufInfo.queueFamilyIndexCount = 0;
1285 bufInfo.pQueueFamilyIndices = nullptr;
1286 err = VK_CALL(CreateBuffer(fDevice, &bufInfo, nullptr, &buffer));
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001287
Brian Salomonde9f5462018-03-07 14:23:58 -05001288 if (err) {
1289 GrVkMemory::FreeImageMemory(this, false, alloc);
1290 VK_CALL(DestroyImage(fDevice, image, nullptr));
1291 VK_CALL(EndCommandBuffer(cmdBuffer));
1292 VK_CALL(FreeCommandBuffers(fDevice, fCmdPool, 1, &cmdBuffer));
Brian Salomon52e943a2018-03-13 09:32:39 -04001293 return false;
Brian Salomonde9f5462018-03-07 14:23:58 -05001294 }
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001295
Brian Salomonde9f5462018-03-07 14:23:58 -05001296 if (!GrVkMemory::AllocAndBindBufferMemory(this, buffer, GrVkBuffer::kCopyRead_Type, true,
1297 &bufferAlloc)) {
1298 GrVkMemory::FreeImageMemory(this, false, alloc);
1299 VK_CALL(DestroyImage(fDevice, image, nullptr));
1300 VK_CALL(DestroyBuffer(fDevice, buffer, nullptr));
1301 VK_CALL(EndCommandBuffer(cmdBuffer));
1302 VK_CALL(FreeCommandBuffers(fDevice, fCmdPool, 1, &cmdBuffer));
Brian Salomon52e943a2018-03-13 09:32:39 -04001303 return false;
Brian Salomonde9f5462018-03-07 14:23:58 -05001304 }
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001305
Brian Salomonde9f5462018-03-07 14:23:58 -05001306 currentWidth = w;
1307 currentHeight = h;
1308 for (uint32_t currentMipLevel = 0; currentMipLevel < mipLevels; currentMipLevel++) {
1309 SkASSERT(0 == currentMipLevel || !srcData);
1310 size_t currentRowBytes = bpp * currentWidth;
1311 size_t bufferOffset = individualMipOffsets[currentMipLevel];
Robert Phillips646f6372018-09-25 09:31:10 -04001312 if (!copy_testing_data(this, srcData, bufferAlloc, bufferOffset, srcRowBytes,
1313 currentRowBytes, trimRowBytes, currentHeight)) {
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001314 GrVkMemory::FreeImageMemory(this, false, alloc);
1315 VK_CALL(DestroyImage(fDevice, image, nullptr));
Brian Salomonde9f5462018-03-07 14:23:58 -05001316 GrVkMemory::FreeBufferMemory(this, GrVkBuffer::kCopyRead_Type, bufferAlloc);
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001317 VK_CALL(DestroyBuffer(fDevice, buffer, nullptr));
1318 VK_CALL(EndCommandBuffer(cmdBuffer));
1319 VK_CALL(FreeCommandBuffers(fDevice, fCmdPool, 1, &cmdBuffer));
Brian Salomon52e943a2018-03-13 09:32:39 -04001320 return false;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001321 }
Brian Salomonde9f5462018-03-07 14:23:58 -05001322 currentWidth = SkTMax(1, currentWidth / 2);
1323 currentHeight = SkTMax(1, currentHeight / 2);
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001324 }
Brian Salomonde9f5462018-03-07 14:23:58 -05001325
1326 // Set image layout and add barrier
1327 VkImageMemoryBarrier barrier;
1328 memset(&barrier, 0, sizeof(VkImageMemoryBarrier));
1329 barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
1330 barrier.pNext = nullptr;
Greg Daniel6ddbafc2018-05-24 12:34:29 -04001331 barrier.srcAccessMask = GrVkImage::LayoutToSrcAccessMask(initialLayout);
Brian Salomonde9f5462018-03-07 14:23:58 -05001332 barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1333 barrier.oldLayout = initialLayout;
1334 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1335 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1336 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1337 barrier.image = image;
1338 barrier.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, mipLevels, 0, 1};
1339
Greg Daniel6ddbafc2018-05-24 12:34:29 -04001340 VK_CALL(CmdPipelineBarrier(cmdBuffer, GrVkImage::LayoutToPipelineStageFlags(initialLayout),
Brian Salomonde9f5462018-03-07 14:23:58 -05001341 VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
1342 &barrier));
1343 initialLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1344
1345 SkTArray<VkBufferImageCopy> regions(mipLevels);
1346
1347 currentWidth = w;
1348 currentHeight = h;
1349 for (uint32_t currentMipLevel = 0; currentMipLevel < mipLevels; currentMipLevel++) {
1350 // Submit copy command
1351 VkBufferImageCopy& region = regions.push_back();
1352 memset(&region, 0, sizeof(VkBufferImageCopy));
1353 region.bufferOffset = individualMipOffsets[currentMipLevel];
1354 region.bufferRowLength = currentWidth;
1355 region.bufferImageHeight = currentHeight;
1356 region.imageSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
1357 region.imageOffset = {0, 0, 0};
1358 region.imageExtent = {(uint32_t)currentWidth, (uint32_t)currentHeight, 1};
1359 currentWidth = SkTMax(1, currentWidth / 2);
1360 currentHeight = SkTMax(1, currentHeight / 2);
1361 }
1362
1363 VK_CALL(CmdCopyBufferToImage(cmdBuffer, buffer, image, initialLayout, regions.count(),
1364 regions.begin()));
1365
Brian Salomon52e943a2018-03-13 09:32:39 -04001366 if (texturable) {
1367 // Change Image layout to shader read since if we use this texture as a borrowed textures
1368 // within Ganesh we require that its layout be set to that
1369 memset(&barrier, 0, sizeof(VkImageMemoryBarrier));
1370 barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
1371 barrier.pNext = nullptr;
Greg Daniel6ddbafc2018-05-24 12:34:29 -04001372 barrier.srcAccessMask = GrVkImage::LayoutToSrcAccessMask(initialLayout);
Brian Salomon52e943a2018-03-13 09:32:39 -04001373 barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
1374 barrier.oldLayout = initialLayout;
1375 barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
1376 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1377 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1378 barrier.image = image;
1379 barrier.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, mipLevels, 0, 1};
Brian Salomon52e943a2018-03-13 09:32:39 -04001380 VK_CALL(CmdPipelineBarrier(cmdBuffer,
Greg Daniel6ddbafc2018-05-24 12:34:29 -04001381 GrVkImage::LayoutToPipelineStageFlags(initialLayout),
Brian Salomon52e943a2018-03-13 09:32:39 -04001382 VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
1383 0,
1384 0, nullptr,
1385 0, nullptr,
1386 1, &barrier));
Greg Daniel4f4a53f2018-03-15 10:20:45 -04001387 initialLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
Brian Salomon52e943a2018-03-13 09:32:39 -04001388 }
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001389
1390 // End CommandBuffer
1391 err = VK_CALL(EndCommandBuffer(cmdBuffer));
1392 SkASSERT(!err);
1393
1394 // Create Fence for queue
1395 VkFence fence;
1396 VkFenceCreateInfo fenceInfo;
1397 memset(&fenceInfo, 0, sizeof(VkFenceCreateInfo));
1398 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
1399
1400 err = VK_CALL(CreateFence(fDevice, &fenceInfo, nullptr, &fence));
1401 SkASSERT(!err);
1402
1403 VkSubmitInfo submitInfo;
1404 memset(&submitInfo, 0, sizeof(VkSubmitInfo));
1405 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
1406 submitInfo.pNext = nullptr;
1407 submitInfo.waitSemaphoreCount = 0;
1408 submitInfo.pWaitSemaphores = nullptr;
1409 submitInfo.pWaitDstStageMask = 0;
1410 submitInfo.commandBufferCount = 1;
1411 submitInfo.pCommandBuffers = &cmdBuffer;
1412 submitInfo.signalSemaphoreCount = 0;
1413 submitInfo.pSignalSemaphores = nullptr;
1414 err = VK_CALL(QueueSubmit(this->queue(), 1, &submitInfo, fence));
1415 SkASSERT(!err);
1416
1417 err = VK_CALL(WaitForFences(fDevice, 1, &fence, true, UINT64_MAX));
1418 if (VK_TIMEOUT == err) {
1419 GrVkMemory::FreeImageMemory(this, false, alloc);
1420 VK_CALL(DestroyImage(fDevice, image, nullptr));
1421 GrVkMemory::FreeBufferMemory(this, GrVkBuffer::kCopyRead_Type, bufferAlloc);
1422 VK_CALL(DestroyBuffer(fDevice, buffer, nullptr));
1423 VK_CALL(FreeCommandBuffers(fDevice, fCmdPool, 1, &cmdBuffer));
1424 VK_CALL(DestroyFence(fDevice, fence, nullptr));
1425 SkDebugf("Fence failed to signal: %d\n", err);
1426 SK_ABORT("failing");
1427 }
1428 SkASSERT(!err);
1429
1430 // Clean up transfer resources
1431 if (buffer != VK_NULL_HANDLE) { // workaround for an older NVidia driver crash
1432 GrVkMemory::FreeBufferMemory(this, GrVkBuffer::kCopyRead_Type, bufferAlloc);
1433 VK_CALL(DestroyBuffer(fDevice, buffer, nullptr));
1434 }
1435 VK_CALL(FreeCommandBuffers(fDevice, fCmdPool, 1, &cmdBuffer));
1436 VK_CALL(DestroyFence(fDevice, fence, nullptr));
1437
Brian Salomon52e943a2018-03-13 09:32:39 -04001438 info->fImage = image;
1439 info->fAlloc = alloc;
1440 info->fImageTiling = VK_IMAGE_TILING_OPTIMAL;
1441 info->fImageLayout = initialLayout;
1442 info->fFormat = pixelFormat;
1443 info->fLevelCount = mipLevels;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001444
Brian Salomon52e943a2018-03-13 09:32:39 -04001445 return true;
1446}
1447
1448GrBackendTexture GrVkGpu::createTestingOnlyBackendTexture(const void* srcData, int w, int h,
Robert Phillips646f6372018-09-25 09:31:10 -04001449 GrColorType colorType,
1450 bool isRenderTarget,
1451 GrMipMapped mipMapped, size_t rowBytes) {
Brian Salomon8a375832018-03-14 10:21:40 -04001452 this->handleDirtyContext();
Robert Phillipsa479f962018-04-10 11:45:40 -04001453
1454 if (w > this->caps()->maxTextureSize() || h > this->caps()->maxTextureSize()) {
1455 return GrBackendTexture();
1456 }
1457
Robert Phillips646f6372018-09-25 09:31:10 -04001458 GrPixelConfig config = GrColorTypeToPixelConfig(colorType, GrSRGBEncoded::kNo);
1459 if (!this->caps()->isConfigTexturable(config)) {
1460 return GrBackendTexture();
1461 }
1462
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001463 GrVkImageInfo info;
Brian Salomon52e943a2018-03-13 09:32:39 -04001464 if (!this->createTestingOnlyVkImage(config, w, h, true, isRenderTarget, mipMapped, srcData,
Robert Phillips646f6372018-09-25 09:31:10 -04001465 rowBytes, &info)) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001466 return {};
1467 }
Greg Daniel108bb232018-07-03 16:18:29 -04001468 GrBackendTexture beTex = GrBackendTexture(w, h, info);
1469 // Lots of tests don't go through Skia's public interface which will set the config so for
1470 // testing we make sure we set a config here.
1471 beTex.setPixelConfig(config);
1472 return beTex;
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001473}
1474
1475bool GrVkGpu::isTestingOnlyBackendTexture(const GrBackendTexture& tex) const {
1476 SkASSERT(kVulkan_GrBackend == tex.fBackend);
1477
Greg Daniel52e16d92018-04-10 09:34:07 -04001478 GrVkImageInfo backend;
1479 if (!tex.getVkImageInfo(&backend)) {
1480 return false;
1481 }
Greg Daniel164a9f02016-02-22 09:56:40 -05001482
Greg Daniel52e16d92018-04-10 09:34:07 -04001483 if (backend.fImage && backend.fAlloc.fMemory) {
Greg Daniel164a9f02016-02-22 09:56:40 -05001484 VkMemoryRequirements req;
1485 memset(&req, 0, sizeof(req));
1486 GR_VK_CALL(this->vkInterface(), GetImageMemoryRequirements(fDevice,
Greg Daniel52e16d92018-04-10 09:34:07 -04001487 backend.fImage,
Greg Daniel164a9f02016-02-22 09:56:40 -05001488 &req));
1489 // TODO: find a better check
1490 // This will probably fail with a different driver
1491 return (req.size > 0) && (req.size <= 8192 * 8192);
1492 }
1493
1494 return false;
1495}
1496
Brian Salomon26102cb2018-03-09 09:33:19 -05001497void GrVkGpu::deleteTestingOnlyBackendTexture(const GrBackendTexture& tex) {
1498 SkASSERT(kVulkan_GrBackend == tex.fBackend);
Robert Phillipsd21b2a52017-12-12 13:01:25 -05001499
Greg Daniel52e16d92018-04-10 09:34:07 -04001500 GrVkImageInfo info;
1501 if (tex.getVkImageInfo(&info)) {
Greg Daniel52e16d92018-04-10 09:34:07 -04001502 GrVkImage::DestroyImageInfo(this, const_cast<GrVkImageInfo*>(&info));
Greg Daniel164a9f02016-02-22 09:56:40 -05001503 }
1504}
1505
Brian Osman2d010b62018-08-09 10:55:09 -04001506GrBackendRenderTarget GrVkGpu::createTestingOnlyBackendRenderTarget(int w, int h, GrColorType ct) {
Greg Daniel92cbf3f2018-04-12 16:50:17 -04001507 if (w > this->caps()->maxRenderTargetSize() || h > this->caps()->maxRenderTargetSize()) {
1508 return GrBackendRenderTarget();
1509 }
1510
Brian Salomon8a375832018-03-14 10:21:40 -04001511 this->handleDirtyContext();
Brian Salomon52e943a2018-03-13 09:32:39 -04001512 GrVkImageInfo info;
Brian Osman2d010b62018-08-09 10:55:09 -04001513 auto config = GrColorTypeToPixelConfig(ct, GrSRGBEncoded::kNo);
Brian Salomon52e943a2018-03-13 09:32:39 -04001514 if (kUnknown_GrPixelConfig == config) {
1515 return {};
1516 }
Robert Phillips646f6372018-09-25 09:31:10 -04001517 if (!this->createTestingOnlyVkImage(config, w, h, false, true, GrMipMapped::kNo, nullptr, 0,
Brian Salomon52e943a2018-03-13 09:32:39 -04001518 &info)) {
1519 return {};
1520 }
Greg Daniel108bb232018-07-03 16:18:29 -04001521 GrBackendRenderTarget beRT = GrBackendRenderTarget(w, h, 1, 0, info);
1522 // Lots of tests don't go through Skia's public interface which will set the config so for
1523 // testing we make sure we set a config here.
1524 beRT.setPixelConfig(config);
1525 return beRT;
Brian Salomonf865b052018-03-09 09:01:53 -05001526}
1527
Brian Salomon52e943a2018-03-13 09:32:39 -04001528void GrVkGpu::deleteTestingOnlyBackendRenderTarget(const GrBackendRenderTarget& rt) {
1529 SkASSERT(kVulkan_GrBackend == rt.fBackend);
Brian Salomonf865b052018-03-09 09:01:53 -05001530
Greg Daniel323fbcf2018-04-10 13:46:30 -04001531 GrVkImageInfo info;
1532 if (rt.getVkImageInfo(&info)) {
Brian Salomon52e943a2018-03-13 09:32:39 -04001533 // something in the command buffer may still be using this, so force submit
1534 this->submitCommandBuffer(kForce_SyncQueue);
Greg Daniel323fbcf2018-04-10 13:46:30 -04001535 GrVkImage::DestroyImageInfo(this, const_cast<GrVkImageInfo*>(&info));
Brian Salomon52e943a2018-03-13 09:32:39 -04001536 }
1537}
Brian Salomonf865b052018-03-09 09:01:53 -05001538
Greg Daniel26b50a42018-03-08 09:49:58 -05001539void GrVkGpu::testingOnly_flushGpuAndSync() {
1540 this->submitCommandBuffer(kForce_SyncQueue);
1541}
Brian Salomonf865b052018-03-09 09:01:53 -05001542#endif
Greg Daniel26b50a42018-03-08 09:49:58 -05001543
Greg Daniel164a9f02016-02-22 09:56:40 -05001544////////////////////////////////////////////////////////////////////////////////
1545
1546void GrVkGpu::addMemoryBarrier(VkPipelineStageFlags srcStageMask,
1547 VkPipelineStageFlags dstStageMask,
1548 bool byRegion,
1549 VkMemoryBarrier* barrier) const {
1550 SkASSERT(fCurrentCmdBuffer);
1551 fCurrentCmdBuffer->pipelineBarrier(this,
1552 srcStageMask,
1553 dstStageMask,
1554 byRegion,
1555 GrVkCommandBuffer::kMemory_BarrierType,
1556 barrier);
1557}
1558
1559void GrVkGpu::addBufferMemoryBarrier(VkPipelineStageFlags srcStageMask,
1560 VkPipelineStageFlags dstStageMask,
1561 bool byRegion,
1562 VkBufferMemoryBarrier* barrier) const {
1563 SkASSERT(fCurrentCmdBuffer);
1564 fCurrentCmdBuffer->pipelineBarrier(this,
1565 srcStageMask,
1566 dstStageMask,
1567 byRegion,
1568 GrVkCommandBuffer::kBufferMemory_BarrierType,
1569 barrier);
1570}
1571
1572void GrVkGpu::addImageMemoryBarrier(VkPipelineStageFlags srcStageMask,
1573 VkPipelineStageFlags dstStageMask,
1574 bool byRegion,
1575 VkImageMemoryBarrier* barrier) const {
1576 SkASSERT(fCurrentCmdBuffer);
1577 fCurrentCmdBuffer->pipelineBarrier(this,
1578 srcStageMask,
1579 dstStageMask,
1580 byRegion,
1581 GrVkCommandBuffer::kImageMemory_BarrierType,
1582 barrier);
1583}
1584
Greg Daniel51316782017-08-02 15:10:09 +00001585void GrVkGpu::onFinishFlush(bool insertedSemaphore) {
1586 // Submit the current command buffer to the Queue. Whether we inserted semaphores or not does
1587 // not effect what we do here.
Greg Daniel164a9f02016-02-22 09:56:40 -05001588 this->submitCommandBuffer(kSkip_SyncQueue);
1589}
1590
Greg Daniel25af6712018-04-25 10:44:38 -04001591static int get_surface_sample_cnt(GrSurface* surf) {
1592 if (const GrRenderTarget* rt = surf->asRenderTarget()) {
1593 return rt->numColorSamples();
egdaniel17b89252016-04-05 07:23:38 -07001594 }
Greg Daniel25af6712018-04-25 10:44:38 -04001595 return 0;
Greg Daniel164a9f02016-02-22 09:56:40 -05001596}
1597
Robert Phillipsb0e93a22017-08-29 08:26:54 -04001598void GrVkGpu::copySurfaceAsCopyImage(GrSurface* dst, GrSurfaceOrigin dstOrigin,
1599 GrSurface* src, GrSurfaceOrigin srcOrigin,
egdaniel17b89252016-04-05 07:23:38 -07001600 GrVkImage* dstImage,
1601 GrVkImage* srcImage,
Greg Daniel164a9f02016-02-22 09:56:40 -05001602 const SkIRect& srcRect,
1603 const SkIPoint& dstPoint) {
Greg Daniel25af6712018-04-25 10:44:38 -04001604#ifdef SK_DEBUG
1605 int dstSampleCnt = get_surface_sample_cnt(dst);
1606 int srcSampleCnt = get_surface_sample_cnt(src);
1607 SkASSERT(this->vkCaps().canCopyImage(dst->config(), dstSampleCnt, dstOrigin,
1608 src->config(), srcSampleCnt, srcOrigin));
1609
1610#endif
Greg Daniel164a9f02016-02-22 09:56:40 -05001611
Greg Daniel164a9f02016-02-22 09:56:40 -05001612 // These flags are for flushing/invalidating caches and for the dst image it doesn't matter if
1613 // the cache is flushed since it is only being written to.
egdaniel17b89252016-04-05 07:23:38 -07001614 dstImage->setImageLayout(this,
jvanverth50c46c72016-05-06 12:31:28 -07001615 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1616 VK_ACCESS_TRANSFER_WRITE_BIT,
1617 VK_PIPELINE_STAGE_TRANSFER_BIT,
1618 false);
Greg Daniel164a9f02016-02-22 09:56:40 -05001619
egdaniel17b89252016-04-05 07:23:38 -07001620 srcImage->setImageLayout(this,
1621 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
jvanverth50c46c72016-05-06 12:31:28 -07001622 VK_ACCESS_TRANSFER_READ_BIT,
1623 VK_PIPELINE_STAGE_TRANSFER_BIT,
egdaniel17b89252016-04-05 07:23:38 -07001624 false);
Greg Daniel164a9f02016-02-22 09:56:40 -05001625
1626 // Flip rect if necessary
1627 SkIRect srcVkRect = srcRect;
1628 int32_t dstY = dstPoint.fY;
1629
Robert Phillipsb0e93a22017-08-29 08:26:54 -04001630 if (kBottomLeft_GrSurfaceOrigin == srcOrigin) {
1631 SkASSERT(kBottomLeft_GrSurfaceOrigin == dstOrigin);
Greg Daniel164a9f02016-02-22 09:56:40 -05001632 srcVkRect.fTop = src->height() - srcRect.fBottom;
1633 srcVkRect.fBottom = src->height() - srcRect.fTop;
1634 dstY = dst->height() - dstPoint.fY - srcVkRect.height();
1635 }
1636
1637 VkImageCopy copyRegion;
1638 memset(&copyRegion, 0, sizeof(VkImageCopy));
1639 copyRegion.srcSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
1640 copyRegion.srcOffset = { srcVkRect.fLeft, srcVkRect.fTop, 0 };
1641 copyRegion.dstSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
1642 copyRegion.dstOffset = { dstPoint.fX, dstY, 0 };
egdanielc355bc82016-04-27 11:31:59 -07001643 copyRegion.extent = { (uint32_t)srcVkRect.width(), (uint32_t)srcVkRect.height(), 1 };
Greg Daniel164a9f02016-02-22 09:56:40 -05001644
1645 fCurrentCmdBuffer->copyImage(this,
egdaniel17b89252016-04-05 07:23:38 -07001646 srcImage,
Greg Daniel164a9f02016-02-22 09:56:40 -05001647 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
egdaniel17b89252016-04-05 07:23:38 -07001648 dstImage,
Greg Daniel164a9f02016-02-22 09:56:40 -05001649 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1650 1,
1651 &copyRegion);
jvanverth900bd4a2016-04-29 13:53:12 -07001652
1653 SkIRect dstRect = SkIRect::MakeXYWH(dstPoint.fX, dstPoint.fY,
1654 srcRect.width(), srcRect.height());
Brian Salomon1fabd512018-02-09 09:54:25 -05001655 this->didWriteToSurface(dst, dstOrigin, &dstRect);
Greg Daniel164a9f02016-02-22 09:56:40 -05001656}
1657
Robert Phillipsb0e93a22017-08-29 08:26:54 -04001658void GrVkGpu::copySurfaceAsBlit(GrSurface* dst, GrSurfaceOrigin dstOrigin,
1659 GrSurface* src, GrSurfaceOrigin srcOrigin,
egdaniel17b89252016-04-05 07:23:38 -07001660 GrVkImage* dstImage,
1661 GrVkImage* srcImage,
1662 const SkIRect& srcRect,
1663 const SkIPoint& dstPoint) {
Greg Daniel25af6712018-04-25 10:44:38 -04001664#ifdef SK_DEBUG
1665 int dstSampleCnt = get_surface_sample_cnt(dst);
1666 int srcSampleCnt = get_surface_sample_cnt(src);
1667 SkASSERT(this->vkCaps().canCopyAsBlit(dst->config(), dstSampleCnt, dstImage->isLinearTiled(),
1668 src->config(), srcSampleCnt, srcImage->isLinearTiled()));
egdaniel17b89252016-04-05 07:23:38 -07001669
Greg Daniel25af6712018-04-25 10:44:38 -04001670#endif
egdaniel17b89252016-04-05 07:23:38 -07001671 dstImage->setImageLayout(this,
1672 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
jvanverth50c46c72016-05-06 12:31:28 -07001673 VK_ACCESS_TRANSFER_WRITE_BIT,
1674 VK_PIPELINE_STAGE_TRANSFER_BIT,
egdaniel17b89252016-04-05 07:23:38 -07001675 false);
1676
egdaniel17b89252016-04-05 07:23:38 -07001677 srcImage->setImageLayout(this,
1678 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
jvanverth50c46c72016-05-06 12:31:28 -07001679 VK_ACCESS_TRANSFER_READ_BIT,
1680 VK_PIPELINE_STAGE_TRANSFER_BIT,
egdaniel17b89252016-04-05 07:23:38 -07001681 false);
1682
1683 // Flip rect if necessary
1684 SkIRect srcVkRect;
egdaniel8af936d2016-04-07 10:17:47 -07001685 srcVkRect.fLeft = srcRect.fLeft;
1686 srcVkRect.fRight = srcRect.fRight;
egdaniel17b89252016-04-05 07:23:38 -07001687 SkIRect dstRect;
1688 dstRect.fLeft = dstPoint.fX;
egdaniel8af936d2016-04-07 10:17:47 -07001689 dstRect.fRight = dstPoint.fX + srcRect.width();
egdaniel17b89252016-04-05 07:23:38 -07001690
Robert Phillipsb0e93a22017-08-29 08:26:54 -04001691 if (kBottomLeft_GrSurfaceOrigin == srcOrigin) {
egdaniel17b89252016-04-05 07:23:38 -07001692 srcVkRect.fTop = src->height() - srcRect.fBottom;
1693 srcVkRect.fBottom = src->height() - srcRect.fTop;
1694 } else {
egdaniel8af936d2016-04-07 10:17:47 -07001695 srcVkRect.fTop = srcRect.fTop;
1696 srcVkRect.fBottom = srcRect.fBottom;
egdaniel17b89252016-04-05 07:23:38 -07001697 }
1698
Robert Phillipsb0e93a22017-08-29 08:26:54 -04001699 if (kBottomLeft_GrSurfaceOrigin == dstOrigin) {
egdaniel17b89252016-04-05 07:23:38 -07001700 dstRect.fTop = dst->height() - dstPoint.fY - srcVkRect.height();
1701 } else {
1702 dstRect.fTop = dstPoint.fY;
1703 }
1704 dstRect.fBottom = dstRect.fTop + srcVkRect.height();
1705
1706 // If we have different origins, we need to flip the top and bottom of the dst rect so that we
1707 // get the correct origintation of the copied data.
Robert Phillipsb0e93a22017-08-29 08:26:54 -04001708 if (srcOrigin != dstOrigin) {
Ben Wagnerf08d1d02018-06-18 15:11:00 -04001709 using std::swap;
1710 swap(dstRect.fTop, dstRect.fBottom);
egdaniel17b89252016-04-05 07:23:38 -07001711 }
1712
1713 VkImageBlit blitRegion;
1714 memset(&blitRegion, 0, sizeof(VkImageBlit));
1715 blitRegion.srcSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
1716 blitRegion.srcOffsets[0] = { srcVkRect.fLeft, srcVkRect.fTop, 0 };
Greg Daniele76071c2016-11-02 11:57:06 -04001717 blitRegion.srcOffsets[1] = { srcVkRect.fRight, srcVkRect.fBottom, 1 };
egdaniel17b89252016-04-05 07:23:38 -07001718 blitRegion.dstSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
1719 blitRegion.dstOffsets[0] = { dstRect.fLeft, dstRect.fTop, 0 };
Greg Daniele76071c2016-11-02 11:57:06 -04001720 blitRegion.dstOffsets[1] = { dstRect.fRight, dstRect.fBottom, 1 };
egdaniel17b89252016-04-05 07:23:38 -07001721
1722 fCurrentCmdBuffer->blitImage(this,
egdanielb2df0c22016-05-13 11:30:37 -07001723 *srcImage,
1724 *dstImage,
egdaniel17b89252016-04-05 07:23:38 -07001725 1,
1726 &blitRegion,
1727 VK_FILTER_NEAREST); // We never scale so any filter works here
jvanverth900bd4a2016-04-29 13:53:12 -07001728
Greg Daniel1ba1bfc2018-06-21 13:55:19 -04001729 dstRect = SkIRect::MakeXYWH(dstPoint.fX, dstPoint.fY, srcRect.width(), srcRect.height());
Brian Salomon1fabd512018-02-09 09:54:25 -05001730 this->didWriteToSurface(dst, dstOrigin, &dstRect);
egdaniel17b89252016-04-05 07:23:38 -07001731}
1732
Brian Salomon1fabd512018-02-09 09:54:25 -05001733void GrVkGpu::copySurfaceAsResolve(GrSurface* dst, GrSurfaceOrigin dstOrigin, GrSurface* src,
1734 GrSurfaceOrigin srcOrigin, const SkIRect& origSrcRect,
1735 const SkIPoint& origDstPoint) {
egdaniel4bcd62e2016-08-31 07:37:31 -07001736 GrVkRenderTarget* srcRT = static_cast<GrVkRenderTarget*>(src->asRenderTarget());
Brian Salomon1fabd512018-02-09 09:54:25 -05001737 SkIRect srcRect = origSrcRect;
1738 SkIPoint dstPoint = origDstPoint;
1739 if (kBottomLeft_GrSurfaceOrigin == srcOrigin) {
1740 SkASSERT(kBottomLeft_GrSurfaceOrigin == dstOrigin);
1741 srcRect = {origSrcRect.fLeft, src->height() - origSrcRect.fBottom,
1742 origSrcRect.fRight, src->height() - origSrcRect.fTop};
1743 dstPoint.fY = dst->height() - dstPoint.fY - srcRect.height();
1744 }
1745 this->resolveImage(dst, srcRT, srcRect, dstPoint);
Greg Daniel1ba1bfc2018-06-21 13:55:19 -04001746 SkIRect dstRect = SkIRect::MakeXYWH(origDstPoint.fX, origDstPoint.fY,
1747 srcRect.width(), srcRect.height());
1748 this->didWriteToSurface(dst, dstOrigin, &dstRect);
egdaniel4bcd62e2016-08-31 07:37:31 -07001749}
1750
Robert Phillipsb0e93a22017-08-29 08:26:54 -04001751bool GrVkGpu::onCopySurface(GrSurface* dst, GrSurfaceOrigin dstOrigin,
1752 GrSurface* src, GrSurfaceOrigin srcOrigin,
Greg Daniel55fa6472018-03-16 16:13:10 -04001753 const SkIRect& srcRect, const SkIPoint& dstPoint,
1754 bool canDiscardOutsideDstRect) {
Greg Daniel25af6712018-04-25 10:44:38 -04001755 GrPixelConfig dstConfig = dst->config();
1756 GrPixelConfig srcConfig = src->config();
1757
1758 int dstSampleCnt = get_surface_sample_cnt(dst);
1759 int srcSampleCnt = get_surface_sample_cnt(src);
1760
1761 if (this->vkCaps().canCopyAsResolve(dstConfig, dstSampleCnt, dstOrigin,
1762 srcConfig, srcSampleCnt, srcOrigin)) {
Robert Phillipsb0e93a22017-08-29 08:26:54 -04001763 this->copySurfaceAsResolve(dst, dstOrigin, src, srcOrigin, srcRect, dstPoint);
egdanielec440992016-09-13 09:54:11 -07001764 return true;
egdaniel4bcd62e2016-08-31 07:37:31 -07001765 }
1766
egdanielfd016d72016-09-27 12:13:05 -07001767 if (this->vkCaps().mustSubmitCommandsBeforeCopyOp()) {
1768 this->submitCommandBuffer(GrVkGpu::kSkip_SyncQueue);
1769 }
1770
Greg Daniel25af6712018-04-25 10:44:38 -04001771 if (this->vkCaps().canCopyAsDraw(dstConfig, SkToBool(dst->asRenderTarget()),
1772 srcConfig, SkToBool(src->asTexture()))) {
1773 SkAssertResult(fCopyManager.copySurfaceAsDraw(this, dst, dstOrigin, src, srcOrigin, srcRect,
1774 dstPoint, canDiscardOutsideDstRect));
Brian Salomon3d86a192018-02-27 16:46:11 -05001775 auto dstRect = srcRect.makeOffset(dstPoint.fX, dstPoint.fY);
1776 this->didWriteToSurface(dst, dstOrigin, &dstRect);
egdanielbc9b2962016-09-27 08:00:53 -07001777 return true;
1778 }
1779
egdaniel17b89252016-04-05 07:23:38 -07001780 GrVkImage* dstImage;
1781 GrVkImage* srcImage;
egdaniel4bcd62e2016-08-31 07:37:31 -07001782 GrRenderTarget* dstRT = dst->asRenderTarget();
1783 if (dstRT) {
1784 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(dstRT);
1785 dstImage = vkRT->numColorSamples() > 1 ? vkRT->msaaImage() : vkRT;
1786 } else {
1787 SkASSERT(dst->asTexture());
egdaniel17b89252016-04-05 07:23:38 -07001788 dstImage = static_cast<GrVkTexture*>(dst->asTexture());
egdaniel17b89252016-04-05 07:23:38 -07001789 }
egdaniel4bcd62e2016-08-31 07:37:31 -07001790 GrRenderTarget* srcRT = src->asRenderTarget();
1791 if (srcRT) {
1792 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(srcRT);
1793 srcImage = vkRT->numColorSamples() > 1 ? vkRT->msaaImage() : vkRT;
egdaniel17b89252016-04-05 07:23:38 -07001794 } else {
egdaniel4bcd62e2016-08-31 07:37:31 -07001795 SkASSERT(src->asTexture());
1796 srcImage = static_cast<GrVkTexture*>(src->asTexture());
egdaniel17b89252016-04-05 07:23:38 -07001797 }
1798
Greg Daniel25af6712018-04-25 10:44:38 -04001799 if (this->vkCaps().canCopyImage(dstConfig, dstSampleCnt, dstOrigin,
1800 srcConfig, srcSampleCnt, srcOrigin)) {
Robert Phillipsb0e93a22017-08-29 08:26:54 -04001801 this->copySurfaceAsCopyImage(dst, dstOrigin, src, srcOrigin, dstImage, srcImage,
1802 srcRect, dstPoint);
egdaniel17b89252016-04-05 07:23:38 -07001803 return true;
1804 }
1805
Greg Daniel25af6712018-04-25 10:44:38 -04001806 if (this->vkCaps().canCopyAsBlit(dstConfig, dstSampleCnt, dstImage->isLinearTiled(),
1807 srcConfig, srcSampleCnt, srcImage->isLinearTiled())) {
Robert Phillipsb0e93a22017-08-29 08:26:54 -04001808 this->copySurfaceAsBlit(dst, dstOrigin, src, srcOrigin, dstImage, srcImage,
1809 srcRect, dstPoint);
Greg Daniel164a9f02016-02-22 09:56:40 -05001810 return true;
1811 }
1812
Greg Daniel164a9f02016-02-22 09:56:40 -05001813 return false;
1814}
1815
Brian Salomona6948702018-06-01 15:33:20 -04001816bool GrVkGpu::onReadPixels(GrSurface* surface, int left, int top, int width, int height,
1817 GrColorType dstColorType, void* buffer, size_t rowBytes) {
Brian Salomonc320b152018-02-20 14:05:36 -05001818 if (GrPixelConfigToColorType(surface->config()) != dstColorType) {
Greg Daniel164a9f02016-02-22 09:56:40 -05001819 return false;
1820 }
1821
egdaniel66933552016-08-24 07:22:19 -07001822 GrVkImage* image = nullptr;
1823 GrVkRenderTarget* rt = static_cast<GrVkRenderTarget*>(surface->asRenderTarget());
1824 if (rt) {
1825 // resolve the render target if necessary
1826 switch (rt->getResolveType()) {
1827 case GrVkRenderTarget::kCantResolve_ResolveType:
1828 return false;
1829 case GrVkRenderTarget::kAutoResolves_ResolveType:
1830 break;
1831 case GrVkRenderTarget::kCanResolve_ResolveType:
Brian Salomon1fabd512018-02-09 09:54:25 -05001832 this->internalResolveRenderTarget(rt, false);
egdaniel66933552016-08-24 07:22:19 -07001833 break;
1834 default:
Ben Wagnerb4aab9a2017-08-16 10:53:04 -04001835 SK_ABORT("Unknown resolve type");
egdaniel66933552016-08-24 07:22:19 -07001836 }
1837 image = rt;
1838 } else {
1839 image = static_cast<GrVkTexture*>(surface->asTexture());
1840 }
1841
1842 if (!image) {
Greg Daniel164a9f02016-02-22 09:56:40 -05001843 return false;
1844 }
1845
Greg Daniel475eb702018-09-28 14:16:50 -04001846 // Skia's RGB_888x color type, which we map to the vulkan R8G8B8_UNORM, expects the data to be
1847 // 32 bits, but the Vulkan format is only 24. So we first copy the surface into an R8G8B8A8
1848 // image and then do the read pixels from that.
1849 sk_sp<GrVkTextureRenderTarget> copySurface;
1850 if (dstColorType == GrColorType::kRGB_888x) {
1851 SkASSERT(image->imageFormat() == VK_FORMAT_R8G8B8_UNORM &&
1852 surface->config() == kRGB_888_GrPixelConfig);
1853
1854 // Make a new surface that is RGBA to copy the RGB surface into.
1855 GrSurfaceDesc surfDesc;
1856 surfDesc.fFlags = kRenderTarget_GrSurfaceFlag;
1857 surfDesc.fWidth = width;
1858 surfDesc.fHeight = height;
1859 surfDesc.fConfig = kRGBA_8888_GrPixelConfig;
1860 surfDesc.fSampleCnt = 1;
1861
1862 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
1863 VK_IMAGE_USAGE_SAMPLED_BIT |
1864 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
1865 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1866
1867 GrVkImage::ImageDesc imageDesc;
1868 imageDesc.fImageType = VK_IMAGE_TYPE_2D;
1869 imageDesc.fFormat = VK_FORMAT_R8G8B8A8_UNORM;
1870 imageDesc.fWidth = width;
1871 imageDesc.fHeight = height;
1872 imageDesc.fLevels = 1;
1873 imageDesc.fSamples = 1;
1874 imageDesc.fImageTiling = VK_IMAGE_TILING_OPTIMAL;
1875 imageDesc.fUsageFlags = usageFlags;
1876 imageDesc.fMemProps = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
1877
1878 copySurface = GrVkTextureRenderTarget::MakeNewTextureRenderTarget(
1879 this, SkBudgeted::kYes, surfDesc, imageDesc, GrMipMapsStatus::kNotAllocated);
1880 if (!copySurface) {
1881 return false;
1882 }
1883
1884 int srcSampleCount = 0;
1885 if (rt) {
1886 srcSampleCount = rt->numColorSamples();
1887 }
1888 static const GrSurfaceOrigin kOrigin = kTopLeft_GrSurfaceOrigin;
1889 if (!this->vkCaps().canCopyAsBlit(copySurface->config(), 1, kOrigin,
1890 surface->config(), srcSampleCount, kOrigin) &&
1891 !this->vkCaps().canCopyAsDraw(copySurface->config(), false,
1892 surface->config(), SkToBool(surface->asTexture()))) {
1893 return false;
1894 }
1895 SkIRect srcRect = SkIRect::MakeXYWH(left, top, width, height);
1896 if (!this->copySurface(copySurface.get(), kOrigin, surface, kOrigin,
1897 srcRect, SkIPoint::Make(0,0))) {
1898 return false;
1899 }
1900 top = 0;
1901 left = 0;
1902 dstColorType = GrColorType::kRGBA_8888;
1903 image = copySurface.get();
1904 }
1905
Greg Daniel164a9f02016-02-22 09:56:40 -05001906 // Change layout of our target so it can be used as copy
egdaniel66933552016-08-24 07:22:19 -07001907 image->setImageLayout(this,
1908 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1909 VK_ACCESS_TRANSFER_READ_BIT,
1910 VK_PIPELINE_STAGE_TRANSFER_BIT,
1911 false);
Greg Daniel164a9f02016-02-22 09:56:40 -05001912
Brian Salomonc320b152018-02-20 14:05:36 -05001913 int bpp = GrColorTypeBytesPerPixel(dstColorType);
egdaniel6fa0a912016-09-12 11:51:29 -07001914 size_t tightRowBytes = bpp * width;
Greg Daniel164a9f02016-02-22 09:56:40 -05001915
Greg Daniel164a9f02016-02-22 09:56:40 -05001916 VkBufferImageCopy region;
1917 memset(&region, 0, sizeof(VkBufferImageCopy));
egdaniel6fa0a912016-09-12 11:51:29 -07001918
1919 bool copyFromOrigin = this->vkCaps().mustDoCopiesFromOrigin();
1920 if (copyFromOrigin) {
1921 region.imageOffset = { 0, 0, 0 };
Brian Salomona6948702018-06-01 15:33:20 -04001922 region.imageExtent = { (uint32_t)(left + width), (uint32_t)(top + height), 1 };
egdaniel6fa0a912016-09-12 11:51:29 -07001923 } else {
Brian Salomona6948702018-06-01 15:33:20 -04001924 VkOffset3D offset = { left, top, 0 };
egdaniel6fa0a912016-09-12 11:51:29 -07001925 region.imageOffset = offset;
1926 region.imageExtent = { (uint32_t)width, (uint32_t)height, 1 };
1927 }
1928
1929 size_t transBufferRowBytes = bpp * region.imageExtent.width;
Greg Daniel386a9b62018-07-03 10:52:30 -04001930 size_t imageRows = region.imageExtent.height;
egdaniel6fa0a912016-09-12 11:51:29 -07001931 GrVkTransferBuffer* transferBuffer =
Greg Daniel3cdfa092018-02-26 16:14:10 -05001932 static_cast<GrVkTransferBuffer*>(this->createBuffer(transBufferRowBytes * imageRows,
egdaniel6fa0a912016-09-12 11:51:29 -07001933 kXferGpuToCpu_GrBufferType,
1934 kStream_GrAccessPattern));
1935
1936 // Copy the image to a buffer so we can map it to cpu memory
jvanverthdb379092016-07-07 11:18:46 -07001937 region.bufferOffset = transferBuffer->offset();
egdaniel88e8aef2016-06-27 14:34:55 -07001938 region.bufferRowLength = 0; // Forces RowLength to be width. We handle the rowBytes below.
Greg Daniel164a9f02016-02-22 09:56:40 -05001939 region.bufferImageHeight = 0; // Forces height to be tightly packed. Only useful for 3d images.
1940 region.imageSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 };
Greg Daniel164a9f02016-02-22 09:56:40 -05001941
1942 fCurrentCmdBuffer->copyImageToBuffer(this,
egdaniel66933552016-08-24 07:22:19 -07001943 image,
Greg Daniel164a9f02016-02-22 09:56:40 -05001944 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1945 transferBuffer,
1946 1,
1947 &region);
1948
1949 // make sure the copy to buffer has finished
1950 transferBuffer->addMemoryBarrier(this,
1951 VK_ACCESS_TRANSFER_WRITE_BIT,
1952 VK_ACCESS_HOST_READ_BIT,
1953 VK_PIPELINE_STAGE_TRANSFER_BIT,
1954 VK_PIPELINE_STAGE_HOST_BIT,
1955 false);
1956
1957 // We need to submit the current command buffer to the Queue and make sure it finishes before
1958 // we can copy the data out of the buffer.
1959 this->submitCommandBuffer(kForce_SyncQueue);
Greg Daniel88fdee92018-02-24 22:41:50 +00001960 void* mappedMemory = transferBuffer->map();
Greg Daniele35a99e2018-03-02 11:44:22 -05001961 const GrVkAlloc& transAlloc = transferBuffer->alloc();
Greg Daniel81df0412018-05-31 13:13:33 -04001962 GrVkMemory::InvalidateMappedAlloc(this, transAlloc, 0, transAlloc.fSize);
Greg Daniel164a9f02016-02-22 09:56:40 -05001963
egdaniel6fa0a912016-09-12 11:51:29 -07001964 if (copyFromOrigin) {
1965 uint32_t skipRows = region.imageExtent.height - height;
1966 mappedMemory = (char*)mappedMemory + transBufferRowBytes * skipRows + bpp * left;
1967 }
1968
Brian Salomona6948702018-06-01 15:33:20 -04001969 SkRectMemcpy(buffer, rowBytes, mappedMemory, transBufferRowBytes, tightRowBytes, height);
Greg Daniel164a9f02016-02-22 09:56:40 -05001970
1971 transferBuffer->unmap();
1972 transferBuffer->unref();
Greg Daniel164a9f02016-02-22 09:56:40 -05001973 return true;
1974}
egdaniel066df7c2016-06-08 14:02:27 -07001975
egdaniel27bb2842016-07-07 11:58:35 -07001976// The RenderArea bounds we pass into BeginRenderPass must have a start x value that is a multiple
1977// of the granularity. The width must also be a multiple of the granularity or eaqual to the width
1978// the the entire attachment. Similar requirements for the y and height components.
1979void adjust_bounds_to_granularity(SkIRect* dstBounds, const SkIRect& srcBounds,
1980 const VkExtent2D& granularity, int maxWidth, int maxHeight) {
1981 // Adjust Width
egdanield5797b32016-09-20 12:57:45 -07001982 if ((0 != granularity.width && 1 != granularity.width)) {
1983 // Start with the right side of rect so we know if we end up going pass the maxWidth.
1984 int rightAdj = srcBounds.fRight % granularity.width;
1985 if (rightAdj != 0) {
1986 rightAdj = granularity.width - rightAdj;
1987 }
1988 dstBounds->fRight = srcBounds.fRight + rightAdj;
1989 if (dstBounds->fRight > maxWidth) {
1990 dstBounds->fRight = maxWidth;
1991 dstBounds->fLeft = 0;
1992 } else {
1993 dstBounds->fLeft = srcBounds.fLeft - srcBounds.fLeft % granularity.width;
1994 }
egdaniel27bb2842016-07-07 11:58:35 -07001995 } else {
egdanield5797b32016-09-20 12:57:45 -07001996 dstBounds->fLeft = srcBounds.fLeft;
1997 dstBounds->fRight = srcBounds.fRight;
egdaniel27bb2842016-07-07 11:58:35 -07001998 }
1999
2000 // Adjust height
egdanield5797b32016-09-20 12:57:45 -07002001 if ((0 != granularity.height && 1 != granularity.height)) {
2002 // Start with the bottom side of rect so we know if we end up going pass the maxHeight.
2003 int bottomAdj = srcBounds.fBottom % granularity.height;
2004 if (bottomAdj != 0) {
2005 bottomAdj = granularity.height - bottomAdj;
2006 }
2007 dstBounds->fBottom = srcBounds.fBottom + bottomAdj;
2008 if (dstBounds->fBottom > maxHeight) {
2009 dstBounds->fBottom = maxHeight;
2010 dstBounds->fTop = 0;
2011 } else {
2012 dstBounds->fTop = srcBounds.fTop - srcBounds.fTop % granularity.height;
2013 }
egdaniel27bb2842016-07-07 11:58:35 -07002014 } else {
egdanield5797b32016-09-20 12:57:45 -07002015 dstBounds->fTop = srcBounds.fTop;
2016 dstBounds->fBottom = srcBounds.fBottom;
egdaniel27bb2842016-07-07 11:58:35 -07002017 }
2018}
2019
Greg Daniel22bc8652017-03-22 15:45:43 -04002020void GrVkGpu::submitSecondaryCommandBuffer(const SkTArray<GrVkSecondaryCommandBuffer*>& buffers,
egdaniel9cb63402016-06-23 08:37:05 -07002021 const GrVkRenderPass* renderPass,
2022 const VkClearValue* colorClear,
Robert Phillipsb0e93a22017-08-29 08:26:54 -04002023 GrVkRenderTarget* target, GrSurfaceOrigin origin,
egdaniel9cb63402016-06-23 08:37:05 -07002024 const SkIRect& bounds) {
egdaniele7d1b242016-07-01 08:06:45 -07002025 const SkIRect* pBounds = &bounds;
2026 SkIRect flippedBounds;
Robert Phillipsb0e93a22017-08-29 08:26:54 -04002027 if (kBottomLeft_GrSurfaceOrigin == origin) {
egdaniele7d1b242016-07-01 08:06:45 -07002028 flippedBounds = bounds;
2029 flippedBounds.fTop = target->height() - bounds.fBottom;
2030 flippedBounds.fBottom = target->height() - bounds.fTop;
2031 pBounds = &flippedBounds;
2032 }
2033
egdaniel27bb2842016-07-07 11:58:35 -07002034 // The bounds we use for the render pass should be of the granularity supported
2035 // by the device.
2036 const VkExtent2D& granularity = renderPass->granularity();
2037 SkIRect adjustedBounds;
2038 if ((0 != granularity.width && 1 != granularity.width) ||
2039 (0 != granularity.height && 1 != granularity.height)) {
2040 adjust_bounds_to_granularity(&adjustedBounds, *pBounds, granularity,
2041 target->width(), target->height());
2042 pBounds = &adjustedBounds;
2043 }
2044
Robert Phillips95214472017-08-08 18:00:03 -04002045#ifdef SK_DEBUG
2046 uint32_t index;
2047 bool result = renderPass->colorAttachmentIndex(&index);
2048 SkASSERT(result && 0 == index);
2049 result = renderPass->stencilAttachmentIndex(&index);
2050 if (result) {
2051 SkASSERT(1 == index);
2052 }
2053#endif
2054 VkClearValue clears[2];
2055 clears[0].color = colorClear->color;
Robert Phillips8c326e92017-08-10 13:50:17 -04002056 clears[1].depthStencil.depth = 0.0f;
2057 clears[1].depthStencil.stencil = 0;
Robert Phillips95214472017-08-08 18:00:03 -04002058
2059 fCurrentCmdBuffer->beginRenderPass(this, renderPass, clears, *target, *pBounds, true);
Greg Daniel22bc8652017-03-22 15:45:43 -04002060 for (int i = 0; i < buffers.count(); ++i) {
2061 fCurrentCmdBuffer->executeCommands(this, buffers[i]);
2062 }
Greg Daniel164a9f02016-02-22 09:56:40 -05002063 fCurrentCmdBuffer->endRenderPass(this);
egdaniel66933552016-08-24 07:22:19 -07002064
Brian Salomon1fabd512018-02-09 09:54:25 -05002065 this->didWriteToSurface(target, origin, &bounds);
Greg Daniel164a9f02016-02-22 09:56:40 -05002066}
egdaniel9cb63402016-06-23 08:37:05 -07002067
Robert Phillips5b5d84c2018-08-09 15:12:18 -04002068void GrVkGpu::submit(GrGpuCommandBuffer* buffer) {
2069 if (buffer->asRTCommandBuffer()) {
2070 SkASSERT(fCachedRTCommandBuffer.get() == buffer);
2071
2072 fCachedRTCommandBuffer->submit();
2073 fCachedRTCommandBuffer->reset();
2074 } else {
2075 SkASSERT(fCachedTexCommandBuffer.get() == buffer);
2076
2077 fCachedTexCommandBuffer->submit();
2078 fCachedTexCommandBuffer->reset();
2079 }
2080}
2081
Greg Daniel6be35232017-03-01 17:01:09 -05002082GrFence SK_WARN_UNUSED_RESULT GrVkGpu::insertFence() {
jvanverth84741b32016-09-30 08:39:02 -07002083 VkFenceCreateInfo createInfo;
2084 memset(&createInfo, 0, sizeof(VkFenceCreateInfo));
2085 createInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
2086 createInfo.pNext = nullptr;
2087 createInfo.flags = 0;
2088 VkFence fence = VK_NULL_HANDLE;
Greg Daniel6be35232017-03-01 17:01:09 -05002089
2090 VK_CALL_ERRCHECK(CreateFence(this->device(), &createInfo, nullptr, &fence));
2091 VK_CALL(QueueSubmit(this->queue(), 0, nullptr, fence));
2092
2093 GR_STATIC_ASSERT(sizeof(GrFence) >= sizeof(VkFence));
jvanverth84741b32016-09-30 08:39:02 -07002094 return (GrFence)fence;
2095}
2096
Greg Daniel6be35232017-03-01 17:01:09 -05002097bool GrVkGpu::waitFence(GrFence fence, uint64_t timeout) {
2098 SkASSERT(VK_NULL_HANDLE != (VkFence)fence);
2099
2100 VkResult result = VK_CALL(WaitForFences(this->device(), 1, (VkFence*)&fence, VK_TRUE, timeout));
jvanverth84741b32016-09-30 08:39:02 -07002101 return (VK_SUCCESS == result);
2102}
2103
2104void GrVkGpu::deleteFence(GrFence fence) const {
Greg Daniel6be35232017-03-01 17:01:09 -05002105 VK_CALL(DestroyFence(this->device(), (VkFence)fence, nullptr));
2106}
2107
Greg Daniela5cb7812017-06-16 09:45:32 -04002108sk_sp<GrSemaphore> SK_WARN_UNUSED_RESULT GrVkGpu::makeSemaphore(bool isOwned) {
2109 return GrVkSemaphore::Make(this, isOwned);
Greg Daniel6be35232017-03-01 17:01:09 -05002110}
2111
Greg Daniel48661b82018-01-22 16:11:35 -05002112sk_sp<GrSemaphore> GrVkGpu::wrapBackendSemaphore(const GrBackendSemaphore& semaphore,
2113 GrResourceProvider::SemaphoreWrapType wrapType,
2114 GrWrapOwnership ownership) {
2115 return GrVkSemaphore::MakeWrapped(this, semaphore.vkSemaphore(), wrapType, ownership);
Greg Daniela5cb7812017-06-16 09:45:32 -04002116}
2117
Greg Daniel48661b82018-01-22 16:11:35 -05002118void GrVkGpu::insertSemaphore(sk_sp<GrSemaphore> semaphore, bool flush) {
Greg Daniel6be35232017-03-01 17:01:09 -05002119 GrVkSemaphore* vkSem = static_cast<GrVkSemaphore*>(semaphore.get());
2120
Greg Daniel48661b82018-01-22 16:11:35 -05002121 GrVkSemaphore::Resource* resource = vkSem->getResource();
2122 if (resource->shouldSignal()) {
Greg Daniel17b7c052018-01-09 13:55:33 -05002123 resource->ref();
2124 fSemaphoresToSignal.push_back(resource);
2125 }
Greg Daniela5cb7812017-06-16 09:45:32 -04002126
2127 if (flush) {
2128 this->submitCommandBuffer(kSkip_SyncQueue);
2129 }
Greg Daniel6be35232017-03-01 17:01:09 -05002130}
2131
Greg Daniel48661b82018-01-22 16:11:35 -05002132void GrVkGpu::waitSemaphore(sk_sp<GrSemaphore> semaphore) {
Greg Daniel6be35232017-03-01 17:01:09 -05002133 GrVkSemaphore* vkSem = static_cast<GrVkSemaphore*>(semaphore.get());
2134
Greg Daniel48661b82018-01-22 16:11:35 -05002135 GrVkSemaphore::Resource* resource = vkSem->getResource();
2136 if (resource->shouldWait()) {
2137 resource->ref();
2138 fSemaphoresToWaitOn.push_back(resource);
2139 }
jvanverth84741b32016-09-30 08:39:02 -07002140}
Brian Osman13dddce2017-05-09 13:19:50 -04002141
2142sk_sp<GrSemaphore> GrVkGpu::prepareTextureForCrossContextUsage(GrTexture* texture) {
2143 SkASSERT(texture);
2144 GrVkTexture* vkTexture = static_cast<GrVkTexture*>(texture);
2145 vkTexture->setImageLayout(this,
2146 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2147 VK_ACCESS_SHADER_READ_BIT,
2148 VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
2149 false);
2150 this->submitCommandBuffer(kSkip_SyncQueue);
2151
2152 // The image layout change serves as a barrier, so no semaphore is needed
2153 return nullptr;
2154}
Greg Danielf5d87582017-12-18 14:48:15 -05002155