blob: 2722e51f656ea63e9ac535f3bf7f521e96ab082b [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2015 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkCommandBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -05009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkRect.h"
11#include "src/gpu/vk/GrVkCommandPool.h"
12#include "src/gpu/vk/GrVkFramebuffer.h"
13#include "src/gpu/vk/GrVkGpu.h"
14#include "src/gpu/vk/GrVkImage.h"
15#include "src/gpu/vk/GrVkImageView.h"
16#include "src/gpu/vk/GrVkIndexBuffer.h"
17#include "src/gpu/vk/GrVkPipeline.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/vk/GrVkPipelineState.h"
19#include "src/gpu/vk/GrVkPipelineState.h"
20#include "src/gpu/vk/GrVkRenderPass.h"
21#include "src/gpu/vk/GrVkRenderTarget.h"
22#include "src/gpu/vk/GrVkTransferBuffer.h"
23#include "src/gpu/vk/GrVkUtil.h"
24#include "src/gpu/vk/GrVkVertexBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050025
26void GrVkCommandBuffer::invalidateState() {
Chris Dalton1d616352017-05-31 12:51:23 -060027 for (auto& boundInputBuffer : fBoundInputBuffers) {
28 boundInputBuffer = VK_NULL_HANDLE;
29 }
egdaniel470d77a2016-03-18 12:50:27 -070030 fBoundIndexBuffer = VK_NULL_HANDLE;
egdaniel470d77a2016-03-18 12:50:27 -070031
32 memset(&fCachedViewport, 0, sizeof(VkViewport));
33 fCachedViewport.width = - 1.0f; // Viewport must have a width greater than 0
34
35 memset(&fCachedScissor, 0, sizeof(VkRect2D));
36 fCachedScissor.offset.x = -1; // Scissor offset must be greater that 0 to be valid
37
38 for (int i = 0; i < 4; ++i) {
39 fCachedBlendConstant[i] = -1.0;
40 }
Greg Daniel164a9f02016-02-22 09:56:40 -050041}
42
Greg Daniel0addbdf2019-11-25 15:03:58 -050043void GrVkCommandBuffer::freeGPUData(GrVkGpu* gpu, VkCommandPool cmdPool) const {
Brian Salomone39526b2019-06-24 16:35:53 -040044 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Greg Daniel164a9f02016-02-22 09:56:40 -050045 SkASSERT(!fIsActive);
Greg Daniel0addbdf2019-11-25 15:03:58 -050046 SkASSERT(!fTrackedResources.count());
47 SkASSERT(!fTrackedRecycledResources.count());
48 SkASSERT(cmdPool != VK_NULL_HANDLE);
49 SkASSERT(!this->isWrapped());
halcanary9d524f22016-03-29 09:03:52 -070050
Greg Daniel0addbdf2019-11-25 15:03:58 -050051 GR_VK_CALL(gpu->vkInterface(), FreeCommandBuffers(gpu->device(), cmdPool, 1, &fCmdBuffer));
egdaniel9cb63402016-06-23 08:37:05 -070052
53 this->onFreeGPUData(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050054}
55
Greg Danielcef213c2017-04-21 11:52:27 -040056void GrVkCommandBuffer::abandonGPUData() const {
Ethan Nicholas8e265a72018-12-12 16:22:40 -050057 SkDEBUGCODE(fResourcesReleased = true;)
Greg Daniel164a9f02016-02-22 09:56:40 -050058 for (int i = 0; i < fTrackedResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050059 fTrackedResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel164a9f02016-02-22 09:56:40 -050060 fTrackedResources[i]->unrefAndAbandon();
61 }
egdanielc1be9bc2016-07-20 08:33:00 -070062
63 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050064 fTrackedRecycledResources[i]->notifyRemovedFromCommandBuffer();
egdanielc1be9bc2016-07-20 08:33:00 -070065 // We don't recycle resources when abandoning them.
66 fTrackedRecycledResources[i]->unrefAndAbandon();
67 }
Greg Daniel7d918fd2018-06-19 15:22:01 -040068
Ethan Nicholas8e265a72018-12-12 16:22:40 -050069 this->onAbandonGPUData();
Greg Daniel164a9f02016-02-22 09:56:40 -050070}
71
Ethan Nicholas8e265a72018-12-12 16:22:40 -050072void GrVkCommandBuffer::releaseResources(GrVkGpu* gpu) {
Brian Salomone39526b2019-06-24 16:35:53 -040073 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Ethan Nicholas8e265a72018-12-12 16:22:40 -050074 SkDEBUGCODE(fResourcesReleased = true;)
jvanverth7ec92412016-07-06 09:24:57 -070075 SkASSERT(!fIsActive);
76 for (int i = 0; i < fTrackedResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050077 fTrackedResources[i]->notifyRemovedFromCommandBuffer();
jvanverth7ec92412016-07-06 09:24:57 -070078 fTrackedResources[i]->unref(gpu);
79 }
egdanielc1be9bc2016-07-20 08:33:00 -070080 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050081 fTrackedRecycledResources[i]->notifyRemovedFromCommandBuffer();
egdanielc1be9bc2016-07-20 08:33:00 -070082 fTrackedRecycledResources[i]->recycle(const_cast<GrVkGpu*>(gpu));
83 }
egdaniel594739c2016-09-20 12:39:25 -070084
85 if (++fNumResets > kNumRewindResetsBeforeFullReset) {
86 fTrackedResources.reset();
87 fTrackedRecycledResources.reset();
88 fTrackedResources.setReserve(kInitialTrackedResourcesCount);
89 fTrackedRecycledResources.setReserve(kInitialTrackedResourcesCount);
90 fNumResets = 0;
91 } else {
92 fTrackedResources.rewind();
93 fTrackedRecycledResources.rewind();
94 }
95
jvanverth7ec92412016-07-06 09:24:57 -070096 this->invalidateState();
97
Ethan Nicholas8e265a72018-12-12 16:22:40 -050098 this->onReleaseResources(gpu);
jvanverth7ec92412016-07-06 09:24:57 -070099}
100
Greg Daniel164a9f02016-02-22 09:56:40 -0500101////////////////////////////////////////////////////////////////////////////////
102// CommandBuffer commands
103////////////////////////////////////////////////////////////////////////////////
104
105void GrVkCommandBuffer::pipelineBarrier(const GrVkGpu* gpu,
Greg Daniel59dc1482019-02-22 10:46:38 -0500106 const GrVkResource* resource,
Greg Daniel164a9f02016-02-22 09:56:40 -0500107 VkPipelineStageFlags srcStageMask,
108 VkPipelineStageFlags dstStageMask,
109 bool byRegion,
110 BarrierType barrierType,
Greg Daniel59dc1482019-02-22 10:46:38 -0500111 void* barrier) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500112 SkASSERT(!this->isWrapped());
Greg Daniel164a9f02016-02-22 09:56:40 -0500113 SkASSERT(fIsActive);
egdaniel58a8d922016-04-21 08:03:10 -0700114 // For images we can have barriers inside of render passes but they require us to add more
115 // support in subpasses which need self dependencies to have barriers inside them. Also, we can
116 // never have buffer barriers inside of a render pass. For now we will just assert that we are
117 // not in a render pass.
118 SkASSERT(!fActiveRenderPass);
Greg Danielf346df32019-04-03 14:52:13 -0400119
Greg Danielee54f232019-04-03 14:58:40 -0400120 if (barrierType == kBufferMemory_BarrierType) {
121 const VkBufferMemoryBarrier* barrierPtr = reinterpret_cast<VkBufferMemoryBarrier*>(barrier);
122 fBufferBarriers.push_back(*barrierPtr);
123 } else {
124 SkASSERT(barrierType == kImageMemory_BarrierType);
125 const VkImageMemoryBarrier* barrierPtr = reinterpret_cast<VkImageMemoryBarrier*>(barrier);
Greg Daniel212ff052019-04-09 10:41:34 -0400126 // We need to check if we are adding a pipeline barrier that covers part of the same
127 // subresource range as a barrier that is already in current batch. If it does, then we must
128 // submit the first batch because the vulkan spec does not define a specific ordering for
129 // barriers submitted in the same batch.
130 // TODO: Look if we can gain anything by merging barriers together instead of submitting
131 // the old ones.
132 for (int i = 0; i < fImageBarriers.count(); ++i) {
133 VkImageMemoryBarrier& currentBarrier = fImageBarriers[i];
134 if (barrierPtr->image == currentBarrier.image) {
135 const VkImageSubresourceRange newRange = barrierPtr->subresourceRange;
136 const VkImageSubresourceRange oldRange = currentBarrier.subresourceRange;
137 SkASSERT(newRange.aspectMask == oldRange.aspectMask);
138 SkASSERT(newRange.baseArrayLayer == oldRange.baseArrayLayer);
139 SkASSERT(newRange.layerCount == oldRange.layerCount);
140 uint32_t newStart = newRange.baseMipLevel;
141 uint32_t newEnd = newRange.baseMipLevel + newRange.levelCount - 1;
142 uint32_t oldStart = oldRange.baseMipLevel;
143 uint32_t oldEnd = oldRange.baseMipLevel + oldRange.levelCount - 1;
144 if (SkTMax(newStart, oldStart) <= SkTMin(newEnd, oldEnd)) {
145 this->submitPipelineBarriers(gpu);
146 break;
147 }
148 }
149 }
Greg Danielee54f232019-04-03 14:58:40 -0400150 fImageBarriers.push_back(*barrierPtr);
Greg Daniel164a9f02016-02-22 09:56:40 -0500151 }
Greg Danielee54f232019-04-03 14:58:40 -0400152 fBarriersByRegion |= byRegion;
153
154 fSrcStageMask = fSrcStageMask | srcStageMask;
155 fDstStageMask = fDstStageMask | dstStageMask;
156
157 fHasWork = true;
Greg Daniel59dc1482019-02-22 10:46:38 -0500158 if (resource) {
159 this->addResource(resource);
160 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500161}
162
Greg Danielee54f232019-04-03 14:58:40 -0400163void GrVkCommandBuffer::submitPipelineBarriers(const GrVkGpu* gpu) {
164 SkASSERT(fIsActive);
165
166 // Currently we never submit a pipeline barrier without at least one memory barrier.
167 if (fBufferBarriers.count() || fImageBarriers.count()) {
168 // For images we can have barriers inside of render passes but they require us to add more
169 // support in subpasses which need self dependencies to have barriers inside them. Also, we
170 // can never have buffer barriers inside of a render pass. For now we will just assert that
171 // we are not in a render pass.
172 SkASSERT(!fActiveRenderPass);
173 SkASSERT(!this->isWrapped());
174 SkASSERT(fSrcStageMask && fDstStageMask);
175
176 VkDependencyFlags dependencyFlags = fBarriersByRegion ? VK_DEPENDENCY_BY_REGION_BIT : 0;
177 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(
178 fCmdBuffer, fSrcStageMask, fDstStageMask, dependencyFlags, 0, nullptr,
179 fBufferBarriers.count(), fBufferBarriers.begin(),
180 fImageBarriers.count(), fImageBarriers.begin()));
181 fBufferBarriers.reset();
182 fImageBarriers.reset();
183 fBarriersByRegion = false;
184 fSrcStageMask = 0;
185 fDstStageMask = 0;
186 }
187 SkASSERT(!fBufferBarriers.count());
188 SkASSERT(!fImageBarriers.count());
189 SkASSERT(!fBarriersByRegion);
190 SkASSERT(!fSrcStageMask);
191 SkASSERT(!fDstStageMask);
192}
193
194
Greg Daniel6ecc9112017-06-16 16:17:03 +0000195void GrVkCommandBuffer::bindInputBuffer(GrVkGpu* gpu, uint32_t binding,
196 const GrVkVertexBuffer* vbuffer) {
197 VkBuffer vkBuffer = vbuffer->buffer();
198 SkASSERT(VK_NULL_HANDLE != vkBuffer);
199 SkASSERT(binding < kMaxInputBuffers);
200 // TODO: once vbuffer->offset() no longer always returns 0, we will need to track the offset
201 // to know if we can skip binding or not.
202 if (vkBuffer != fBoundInputBuffers[binding]) {
203 VkDeviceSize offset = vbuffer->offset();
204 GR_VK_CALL(gpu->vkInterface(), CmdBindVertexBuffers(fCmdBuffer,
205 binding,
206 1,
207 &vkBuffer,
208 &offset));
209 fBoundInputBuffers[binding] = vkBuffer;
Greg Daniel59dc1482019-02-22 10:46:38 -0500210 this->addResource(vbuffer->resource());
Greg Daniel6ecc9112017-06-16 16:17:03 +0000211 }
212}
213
214void GrVkCommandBuffer::bindIndexBuffer(GrVkGpu* gpu, const GrVkIndexBuffer* ibuffer) {
215 VkBuffer vkBuffer = ibuffer->buffer();
216 SkASSERT(VK_NULL_HANDLE != vkBuffer);
217 // TODO: once ibuffer->offset() no longer always returns 0, we will need to track the offset
218 // to know if we can skip binding or not.
219 if (vkBuffer != fBoundIndexBuffer) {
220 GR_VK_CALL(gpu->vkInterface(), CmdBindIndexBuffer(fCmdBuffer,
221 vkBuffer,
222 ibuffer->offset(),
223 VK_INDEX_TYPE_UINT16));
224 fBoundIndexBuffer = vkBuffer;
Greg Daniel59dc1482019-02-22 10:46:38 -0500225 this->addResource(ibuffer->resource());
Greg Daniel6ecc9112017-06-16 16:17:03 +0000226 }
227}
228
Greg Daniel164a9f02016-02-22 09:56:40 -0500229void GrVkCommandBuffer::clearAttachments(const GrVkGpu* gpu,
230 int numAttachments,
231 const VkClearAttachment* attachments,
232 int numRects,
Greg Danielf346df32019-04-03 14:52:13 -0400233 const VkClearRect* clearRects) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500234 SkASSERT(fIsActive);
235 SkASSERT(fActiveRenderPass);
236 SkASSERT(numAttachments > 0);
237 SkASSERT(numRects > 0);
Greg Danielf346df32019-04-03 14:52:13 -0400238
Greg Danielee54f232019-04-03 14:58:40 -0400239 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400240
Greg Daniel164a9f02016-02-22 09:56:40 -0500241#ifdef SK_DEBUG
242 for (int i = 0; i < numAttachments; ++i) {
243 if (attachments[i].aspectMask == VK_IMAGE_ASPECT_COLOR_BIT) {
244 uint32_t testIndex;
245 SkAssertResult(fActiveRenderPass->colorAttachmentIndex(&testIndex));
246 SkASSERT(testIndex == attachments[i].colorAttachment);
247 }
248 }
249#endif
250 GR_VK_CALL(gpu->vkInterface(), CmdClearAttachments(fCmdBuffer,
251 numAttachments,
252 attachments,
253 numRects,
254 clearRects));
Greg Daniela718a612019-10-07 16:25:41 -0400255 if (gpu->vkCaps().mustInvalidatePrimaryCmdBufferStateAfterClearAttachments()) {
256 this->invalidateState();
257 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500258}
259
260void GrVkCommandBuffer::bindDescriptorSets(const GrVkGpu* gpu,
egdaniel22281c12016-03-23 13:49:40 -0700261 GrVkPipelineState* pipelineState,
Greg Danieleecc6872019-07-29 13:21:37 -0400262 VkPipelineLayout layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500263 uint32_t firstSet,
264 uint32_t setCount,
265 const VkDescriptorSet* descriptorSets,
266 uint32_t dynamicOffsetCount,
267 const uint32_t* dynamicOffsets) {
268 SkASSERT(fIsActive);
269 GR_VK_CALL(gpu->vkInterface(), CmdBindDescriptorSets(fCmdBuffer,
270 VK_PIPELINE_BIND_POINT_GRAPHICS,
Greg Danieleecc6872019-07-29 13:21:37 -0400271 layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500272 firstSet,
273 setCount,
274 descriptorSets,
275 dynamicOffsetCount,
276 dynamicOffsets));
egdanielbc9b2962016-09-27 08:00:53 -0700277}
278
egdaniel470d77a2016-03-18 12:50:27 -0700279void GrVkCommandBuffer::bindPipeline(const GrVkGpu* gpu, const GrVkPipeline* pipeline) {
280 SkASSERT(fIsActive);
egdaniel470d77a2016-03-18 12:50:27 -0700281 GR_VK_CALL(gpu->vkInterface(), CmdBindPipeline(fCmdBuffer,
282 VK_PIPELINE_BIND_POINT_GRAPHICS,
283 pipeline->pipeline()));
egdanielec440992016-09-13 09:54:11 -0700284 this->addResource(pipeline);
egdaniel470d77a2016-03-18 12:50:27 -0700285}
286
Greg Daniel164a9f02016-02-22 09:56:40 -0500287void GrVkCommandBuffer::drawIndexed(const GrVkGpu* gpu,
288 uint32_t indexCount,
289 uint32_t instanceCount,
290 uint32_t firstIndex,
291 int32_t vertexOffset,
Greg Danielf346df32019-04-03 14:52:13 -0400292 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500293 SkASSERT(fIsActive);
294 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400295 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500296 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndexed(fCmdBuffer,
297 indexCount,
298 instanceCount,
299 firstIndex,
300 vertexOffset,
301 firstInstance));
302}
303
304void GrVkCommandBuffer::draw(const GrVkGpu* gpu,
305 uint32_t vertexCount,
306 uint32_t instanceCount,
307 uint32_t firstVertex,
Greg Danielf346df32019-04-03 14:52:13 -0400308 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500309 SkASSERT(fIsActive);
310 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400311 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500312 GR_VK_CALL(gpu->vkInterface(), CmdDraw(fCmdBuffer,
313 vertexCount,
314 instanceCount,
315 firstVertex,
316 firstInstance));
317}
egdaniel470d77a2016-03-18 12:50:27 -0700318
319void GrVkCommandBuffer::setViewport(const GrVkGpu* gpu,
320 uint32_t firstViewport,
321 uint32_t viewportCount,
322 const VkViewport* viewports) {
323 SkASSERT(fIsActive);
324 SkASSERT(1 == viewportCount);
325 if (memcmp(viewports, &fCachedViewport, sizeof(VkViewport))) {
326 GR_VK_CALL(gpu->vkInterface(), CmdSetViewport(fCmdBuffer,
327 firstViewport,
328 viewportCount,
329 viewports));
330 fCachedViewport = viewports[0];
331 }
332}
333
334void GrVkCommandBuffer::setScissor(const GrVkGpu* gpu,
335 uint32_t firstScissor,
336 uint32_t scissorCount,
337 const VkRect2D* scissors) {
338 SkASSERT(fIsActive);
339 SkASSERT(1 == scissorCount);
340 if (memcmp(scissors, &fCachedScissor, sizeof(VkRect2D))) {
341 GR_VK_CALL(gpu->vkInterface(), CmdSetScissor(fCmdBuffer,
342 firstScissor,
343 scissorCount,
344 scissors));
345 fCachedScissor = scissors[0];
346 }
347}
348
349void GrVkCommandBuffer::setBlendConstants(const GrVkGpu* gpu,
350 const float blendConstants[4]) {
351 SkASSERT(fIsActive);
352 if (memcmp(blendConstants, fCachedBlendConstant, 4 * sizeof(float))) {
353 GR_VK_CALL(gpu->vkInterface(), CmdSetBlendConstants(fCmdBuffer, blendConstants));
354 memcpy(fCachedBlendConstant, blendConstants, 4 * sizeof(float));
355 }
356}
egdaniel9a6cf802016-06-08 08:22:05 -0700357
Greg Danielee54f232019-04-03 14:58:40 -0400358void GrVkCommandBuffer::addingWork(const GrVkGpu* gpu) {
359 this->submitPipelineBarriers(gpu);
360 fHasWork = true;
361}
362
egdaniel9a6cf802016-06-08 08:22:05 -0700363///////////////////////////////////////////////////////////////////////////////
364// PrimaryCommandBuffer
365////////////////////////////////////////////////////////////////////////////////
egdaniel9cb63402016-06-23 08:37:05 -0700366GrVkPrimaryCommandBuffer::~GrVkPrimaryCommandBuffer() {
367 // Should have ended any render pass we're in the middle of
368 SkASSERT(!fActiveRenderPass);
369}
370
Greg Daniel315c8dc2019-11-26 15:41:27 -0500371GrVkPrimaryCommandBuffer* GrVkPrimaryCommandBuffer::Create(GrVkGpu* gpu,
Greg Daniel0addbdf2019-11-25 15:03:58 -0500372 VkCommandPool cmdPool) {
egdaniel9a6cf802016-06-08 08:22:05 -0700373 const VkCommandBufferAllocateInfo cmdInfo = {
374 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400375 nullptr, // pNext
Greg Daniel0addbdf2019-11-25 15:03:58 -0500376 cmdPool, // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700377 VK_COMMAND_BUFFER_LEVEL_PRIMARY, // level
378 1 // bufferCount
379 };
380
381 VkCommandBuffer cmdBuffer;
Greg Daniel315c8dc2019-11-26 15:41:27 -0500382 VkResult err;
383 GR_VK_CALL_RESULT(gpu, err, AllocateCommandBuffers(gpu->device(), &cmdInfo, &cmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700384 if (err) {
385 return nullptr;
386 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500387 return new GrVkPrimaryCommandBuffer(cmdBuffer);
egdaniel9a6cf802016-06-08 08:22:05 -0700388}
389
Greg Daniele643da62019-11-05 12:36:42 -0500390void GrVkPrimaryCommandBuffer::begin(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700391 SkASSERT(!fIsActive);
392 VkCommandBufferBeginInfo cmdBufferBeginInfo;
393 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
394 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
395 cmdBufferBeginInfo.pNext = nullptr;
396 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
397 cmdBufferBeginInfo.pInheritanceInfo = nullptr;
398
Greg Daniele643da62019-11-05 12:36:42 -0500399 GR_VK_CALL_ERRCHECK(gpu, BeginCommandBuffer(fCmdBuffer, &cmdBufferBeginInfo));
egdaniel9a6cf802016-06-08 08:22:05 -0700400 fIsActive = true;
401}
402
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500403void GrVkPrimaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700404 SkASSERT(fIsActive);
405 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400406
407 this->submitPipelineBarriers(gpu);
408
Greg Daniele643da62019-11-05 12:36:42 -0500409 GR_VK_CALL_ERRCHECK(gpu, EndCommandBuffer(fCmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700410 this->invalidateState();
411 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400412 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700413}
414
Greg Danielfa3adf72019-11-07 09:53:41 -0500415bool GrVkPrimaryCommandBuffer::beginRenderPass(GrVkGpu* gpu,
egdaniel9cb63402016-06-23 08:37:05 -0700416 const GrVkRenderPass* renderPass,
Robert Phillips95214472017-08-08 18:00:03 -0400417 const VkClearValue clearValues[],
Greg Danielfa3adf72019-11-07 09:53:41 -0500418 GrVkRenderTarget* target,
egdaniel9cb63402016-06-23 08:37:05 -0700419 const SkIRect& bounds,
420 bool forSecondaryCB) {
egdaniel9a6cf802016-06-08 08:22:05 -0700421 SkASSERT(fIsActive);
422 SkASSERT(!fActiveRenderPass);
Greg Danielfa3adf72019-11-07 09:53:41 -0500423 SkASSERT(renderPass->isCompatible(*target));
424
425 const GrVkFramebuffer* framebuffer = target->getFramebuffer();
426 if (!framebuffer) {
427 return false;
428 }
egdaniel9cb63402016-06-23 08:37:05 -0700429
Greg Danielee54f232019-04-03 14:58:40 -0400430 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400431
egdaniel9a6cf802016-06-08 08:22:05 -0700432 VkRenderPassBeginInfo beginInfo;
egdaniel9cb63402016-06-23 08:37:05 -0700433 VkRect2D renderArea;
434 renderArea.offset = { bounds.fLeft , bounds.fTop };
435 renderArea.extent = { (uint32_t)bounds.width(), (uint32_t)bounds.height() };
436
437 memset(&beginInfo, 0, sizeof(VkRenderPassBeginInfo));
438 beginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
439 beginInfo.pNext = nullptr;
440 beginInfo.renderPass = renderPass->vkRenderPass();
Greg Danielfa3adf72019-11-07 09:53:41 -0500441 beginInfo.framebuffer = framebuffer->framebuffer();
egdaniel9cb63402016-06-23 08:37:05 -0700442 beginInfo.renderArea = renderArea;
Greg Danielb68319a2018-02-23 16:08:28 -0500443 beginInfo.clearValueCount = renderPass->clearValueCount();
egdaniel9cb63402016-06-23 08:37:05 -0700444 beginInfo.pClearValues = clearValues;
445
446 VkSubpassContents contents = forSecondaryCB ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
447 : VK_SUBPASS_CONTENTS_INLINE;
448
egdaniel9a6cf802016-06-08 08:22:05 -0700449 GR_VK_CALL(gpu->vkInterface(), CmdBeginRenderPass(fCmdBuffer, &beginInfo, contents));
450 fActiveRenderPass = renderPass;
451 this->addResource(renderPass);
Greg Danielfa3adf72019-11-07 09:53:41 -0500452 target->addResources(*this);
453 return true;
egdaniel9a6cf802016-06-08 08:22:05 -0700454}
455
456void GrVkPrimaryCommandBuffer::endRenderPass(const GrVkGpu* gpu) {
457 SkASSERT(fIsActive);
458 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400459 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700460 GR_VK_CALL(gpu->vkInterface(), CmdEndRenderPass(fCmdBuffer));
461 fActiveRenderPass = nullptr;
462}
463
464void GrVkPrimaryCommandBuffer::executeCommands(const GrVkGpu* gpu,
Greg Daniel8daf3b72019-07-30 09:57:26 -0400465 std::unique_ptr<GrVkSecondaryCommandBuffer> buffer) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500466 // The Vulkan spec allows secondary command buffers to be executed on a primary command buffer
467 // if the command pools both were created from were created with the same queue family. However,
468 // we currently always create them from the same pool.
egdaniel9a6cf802016-06-08 08:22:05 -0700469 SkASSERT(fIsActive);
Greg Daniel77b53f62016-10-18 11:48:51 -0400470 SkASSERT(!buffer->fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700471 SkASSERT(fActiveRenderPass);
472 SkASSERT(fActiveRenderPass->isCompatible(*buffer->fActiveRenderPass));
473
Greg Danielee54f232019-04-03 14:58:40 -0400474 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400475
egdaniel9a6cf802016-06-08 08:22:05 -0700476 GR_VK_CALL(gpu->vkInterface(), CmdExecuteCommands(fCmdBuffer, 1, &buffer->fCmdBuffer));
Greg Daniel8daf3b72019-07-30 09:57:26 -0400477 fSecondaryCommandBuffers.push_back(std::move(buffer));
egdaniel066df7c2016-06-08 14:02:27 -0700478 // When executing a secondary command buffer all state (besides render pass state) becomes
479 // invalidated and must be reset. This includes bound buffers, pipelines, dynamic state, etc.
480 this->invalidateState();
egdaniel9a6cf802016-06-08 08:22:05 -0700481}
482
Greg Daniele643da62019-11-05 12:36:42 -0500483static void submit_to_queue(GrVkGpu* gpu,
Greg Daniel48661b82018-01-22 16:11:35 -0500484 VkQueue queue,
485 VkFence fence,
486 uint32_t waitCount,
487 const VkSemaphore* waitSemaphores,
488 const VkPipelineStageFlags* waitStages,
489 uint32_t commandBufferCount,
490 const VkCommandBuffer* commandBuffers,
491 uint32_t signalCount,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400492 const VkSemaphore* signalSemaphores,
493 GrProtected protectedContext) {
494 VkProtectedSubmitInfo protectedSubmitInfo;
495 if (protectedContext == GrProtected::kYes) {
496 memset(&protectedSubmitInfo, 0, sizeof(VkProtectedSubmitInfo));
497 protectedSubmitInfo.sType = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO;
498 protectedSubmitInfo.pNext = nullptr;
499 protectedSubmitInfo.protectedSubmit = VK_TRUE;
500 }
501
Greg Daniel48661b82018-01-22 16:11:35 -0500502 VkSubmitInfo submitInfo;
503 memset(&submitInfo, 0, sizeof(VkSubmitInfo));
504 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400505 submitInfo.pNext = protectedContext == GrProtected::kYes ? &protectedSubmitInfo : nullptr;
Greg Daniel48661b82018-01-22 16:11:35 -0500506 submitInfo.waitSemaphoreCount = waitCount;
507 submitInfo.pWaitSemaphores = waitSemaphores;
508 submitInfo.pWaitDstStageMask = waitStages;
509 submitInfo.commandBufferCount = commandBufferCount;
510 submitInfo.pCommandBuffers = commandBuffers;
511 submitInfo.signalSemaphoreCount = signalCount;
512 submitInfo.pSignalSemaphores = signalSemaphores;
Greg Daniele643da62019-11-05 12:36:42 -0500513 GR_VK_CALL_ERRCHECK(gpu, QueueSubmit(queue, 1, &submitInfo, fence));
Greg Daniel48661b82018-01-22 16:11:35 -0500514}
515
Greg Daniel6be35232017-03-01 17:01:09 -0500516void GrVkPrimaryCommandBuffer::submitToQueue(
Greg Daniele643da62019-11-05 12:36:42 -0500517 GrVkGpu* gpu,
Greg Daniel6be35232017-03-01 17:01:09 -0500518 VkQueue queue,
519 GrVkGpu::SyncQueue sync,
Greg Daniel48661b82018-01-22 16:11:35 -0500520 SkTArray<GrVkSemaphore::Resource*>& signalSemaphores,
521 SkTArray<GrVkSemaphore::Resource*>& waitSemaphores) {
egdaniel9a6cf802016-06-08 08:22:05 -0700522 SkASSERT(!fIsActive);
523
524 VkResult err;
jvanverth7ec92412016-07-06 09:24:57 -0700525 if (VK_NULL_HANDLE == fSubmitFence) {
526 VkFenceCreateInfo fenceInfo;
527 memset(&fenceInfo, 0, sizeof(VkFenceCreateInfo));
528 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
529 err = GR_VK_CALL(gpu->vkInterface(), CreateFence(gpu->device(), &fenceInfo, nullptr,
530 &fSubmitFence));
531 SkASSERT(!err);
532 } else {
533 GR_VK_CALL(gpu->vkInterface(), ResetFences(gpu->device(), 1, &fSubmitFence));
534 }
egdaniel9a6cf802016-06-08 08:22:05 -0700535
Greg Daniela5cb7812017-06-16 09:45:32 -0400536 int signalCount = signalSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500537 int waitCount = waitSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500538
Greg Daniel48661b82018-01-22 16:11:35 -0500539 if (0 == signalCount && 0 == waitCount) {
540 // This command buffer has no dependent semaphores so we can simply just submit it to the
541 // queue with no worries.
Greg Daniele643da62019-11-05 12:36:42 -0500542 submit_to_queue(gpu, queue, fSubmitFence, 0, nullptr, nullptr, 1, &fCmdBuffer, 0, nullptr,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400543 gpu->protectedContext() ? GrProtected::kYes : GrProtected::kNo);
Greg Daniel48661b82018-01-22 16:11:35 -0500544 } else {
Greg Daniel48661b82018-01-22 16:11:35 -0500545 SkTArray<VkSemaphore> vkSignalSems(signalCount);
546 for (int i = 0; i < signalCount; ++i) {
547 if (signalSemaphores[i]->shouldSignal()) {
548 this->addResource(signalSemaphores[i]);
549 vkSignalSems.push_back(signalSemaphores[i]->semaphore());
550 }
551 }
552
553 SkTArray<VkSemaphore> vkWaitSems(waitCount);
554 SkTArray<VkPipelineStageFlags> vkWaitStages(waitCount);
555 for (int i = 0; i < waitCount; ++i) {
556 if (waitSemaphores[i]->shouldWait()) {
557 this->addResource(waitSemaphores[i]);
558 vkWaitSems.push_back(waitSemaphores[i]->semaphore());
559 vkWaitStages.push_back(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
560 }
561 }
Greg Daniele643da62019-11-05 12:36:42 -0500562 submit_to_queue(gpu, queue, fSubmitFence, vkWaitSems.count(), vkWaitSems.begin(),
563 vkWaitStages.begin(), 1, &fCmdBuffer,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400564 vkSignalSems.count(), vkSignalSems.begin(),
565 gpu->protectedContext() ? GrProtected::kYes : GrProtected::kNo);
Greg Daniel48661b82018-01-22 16:11:35 -0500566 for (int i = 0; i < signalCount; ++i) {
567 signalSemaphores[i]->markAsSignaled();
568 }
569 for (int i = 0; i < waitCount; ++i) {
570 waitSemaphores[i]->markAsWaited();
571 }
Greg Daniel48661b82018-01-22 16:11:35 -0500572 }
egdaniel9a6cf802016-06-08 08:22:05 -0700573
574 if (GrVkGpu::kForce_SyncQueue == sync) {
575 err = GR_VK_CALL(gpu->vkInterface(),
576 WaitForFences(gpu->device(), 1, &fSubmitFence, true, UINT64_MAX));
Emircan Uysaler283ec652019-10-31 15:33:31 -0400577 if (VK_SUCCESS != err) {
578 SkDebugf("Fence failed: %d\n", err);
Ben Wagnerb4aab9a2017-08-16 10:53:04 -0400579 SK_ABORT("failing");
egdaniel9a6cf802016-06-08 08:22:05 -0700580 }
egdaniel9a6cf802016-06-08 08:22:05 -0700581
Greg Daniela3aa75a2019-04-12 14:24:55 -0400582 fFinishedProcs.reset();
583
egdaniel9a6cf802016-06-08 08:22:05 -0700584 // Destroy the fence
585 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
586 fSubmitFence = VK_NULL_HANDLE;
587 }
588}
589
Greg Daniela3aa75a2019-04-12 14:24:55 -0400590bool GrVkPrimaryCommandBuffer::finished(const GrVkGpu* gpu) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500591 SkASSERT(!fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700592 if (VK_NULL_HANDLE == fSubmitFence) {
593 return true;
594 }
595
596 VkResult err = GR_VK_CALL(gpu->vkInterface(), GetFenceStatus(gpu->device(), fSubmitFence));
597 switch (err) {
598 case VK_SUCCESS:
599 return true;
600
601 case VK_NOT_READY:
602 return false;
603
604 default:
605 SkDebugf("Error getting fence status: %d\n", err);
Ben Wagnerb4aab9a2017-08-16 10:53:04 -0400606 SK_ABORT("failing");
egdaniel9a6cf802016-06-08 08:22:05 -0700607 break;
608 }
609
610 return false;
611}
612
Greg Daniela3aa75a2019-04-12 14:24:55 -0400613void GrVkPrimaryCommandBuffer::addFinishedProc(sk_sp<GrRefCntedCallback> finishedProc) {
614 fFinishedProcs.push_back(std::move(finishedProc));
615}
616
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500617void GrVkPrimaryCommandBuffer::onReleaseResources(GrVkGpu* gpu) {
jvanverth7ec92412016-07-06 09:24:57 -0700618 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500619 fSecondaryCommandBuffers[i]->releaseResources(gpu);
620 }
Brian Salomonab32f652019-05-10 14:24:50 -0400621 fFinishedProcs.reset();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500622}
623
Greg Daniel0addbdf2019-11-25 15:03:58 -0500624void GrVkPrimaryCommandBuffer::recycleSecondaryCommandBuffers(GrVkCommandPool* cmdPool) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500625 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500626 fSecondaryCommandBuffers[i].release()->recycle(cmdPool);
jvanverth7ec92412016-07-06 09:24:57 -0700627 }
628 fSecondaryCommandBuffers.reset();
629}
630
egdaniel9a6cf802016-06-08 08:22:05 -0700631void GrVkPrimaryCommandBuffer::copyImage(const GrVkGpu* gpu,
632 GrVkImage* srcImage,
633 VkImageLayout srcLayout,
634 GrVkImage* dstImage,
635 VkImageLayout dstLayout,
636 uint32_t copyRegionCount,
637 const VkImageCopy* copyRegions) {
638 SkASSERT(fIsActive);
639 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400640 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700641 this->addResource(srcImage->resource());
642 this->addResource(dstImage->resource());
643 GR_VK_CALL(gpu->vkInterface(), CmdCopyImage(fCmdBuffer,
644 srcImage->image(),
645 srcLayout,
646 dstImage->image(),
647 dstLayout,
648 copyRegionCount,
649 copyRegions));
650}
651
652void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
653 const GrVkResource* srcResource,
654 VkImage srcImage,
655 VkImageLayout srcLayout,
656 const GrVkResource* dstResource,
657 VkImage dstImage,
658 VkImageLayout dstLayout,
659 uint32_t blitRegionCount,
660 const VkImageBlit* blitRegions,
661 VkFilter filter) {
662 SkASSERT(fIsActive);
663 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400664 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700665 this->addResource(srcResource);
666 this->addResource(dstResource);
667 GR_VK_CALL(gpu->vkInterface(), CmdBlitImage(fCmdBuffer,
668 srcImage,
669 srcLayout,
670 dstImage,
671 dstLayout,
672 blitRegionCount,
673 blitRegions,
674 filter));
675}
676
Greg Daniel6ecc9112017-06-16 16:17:03 +0000677void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
678 const GrVkImage& srcImage,
679 const GrVkImage& dstImage,
680 uint32_t blitRegionCount,
681 const VkImageBlit* blitRegions,
682 VkFilter filter) {
683 this->blitImage(gpu,
684 srcImage.resource(),
685 srcImage.image(),
686 srcImage.currentLayout(),
687 dstImage.resource(),
688 dstImage.image(),
689 dstImage.currentLayout(),
690 blitRegionCount,
691 blitRegions,
692 filter);
693}
694
695
egdaniel9a6cf802016-06-08 08:22:05 -0700696void GrVkPrimaryCommandBuffer::copyImageToBuffer(const GrVkGpu* gpu,
697 GrVkImage* srcImage,
698 VkImageLayout srcLayout,
699 GrVkTransferBuffer* dstBuffer,
700 uint32_t copyRegionCount,
701 const VkBufferImageCopy* copyRegions) {
702 SkASSERT(fIsActive);
703 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400704 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700705 this->addResource(srcImage->resource());
706 this->addResource(dstBuffer->resource());
707 GR_VK_CALL(gpu->vkInterface(), CmdCopyImageToBuffer(fCmdBuffer,
708 srcImage->image(),
709 srcLayout,
710 dstBuffer->buffer(),
711 copyRegionCount,
712 copyRegions));
713}
714
715void GrVkPrimaryCommandBuffer::copyBufferToImage(const GrVkGpu* gpu,
716 GrVkTransferBuffer* srcBuffer,
717 GrVkImage* dstImage,
718 VkImageLayout dstLayout,
719 uint32_t copyRegionCount,
720 const VkBufferImageCopy* copyRegions) {
721 SkASSERT(fIsActive);
722 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400723 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700724 this->addResource(srcBuffer->resource());
725 this->addResource(dstImage->resource());
726 GR_VK_CALL(gpu->vkInterface(), CmdCopyBufferToImage(fCmdBuffer,
727 srcBuffer->buffer(),
728 dstImage->image(),
729 dstLayout,
730 copyRegionCount,
731 copyRegions));
732}
733
Greg Daniel6888c0d2017-08-25 11:55:50 -0400734
735void GrVkPrimaryCommandBuffer::copyBuffer(GrVkGpu* gpu,
736 GrVkBuffer* srcBuffer,
737 GrVkBuffer* dstBuffer,
738 uint32_t regionCount,
739 const VkBufferCopy* regions) {
740 SkASSERT(fIsActive);
741 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400742 this->addingWork(gpu);
Greg Daniel6888c0d2017-08-25 11:55:50 -0400743#ifdef SK_DEBUG
744 for (uint32_t i = 0; i < regionCount; ++i) {
745 const VkBufferCopy& region = regions[i];
746 SkASSERT(region.size > 0);
747 SkASSERT(region.srcOffset < srcBuffer->size());
748 SkASSERT(region.dstOffset < dstBuffer->size());
749 SkASSERT(region.srcOffset + region.size <= srcBuffer->size());
750 SkASSERT(region.dstOffset + region.size <= dstBuffer->size());
751 }
752#endif
753 this->addResource(srcBuffer->resource());
754 this->addResource(dstBuffer->resource());
755 GR_VK_CALL(gpu->vkInterface(), CmdCopyBuffer(fCmdBuffer,
756 srcBuffer->buffer(),
757 dstBuffer->buffer(),
758 regionCount,
759 regions));
760}
761
jvanvertha584de92016-06-30 09:10:52 -0700762void GrVkPrimaryCommandBuffer::updateBuffer(GrVkGpu* gpu,
763 GrVkBuffer* dstBuffer,
764 VkDeviceSize dstOffset,
765 VkDeviceSize dataSize,
766 const void* data) {
767 SkASSERT(fIsActive);
768 SkASSERT(!fActiveRenderPass);
769 SkASSERT(0 == (dstOffset & 0x03)); // four byte aligned
770 // TODO: handle larger transfer sizes
771 SkASSERT(dataSize <= 65536);
772 SkASSERT(0 == (dataSize & 0x03)); // four byte aligned
Greg Danielee54f232019-04-03 14:58:40 -0400773 this->addingWork(gpu);
jvanvertha584de92016-06-30 09:10:52 -0700774 this->addResource(dstBuffer->resource());
775 GR_VK_CALL(gpu->vkInterface(), CmdUpdateBuffer(fCmdBuffer,
776 dstBuffer->buffer(),
777 dstOffset,
778 dataSize,
779 (const uint32_t*) data));
780}
781
egdaniel9a6cf802016-06-08 08:22:05 -0700782void GrVkPrimaryCommandBuffer::clearColorImage(const GrVkGpu* gpu,
783 GrVkImage* image,
784 const VkClearColorValue* color,
785 uint32_t subRangeCount,
786 const VkImageSubresourceRange* subRanges) {
787 SkASSERT(fIsActive);
788 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400789 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700790 this->addResource(image->resource());
791 GR_VK_CALL(gpu->vkInterface(), CmdClearColorImage(fCmdBuffer,
792 image->image(),
793 image->currentLayout(),
794 color,
795 subRangeCount,
796 subRanges));
797}
798
799void GrVkPrimaryCommandBuffer::clearDepthStencilImage(const GrVkGpu* gpu,
800 GrVkImage* image,
801 const VkClearDepthStencilValue* color,
802 uint32_t subRangeCount,
803 const VkImageSubresourceRange* subRanges) {
804 SkASSERT(fIsActive);
805 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400806 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700807 this->addResource(image->resource());
808 GR_VK_CALL(gpu->vkInterface(), CmdClearDepthStencilImage(fCmdBuffer,
809 image->image(),
810 image->currentLayout(),
811 color,
812 subRangeCount,
813 subRanges));
814}
815
egdaniel52ad2512016-08-04 12:50:01 -0700816void GrVkPrimaryCommandBuffer::resolveImage(GrVkGpu* gpu,
817 const GrVkImage& srcImage,
818 const GrVkImage& dstImage,
819 uint32_t regionCount,
820 const VkImageResolve* regions) {
821 SkASSERT(fIsActive);
822 SkASSERT(!fActiveRenderPass);
823
Greg Danielee54f232019-04-03 14:58:40 -0400824 this->addingWork(gpu);
egdaniel52ad2512016-08-04 12:50:01 -0700825 this->addResource(srcImage.resource());
826 this->addResource(dstImage.resource());
827
828 GR_VK_CALL(gpu->vkInterface(), CmdResolveImage(fCmdBuffer,
829 srcImage.image(),
830 srcImage.currentLayout(),
831 dstImage.image(),
832 dstImage.currentLayout(),
833 regionCount,
834 regions));
835}
836
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500837void GrVkPrimaryCommandBuffer::onFreeGPUData(GrVkGpu* gpu) const {
egdaniel9cb63402016-06-23 08:37:05 -0700838 SkASSERT(!fActiveRenderPass);
839 // Destroy the fence, if any
840 if (VK_NULL_HANDLE != fSubmitFence) {
841 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
842 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500843 SkASSERT(!fSecondaryCommandBuffers.count());
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500844}
845
846void GrVkPrimaryCommandBuffer::onAbandonGPUData() const {
847 SkASSERT(!fActiveRenderPass);
Greg Daniel8daf3b72019-07-30 09:57:26 -0400848 for (const auto& buffer : fSecondaryCommandBuffers) {
849 buffer->abandonGPUData();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500850 }
egdaniel9cb63402016-06-23 08:37:05 -0700851}
852
egdaniel9a6cf802016-06-08 08:22:05 -0700853///////////////////////////////////////////////////////////////////////////////
854// SecondaryCommandBuffer
855////////////////////////////////////////////////////////////////////////////////
856
Greg Daniel315c8dc2019-11-26 15:41:27 -0500857GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(GrVkGpu* gpu,
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500858 GrVkCommandPool* cmdPool) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500859 SkASSERT(cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700860 const VkCommandBufferAllocateInfo cmdInfo = {
861 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400862 nullptr, // pNext
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500863 cmdPool->vkCommandPool(), // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700864 VK_COMMAND_BUFFER_LEVEL_SECONDARY, // level
865 1 // bufferCount
866 };
867
868 VkCommandBuffer cmdBuffer;
Greg Daniel315c8dc2019-11-26 15:41:27 -0500869 VkResult err;
870 GR_VK_CALL_RESULT(gpu, err, AllocateCommandBuffers(gpu->device(), &cmdInfo, &cmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700871 if (err) {
872 return nullptr;
873 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500874 return new GrVkSecondaryCommandBuffer(cmdBuffer, false);
egdaniel9a6cf802016-06-08 08:22:05 -0700875}
876
Greg Daniel070cbaf2019-01-03 17:35:54 -0500877GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(VkCommandBuffer cmdBuffer) {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500878 return new GrVkSecondaryCommandBuffer(cmdBuffer, true);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500879}
egdaniel9a6cf802016-06-08 08:22:05 -0700880
Greg Daniele643da62019-11-05 12:36:42 -0500881void GrVkSecondaryCommandBuffer::begin(GrVkGpu* gpu, const GrVkFramebuffer* framebuffer,
jvanverth7ec92412016-07-06 09:24:57 -0700882 const GrVkRenderPass* compatibleRenderPass) {
egdaniel9a6cf802016-06-08 08:22:05 -0700883 SkASSERT(!fIsActive);
jvanverth7ec92412016-07-06 09:24:57 -0700884 SkASSERT(compatibleRenderPass);
885 fActiveRenderPass = compatibleRenderPass;
egdaniel9a6cf802016-06-08 08:22:05 -0700886
Greg Daniel070cbaf2019-01-03 17:35:54 -0500887 if (!this->isWrapped()) {
888 VkCommandBufferInheritanceInfo inheritanceInfo;
889 memset(&inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
890 inheritanceInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
891 inheritanceInfo.pNext = nullptr;
892 inheritanceInfo.renderPass = fActiveRenderPass->vkRenderPass();
893 inheritanceInfo.subpass = 0; // Currently only using 1 subpass for each render pass
894 inheritanceInfo.framebuffer = framebuffer ? framebuffer->framebuffer() : VK_NULL_HANDLE;
895 inheritanceInfo.occlusionQueryEnable = false;
896 inheritanceInfo.queryFlags = 0;
897 inheritanceInfo.pipelineStatistics = 0;
egdaniel9a6cf802016-06-08 08:22:05 -0700898
Greg Daniel070cbaf2019-01-03 17:35:54 -0500899 VkCommandBufferBeginInfo cmdBufferBeginInfo;
900 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
901 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
902 cmdBufferBeginInfo.pNext = nullptr;
903 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT |
904 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
905 cmdBufferBeginInfo.pInheritanceInfo = &inheritanceInfo;
egdaniel9a6cf802016-06-08 08:22:05 -0700906
Greg Daniele643da62019-11-05 12:36:42 -0500907 GR_VK_CALL_ERRCHECK(gpu, BeginCommandBuffer(fCmdBuffer, &cmdBufferBeginInfo));
Greg Daniel070cbaf2019-01-03 17:35:54 -0500908 }
egdaniel9a6cf802016-06-08 08:22:05 -0700909 fIsActive = true;
910}
911
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500912void GrVkSecondaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700913 SkASSERT(fIsActive);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500914 if (!this->isWrapped()) {
Greg Daniele643da62019-11-05 12:36:42 -0500915 GR_VK_CALL_ERRCHECK(gpu, EndCommandBuffer(fCmdBuffer));
Greg Daniel070cbaf2019-01-03 17:35:54 -0500916 }
egdaniel9a6cf802016-06-08 08:22:05 -0700917 this->invalidateState();
918 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400919 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700920}
Greg Daniel8daf3b72019-07-30 09:57:26 -0400921
Greg Daniel0addbdf2019-11-25 15:03:58 -0500922void GrVkSecondaryCommandBuffer::recycle(GrVkCommandPool* cmdPool) {
Greg Daniel8daf3b72019-07-30 09:57:26 -0400923 if (this->isWrapped()) {
Greg Daniel8daf3b72019-07-30 09:57:26 -0400924 delete this;
925 } else {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500926 cmdPool->recycleSecondaryCommandBuffer(this);
Greg Daniel8daf3b72019-07-30 09:57:26 -0400927 }
928}
929