blob: 12504fba1953453d8b1c2ffd635dce17cd23be0f [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2015 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkCommandBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -05009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkRect.h"
11#include "src/gpu/vk/GrVkCommandPool.h"
12#include "src/gpu/vk/GrVkFramebuffer.h"
13#include "src/gpu/vk/GrVkGpu.h"
14#include "src/gpu/vk/GrVkImage.h"
15#include "src/gpu/vk/GrVkImageView.h"
16#include "src/gpu/vk/GrVkIndexBuffer.h"
17#include "src/gpu/vk/GrVkPipeline.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/vk/GrVkPipelineState.h"
19#include "src/gpu/vk/GrVkPipelineState.h"
20#include "src/gpu/vk/GrVkRenderPass.h"
21#include "src/gpu/vk/GrVkRenderTarget.h"
22#include "src/gpu/vk/GrVkTransferBuffer.h"
23#include "src/gpu/vk/GrVkUtil.h"
24#include "src/gpu/vk/GrVkVertexBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050025
26void GrVkCommandBuffer::invalidateState() {
Chris Dalton1d616352017-05-31 12:51:23 -060027 for (auto& boundInputBuffer : fBoundInputBuffers) {
28 boundInputBuffer = VK_NULL_HANDLE;
29 }
egdaniel470d77a2016-03-18 12:50:27 -070030 fBoundIndexBuffer = VK_NULL_HANDLE;
egdaniel470d77a2016-03-18 12:50:27 -070031
32 memset(&fCachedViewport, 0, sizeof(VkViewport));
33 fCachedViewport.width = - 1.0f; // Viewport must have a width greater than 0
34
35 memset(&fCachedScissor, 0, sizeof(VkRect2D));
36 fCachedScissor.offset.x = -1; // Scissor offset must be greater that 0 to be valid
37
38 for (int i = 0; i < 4; ++i) {
39 fCachedBlendConstant[i] = -1.0;
40 }
Greg Daniel164a9f02016-02-22 09:56:40 -050041}
42
Jim Van Verth3e192162020-03-10 16:23:16 -040043void GrVkCommandBuffer::freeGPUData(GrGpu* gpu, VkCommandPool cmdPool) const {
Brian Salomone39526b2019-06-24 16:35:53 -040044 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Greg Daniel164a9f02016-02-22 09:56:40 -050045 SkASSERT(!fIsActive);
Greg Daniel0addbdf2019-11-25 15:03:58 -050046 SkASSERT(!fTrackedResources.count());
47 SkASSERT(!fTrackedRecycledResources.count());
48 SkASSERT(cmdPool != VK_NULL_HANDLE);
49 SkASSERT(!this->isWrapped());
halcanary9d524f22016-03-29 09:03:52 -070050
Jim Van Verth3e192162020-03-10 16:23:16 -040051 GrVkGpu* vkGpu = (GrVkGpu*)gpu;
52 GR_VK_CALL(vkGpu->vkInterface(), FreeCommandBuffers(vkGpu->device(), cmdPool, 1, &fCmdBuffer));
egdaniel9cb63402016-06-23 08:37:05 -070053
Jim Van Verth3e192162020-03-10 16:23:16 -040054 this->onFreeGPUData(vkGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050055}
56
Ethan Nicholas8e265a72018-12-12 16:22:40 -050057void GrVkCommandBuffer::releaseResources(GrVkGpu* gpu) {
Brian Salomone39526b2019-06-24 16:35:53 -040058 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Ethan Nicholas8e265a72018-12-12 16:22:40 -050059 SkDEBUGCODE(fResourcesReleased = true;)
jvanverth7ec92412016-07-06 09:24:57 -070060 SkASSERT(!fIsActive);
61 for (int i = 0; i < fTrackedResources.count(); ++i) {
Jim Van Verth3e192162020-03-10 16:23:16 -040062 fTrackedResources[i]->notifyFinishedWithWorkOnGpu();
jvanverth7ec92412016-07-06 09:24:57 -070063 fTrackedResources[i]->unref(gpu);
64 }
egdanielc1be9bc2016-07-20 08:33:00 -070065 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Jim Van Verth3e192162020-03-10 16:23:16 -040066 fTrackedRecycledResources[i]->notifyFinishedWithWorkOnGpu();
egdanielc1be9bc2016-07-20 08:33:00 -070067 fTrackedRecycledResources[i]->recycle(const_cast<GrVkGpu*>(gpu));
68 }
egdaniel594739c2016-09-20 12:39:25 -070069
70 if (++fNumResets > kNumRewindResetsBeforeFullReset) {
71 fTrackedResources.reset();
72 fTrackedRecycledResources.reset();
73 fTrackedResources.setReserve(kInitialTrackedResourcesCount);
74 fTrackedRecycledResources.setReserve(kInitialTrackedResourcesCount);
75 fNumResets = 0;
76 } else {
77 fTrackedResources.rewind();
78 fTrackedRecycledResources.rewind();
79 }
80
jvanverth7ec92412016-07-06 09:24:57 -070081 this->invalidateState();
82
Ethan Nicholas8e265a72018-12-12 16:22:40 -050083 this->onReleaseResources(gpu);
jvanverth7ec92412016-07-06 09:24:57 -070084}
85
Greg Daniel164a9f02016-02-22 09:56:40 -050086////////////////////////////////////////////////////////////////////////////////
87// CommandBuffer commands
88////////////////////////////////////////////////////////////////////////////////
89
90void GrVkCommandBuffer::pipelineBarrier(const GrVkGpu* gpu,
Jim Van Verth3e192162020-03-10 16:23:16 -040091 const GrManagedResource* resource,
Greg Daniel164a9f02016-02-22 09:56:40 -050092 VkPipelineStageFlags srcStageMask,
93 VkPipelineStageFlags dstStageMask,
94 bool byRegion,
95 BarrierType barrierType,
Greg Daniel59dc1482019-02-22 10:46:38 -050096 void* barrier) {
Greg Daniel070cbaf2019-01-03 17:35:54 -050097 SkASSERT(!this->isWrapped());
Greg Daniel164a9f02016-02-22 09:56:40 -050098 SkASSERT(fIsActive);
egdaniel58a8d922016-04-21 08:03:10 -070099 // For images we can have barriers inside of render passes but they require us to add more
100 // support in subpasses which need self dependencies to have barriers inside them. Also, we can
101 // never have buffer barriers inside of a render pass. For now we will just assert that we are
102 // not in a render pass.
103 SkASSERT(!fActiveRenderPass);
Greg Danielf346df32019-04-03 14:52:13 -0400104
Greg Danielee54f232019-04-03 14:58:40 -0400105 if (barrierType == kBufferMemory_BarrierType) {
106 const VkBufferMemoryBarrier* barrierPtr = reinterpret_cast<VkBufferMemoryBarrier*>(barrier);
107 fBufferBarriers.push_back(*barrierPtr);
108 } else {
109 SkASSERT(barrierType == kImageMemory_BarrierType);
110 const VkImageMemoryBarrier* barrierPtr = reinterpret_cast<VkImageMemoryBarrier*>(barrier);
Greg Daniel212ff052019-04-09 10:41:34 -0400111 // We need to check if we are adding a pipeline barrier that covers part of the same
112 // subresource range as a barrier that is already in current batch. If it does, then we must
113 // submit the first batch because the vulkan spec does not define a specific ordering for
114 // barriers submitted in the same batch.
115 // TODO: Look if we can gain anything by merging barriers together instead of submitting
116 // the old ones.
117 for (int i = 0; i < fImageBarriers.count(); ++i) {
118 VkImageMemoryBarrier& currentBarrier = fImageBarriers[i];
119 if (barrierPtr->image == currentBarrier.image) {
120 const VkImageSubresourceRange newRange = barrierPtr->subresourceRange;
121 const VkImageSubresourceRange oldRange = currentBarrier.subresourceRange;
122 SkASSERT(newRange.aspectMask == oldRange.aspectMask);
123 SkASSERT(newRange.baseArrayLayer == oldRange.baseArrayLayer);
124 SkASSERT(newRange.layerCount == oldRange.layerCount);
125 uint32_t newStart = newRange.baseMipLevel;
126 uint32_t newEnd = newRange.baseMipLevel + newRange.levelCount - 1;
127 uint32_t oldStart = oldRange.baseMipLevel;
128 uint32_t oldEnd = oldRange.baseMipLevel + oldRange.levelCount - 1;
Brian Osman788b9162020-02-07 10:36:46 -0500129 if (std::max(newStart, oldStart) <= std::min(newEnd, oldEnd)) {
Greg Daniel212ff052019-04-09 10:41:34 -0400130 this->submitPipelineBarriers(gpu);
131 break;
132 }
133 }
134 }
Greg Danielee54f232019-04-03 14:58:40 -0400135 fImageBarriers.push_back(*barrierPtr);
Greg Daniel164a9f02016-02-22 09:56:40 -0500136 }
Greg Danielee54f232019-04-03 14:58:40 -0400137 fBarriersByRegion |= byRegion;
138
139 fSrcStageMask = fSrcStageMask | srcStageMask;
140 fDstStageMask = fDstStageMask | dstStageMask;
141
142 fHasWork = true;
Greg Daniel59dc1482019-02-22 10:46:38 -0500143 if (resource) {
144 this->addResource(resource);
145 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500146}
147
Greg Danielee54f232019-04-03 14:58:40 -0400148void GrVkCommandBuffer::submitPipelineBarriers(const GrVkGpu* gpu) {
149 SkASSERT(fIsActive);
150
151 // Currently we never submit a pipeline barrier without at least one memory barrier.
152 if (fBufferBarriers.count() || fImageBarriers.count()) {
153 // For images we can have barriers inside of render passes but they require us to add more
154 // support in subpasses which need self dependencies to have barriers inside them. Also, we
155 // can never have buffer barriers inside of a render pass. For now we will just assert that
156 // we are not in a render pass.
157 SkASSERT(!fActiveRenderPass);
158 SkASSERT(!this->isWrapped());
159 SkASSERT(fSrcStageMask && fDstStageMask);
160
161 VkDependencyFlags dependencyFlags = fBarriersByRegion ? VK_DEPENDENCY_BY_REGION_BIT : 0;
162 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(
163 fCmdBuffer, fSrcStageMask, fDstStageMask, dependencyFlags, 0, nullptr,
164 fBufferBarriers.count(), fBufferBarriers.begin(),
165 fImageBarriers.count(), fImageBarriers.begin()));
166 fBufferBarriers.reset();
167 fImageBarriers.reset();
168 fBarriersByRegion = false;
169 fSrcStageMask = 0;
170 fDstStageMask = 0;
171 }
172 SkASSERT(!fBufferBarriers.count());
173 SkASSERT(!fImageBarriers.count());
174 SkASSERT(!fBarriersByRegion);
175 SkASSERT(!fSrcStageMask);
176 SkASSERT(!fDstStageMask);
177}
178
179
Greg Daniel6ecc9112017-06-16 16:17:03 +0000180void GrVkCommandBuffer::bindInputBuffer(GrVkGpu* gpu, uint32_t binding,
181 const GrVkVertexBuffer* vbuffer) {
182 VkBuffer vkBuffer = vbuffer->buffer();
183 SkASSERT(VK_NULL_HANDLE != vkBuffer);
184 SkASSERT(binding < kMaxInputBuffers);
185 // TODO: once vbuffer->offset() no longer always returns 0, we will need to track the offset
186 // to know if we can skip binding or not.
187 if (vkBuffer != fBoundInputBuffers[binding]) {
188 VkDeviceSize offset = vbuffer->offset();
189 GR_VK_CALL(gpu->vkInterface(), CmdBindVertexBuffers(fCmdBuffer,
190 binding,
191 1,
192 &vkBuffer,
193 &offset));
194 fBoundInputBuffers[binding] = vkBuffer;
Greg Daniel59dc1482019-02-22 10:46:38 -0500195 this->addResource(vbuffer->resource());
Greg Daniel6ecc9112017-06-16 16:17:03 +0000196 }
197}
198
199void GrVkCommandBuffer::bindIndexBuffer(GrVkGpu* gpu, const GrVkIndexBuffer* ibuffer) {
200 VkBuffer vkBuffer = ibuffer->buffer();
201 SkASSERT(VK_NULL_HANDLE != vkBuffer);
202 // TODO: once ibuffer->offset() no longer always returns 0, we will need to track the offset
203 // to know if we can skip binding or not.
204 if (vkBuffer != fBoundIndexBuffer) {
205 GR_VK_CALL(gpu->vkInterface(), CmdBindIndexBuffer(fCmdBuffer,
206 vkBuffer,
207 ibuffer->offset(),
208 VK_INDEX_TYPE_UINT16));
209 fBoundIndexBuffer = vkBuffer;
Greg Daniel59dc1482019-02-22 10:46:38 -0500210 this->addResource(ibuffer->resource());
Greg Daniel6ecc9112017-06-16 16:17:03 +0000211 }
212}
213
Greg Daniel164a9f02016-02-22 09:56:40 -0500214void GrVkCommandBuffer::clearAttachments(const GrVkGpu* gpu,
215 int numAttachments,
216 const VkClearAttachment* attachments,
217 int numRects,
Greg Danielf346df32019-04-03 14:52:13 -0400218 const VkClearRect* clearRects) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500219 SkASSERT(fIsActive);
220 SkASSERT(fActiveRenderPass);
221 SkASSERT(numAttachments > 0);
222 SkASSERT(numRects > 0);
Greg Danielf346df32019-04-03 14:52:13 -0400223
Greg Danielee54f232019-04-03 14:58:40 -0400224 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400225
Greg Daniel164a9f02016-02-22 09:56:40 -0500226#ifdef SK_DEBUG
227 for (int i = 0; i < numAttachments; ++i) {
228 if (attachments[i].aspectMask == VK_IMAGE_ASPECT_COLOR_BIT) {
229 uint32_t testIndex;
230 SkAssertResult(fActiveRenderPass->colorAttachmentIndex(&testIndex));
231 SkASSERT(testIndex == attachments[i].colorAttachment);
232 }
233 }
234#endif
235 GR_VK_CALL(gpu->vkInterface(), CmdClearAttachments(fCmdBuffer,
236 numAttachments,
237 attachments,
238 numRects,
239 clearRects));
Greg Daniela718a612019-10-07 16:25:41 -0400240 if (gpu->vkCaps().mustInvalidatePrimaryCmdBufferStateAfterClearAttachments()) {
241 this->invalidateState();
242 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500243}
244
245void GrVkCommandBuffer::bindDescriptorSets(const GrVkGpu* gpu,
egdaniel22281c12016-03-23 13:49:40 -0700246 GrVkPipelineState* pipelineState,
Greg Danieleecc6872019-07-29 13:21:37 -0400247 VkPipelineLayout layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500248 uint32_t firstSet,
249 uint32_t setCount,
250 const VkDescriptorSet* descriptorSets,
251 uint32_t dynamicOffsetCount,
252 const uint32_t* dynamicOffsets) {
253 SkASSERT(fIsActive);
254 GR_VK_CALL(gpu->vkInterface(), CmdBindDescriptorSets(fCmdBuffer,
255 VK_PIPELINE_BIND_POINT_GRAPHICS,
Greg Danieleecc6872019-07-29 13:21:37 -0400256 layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500257 firstSet,
258 setCount,
259 descriptorSets,
260 dynamicOffsetCount,
261 dynamicOffsets));
egdanielbc9b2962016-09-27 08:00:53 -0700262}
263
egdaniel470d77a2016-03-18 12:50:27 -0700264void GrVkCommandBuffer::bindPipeline(const GrVkGpu* gpu, const GrVkPipeline* pipeline) {
265 SkASSERT(fIsActive);
egdaniel470d77a2016-03-18 12:50:27 -0700266 GR_VK_CALL(gpu->vkInterface(), CmdBindPipeline(fCmdBuffer,
267 VK_PIPELINE_BIND_POINT_GRAPHICS,
268 pipeline->pipeline()));
egdanielec440992016-09-13 09:54:11 -0700269 this->addResource(pipeline);
egdaniel470d77a2016-03-18 12:50:27 -0700270}
271
Greg Daniel164a9f02016-02-22 09:56:40 -0500272void GrVkCommandBuffer::drawIndexed(const GrVkGpu* gpu,
273 uint32_t indexCount,
274 uint32_t instanceCount,
275 uint32_t firstIndex,
276 int32_t vertexOffset,
Greg Danielf346df32019-04-03 14:52:13 -0400277 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500278 SkASSERT(fIsActive);
279 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400280 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500281 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndexed(fCmdBuffer,
282 indexCount,
283 instanceCount,
284 firstIndex,
285 vertexOffset,
286 firstInstance));
287}
288
289void GrVkCommandBuffer::draw(const GrVkGpu* gpu,
290 uint32_t vertexCount,
291 uint32_t instanceCount,
292 uint32_t firstVertex,
Greg Danielf346df32019-04-03 14:52:13 -0400293 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500294 SkASSERT(fIsActive);
295 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400296 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500297 GR_VK_CALL(gpu->vkInterface(), CmdDraw(fCmdBuffer,
298 vertexCount,
299 instanceCount,
300 firstVertex,
301 firstInstance));
302}
egdaniel470d77a2016-03-18 12:50:27 -0700303
304void GrVkCommandBuffer::setViewport(const GrVkGpu* gpu,
305 uint32_t firstViewport,
306 uint32_t viewportCount,
307 const VkViewport* viewports) {
308 SkASSERT(fIsActive);
309 SkASSERT(1 == viewportCount);
310 if (memcmp(viewports, &fCachedViewport, sizeof(VkViewport))) {
311 GR_VK_CALL(gpu->vkInterface(), CmdSetViewport(fCmdBuffer,
312 firstViewport,
313 viewportCount,
314 viewports));
315 fCachedViewport = viewports[0];
316 }
317}
318
319void GrVkCommandBuffer::setScissor(const GrVkGpu* gpu,
320 uint32_t firstScissor,
321 uint32_t scissorCount,
322 const VkRect2D* scissors) {
323 SkASSERT(fIsActive);
324 SkASSERT(1 == scissorCount);
325 if (memcmp(scissors, &fCachedScissor, sizeof(VkRect2D))) {
326 GR_VK_CALL(gpu->vkInterface(), CmdSetScissor(fCmdBuffer,
327 firstScissor,
328 scissorCount,
329 scissors));
330 fCachedScissor = scissors[0];
331 }
332}
333
334void GrVkCommandBuffer::setBlendConstants(const GrVkGpu* gpu,
335 const float blendConstants[4]) {
336 SkASSERT(fIsActive);
337 if (memcmp(blendConstants, fCachedBlendConstant, 4 * sizeof(float))) {
338 GR_VK_CALL(gpu->vkInterface(), CmdSetBlendConstants(fCmdBuffer, blendConstants));
339 memcpy(fCachedBlendConstant, blendConstants, 4 * sizeof(float));
340 }
341}
egdaniel9a6cf802016-06-08 08:22:05 -0700342
Greg Danielee54f232019-04-03 14:58:40 -0400343void GrVkCommandBuffer::addingWork(const GrVkGpu* gpu) {
344 this->submitPipelineBarriers(gpu);
345 fHasWork = true;
346}
347
egdaniel9a6cf802016-06-08 08:22:05 -0700348///////////////////////////////////////////////////////////////////////////////
349// PrimaryCommandBuffer
350////////////////////////////////////////////////////////////////////////////////
egdaniel9cb63402016-06-23 08:37:05 -0700351GrVkPrimaryCommandBuffer::~GrVkPrimaryCommandBuffer() {
352 // Should have ended any render pass we're in the middle of
353 SkASSERT(!fActiveRenderPass);
354}
355
Greg Daniel315c8dc2019-11-26 15:41:27 -0500356GrVkPrimaryCommandBuffer* GrVkPrimaryCommandBuffer::Create(GrVkGpu* gpu,
Greg Daniel0addbdf2019-11-25 15:03:58 -0500357 VkCommandPool cmdPool) {
egdaniel9a6cf802016-06-08 08:22:05 -0700358 const VkCommandBufferAllocateInfo cmdInfo = {
359 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400360 nullptr, // pNext
Greg Daniel0addbdf2019-11-25 15:03:58 -0500361 cmdPool, // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700362 VK_COMMAND_BUFFER_LEVEL_PRIMARY, // level
363 1 // bufferCount
364 };
365
366 VkCommandBuffer cmdBuffer;
Greg Daniel315c8dc2019-11-26 15:41:27 -0500367 VkResult err;
368 GR_VK_CALL_RESULT(gpu, err, AllocateCommandBuffers(gpu->device(), &cmdInfo, &cmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700369 if (err) {
370 return nullptr;
371 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500372 return new GrVkPrimaryCommandBuffer(cmdBuffer);
egdaniel9a6cf802016-06-08 08:22:05 -0700373}
374
Greg Daniele643da62019-11-05 12:36:42 -0500375void GrVkPrimaryCommandBuffer::begin(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700376 SkASSERT(!fIsActive);
377 VkCommandBufferBeginInfo cmdBufferBeginInfo;
378 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
379 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
380 cmdBufferBeginInfo.pNext = nullptr;
381 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
382 cmdBufferBeginInfo.pInheritanceInfo = nullptr;
383
Greg Daniele643da62019-11-05 12:36:42 -0500384 GR_VK_CALL_ERRCHECK(gpu, BeginCommandBuffer(fCmdBuffer, &cmdBufferBeginInfo));
egdaniel9a6cf802016-06-08 08:22:05 -0700385 fIsActive = true;
386}
387
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500388void GrVkPrimaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700389 SkASSERT(fIsActive);
390 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400391
392 this->submitPipelineBarriers(gpu);
393
Greg Daniele643da62019-11-05 12:36:42 -0500394 GR_VK_CALL_ERRCHECK(gpu, EndCommandBuffer(fCmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700395 this->invalidateState();
396 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400397 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700398}
399
Greg Danielfa3adf72019-11-07 09:53:41 -0500400bool GrVkPrimaryCommandBuffer::beginRenderPass(GrVkGpu* gpu,
egdaniel9cb63402016-06-23 08:37:05 -0700401 const GrVkRenderPass* renderPass,
Robert Phillips95214472017-08-08 18:00:03 -0400402 const VkClearValue clearValues[],
Greg Danielfa3adf72019-11-07 09:53:41 -0500403 GrVkRenderTarget* target,
egdaniel9cb63402016-06-23 08:37:05 -0700404 const SkIRect& bounds,
405 bool forSecondaryCB) {
egdaniel9a6cf802016-06-08 08:22:05 -0700406 SkASSERT(fIsActive);
407 SkASSERT(!fActiveRenderPass);
Greg Danielfa3adf72019-11-07 09:53:41 -0500408 SkASSERT(renderPass->isCompatible(*target));
409
410 const GrVkFramebuffer* framebuffer = target->getFramebuffer();
411 if (!framebuffer) {
412 return false;
413 }
egdaniel9cb63402016-06-23 08:37:05 -0700414
Greg Danielee54f232019-04-03 14:58:40 -0400415 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400416
egdaniel9a6cf802016-06-08 08:22:05 -0700417 VkRenderPassBeginInfo beginInfo;
egdaniel9cb63402016-06-23 08:37:05 -0700418 VkRect2D renderArea;
419 renderArea.offset = { bounds.fLeft , bounds.fTop };
420 renderArea.extent = { (uint32_t)bounds.width(), (uint32_t)bounds.height() };
421
422 memset(&beginInfo, 0, sizeof(VkRenderPassBeginInfo));
423 beginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
424 beginInfo.pNext = nullptr;
425 beginInfo.renderPass = renderPass->vkRenderPass();
Greg Danielfa3adf72019-11-07 09:53:41 -0500426 beginInfo.framebuffer = framebuffer->framebuffer();
egdaniel9cb63402016-06-23 08:37:05 -0700427 beginInfo.renderArea = renderArea;
Greg Danielb68319a2018-02-23 16:08:28 -0500428 beginInfo.clearValueCount = renderPass->clearValueCount();
egdaniel9cb63402016-06-23 08:37:05 -0700429 beginInfo.pClearValues = clearValues;
430
431 VkSubpassContents contents = forSecondaryCB ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
432 : VK_SUBPASS_CONTENTS_INLINE;
433
egdaniel9a6cf802016-06-08 08:22:05 -0700434 GR_VK_CALL(gpu->vkInterface(), CmdBeginRenderPass(fCmdBuffer, &beginInfo, contents));
435 fActiveRenderPass = renderPass;
436 this->addResource(renderPass);
Greg Danielfa3adf72019-11-07 09:53:41 -0500437 target->addResources(*this);
438 return true;
egdaniel9a6cf802016-06-08 08:22:05 -0700439}
440
441void GrVkPrimaryCommandBuffer::endRenderPass(const GrVkGpu* gpu) {
442 SkASSERT(fIsActive);
443 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400444 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700445 GR_VK_CALL(gpu->vkInterface(), CmdEndRenderPass(fCmdBuffer));
446 fActiveRenderPass = nullptr;
447}
448
449void GrVkPrimaryCommandBuffer::executeCommands(const GrVkGpu* gpu,
Greg Daniel8daf3b72019-07-30 09:57:26 -0400450 std::unique_ptr<GrVkSecondaryCommandBuffer> buffer) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500451 // The Vulkan spec allows secondary command buffers to be executed on a primary command buffer
452 // if the command pools both were created from were created with the same queue family. However,
453 // we currently always create them from the same pool.
egdaniel9a6cf802016-06-08 08:22:05 -0700454 SkASSERT(fIsActive);
Greg Daniel77b53f62016-10-18 11:48:51 -0400455 SkASSERT(!buffer->fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700456 SkASSERT(fActiveRenderPass);
457 SkASSERT(fActiveRenderPass->isCompatible(*buffer->fActiveRenderPass));
458
Greg Danielee54f232019-04-03 14:58:40 -0400459 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400460
egdaniel9a6cf802016-06-08 08:22:05 -0700461 GR_VK_CALL(gpu->vkInterface(), CmdExecuteCommands(fCmdBuffer, 1, &buffer->fCmdBuffer));
Greg Daniel8daf3b72019-07-30 09:57:26 -0400462 fSecondaryCommandBuffers.push_back(std::move(buffer));
egdaniel066df7c2016-06-08 14:02:27 -0700463 // When executing a secondary command buffer all state (besides render pass state) becomes
464 // invalidated and must be reset. This includes bound buffers, pipelines, dynamic state, etc.
465 this->invalidateState();
egdaniel9a6cf802016-06-08 08:22:05 -0700466}
467
Greg Daniele1185582019-12-04 11:29:44 -0500468static bool submit_to_queue(GrVkGpu* gpu,
Greg Daniel48661b82018-01-22 16:11:35 -0500469 VkQueue queue,
470 VkFence fence,
471 uint32_t waitCount,
472 const VkSemaphore* waitSemaphores,
473 const VkPipelineStageFlags* waitStages,
474 uint32_t commandBufferCount,
475 const VkCommandBuffer* commandBuffers,
476 uint32_t signalCount,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400477 const VkSemaphore* signalSemaphores,
478 GrProtected protectedContext) {
479 VkProtectedSubmitInfo protectedSubmitInfo;
480 if (protectedContext == GrProtected::kYes) {
481 memset(&protectedSubmitInfo, 0, sizeof(VkProtectedSubmitInfo));
482 protectedSubmitInfo.sType = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO;
483 protectedSubmitInfo.pNext = nullptr;
484 protectedSubmitInfo.protectedSubmit = VK_TRUE;
485 }
486
Greg Daniel48661b82018-01-22 16:11:35 -0500487 VkSubmitInfo submitInfo;
488 memset(&submitInfo, 0, sizeof(VkSubmitInfo));
489 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400490 submitInfo.pNext = protectedContext == GrProtected::kYes ? &protectedSubmitInfo : nullptr;
Greg Daniel48661b82018-01-22 16:11:35 -0500491 submitInfo.waitSemaphoreCount = waitCount;
492 submitInfo.pWaitSemaphores = waitSemaphores;
493 submitInfo.pWaitDstStageMask = waitStages;
494 submitInfo.commandBufferCount = commandBufferCount;
495 submitInfo.pCommandBuffers = commandBuffers;
496 submitInfo.signalSemaphoreCount = signalCount;
497 submitInfo.pSignalSemaphores = signalSemaphores;
Greg Daniele1185582019-12-04 11:29:44 -0500498 VkResult result;
499 GR_VK_CALL_RESULT(gpu, result, QueueSubmit(queue, 1, &submitInfo, fence));
500 return result == VK_SUCCESS;
Greg Daniel48661b82018-01-22 16:11:35 -0500501}
502
Greg Daniele1185582019-12-04 11:29:44 -0500503bool GrVkPrimaryCommandBuffer::submitToQueue(
Greg Daniele643da62019-11-05 12:36:42 -0500504 GrVkGpu* gpu,
Greg Daniel6be35232017-03-01 17:01:09 -0500505 VkQueue queue,
Greg Daniel48661b82018-01-22 16:11:35 -0500506 SkTArray<GrVkSemaphore::Resource*>& signalSemaphores,
507 SkTArray<GrVkSemaphore::Resource*>& waitSemaphores) {
egdaniel9a6cf802016-06-08 08:22:05 -0700508 SkASSERT(!fIsActive);
509
510 VkResult err;
jvanverth7ec92412016-07-06 09:24:57 -0700511 if (VK_NULL_HANDLE == fSubmitFence) {
512 VkFenceCreateInfo fenceInfo;
513 memset(&fenceInfo, 0, sizeof(VkFenceCreateInfo));
514 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
Greg Daniele1185582019-12-04 11:29:44 -0500515 GR_VK_CALL_RESULT(gpu, err, CreateFence(gpu->device(), &fenceInfo, nullptr,
516 &fSubmitFence));
517 if (err) {
518 fSubmitFence = VK_NULL_HANDLE;
519 return false;
520 }
jvanverth7ec92412016-07-06 09:24:57 -0700521 } else {
Greg Daniele1185582019-12-04 11:29:44 -0500522 // This cannot return DEVICE_LOST so we assert we succeeded.
523 GR_VK_CALL_RESULT(gpu, err, ResetFences(gpu->device(), 1, &fSubmitFence));
524 SkASSERT(err == VK_SUCCESS);
jvanverth7ec92412016-07-06 09:24:57 -0700525 }
egdaniel9a6cf802016-06-08 08:22:05 -0700526
Greg Daniela5cb7812017-06-16 09:45:32 -0400527 int signalCount = signalSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500528 int waitCount = waitSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500529
Greg Daniele1185582019-12-04 11:29:44 -0500530 bool submitted = false;
531
Greg Daniel48661b82018-01-22 16:11:35 -0500532 if (0 == signalCount && 0 == waitCount) {
533 // This command buffer has no dependent semaphores so we can simply just submit it to the
534 // queue with no worries.
Greg Daniele1185582019-12-04 11:29:44 -0500535 submitted = submit_to_queue(
536 gpu, queue, fSubmitFence, 0, nullptr, nullptr, 1, &fCmdBuffer, 0, nullptr,
537 gpu->protectedContext() ? GrProtected::kYes : GrProtected::kNo);
Greg Daniel48661b82018-01-22 16:11:35 -0500538 } else {
Greg Daniel48661b82018-01-22 16:11:35 -0500539 SkTArray<VkSemaphore> vkSignalSems(signalCount);
540 for (int i = 0; i < signalCount; ++i) {
541 if (signalSemaphores[i]->shouldSignal()) {
542 this->addResource(signalSemaphores[i]);
543 vkSignalSems.push_back(signalSemaphores[i]->semaphore());
544 }
545 }
546
547 SkTArray<VkSemaphore> vkWaitSems(waitCount);
548 SkTArray<VkPipelineStageFlags> vkWaitStages(waitCount);
549 for (int i = 0; i < waitCount; ++i) {
550 if (waitSemaphores[i]->shouldWait()) {
551 this->addResource(waitSemaphores[i]);
552 vkWaitSems.push_back(waitSemaphores[i]->semaphore());
553 vkWaitStages.push_back(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
554 }
555 }
Greg Daniele1185582019-12-04 11:29:44 -0500556 submitted = submit_to_queue(gpu, queue, fSubmitFence, vkWaitSems.count(),
557 vkWaitSems.begin(), vkWaitStages.begin(), 1, &fCmdBuffer,
558 vkSignalSems.count(), vkSignalSems.begin(),
559 gpu->protectedContext() ? GrProtected::kYes : GrProtected::kNo);
560 if (submitted) {
561 for (int i = 0; i < signalCount; ++i) {
562 signalSemaphores[i]->markAsSignaled();
563 }
564 for (int i = 0; i < waitCount; ++i) {
565 waitSemaphores[i]->markAsWaited();
566 }
Greg Daniel48661b82018-01-22 16:11:35 -0500567 }
Greg Daniel48661b82018-01-22 16:11:35 -0500568 }
egdaniel9a6cf802016-06-08 08:22:05 -0700569
Greg Daniele1185582019-12-04 11:29:44 -0500570 if (!submitted) {
571 // Destroy the fence or else we will try to wait forever for it to finish.
egdaniel9a6cf802016-06-08 08:22:05 -0700572 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
573 fSubmitFence = VK_NULL_HANDLE;
Greg Daniele1185582019-12-04 11:29:44 -0500574 return false;
egdaniel9a6cf802016-06-08 08:22:05 -0700575 }
Greg Daniele1185582019-12-04 11:29:44 -0500576 return true;
egdaniel9a6cf802016-06-08 08:22:05 -0700577}
578
Greg Daniele1185582019-12-04 11:29:44 -0500579void GrVkPrimaryCommandBuffer::forceSync(GrVkGpu* gpu) {
580 SkASSERT(fSubmitFence != VK_NULL_HANDLE);
581 GR_VK_CALL_ERRCHECK(gpu, WaitForFences(gpu->device(), 1, &fSubmitFence, true, UINT64_MAX));
582}
583
584bool GrVkPrimaryCommandBuffer::finished(GrVkGpu* gpu) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500585 SkASSERT(!fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700586 if (VK_NULL_HANDLE == fSubmitFence) {
587 return true;
588 }
589
Greg Daniele1185582019-12-04 11:29:44 -0500590 VkResult err;
591 GR_VK_CALL_RESULT_NOCHECK(gpu, err, GetFenceStatus(gpu->device(), fSubmitFence));
egdaniel9a6cf802016-06-08 08:22:05 -0700592 switch (err) {
593 case VK_SUCCESS:
Greg Daniele1185582019-12-04 11:29:44 -0500594 case VK_ERROR_DEVICE_LOST:
egdaniel9a6cf802016-06-08 08:22:05 -0700595 return true;
596
597 case VK_NOT_READY:
598 return false;
599
600 default:
601 SkDebugf("Error getting fence status: %d\n", err);
Greg Daniele1185582019-12-04 11:29:44 -0500602 SK_ABORT("Got an invalid fence status");
603 return false;
egdaniel9a6cf802016-06-08 08:22:05 -0700604 }
egdaniel9a6cf802016-06-08 08:22:05 -0700605}
606
Greg Daniela3aa75a2019-04-12 14:24:55 -0400607void GrVkPrimaryCommandBuffer::addFinishedProc(sk_sp<GrRefCntedCallback> finishedProc) {
608 fFinishedProcs.push_back(std::move(finishedProc));
609}
610
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500611void GrVkPrimaryCommandBuffer::onReleaseResources(GrVkGpu* gpu) {
jvanverth7ec92412016-07-06 09:24:57 -0700612 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500613 fSecondaryCommandBuffers[i]->releaseResources(gpu);
614 }
Brian Salomonab32f652019-05-10 14:24:50 -0400615 fFinishedProcs.reset();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500616}
617
Greg Daniel0addbdf2019-11-25 15:03:58 -0500618void GrVkPrimaryCommandBuffer::recycleSecondaryCommandBuffers(GrVkCommandPool* cmdPool) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500619 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500620 fSecondaryCommandBuffers[i].release()->recycle(cmdPool);
jvanverth7ec92412016-07-06 09:24:57 -0700621 }
622 fSecondaryCommandBuffers.reset();
623}
624
egdaniel9a6cf802016-06-08 08:22:05 -0700625void GrVkPrimaryCommandBuffer::copyImage(const GrVkGpu* gpu,
626 GrVkImage* srcImage,
627 VkImageLayout srcLayout,
628 GrVkImage* dstImage,
629 VkImageLayout dstLayout,
630 uint32_t copyRegionCount,
631 const VkImageCopy* copyRegions) {
632 SkASSERT(fIsActive);
633 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400634 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700635 this->addResource(srcImage->resource());
636 this->addResource(dstImage->resource());
637 GR_VK_CALL(gpu->vkInterface(), CmdCopyImage(fCmdBuffer,
638 srcImage->image(),
639 srcLayout,
640 dstImage->image(),
641 dstLayout,
642 copyRegionCount,
643 copyRegions));
644}
645
646void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
Jim Van Verth3e192162020-03-10 16:23:16 -0400647 const GrManagedResource* srcResource,
egdaniel9a6cf802016-06-08 08:22:05 -0700648 VkImage srcImage,
649 VkImageLayout srcLayout,
Jim Van Verth3e192162020-03-10 16:23:16 -0400650 const GrManagedResource* dstResource,
egdaniel9a6cf802016-06-08 08:22:05 -0700651 VkImage dstImage,
652 VkImageLayout dstLayout,
653 uint32_t blitRegionCount,
654 const VkImageBlit* blitRegions,
655 VkFilter filter) {
656 SkASSERT(fIsActive);
657 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400658 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700659 this->addResource(srcResource);
660 this->addResource(dstResource);
661 GR_VK_CALL(gpu->vkInterface(), CmdBlitImage(fCmdBuffer,
662 srcImage,
663 srcLayout,
664 dstImage,
665 dstLayout,
666 blitRegionCount,
667 blitRegions,
668 filter));
669}
670
Greg Daniel6ecc9112017-06-16 16:17:03 +0000671void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
672 const GrVkImage& srcImage,
673 const GrVkImage& dstImage,
674 uint32_t blitRegionCount,
675 const VkImageBlit* blitRegions,
676 VkFilter filter) {
677 this->blitImage(gpu,
678 srcImage.resource(),
679 srcImage.image(),
680 srcImage.currentLayout(),
681 dstImage.resource(),
682 dstImage.image(),
683 dstImage.currentLayout(),
684 blitRegionCount,
685 blitRegions,
686 filter);
687}
688
689
egdaniel9a6cf802016-06-08 08:22:05 -0700690void GrVkPrimaryCommandBuffer::copyImageToBuffer(const GrVkGpu* gpu,
691 GrVkImage* srcImage,
692 VkImageLayout srcLayout,
693 GrVkTransferBuffer* dstBuffer,
694 uint32_t copyRegionCount,
695 const VkBufferImageCopy* copyRegions) {
696 SkASSERT(fIsActive);
697 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400698 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700699 this->addResource(srcImage->resource());
700 this->addResource(dstBuffer->resource());
701 GR_VK_CALL(gpu->vkInterface(), CmdCopyImageToBuffer(fCmdBuffer,
702 srcImage->image(),
703 srcLayout,
704 dstBuffer->buffer(),
705 copyRegionCount,
706 copyRegions));
707}
708
709void GrVkPrimaryCommandBuffer::copyBufferToImage(const GrVkGpu* gpu,
710 GrVkTransferBuffer* srcBuffer,
711 GrVkImage* dstImage,
712 VkImageLayout dstLayout,
713 uint32_t copyRegionCount,
714 const VkBufferImageCopy* copyRegions) {
715 SkASSERT(fIsActive);
716 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400717 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700718 this->addResource(srcBuffer->resource());
719 this->addResource(dstImage->resource());
720 GR_VK_CALL(gpu->vkInterface(), CmdCopyBufferToImage(fCmdBuffer,
721 srcBuffer->buffer(),
722 dstImage->image(),
723 dstLayout,
724 copyRegionCount,
725 copyRegions));
726}
727
Greg Daniel6888c0d2017-08-25 11:55:50 -0400728
729void GrVkPrimaryCommandBuffer::copyBuffer(GrVkGpu* gpu,
730 GrVkBuffer* srcBuffer,
731 GrVkBuffer* dstBuffer,
732 uint32_t regionCount,
733 const VkBufferCopy* regions) {
734 SkASSERT(fIsActive);
735 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400736 this->addingWork(gpu);
Greg Daniel6888c0d2017-08-25 11:55:50 -0400737#ifdef SK_DEBUG
738 for (uint32_t i = 0; i < regionCount; ++i) {
739 const VkBufferCopy& region = regions[i];
740 SkASSERT(region.size > 0);
741 SkASSERT(region.srcOffset < srcBuffer->size());
742 SkASSERT(region.dstOffset < dstBuffer->size());
743 SkASSERT(region.srcOffset + region.size <= srcBuffer->size());
744 SkASSERT(region.dstOffset + region.size <= dstBuffer->size());
745 }
746#endif
747 this->addResource(srcBuffer->resource());
748 this->addResource(dstBuffer->resource());
749 GR_VK_CALL(gpu->vkInterface(), CmdCopyBuffer(fCmdBuffer,
750 srcBuffer->buffer(),
751 dstBuffer->buffer(),
752 regionCount,
753 regions));
754}
755
jvanvertha584de92016-06-30 09:10:52 -0700756void GrVkPrimaryCommandBuffer::updateBuffer(GrVkGpu* gpu,
757 GrVkBuffer* dstBuffer,
758 VkDeviceSize dstOffset,
759 VkDeviceSize dataSize,
760 const void* data) {
761 SkASSERT(fIsActive);
762 SkASSERT(!fActiveRenderPass);
763 SkASSERT(0 == (dstOffset & 0x03)); // four byte aligned
764 // TODO: handle larger transfer sizes
765 SkASSERT(dataSize <= 65536);
766 SkASSERT(0 == (dataSize & 0x03)); // four byte aligned
Greg Danielee54f232019-04-03 14:58:40 -0400767 this->addingWork(gpu);
jvanvertha584de92016-06-30 09:10:52 -0700768 this->addResource(dstBuffer->resource());
769 GR_VK_CALL(gpu->vkInterface(), CmdUpdateBuffer(fCmdBuffer,
770 dstBuffer->buffer(),
771 dstOffset,
772 dataSize,
773 (const uint32_t*) data));
774}
775
egdaniel9a6cf802016-06-08 08:22:05 -0700776void GrVkPrimaryCommandBuffer::clearColorImage(const GrVkGpu* gpu,
777 GrVkImage* image,
778 const VkClearColorValue* color,
779 uint32_t subRangeCount,
780 const VkImageSubresourceRange* subRanges) {
781 SkASSERT(fIsActive);
782 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400783 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700784 this->addResource(image->resource());
785 GR_VK_CALL(gpu->vkInterface(), CmdClearColorImage(fCmdBuffer,
786 image->image(),
787 image->currentLayout(),
788 color,
789 subRangeCount,
790 subRanges));
791}
792
793void GrVkPrimaryCommandBuffer::clearDepthStencilImage(const GrVkGpu* gpu,
794 GrVkImage* image,
795 const VkClearDepthStencilValue* color,
796 uint32_t subRangeCount,
797 const VkImageSubresourceRange* subRanges) {
798 SkASSERT(fIsActive);
799 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400800 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700801 this->addResource(image->resource());
802 GR_VK_CALL(gpu->vkInterface(), CmdClearDepthStencilImage(fCmdBuffer,
803 image->image(),
804 image->currentLayout(),
805 color,
806 subRangeCount,
807 subRanges));
808}
809
egdaniel52ad2512016-08-04 12:50:01 -0700810void GrVkPrimaryCommandBuffer::resolveImage(GrVkGpu* gpu,
811 const GrVkImage& srcImage,
812 const GrVkImage& dstImage,
813 uint32_t regionCount,
814 const VkImageResolve* regions) {
815 SkASSERT(fIsActive);
816 SkASSERT(!fActiveRenderPass);
817
Greg Danielee54f232019-04-03 14:58:40 -0400818 this->addingWork(gpu);
egdaniel52ad2512016-08-04 12:50:01 -0700819 this->addResource(srcImage.resource());
820 this->addResource(dstImage.resource());
821
822 GR_VK_CALL(gpu->vkInterface(), CmdResolveImage(fCmdBuffer,
823 srcImage.image(),
824 srcImage.currentLayout(),
825 dstImage.image(),
826 dstImage.currentLayout(),
827 regionCount,
828 regions));
829}
830
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500831void GrVkPrimaryCommandBuffer::onFreeGPUData(GrVkGpu* gpu) const {
egdaniel9cb63402016-06-23 08:37:05 -0700832 SkASSERT(!fActiveRenderPass);
833 // Destroy the fence, if any
834 if (VK_NULL_HANDLE != fSubmitFence) {
835 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
836 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500837 SkASSERT(!fSecondaryCommandBuffers.count());
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500838}
839
egdaniel9a6cf802016-06-08 08:22:05 -0700840///////////////////////////////////////////////////////////////////////////////
841// SecondaryCommandBuffer
842////////////////////////////////////////////////////////////////////////////////
843
Greg Daniel315c8dc2019-11-26 15:41:27 -0500844GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(GrVkGpu* gpu,
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500845 GrVkCommandPool* cmdPool) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500846 SkASSERT(cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700847 const VkCommandBufferAllocateInfo cmdInfo = {
848 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400849 nullptr, // pNext
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500850 cmdPool->vkCommandPool(), // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700851 VK_COMMAND_BUFFER_LEVEL_SECONDARY, // level
852 1 // bufferCount
853 };
854
855 VkCommandBuffer cmdBuffer;
Greg Daniel315c8dc2019-11-26 15:41:27 -0500856 VkResult err;
857 GR_VK_CALL_RESULT(gpu, err, AllocateCommandBuffers(gpu->device(), &cmdInfo, &cmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700858 if (err) {
859 return nullptr;
860 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500861 return new GrVkSecondaryCommandBuffer(cmdBuffer, false);
egdaniel9a6cf802016-06-08 08:22:05 -0700862}
863
Greg Daniel070cbaf2019-01-03 17:35:54 -0500864GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(VkCommandBuffer cmdBuffer) {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500865 return new GrVkSecondaryCommandBuffer(cmdBuffer, true);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500866}
egdaniel9a6cf802016-06-08 08:22:05 -0700867
Greg Daniele643da62019-11-05 12:36:42 -0500868void GrVkSecondaryCommandBuffer::begin(GrVkGpu* gpu, const GrVkFramebuffer* framebuffer,
jvanverth7ec92412016-07-06 09:24:57 -0700869 const GrVkRenderPass* compatibleRenderPass) {
egdaniel9a6cf802016-06-08 08:22:05 -0700870 SkASSERT(!fIsActive);
jvanverth7ec92412016-07-06 09:24:57 -0700871 SkASSERT(compatibleRenderPass);
872 fActiveRenderPass = compatibleRenderPass;
egdaniel9a6cf802016-06-08 08:22:05 -0700873
Greg Daniel070cbaf2019-01-03 17:35:54 -0500874 if (!this->isWrapped()) {
875 VkCommandBufferInheritanceInfo inheritanceInfo;
876 memset(&inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
877 inheritanceInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
878 inheritanceInfo.pNext = nullptr;
879 inheritanceInfo.renderPass = fActiveRenderPass->vkRenderPass();
880 inheritanceInfo.subpass = 0; // Currently only using 1 subpass for each render pass
881 inheritanceInfo.framebuffer = framebuffer ? framebuffer->framebuffer() : VK_NULL_HANDLE;
882 inheritanceInfo.occlusionQueryEnable = false;
883 inheritanceInfo.queryFlags = 0;
884 inheritanceInfo.pipelineStatistics = 0;
egdaniel9a6cf802016-06-08 08:22:05 -0700885
Greg Daniel070cbaf2019-01-03 17:35:54 -0500886 VkCommandBufferBeginInfo cmdBufferBeginInfo;
887 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
888 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
889 cmdBufferBeginInfo.pNext = nullptr;
890 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT |
891 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
892 cmdBufferBeginInfo.pInheritanceInfo = &inheritanceInfo;
egdaniel9a6cf802016-06-08 08:22:05 -0700893
Greg Daniele643da62019-11-05 12:36:42 -0500894 GR_VK_CALL_ERRCHECK(gpu, BeginCommandBuffer(fCmdBuffer, &cmdBufferBeginInfo));
Greg Daniel070cbaf2019-01-03 17:35:54 -0500895 }
egdaniel9a6cf802016-06-08 08:22:05 -0700896 fIsActive = true;
897}
898
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500899void GrVkSecondaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700900 SkASSERT(fIsActive);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500901 if (!this->isWrapped()) {
Greg Daniele643da62019-11-05 12:36:42 -0500902 GR_VK_CALL_ERRCHECK(gpu, EndCommandBuffer(fCmdBuffer));
Greg Daniel070cbaf2019-01-03 17:35:54 -0500903 }
egdaniel9a6cf802016-06-08 08:22:05 -0700904 this->invalidateState();
905 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400906 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700907}
Greg Daniel8daf3b72019-07-30 09:57:26 -0400908
Greg Daniel0addbdf2019-11-25 15:03:58 -0500909void GrVkSecondaryCommandBuffer::recycle(GrVkCommandPool* cmdPool) {
Greg Daniel8daf3b72019-07-30 09:57:26 -0400910 if (this->isWrapped()) {
Greg Daniel8daf3b72019-07-30 09:57:26 -0400911 delete this;
912 } else {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500913 cmdPool->recycleSecondaryCommandBuffer(this);
Greg Daniel8daf3b72019-07-30 09:57:26 -0400914 }
915}
916