blob: 7087379b693535aa6e81c7bf64a393630ca36a8f [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2015 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkCommandBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -05009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkRect.h"
11#include "src/gpu/vk/GrVkCommandPool.h"
12#include "src/gpu/vk/GrVkFramebuffer.h"
13#include "src/gpu/vk/GrVkGpu.h"
14#include "src/gpu/vk/GrVkImage.h"
15#include "src/gpu/vk/GrVkImageView.h"
16#include "src/gpu/vk/GrVkIndexBuffer.h"
17#include "src/gpu/vk/GrVkPipeline.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/vk/GrVkPipelineState.h"
19#include "src/gpu/vk/GrVkPipelineState.h"
20#include "src/gpu/vk/GrVkRenderPass.h"
21#include "src/gpu/vk/GrVkRenderTarget.h"
22#include "src/gpu/vk/GrVkTransferBuffer.h"
23#include "src/gpu/vk/GrVkUtil.h"
24#include "src/gpu/vk/GrVkVertexBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050025
26void GrVkCommandBuffer::invalidateState() {
Chris Dalton1d616352017-05-31 12:51:23 -060027 for (auto& boundInputBuffer : fBoundInputBuffers) {
28 boundInputBuffer = VK_NULL_HANDLE;
29 }
egdaniel470d77a2016-03-18 12:50:27 -070030 fBoundIndexBuffer = VK_NULL_HANDLE;
egdaniel470d77a2016-03-18 12:50:27 -070031
32 memset(&fCachedViewport, 0, sizeof(VkViewport));
33 fCachedViewport.width = - 1.0f; // Viewport must have a width greater than 0
34
35 memset(&fCachedScissor, 0, sizeof(VkRect2D));
36 fCachedScissor.offset.x = -1; // Scissor offset must be greater that 0 to be valid
37
38 for (int i = 0; i < 4; ++i) {
39 fCachedBlendConstant[i] = -1.0;
40 }
Greg Daniel164a9f02016-02-22 09:56:40 -050041}
42
Greg Daniel0addbdf2019-11-25 15:03:58 -050043void GrVkCommandBuffer::freeGPUData(GrVkGpu* gpu, VkCommandPool cmdPool) const {
Brian Salomone39526b2019-06-24 16:35:53 -040044 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Greg Daniel164a9f02016-02-22 09:56:40 -050045 SkASSERT(!fIsActive);
Greg Daniel0addbdf2019-11-25 15:03:58 -050046 SkASSERT(!fTrackedResources.count());
47 SkASSERT(!fTrackedRecycledResources.count());
48 SkASSERT(cmdPool != VK_NULL_HANDLE);
49 SkASSERT(!this->isWrapped());
halcanary9d524f22016-03-29 09:03:52 -070050
Greg Daniel0addbdf2019-11-25 15:03:58 -050051 GR_VK_CALL(gpu->vkInterface(), FreeCommandBuffers(gpu->device(), cmdPool, 1, &fCmdBuffer));
egdaniel9cb63402016-06-23 08:37:05 -070052
53 this->onFreeGPUData(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050054}
55
Greg Danielcef213c2017-04-21 11:52:27 -040056void GrVkCommandBuffer::abandonGPUData() const {
Ethan Nicholas8e265a72018-12-12 16:22:40 -050057 SkDEBUGCODE(fResourcesReleased = true;)
Greg Daniel164a9f02016-02-22 09:56:40 -050058 for (int i = 0; i < fTrackedResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050059 fTrackedResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel164a9f02016-02-22 09:56:40 -050060 fTrackedResources[i]->unrefAndAbandon();
61 }
egdanielc1be9bc2016-07-20 08:33:00 -070062
63 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050064 fTrackedRecycledResources[i]->notifyRemovedFromCommandBuffer();
egdanielc1be9bc2016-07-20 08:33:00 -070065 // We don't recycle resources when abandoning them.
66 fTrackedRecycledResources[i]->unrefAndAbandon();
67 }
Greg Daniel7d918fd2018-06-19 15:22:01 -040068
Ethan Nicholas8e265a72018-12-12 16:22:40 -050069 this->onAbandonGPUData();
Greg Daniel164a9f02016-02-22 09:56:40 -050070}
71
Ethan Nicholas8e265a72018-12-12 16:22:40 -050072void GrVkCommandBuffer::releaseResources(GrVkGpu* gpu) {
Brian Salomone39526b2019-06-24 16:35:53 -040073 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Ethan Nicholas8e265a72018-12-12 16:22:40 -050074 SkDEBUGCODE(fResourcesReleased = true;)
jvanverth7ec92412016-07-06 09:24:57 -070075 SkASSERT(!fIsActive);
76 for (int i = 0; i < fTrackedResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050077 fTrackedResources[i]->notifyRemovedFromCommandBuffer();
jvanverth7ec92412016-07-06 09:24:57 -070078 fTrackedResources[i]->unref(gpu);
79 }
egdanielc1be9bc2016-07-20 08:33:00 -070080 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050081 fTrackedRecycledResources[i]->notifyRemovedFromCommandBuffer();
egdanielc1be9bc2016-07-20 08:33:00 -070082 fTrackedRecycledResources[i]->recycle(const_cast<GrVkGpu*>(gpu));
83 }
egdaniel594739c2016-09-20 12:39:25 -070084
85 if (++fNumResets > kNumRewindResetsBeforeFullReset) {
86 fTrackedResources.reset();
87 fTrackedRecycledResources.reset();
88 fTrackedResources.setReserve(kInitialTrackedResourcesCount);
89 fTrackedRecycledResources.setReserve(kInitialTrackedResourcesCount);
90 fNumResets = 0;
91 } else {
92 fTrackedResources.rewind();
93 fTrackedRecycledResources.rewind();
94 }
95
jvanverth7ec92412016-07-06 09:24:57 -070096 this->invalidateState();
97
Ethan Nicholas8e265a72018-12-12 16:22:40 -050098 this->onReleaseResources(gpu);
jvanverth7ec92412016-07-06 09:24:57 -070099}
100
Greg Daniel164a9f02016-02-22 09:56:40 -0500101////////////////////////////////////////////////////////////////////////////////
102// CommandBuffer commands
103////////////////////////////////////////////////////////////////////////////////
104
105void GrVkCommandBuffer::pipelineBarrier(const GrVkGpu* gpu,
Greg Daniel59dc1482019-02-22 10:46:38 -0500106 const GrVkResource* resource,
Greg Daniel164a9f02016-02-22 09:56:40 -0500107 VkPipelineStageFlags srcStageMask,
108 VkPipelineStageFlags dstStageMask,
109 bool byRegion,
110 BarrierType barrierType,
Greg Daniel59dc1482019-02-22 10:46:38 -0500111 void* barrier) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500112 SkASSERT(!this->isWrapped());
Greg Daniel164a9f02016-02-22 09:56:40 -0500113 SkASSERT(fIsActive);
egdaniel58a8d922016-04-21 08:03:10 -0700114 // For images we can have barriers inside of render passes but they require us to add more
115 // support in subpasses which need self dependencies to have barriers inside them. Also, we can
116 // never have buffer barriers inside of a render pass. For now we will just assert that we are
117 // not in a render pass.
118 SkASSERT(!fActiveRenderPass);
Greg Danielf346df32019-04-03 14:52:13 -0400119
Greg Danielee54f232019-04-03 14:58:40 -0400120 if (barrierType == kBufferMemory_BarrierType) {
121 const VkBufferMemoryBarrier* barrierPtr = reinterpret_cast<VkBufferMemoryBarrier*>(barrier);
122 fBufferBarriers.push_back(*barrierPtr);
123 } else {
124 SkASSERT(barrierType == kImageMemory_BarrierType);
125 const VkImageMemoryBarrier* barrierPtr = reinterpret_cast<VkImageMemoryBarrier*>(barrier);
Greg Daniel212ff052019-04-09 10:41:34 -0400126 // We need to check if we are adding a pipeline barrier that covers part of the same
127 // subresource range as a barrier that is already in current batch. If it does, then we must
128 // submit the first batch because the vulkan spec does not define a specific ordering for
129 // barriers submitted in the same batch.
130 // TODO: Look if we can gain anything by merging barriers together instead of submitting
131 // the old ones.
132 for (int i = 0; i < fImageBarriers.count(); ++i) {
133 VkImageMemoryBarrier& currentBarrier = fImageBarriers[i];
134 if (barrierPtr->image == currentBarrier.image) {
135 const VkImageSubresourceRange newRange = barrierPtr->subresourceRange;
136 const VkImageSubresourceRange oldRange = currentBarrier.subresourceRange;
137 SkASSERT(newRange.aspectMask == oldRange.aspectMask);
138 SkASSERT(newRange.baseArrayLayer == oldRange.baseArrayLayer);
139 SkASSERT(newRange.layerCount == oldRange.layerCount);
140 uint32_t newStart = newRange.baseMipLevel;
141 uint32_t newEnd = newRange.baseMipLevel + newRange.levelCount - 1;
142 uint32_t oldStart = oldRange.baseMipLevel;
143 uint32_t oldEnd = oldRange.baseMipLevel + oldRange.levelCount - 1;
144 if (SkTMax(newStart, oldStart) <= SkTMin(newEnd, oldEnd)) {
145 this->submitPipelineBarriers(gpu);
146 break;
147 }
148 }
149 }
Greg Danielee54f232019-04-03 14:58:40 -0400150 fImageBarriers.push_back(*barrierPtr);
Greg Daniel164a9f02016-02-22 09:56:40 -0500151 }
Greg Danielee54f232019-04-03 14:58:40 -0400152 fBarriersByRegion |= byRegion;
153
154 fSrcStageMask = fSrcStageMask | srcStageMask;
155 fDstStageMask = fDstStageMask | dstStageMask;
156
157 fHasWork = true;
Greg Daniel59dc1482019-02-22 10:46:38 -0500158 if (resource) {
159 this->addResource(resource);
160 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500161}
162
Greg Danielee54f232019-04-03 14:58:40 -0400163void GrVkCommandBuffer::submitPipelineBarriers(const GrVkGpu* gpu) {
164 SkASSERT(fIsActive);
165
166 // Currently we never submit a pipeline barrier without at least one memory barrier.
167 if (fBufferBarriers.count() || fImageBarriers.count()) {
168 // For images we can have barriers inside of render passes but they require us to add more
169 // support in subpasses which need self dependencies to have barriers inside them. Also, we
170 // can never have buffer barriers inside of a render pass. For now we will just assert that
171 // we are not in a render pass.
172 SkASSERT(!fActiveRenderPass);
173 SkASSERT(!this->isWrapped());
174 SkASSERT(fSrcStageMask && fDstStageMask);
175
176 VkDependencyFlags dependencyFlags = fBarriersByRegion ? VK_DEPENDENCY_BY_REGION_BIT : 0;
177 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(
178 fCmdBuffer, fSrcStageMask, fDstStageMask, dependencyFlags, 0, nullptr,
179 fBufferBarriers.count(), fBufferBarriers.begin(),
180 fImageBarriers.count(), fImageBarriers.begin()));
181 fBufferBarriers.reset();
182 fImageBarriers.reset();
183 fBarriersByRegion = false;
184 fSrcStageMask = 0;
185 fDstStageMask = 0;
186 }
187 SkASSERT(!fBufferBarriers.count());
188 SkASSERT(!fImageBarriers.count());
189 SkASSERT(!fBarriersByRegion);
190 SkASSERT(!fSrcStageMask);
191 SkASSERT(!fDstStageMask);
192}
193
194
Greg Daniel6ecc9112017-06-16 16:17:03 +0000195void GrVkCommandBuffer::bindInputBuffer(GrVkGpu* gpu, uint32_t binding,
196 const GrVkVertexBuffer* vbuffer) {
197 VkBuffer vkBuffer = vbuffer->buffer();
198 SkASSERT(VK_NULL_HANDLE != vkBuffer);
199 SkASSERT(binding < kMaxInputBuffers);
200 // TODO: once vbuffer->offset() no longer always returns 0, we will need to track the offset
201 // to know if we can skip binding or not.
202 if (vkBuffer != fBoundInputBuffers[binding]) {
203 VkDeviceSize offset = vbuffer->offset();
204 GR_VK_CALL(gpu->vkInterface(), CmdBindVertexBuffers(fCmdBuffer,
205 binding,
206 1,
207 &vkBuffer,
208 &offset));
209 fBoundInputBuffers[binding] = vkBuffer;
Greg Daniel59dc1482019-02-22 10:46:38 -0500210 this->addResource(vbuffer->resource());
Greg Daniel6ecc9112017-06-16 16:17:03 +0000211 }
212}
213
214void GrVkCommandBuffer::bindIndexBuffer(GrVkGpu* gpu, const GrVkIndexBuffer* ibuffer) {
215 VkBuffer vkBuffer = ibuffer->buffer();
216 SkASSERT(VK_NULL_HANDLE != vkBuffer);
217 // TODO: once ibuffer->offset() no longer always returns 0, we will need to track the offset
218 // to know if we can skip binding or not.
219 if (vkBuffer != fBoundIndexBuffer) {
220 GR_VK_CALL(gpu->vkInterface(), CmdBindIndexBuffer(fCmdBuffer,
221 vkBuffer,
222 ibuffer->offset(),
223 VK_INDEX_TYPE_UINT16));
224 fBoundIndexBuffer = vkBuffer;
Greg Daniel59dc1482019-02-22 10:46:38 -0500225 this->addResource(ibuffer->resource());
Greg Daniel6ecc9112017-06-16 16:17:03 +0000226 }
227}
228
Greg Daniel164a9f02016-02-22 09:56:40 -0500229void GrVkCommandBuffer::clearAttachments(const GrVkGpu* gpu,
230 int numAttachments,
231 const VkClearAttachment* attachments,
232 int numRects,
Greg Danielf346df32019-04-03 14:52:13 -0400233 const VkClearRect* clearRects) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500234 SkASSERT(fIsActive);
235 SkASSERT(fActiveRenderPass);
236 SkASSERT(numAttachments > 0);
237 SkASSERT(numRects > 0);
Greg Danielf346df32019-04-03 14:52:13 -0400238
Greg Danielee54f232019-04-03 14:58:40 -0400239 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400240
Greg Daniel164a9f02016-02-22 09:56:40 -0500241#ifdef SK_DEBUG
242 for (int i = 0; i < numAttachments; ++i) {
243 if (attachments[i].aspectMask == VK_IMAGE_ASPECT_COLOR_BIT) {
244 uint32_t testIndex;
245 SkAssertResult(fActiveRenderPass->colorAttachmentIndex(&testIndex));
246 SkASSERT(testIndex == attachments[i].colorAttachment);
247 }
248 }
249#endif
250 GR_VK_CALL(gpu->vkInterface(), CmdClearAttachments(fCmdBuffer,
251 numAttachments,
252 attachments,
253 numRects,
254 clearRects));
Greg Daniela718a612019-10-07 16:25:41 -0400255 if (gpu->vkCaps().mustInvalidatePrimaryCmdBufferStateAfterClearAttachments()) {
256 this->invalidateState();
257 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500258}
259
260void GrVkCommandBuffer::bindDescriptorSets(const GrVkGpu* gpu,
egdaniel22281c12016-03-23 13:49:40 -0700261 GrVkPipelineState* pipelineState,
Greg Danieleecc6872019-07-29 13:21:37 -0400262 VkPipelineLayout layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500263 uint32_t firstSet,
264 uint32_t setCount,
265 const VkDescriptorSet* descriptorSets,
266 uint32_t dynamicOffsetCount,
267 const uint32_t* dynamicOffsets) {
268 SkASSERT(fIsActive);
269 GR_VK_CALL(gpu->vkInterface(), CmdBindDescriptorSets(fCmdBuffer,
270 VK_PIPELINE_BIND_POINT_GRAPHICS,
Greg Danieleecc6872019-07-29 13:21:37 -0400271 layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500272 firstSet,
273 setCount,
274 descriptorSets,
275 dynamicOffsetCount,
276 dynamicOffsets));
egdanielbc9b2962016-09-27 08:00:53 -0700277}
278
egdaniel470d77a2016-03-18 12:50:27 -0700279void GrVkCommandBuffer::bindPipeline(const GrVkGpu* gpu, const GrVkPipeline* pipeline) {
280 SkASSERT(fIsActive);
egdaniel470d77a2016-03-18 12:50:27 -0700281 GR_VK_CALL(gpu->vkInterface(), CmdBindPipeline(fCmdBuffer,
282 VK_PIPELINE_BIND_POINT_GRAPHICS,
283 pipeline->pipeline()));
egdanielec440992016-09-13 09:54:11 -0700284 this->addResource(pipeline);
egdaniel470d77a2016-03-18 12:50:27 -0700285}
286
Greg Daniel164a9f02016-02-22 09:56:40 -0500287void GrVkCommandBuffer::drawIndexed(const GrVkGpu* gpu,
288 uint32_t indexCount,
289 uint32_t instanceCount,
290 uint32_t firstIndex,
291 int32_t vertexOffset,
Greg Danielf346df32019-04-03 14:52:13 -0400292 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500293 SkASSERT(fIsActive);
294 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400295 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500296 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndexed(fCmdBuffer,
297 indexCount,
298 instanceCount,
299 firstIndex,
300 vertexOffset,
301 firstInstance));
302}
303
304void GrVkCommandBuffer::draw(const GrVkGpu* gpu,
305 uint32_t vertexCount,
306 uint32_t instanceCount,
307 uint32_t firstVertex,
Greg Danielf346df32019-04-03 14:52:13 -0400308 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500309 SkASSERT(fIsActive);
310 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400311 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500312 GR_VK_CALL(gpu->vkInterface(), CmdDraw(fCmdBuffer,
313 vertexCount,
314 instanceCount,
315 firstVertex,
316 firstInstance));
317}
egdaniel470d77a2016-03-18 12:50:27 -0700318
319void GrVkCommandBuffer::setViewport(const GrVkGpu* gpu,
320 uint32_t firstViewport,
321 uint32_t viewportCount,
322 const VkViewport* viewports) {
323 SkASSERT(fIsActive);
324 SkASSERT(1 == viewportCount);
325 if (memcmp(viewports, &fCachedViewport, sizeof(VkViewport))) {
326 GR_VK_CALL(gpu->vkInterface(), CmdSetViewport(fCmdBuffer,
327 firstViewport,
328 viewportCount,
329 viewports));
330 fCachedViewport = viewports[0];
331 }
332}
333
334void GrVkCommandBuffer::setScissor(const GrVkGpu* gpu,
335 uint32_t firstScissor,
336 uint32_t scissorCount,
337 const VkRect2D* scissors) {
338 SkASSERT(fIsActive);
339 SkASSERT(1 == scissorCount);
340 if (memcmp(scissors, &fCachedScissor, sizeof(VkRect2D))) {
341 GR_VK_CALL(gpu->vkInterface(), CmdSetScissor(fCmdBuffer,
342 firstScissor,
343 scissorCount,
344 scissors));
345 fCachedScissor = scissors[0];
346 }
347}
348
349void GrVkCommandBuffer::setBlendConstants(const GrVkGpu* gpu,
350 const float blendConstants[4]) {
351 SkASSERT(fIsActive);
352 if (memcmp(blendConstants, fCachedBlendConstant, 4 * sizeof(float))) {
353 GR_VK_CALL(gpu->vkInterface(), CmdSetBlendConstants(fCmdBuffer, blendConstants));
354 memcpy(fCachedBlendConstant, blendConstants, 4 * sizeof(float));
355 }
356}
egdaniel9a6cf802016-06-08 08:22:05 -0700357
Greg Danielee54f232019-04-03 14:58:40 -0400358void GrVkCommandBuffer::addingWork(const GrVkGpu* gpu) {
359 this->submitPipelineBarriers(gpu);
360 fHasWork = true;
361}
362
egdaniel9a6cf802016-06-08 08:22:05 -0700363///////////////////////////////////////////////////////////////////////////////
364// PrimaryCommandBuffer
365////////////////////////////////////////////////////////////////////////////////
egdaniel9cb63402016-06-23 08:37:05 -0700366GrVkPrimaryCommandBuffer::~GrVkPrimaryCommandBuffer() {
367 // Should have ended any render pass we're in the middle of
368 SkASSERT(!fActiveRenderPass);
369}
370
egdaniel9a6cf802016-06-08 08:22:05 -0700371GrVkPrimaryCommandBuffer* GrVkPrimaryCommandBuffer::Create(const GrVkGpu* gpu,
Greg Daniel0addbdf2019-11-25 15:03:58 -0500372 VkCommandPool cmdPool) {
egdaniel9a6cf802016-06-08 08:22:05 -0700373 const VkCommandBufferAllocateInfo cmdInfo = {
374 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400375 nullptr, // pNext
Greg Daniel0addbdf2019-11-25 15:03:58 -0500376 cmdPool, // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700377 VK_COMMAND_BUFFER_LEVEL_PRIMARY, // level
378 1 // bufferCount
379 };
380
381 VkCommandBuffer cmdBuffer;
382 VkResult err = GR_VK_CALL(gpu->vkInterface(), AllocateCommandBuffers(gpu->device(),
383 &cmdInfo,
384 &cmdBuffer));
385 if (err) {
386 return nullptr;
387 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500388 return new GrVkPrimaryCommandBuffer(cmdBuffer);
egdaniel9a6cf802016-06-08 08:22:05 -0700389}
390
Greg Daniele643da62019-11-05 12:36:42 -0500391void GrVkPrimaryCommandBuffer::begin(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700392 SkASSERT(!fIsActive);
393 VkCommandBufferBeginInfo cmdBufferBeginInfo;
394 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
395 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
396 cmdBufferBeginInfo.pNext = nullptr;
397 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
398 cmdBufferBeginInfo.pInheritanceInfo = nullptr;
399
Greg Daniele643da62019-11-05 12:36:42 -0500400 GR_VK_CALL_ERRCHECK(gpu, BeginCommandBuffer(fCmdBuffer, &cmdBufferBeginInfo));
egdaniel9a6cf802016-06-08 08:22:05 -0700401 fIsActive = true;
402}
403
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500404void GrVkPrimaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700405 SkASSERT(fIsActive);
406 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400407
408 this->submitPipelineBarriers(gpu);
409
Greg Daniele643da62019-11-05 12:36:42 -0500410 GR_VK_CALL_ERRCHECK(gpu, EndCommandBuffer(fCmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700411 this->invalidateState();
412 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400413 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700414}
415
Greg Danielfa3adf72019-11-07 09:53:41 -0500416bool GrVkPrimaryCommandBuffer::beginRenderPass(GrVkGpu* gpu,
egdaniel9cb63402016-06-23 08:37:05 -0700417 const GrVkRenderPass* renderPass,
Robert Phillips95214472017-08-08 18:00:03 -0400418 const VkClearValue clearValues[],
Greg Danielfa3adf72019-11-07 09:53:41 -0500419 GrVkRenderTarget* target,
egdaniel9cb63402016-06-23 08:37:05 -0700420 const SkIRect& bounds,
421 bool forSecondaryCB) {
egdaniel9a6cf802016-06-08 08:22:05 -0700422 SkASSERT(fIsActive);
423 SkASSERT(!fActiveRenderPass);
Greg Danielfa3adf72019-11-07 09:53:41 -0500424 SkASSERT(renderPass->isCompatible(*target));
425
426 const GrVkFramebuffer* framebuffer = target->getFramebuffer();
427 if (!framebuffer) {
428 return false;
429 }
egdaniel9cb63402016-06-23 08:37:05 -0700430
Greg Danielee54f232019-04-03 14:58:40 -0400431 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400432
egdaniel9a6cf802016-06-08 08:22:05 -0700433 VkRenderPassBeginInfo beginInfo;
egdaniel9cb63402016-06-23 08:37:05 -0700434 VkRect2D renderArea;
435 renderArea.offset = { bounds.fLeft , bounds.fTop };
436 renderArea.extent = { (uint32_t)bounds.width(), (uint32_t)bounds.height() };
437
438 memset(&beginInfo, 0, sizeof(VkRenderPassBeginInfo));
439 beginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
440 beginInfo.pNext = nullptr;
441 beginInfo.renderPass = renderPass->vkRenderPass();
Greg Danielfa3adf72019-11-07 09:53:41 -0500442 beginInfo.framebuffer = framebuffer->framebuffer();
egdaniel9cb63402016-06-23 08:37:05 -0700443 beginInfo.renderArea = renderArea;
Greg Danielb68319a2018-02-23 16:08:28 -0500444 beginInfo.clearValueCount = renderPass->clearValueCount();
egdaniel9cb63402016-06-23 08:37:05 -0700445 beginInfo.pClearValues = clearValues;
446
447 VkSubpassContents contents = forSecondaryCB ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
448 : VK_SUBPASS_CONTENTS_INLINE;
449
egdaniel9a6cf802016-06-08 08:22:05 -0700450 GR_VK_CALL(gpu->vkInterface(), CmdBeginRenderPass(fCmdBuffer, &beginInfo, contents));
451 fActiveRenderPass = renderPass;
452 this->addResource(renderPass);
Greg Danielfa3adf72019-11-07 09:53:41 -0500453 target->addResources(*this);
454 return true;
egdaniel9a6cf802016-06-08 08:22:05 -0700455}
456
457void GrVkPrimaryCommandBuffer::endRenderPass(const GrVkGpu* gpu) {
458 SkASSERT(fIsActive);
459 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400460 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700461 GR_VK_CALL(gpu->vkInterface(), CmdEndRenderPass(fCmdBuffer));
462 fActiveRenderPass = nullptr;
463}
464
465void GrVkPrimaryCommandBuffer::executeCommands(const GrVkGpu* gpu,
Greg Daniel8daf3b72019-07-30 09:57:26 -0400466 std::unique_ptr<GrVkSecondaryCommandBuffer> buffer) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500467 // The Vulkan spec allows secondary command buffers to be executed on a primary command buffer
468 // if the command pools both were created from were created with the same queue family. However,
469 // we currently always create them from the same pool.
egdaniel9a6cf802016-06-08 08:22:05 -0700470 SkASSERT(fIsActive);
Greg Daniel77b53f62016-10-18 11:48:51 -0400471 SkASSERT(!buffer->fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700472 SkASSERT(fActiveRenderPass);
473 SkASSERT(fActiveRenderPass->isCompatible(*buffer->fActiveRenderPass));
474
Greg Danielee54f232019-04-03 14:58:40 -0400475 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400476
egdaniel9a6cf802016-06-08 08:22:05 -0700477 GR_VK_CALL(gpu->vkInterface(), CmdExecuteCommands(fCmdBuffer, 1, &buffer->fCmdBuffer));
Greg Daniel8daf3b72019-07-30 09:57:26 -0400478 fSecondaryCommandBuffers.push_back(std::move(buffer));
egdaniel066df7c2016-06-08 14:02:27 -0700479 // When executing a secondary command buffer all state (besides render pass state) becomes
480 // invalidated and must be reset. This includes bound buffers, pipelines, dynamic state, etc.
481 this->invalidateState();
egdaniel9a6cf802016-06-08 08:22:05 -0700482}
483
Greg Daniele643da62019-11-05 12:36:42 -0500484static void submit_to_queue(GrVkGpu* gpu,
Greg Daniel48661b82018-01-22 16:11:35 -0500485 VkQueue queue,
486 VkFence fence,
487 uint32_t waitCount,
488 const VkSemaphore* waitSemaphores,
489 const VkPipelineStageFlags* waitStages,
490 uint32_t commandBufferCount,
491 const VkCommandBuffer* commandBuffers,
492 uint32_t signalCount,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400493 const VkSemaphore* signalSemaphores,
494 GrProtected protectedContext) {
495 VkProtectedSubmitInfo protectedSubmitInfo;
496 if (protectedContext == GrProtected::kYes) {
497 memset(&protectedSubmitInfo, 0, sizeof(VkProtectedSubmitInfo));
498 protectedSubmitInfo.sType = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO;
499 protectedSubmitInfo.pNext = nullptr;
500 protectedSubmitInfo.protectedSubmit = VK_TRUE;
501 }
502
Greg Daniel48661b82018-01-22 16:11:35 -0500503 VkSubmitInfo submitInfo;
504 memset(&submitInfo, 0, sizeof(VkSubmitInfo));
505 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400506 submitInfo.pNext = protectedContext == GrProtected::kYes ? &protectedSubmitInfo : nullptr;
Greg Daniel48661b82018-01-22 16:11:35 -0500507 submitInfo.waitSemaphoreCount = waitCount;
508 submitInfo.pWaitSemaphores = waitSemaphores;
509 submitInfo.pWaitDstStageMask = waitStages;
510 submitInfo.commandBufferCount = commandBufferCount;
511 submitInfo.pCommandBuffers = commandBuffers;
512 submitInfo.signalSemaphoreCount = signalCount;
513 submitInfo.pSignalSemaphores = signalSemaphores;
Greg Daniele643da62019-11-05 12:36:42 -0500514 GR_VK_CALL_ERRCHECK(gpu, QueueSubmit(queue, 1, &submitInfo, fence));
Greg Daniel48661b82018-01-22 16:11:35 -0500515}
516
Greg Daniel6be35232017-03-01 17:01:09 -0500517void GrVkPrimaryCommandBuffer::submitToQueue(
Greg Daniele643da62019-11-05 12:36:42 -0500518 GrVkGpu* gpu,
Greg Daniel6be35232017-03-01 17:01:09 -0500519 VkQueue queue,
520 GrVkGpu::SyncQueue sync,
Greg Daniel48661b82018-01-22 16:11:35 -0500521 SkTArray<GrVkSemaphore::Resource*>& signalSemaphores,
522 SkTArray<GrVkSemaphore::Resource*>& waitSemaphores) {
egdaniel9a6cf802016-06-08 08:22:05 -0700523 SkASSERT(!fIsActive);
524
525 VkResult err;
jvanverth7ec92412016-07-06 09:24:57 -0700526 if (VK_NULL_HANDLE == fSubmitFence) {
527 VkFenceCreateInfo fenceInfo;
528 memset(&fenceInfo, 0, sizeof(VkFenceCreateInfo));
529 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
530 err = GR_VK_CALL(gpu->vkInterface(), CreateFence(gpu->device(), &fenceInfo, nullptr,
531 &fSubmitFence));
532 SkASSERT(!err);
533 } else {
534 GR_VK_CALL(gpu->vkInterface(), ResetFences(gpu->device(), 1, &fSubmitFence));
535 }
egdaniel9a6cf802016-06-08 08:22:05 -0700536
Greg Daniela5cb7812017-06-16 09:45:32 -0400537 int signalCount = signalSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500538 int waitCount = waitSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500539
Greg Daniel48661b82018-01-22 16:11:35 -0500540 if (0 == signalCount && 0 == waitCount) {
541 // This command buffer has no dependent semaphores so we can simply just submit it to the
542 // queue with no worries.
Greg Daniele643da62019-11-05 12:36:42 -0500543 submit_to_queue(gpu, queue, fSubmitFence, 0, nullptr, nullptr, 1, &fCmdBuffer, 0, nullptr,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400544 gpu->protectedContext() ? GrProtected::kYes : GrProtected::kNo);
Greg Daniel48661b82018-01-22 16:11:35 -0500545 } else {
Greg Daniel48661b82018-01-22 16:11:35 -0500546 SkTArray<VkSemaphore> vkSignalSems(signalCount);
547 for (int i = 0; i < signalCount; ++i) {
548 if (signalSemaphores[i]->shouldSignal()) {
549 this->addResource(signalSemaphores[i]);
550 vkSignalSems.push_back(signalSemaphores[i]->semaphore());
551 }
552 }
553
554 SkTArray<VkSemaphore> vkWaitSems(waitCount);
555 SkTArray<VkPipelineStageFlags> vkWaitStages(waitCount);
556 for (int i = 0; i < waitCount; ++i) {
557 if (waitSemaphores[i]->shouldWait()) {
558 this->addResource(waitSemaphores[i]);
559 vkWaitSems.push_back(waitSemaphores[i]->semaphore());
560 vkWaitStages.push_back(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
561 }
562 }
Greg Daniele643da62019-11-05 12:36:42 -0500563 submit_to_queue(gpu, queue, fSubmitFence, vkWaitSems.count(), vkWaitSems.begin(),
564 vkWaitStages.begin(), 1, &fCmdBuffer,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400565 vkSignalSems.count(), vkSignalSems.begin(),
566 gpu->protectedContext() ? GrProtected::kYes : GrProtected::kNo);
Greg Daniel48661b82018-01-22 16:11:35 -0500567 for (int i = 0; i < signalCount; ++i) {
568 signalSemaphores[i]->markAsSignaled();
569 }
570 for (int i = 0; i < waitCount; ++i) {
571 waitSemaphores[i]->markAsWaited();
572 }
Greg Daniel48661b82018-01-22 16:11:35 -0500573 }
egdaniel9a6cf802016-06-08 08:22:05 -0700574
575 if (GrVkGpu::kForce_SyncQueue == sync) {
576 err = GR_VK_CALL(gpu->vkInterface(),
577 WaitForFences(gpu->device(), 1, &fSubmitFence, true, UINT64_MAX));
Emircan Uysaler283ec652019-10-31 15:33:31 -0400578 if (VK_SUCCESS != err) {
579 SkDebugf("Fence failed: %d\n", err);
Ben Wagnerb4aab9a2017-08-16 10:53:04 -0400580 SK_ABORT("failing");
egdaniel9a6cf802016-06-08 08:22:05 -0700581 }
egdaniel9a6cf802016-06-08 08:22:05 -0700582
Greg Daniela3aa75a2019-04-12 14:24:55 -0400583 fFinishedProcs.reset();
584
egdaniel9a6cf802016-06-08 08:22:05 -0700585 // Destroy the fence
586 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
587 fSubmitFence = VK_NULL_HANDLE;
588 }
589}
590
Greg Daniela3aa75a2019-04-12 14:24:55 -0400591bool GrVkPrimaryCommandBuffer::finished(const GrVkGpu* gpu) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500592 SkASSERT(!fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700593 if (VK_NULL_HANDLE == fSubmitFence) {
594 return true;
595 }
596
597 VkResult err = GR_VK_CALL(gpu->vkInterface(), GetFenceStatus(gpu->device(), fSubmitFence));
598 switch (err) {
599 case VK_SUCCESS:
600 return true;
601
602 case VK_NOT_READY:
603 return false;
604
605 default:
606 SkDebugf("Error getting fence status: %d\n", err);
Ben Wagnerb4aab9a2017-08-16 10:53:04 -0400607 SK_ABORT("failing");
egdaniel9a6cf802016-06-08 08:22:05 -0700608 break;
609 }
610
611 return false;
612}
613
Greg Daniela3aa75a2019-04-12 14:24:55 -0400614void GrVkPrimaryCommandBuffer::addFinishedProc(sk_sp<GrRefCntedCallback> finishedProc) {
615 fFinishedProcs.push_back(std::move(finishedProc));
616}
617
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500618void GrVkPrimaryCommandBuffer::onReleaseResources(GrVkGpu* gpu) {
jvanverth7ec92412016-07-06 09:24:57 -0700619 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500620 fSecondaryCommandBuffers[i]->releaseResources(gpu);
621 }
Brian Salomonab32f652019-05-10 14:24:50 -0400622 fFinishedProcs.reset();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500623}
624
Greg Daniel0addbdf2019-11-25 15:03:58 -0500625void GrVkPrimaryCommandBuffer::recycleSecondaryCommandBuffers(GrVkCommandPool* cmdPool) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500626 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500627 fSecondaryCommandBuffers[i].release()->recycle(cmdPool);
jvanverth7ec92412016-07-06 09:24:57 -0700628 }
629 fSecondaryCommandBuffers.reset();
630}
631
egdaniel9a6cf802016-06-08 08:22:05 -0700632void GrVkPrimaryCommandBuffer::copyImage(const GrVkGpu* gpu,
633 GrVkImage* srcImage,
634 VkImageLayout srcLayout,
635 GrVkImage* dstImage,
636 VkImageLayout dstLayout,
637 uint32_t copyRegionCount,
638 const VkImageCopy* copyRegions) {
639 SkASSERT(fIsActive);
640 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400641 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700642 this->addResource(srcImage->resource());
643 this->addResource(dstImage->resource());
644 GR_VK_CALL(gpu->vkInterface(), CmdCopyImage(fCmdBuffer,
645 srcImage->image(),
646 srcLayout,
647 dstImage->image(),
648 dstLayout,
649 copyRegionCount,
650 copyRegions));
651}
652
653void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
654 const GrVkResource* srcResource,
655 VkImage srcImage,
656 VkImageLayout srcLayout,
657 const GrVkResource* dstResource,
658 VkImage dstImage,
659 VkImageLayout dstLayout,
660 uint32_t blitRegionCount,
661 const VkImageBlit* blitRegions,
662 VkFilter filter) {
663 SkASSERT(fIsActive);
664 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400665 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700666 this->addResource(srcResource);
667 this->addResource(dstResource);
668 GR_VK_CALL(gpu->vkInterface(), CmdBlitImage(fCmdBuffer,
669 srcImage,
670 srcLayout,
671 dstImage,
672 dstLayout,
673 blitRegionCount,
674 blitRegions,
675 filter));
676}
677
Greg Daniel6ecc9112017-06-16 16:17:03 +0000678void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
679 const GrVkImage& srcImage,
680 const GrVkImage& dstImage,
681 uint32_t blitRegionCount,
682 const VkImageBlit* blitRegions,
683 VkFilter filter) {
684 this->blitImage(gpu,
685 srcImage.resource(),
686 srcImage.image(),
687 srcImage.currentLayout(),
688 dstImage.resource(),
689 dstImage.image(),
690 dstImage.currentLayout(),
691 blitRegionCount,
692 blitRegions,
693 filter);
694}
695
696
egdaniel9a6cf802016-06-08 08:22:05 -0700697void GrVkPrimaryCommandBuffer::copyImageToBuffer(const GrVkGpu* gpu,
698 GrVkImage* srcImage,
699 VkImageLayout srcLayout,
700 GrVkTransferBuffer* dstBuffer,
701 uint32_t copyRegionCount,
702 const VkBufferImageCopy* copyRegions) {
703 SkASSERT(fIsActive);
704 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400705 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700706 this->addResource(srcImage->resource());
707 this->addResource(dstBuffer->resource());
708 GR_VK_CALL(gpu->vkInterface(), CmdCopyImageToBuffer(fCmdBuffer,
709 srcImage->image(),
710 srcLayout,
711 dstBuffer->buffer(),
712 copyRegionCount,
713 copyRegions));
714}
715
716void GrVkPrimaryCommandBuffer::copyBufferToImage(const GrVkGpu* gpu,
717 GrVkTransferBuffer* srcBuffer,
718 GrVkImage* dstImage,
719 VkImageLayout dstLayout,
720 uint32_t copyRegionCount,
721 const VkBufferImageCopy* copyRegions) {
722 SkASSERT(fIsActive);
723 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400724 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700725 this->addResource(srcBuffer->resource());
726 this->addResource(dstImage->resource());
727 GR_VK_CALL(gpu->vkInterface(), CmdCopyBufferToImage(fCmdBuffer,
728 srcBuffer->buffer(),
729 dstImage->image(),
730 dstLayout,
731 copyRegionCount,
732 copyRegions));
733}
734
Greg Daniel6888c0d2017-08-25 11:55:50 -0400735
736void GrVkPrimaryCommandBuffer::copyBuffer(GrVkGpu* gpu,
737 GrVkBuffer* srcBuffer,
738 GrVkBuffer* dstBuffer,
739 uint32_t regionCount,
740 const VkBufferCopy* regions) {
741 SkASSERT(fIsActive);
742 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400743 this->addingWork(gpu);
Greg Daniel6888c0d2017-08-25 11:55:50 -0400744#ifdef SK_DEBUG
745 for (uint32_t i = 0; i < regionCount; ++i) {
746 const VkBufferCopy& region = regions[i];
747 SkASSERT(region.size > 0);
748 SkASSERT(region.srcOffset < srcBuffer->size());
749 SkASSERT(region.dstOffset < dstBuffer->size());
750 SkASSERT(region.srcOffset + region.size <= srcBuffer->size());
751 SkASSERT(region.dstOffset + region.size <= dstBuffer->size());
752 }
753#endif
754 this->addResource(srcBuffer->resource());
755 this->addResource(dstBuffer->resource());
756 GR_VK_CALL(gpu->vkInterface(), CmdCopyBuffer(fCmdBuffer,
757 srcBuffer->buffer(),
758 dstBuffer->buffer(),
759 regionCount,
760 regions));
761}
762
jvanvertha584de92016-06-30 09:10:52 -0700763void GrVkPrimaryCommandBuffer::updateBuffer(GrVkGpu* gpu,
764 GrVkBuffer* dstBuffer,
765 VkDeviceSize dstOffset,
766 VkDeviceSize dataSize,
767 const void* data) {
768 SkASSERT(fIsActive);
769 SkASSERT(!fActiveRenderPass);
770 SkASSERT(0 == (dstOffset & 0x03)); // four byte aligned
771 // TODO: handle larger transfer sizes
772 SkASSERT(dataSize <= 65536);
773 SkASSERT(0 == (dataSize & 0x03)); // four byte aligned
Greg Danielee54f232019-04-03 14:58:40 -0400774 this->addingWork(gpu);
jvanvertha584de92016-06-30 09:10:52 -0700775 this->addResource(dstBuffer->resource());
776 GR_VK_CALL(gpu->vkInterface(), CmdUpdateBuffer(fCmdBuffer,
777 dstBuffer->buffer(),
778 dstOffset,
779 dataSize,
780 (const uint32_t*) data));
781}
782
egdaniel9a6cf802016-06-08 08:22:05 -0700783void GrVkPrimaryCommandBuffer::clearColorImage(const GrVkGpu* gpu,
784 GrVkImage* image,
785 const VkClearColorValue* color,
786 uint32_t subRangeCount,
787 const VkImageSubresourceRange* subRanges) {
788 SkASSERT(fIsActive);
789 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400790 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700791 this->addResource(image->resource());
792 GR_VK_CALL(gpu->vkInterface(), CmdClearColorImage(fCmdBuffer,
793 image->image(),
794 image->currentLayout(),
795 color,
796 subRangeCount,
797 subRanges));
798}
799
800void GrVkPrimaryCommandBuffer::clearDepthStencilImage(const GrVkGpu* gpu,
801 GrVkImage* image,
802 const VkClearDepthStencilValue* color,
803 uint32_t subRangeCount,
804 const VkImageSubresourceRange* subRanges) {
805 SkASSERT(fIsActive);
806 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400807 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700808 this->addResource(image->resource());
809 GR_VK_CALL(gpu->vkInterface(), CmdClearDepthStencilImage(fCmdBuffer,
810 image->image(),
811 image->currentLayout(),
812 color,
813 subRangeCount,
814 subRanges));
815}
816
egdaniel52ad2512016-08-04 12:50:01 -0700817void GrVkPrimaryCommandBuffer::resolveImage(GrVkGpu* gpu,
818 const GrVkImage& srcImage,
819 const GrVkImage& dstImage,
820 uint32_t regionCount,
821 const VkImageResolve* regions) {
822 SkASSERT(fIsActive);
823 SkASSERT(!fActiveRenderPass);
824
Greg Danielee54f232019-04-03 14:58:40 -0400825 this->addingWork(gpu);
egdaniel52ad2512016-08-04 12:50:01 -0700826 this->addResource(srcImage.resource());
827 this->addResource(dstImage.resource());
828
829 GR_VK_CALL(gpu->vkInterface(), CmdResolveImage(fCmdBuffer,
830 srcImage.image(),
831 srcImage.currentLayout(),
832 dstImage.image(),
833 dstImage.currentLayout(),
834 regionCount,
835 regions));
836}
837
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500838void GrVkPrimaryCommandBuffer::onFreeGPUData(GrVkGpu* gpu) const {
egdaniel9cb63402016-06-23 08:37:05 -0700839 SkASSERT(!fActiveRenderPass);
840 // Destroy the fence, if any
841 if (VK_NULL_HANDLE != fSubmitFence) {
842 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
843 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500844 SkASSERT(!fSecondaryCommandBuffers.count());
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500845}
846
847void GrVkPrimaryCommandBuffer::onAbandonGPUData() const {
848 SkASSERT(!fActiveRenderPass);
Greg Daniel8daf3b72019-07-30 09:57:26 -0400849 for (const auto& buffer : fSecondaryCommandBuffers) {
850 buffer->abandonGPUData();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500851 }
egdaniel9cb63402016-06-23 08:37:05 -0700852}
853
egdaniel9a6cf802016-06-08 08:22:05 -0700854///////////////////////////////////////////////////////////////////////////////
855// SecondaryCommandBuffer
856////////////////////////////////////////////////////////////////////////////////
857
jvanverth7ec92412016-07-06 09:24:57 -0700858GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(const GrVkGpu* gpu,
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500859 GrVkCommandPool* cmdPool) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500860 SkASSERT(cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700861 const VkCommandBufferAllocateInfo cmdInfo = {
862 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400863 nullptr, // pNext
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500864 cmdPool->vkCommandPool(), // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700865 VK_COMMAND_BUFFER_LEVEL_SECONDARY, // level
866 1 // bufferCount
867 };
868
869 VkCommandBuffer cmdBuffer;
870 VkResult err = GR_VK_CALL(gpu->vkInterface(), AllocateCommandBuffers(gpu->device(),
871 &cmdInfo,
872 &cmdBuffer));
873 if (err) {
874 return nullptr;
875 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500876 return new GrVkSecondaryCommandBuffer(cmdBuffer, false);
egdaniel9a6cf802016-06-08 08:22:05 -0700877}
878
Greg Daniel070cbaf2019-01-03 17:35:54 -0500879GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(VkCommandBuffer cmdBuffer) {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500880 return new GrVkSecondaryCommandBuffer(cmdBuffer, true);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500881}
egdaniel9a6cf802016-06-08 08:22:05 -0700882
Greg Daniele643da62019-11-05 12:36:42 -0500883void GrVkSecondaryCommandBuffer::begin(GrVkGpu* gpu, const GrVkFramebuffer* framebuffer,
jvanverth7ec92412016-07-06 09:24:57 -0700884 const GrVkRenderPass* compatibleRenderPass) {
egdaniel9a6cf802016-06-08 08:22:05 -0700885 SkASSERT(!fIsActive);
jvanverth7ec92412016-07-06 09:24:57 -0700886 SkASSERT(compatibleRenderPass);
887 fActiveRenderPass = compatibleRenderPass;
egdaniel9a6cf802016-06-08 08:22:05 -0700888
Greg Daniel070cbaf2019-01-03 17:35:54 -0500889 if (!this->isWrapped()) {
890 VkCommandBufferInheritanceInfo inheritanceInfo;
891 memset(&inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
892 inheritanceInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
893 inheritanceInfo.pNext = nullptr;
894 inheritanceInfo.renderPass = fActiveRenderPass->vkRenderPass();
895 inheritanceInfo.subpass = 0; // Currently only using 1 subpass for each render pass
896 inheritanceInfo.framebuffer = framebuffer ? framebuffer->framebuffer() : VK_NULL_HANDLE;
897 inheritanceInfo.occlusionQueryEnable = false;
898 inheritanceInfo.queryFlags = 0;
899 inheritanceInfo.pipelineStatistics = 0;
egdaniel9a6cf802016-06-08 08:22:05 -0700900
Greg Daniel070cbaf2019-01-03 17:35:54 -0500901 VkCommandBufferBeginInfo cmdBufferBeginInfo;
902 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
903 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
904 cmdBufferBeginInfo.pNext = nullptr;
905 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT |
906 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
907 cmdBufferBeginInfo.pInheritanceInfo = &inheritanceInfo;
egdaniel9a6cf802016-06-08 08:22:05 -0700908
Greg Daniele643da62019-11-05 12:36:42 -0500909 GR_VK_CALL_ERRCHECK(gpu, BeginCommandBuffer(fCmdBuffer, &cmdBufferBeginInfo));
Greg Daniel070cbaf2019-01-03 17:35:54 -0500910 }
egdaniel9a6cf802016-06-08 08:22:05 -0700911 fIsActive = true;
912}
913
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500914void GrVkSecondaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700915 SkASSERT(fIsActive);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500916 if (!this->isWrapped()) {
Greg Daniele643da62019-11-05 12:36:42 -0500917 GR_VK_CALL_ERRCHECK(gpu, EndCommandBuffer(fCmdBuffer));
Greg Daniel070cbaf2019-01-03 17:35:54 -0500918 }
egdaniel9a6cf802016-06-08 08:22:05 -0700919 this->invalidateState();
920 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400921 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700922}
Greg Daniel8daf3b72019-07-30 09:57:26 -0400923
Greg Daniel0addbdf2019-11-25 15:03:58 -0500924void GrVkSecondaryCommandBuffer::recycle(GrVkCommandPool* cmdPool) {
Greg Daniel8daf3b72019-07-30 09:57:26 -0400925 if (this->isWrapped()) {
Greg Daniel8daf3b72019-07-30 09:57:26 -0400926 delete this;
927 } else {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500928 cmdPool->recycleSecondaryCommandBuffer(this);
Greg Daniel8daf3b72019-07-30 09:57:26 -0400929 }
930}
931