blob: 2aed1b5e704472aebfbafc0a37e9da5a18fa743b [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2015 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkCommandBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -05009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkRect.h"
11#include "src/gpu/vk/GrVkCommandPool.h"
12#include "src/gpu/vk/GrVkFramebuffer.h"
13#include "src/gpu/vk/GrVkGpu.h"
14#include "src/gpu/vk/GrVkImage.h"
15#include "src/gpu/vk/GrVkImageView.h"
16#include "src/gpu/vk/GrVkIndexBuffer.h"
17#include "src/gpu/vk/GrVkPipeline.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/vk/GrVkPipelineState.h"
19#include "src/gpu/vk/GrVkPipelineState.h"
20#include "src/gpu/vk/GrVkRenderPass.h"
21#include "src/gpu/vk/GrVkRenderTarget.h"
22#include "src/gpu/vk/GrVkTransferBuffer.h"
23#include "src/gpu/vk/GrVkUtil.h"
24#include "src/gpu/vk/GrVkVertexBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050025
26void GrVkCommandBuffer::invalidateState() {
Chris Dalton1d616352017-05-31 12:51:23 -060027 for (auto& boundInputBuffer : fBoundInputBuffers) {
28 boundInputBuffer = VK_NULL_HANDLE;
29 }
egdaniel470d77a2016-03-18 12:50:27 -070030 fBoundIndexBuffer = VK_NULL_HANDLE;
egdaniel470d77a2016-03-18 12:50:27 -070031
32 memset(&fCachedViewport, 0, sizeof(VkViewport));
33 fCachedViewport.width = - 1.0f; // Viewport must have a width greater than 0
34
35 memset(&fCachedScissor, 0, sizeof(VkRect2D));
36 fCachedScissor.offset.x = -1; // Scissor offset must be greater that 0 to be valid
37
38 for (int i = 0; i < 4; ++i) {
39 fCachedBlendConstant[i] = -1.0;
40 }
Greg Daniel164a9f02016-02-22 09:56:40 -050041}
42
Ethan Nicholas8e265a72018-12-12 16:22:40 -050043void GrVkCommandBuffer::freeGPUData(GrVkGpu* gpu) const {
Brian Salomone39526b2019-06-24 16:35:53 -040044 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Greg Daniel164a9f02016-02-22 09:56:40 -050045 SkASSERT(!fIsActive);
Greg Daniel164a9f02016-02-22 09:56:40 -050046 for (int i = 0; i < fTrackedResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050047 fTrackedResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel164a9f02016-02-22 09:56:40 -050048 fTrackedResources[i]->unref(gpu);
49 }
halcanary9d524f22016-03-29 09:03:52 -070050
egdanielc1be9bc2016-07-20 08:33:00 -070051 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050052 fTrackedRecycledResources[i]->notifyRemovedFromCommandBuffer();
egdanielc1be9bc2016-07-20 08:33:00 -070053 fTrackedRecycledResources[i]->recycle(const_cast<GrVkGpu*>(gpu));
54 }
55
Greg Daniel070cbaf2019-01-03 17:35:54 -050056 if (!this->isWrapped()) {
57 GR_VK_CALL(gpu->vkInterface(), FreeCommandBuffers(gpu->device(), fCmdPool->vkCommandPool(),
58 1, &fCmdBuffer));
59 }
egdaniel9cb63402016-06-23 08:37:05 -070060
61 this->onFreeGPUData(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050062}
63
Greg Danielcef213c2017-04-21 11:52:27 -040064void GrVkCommandBuffer::abandonGPUData() const {
Ethan Nicholas8e265a72018-12-12 16:22:40 -050065 SkDEBUGCODE(fResourcesReleased = true;)
Greg Daniel164a9f02016-02-22 09:56:40 -050066 for (int i = 0; i < fTrackedResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050067 fTrackedResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel164a9f02016-02-22 09:56:40 -050068 fTrackedResources[i]->unrefAndAbandon();
69 }
egdanielc1be9bc2016-07-20 08:33:00 -070070
71 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050072 fTrackedRecycledResources[i]->notifyRemovedFromCommandBuffer();
egdanielc1be9bc2016-07-20 08:33:00 -070073 // We don't recycle resources when abandoning them.
74 fTrackedRecycledResources[i]->unrefAndAbandon();
75 }
Greg Daniel7d918fd2018-06-19 15:22:01 -040076
Ethan Nicholas8e265a72018-12-12 16:22:40 -050077 this->onAbandonGPUData();
Greg Daniel164a9f02016-02-22 09:56:40 -050078}
79
Ethan Nicholas8e265a72018-12-12 16:22:40 -050080void GrVkCommandBuffer::releaseResources(GrVkGpu* gpu) {
Brian Salomone39526b2019-06-24 16:35:53 -040081 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Ethan Nicholas8e265a72018-12-12 16:22:40 -050082 SkDEBUGCODE(fResourcesReleased = true;)
jvanverth7ec92412016-07-06 09:24:57 -070083 SkASSERT(!fIsActive);
84 for (int i = 0; i < fTrackedResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050085 fTrackedResources[i]->notifyRemovedFromCommandBuffer();
jvanverth7ec92412016-07-06 09:24:57 -070086 fTrackedResources[i]->unref(gpu);
87 }
egdanielc1be9bc2016-07-20 08:33:00 -070088 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050089 fTrackedRecycledResources[i]->notifyRemovedFromCommandBuffer();
egdanielc1be9bc2016-07-20 08:33:00 -070090 fTrackedRecycledResources[i]->recycle(const_cast<GrVkGpu*>(gpu));
91 }
egdaniel594739c2016-09-20 12:39:25 -070092
93 if (++fNumResets > kNumRewindResetsBeforeFullReset) {
94 fTrackedResources.reset();
95 fTrackedRecycledResources.reset();
96 fTrackedResources.setReserve(kInitialTrackedResourcesCount);
97 fTrackedRecycledResources.setReserve(kInitialTrackedResourcesCount);
98 fNumResets = 0;
99 } else {
100 fTrackedResources.rewind();
101 fTrackedRecycledResources.rewind();
102 }
103
jvanverth7ec92412016-07-06 09:24:57 -0700104 this->invalidateState();
105
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500106 this->onReleaseResources(gpu);
jvanverth7ec92412016-07-06 09:24:57 -0700107}
108
Greg Daniel164a9f02016-02-22 09:56:40 -0500109////////////////////////////////////////////////////////////////////////////////
110// CommandBuffer commands
111////////////////////////////////////////////////////////////////////////////////
112
113void GrVkCommandBuffer::pipelineBarrier(const GrVkGpu* gpu,
Greg Daniel59dc1482019-02-22 10:46:38 -0500114 const GrVkResource* resource,
Greg Daniel164a9f02016-02-22 09:56:40 -0500115 VkPipelineStageFlags srcStageMask,
116 VkPipelineStageFlags dstStageMask,
117 bool byRegion,
118 BarrierType barrierType,
Greg Daniel59dc1482019-02-22 10:46:38 -0500119 void* barrier) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500120 SkASSERT(!this->isWrapped());
Greg Daniel164a9f02016-02-22 09:56:40 -0500121 SkASSERT(fIsActive);
egdaniel58a8d922016-04-21 08:03:10 -0700122 // For images we can have barriers inside of render passes but they require us to add more
123 // support in subpasses which need self dependencies to have barriers inside them. Also, we can
124 // never have buffer barriers inside of a render pass. For now we will just assert that we are
125 // not in a render pass.
126 SkASSERT(!fActiveRenderPass);
Greg Danielf346df32019-04-03 14:52:13 -0400127
Greg Danielee54f232019-04-03 14:58:40 -0400128 if (barrierType == kBufferMemory_BarrierType) {
129 const VkBufferMemoryBarrier* barrierPtr = reinterpret_cast<VkBufferMemoryBarrier*>(barrier);
130 fBufferBarriers.push_back(*barrierPtr);
131 } else {
132 SkASSERT(barrierType == kImageMemory_BarrierType);
133 const VkImageMemoryBarrier* barrierPtr = reinterpret_cast<VkImageMemoryBarrier*>(barrier);
Greg Daniel212ff052019-04-09 10:41:34 -0400134 // We need to check if we are adding a pipeline barrier that covers part of the same
135 // subresource range as a barrier that is already in current batch. If it does, then we must
136 // submit the first batch because the vulkan spec does not define a specific ordering for
137 // barriers submitted in the same batch.
138 // TODO: Look if we can gain anything by merging barriers together instead of submitting
139 // the old ones.
140 for (int i = 0; i < fImageBarriers.count(); ++i) {
141 VkImageMemoryBarrier& currentBarrier = fImageBarriers[i];
142 if (barrierPtr->image == currentBarrier.image) {
143 const VkImageSubresourceRange newRange = barrierPtr->subresourceRange;
144 const VkImageSubresourceRange oldRange = currentBarrier.subresourceRange;
145 SkASSERT(newRange.aspectMask == oldRange.aspectMask);
146 SkASSERT(newRange.baseArrayLayer == oldRange.baseArrayLayer);
147 SkASSERT(newRange.layerCount == oldRange.layerCount);
148 uint32_t newStart = newRange.baseMipLevel;
149 uint32_t newEnd = newRange.baseMipLevel + newRange.levelCount - 1;
150 uint32_t oldStart = oldRange.baseMipLevel;
151 uint32_t oldEnd = oldRange.baseMipLevel + oldRange.levelCount - 1;
152 if (SkTMax(newStart, oldStart) <= SkTMin(newEnd, oldEnd)) {
153 this->submitPipelineBarriers(gpu);
154 break;
155 }
156 }
157 }
Greg Danielee54f232019-04-03 14:58:40 -0400158 fImageBarriers.push_back(*barrierPtr);
Greg Daniel164a9f02016-02-22 09:56:40 -0500159 }
Greg Danielee54f232019-04-03 14:58:40 -0400160 fBarriersByRegion |= byRegion;
161
162 fSrcStageMask = fSrcStageMask | srcStageMask;
163 fDstStageMask = fDstStageMask | dstStageMask;
164
165 fHasWork = true;
Greg Daniel59dc1482019-02-22 10:46:38 -0500166 if (resource) {
167 this->addResource(resource);
168 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500169}
170
Greg Danielee54f232019-04-03 14:58:40 -0400171void GrVkCommandBuffer::submitPipelineBarriers(const GrVkGpu* gpu) {
172 SkASSERT(fIsActive);
173
174 // Currently we never submit a pipeline barrier without at least one memory barrier.
175 if (fBufferBarriers.count() || fImageBarriers.count()) {
176 // For images we can have barriers inside of render passes but they require us to add more
177 // support in subpasses which need self dependencies to have barriers inside them. Also, we
178 // can never have buffer barriers inside of a render pass. For now we will just assert that
179 // we are not in a render pass.
180 SkASSERT(!fActiveRenderPass);
181 SkASSERT(!this->isWrapped());
182 SkASSERT(fSrcStageMask && fDstStageMask);
183
184 VkDependencyFlags dependencyFlags = fBarriersByRegion ? VK_DEPENDENCY_BY_REGION_BIT : 0;
185 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(
186 fCmdBuffer, fSrcStageMask, fDstStageMask, dependencyFlags, 0, nullptr,
187 fBufferBarriers.count(), fBufferBarriers.begin(),
188 fImageBarriers.count(), fImageBarriers.begin()));
189 fBufferBarriers.reset();
190 fImageBarriers.reset();
191 fBarriersByRegion = false;
192 fSrcStageMask = 0;
193 fDstStageMask = 0;
194 }
195 SkASSERT(!fBufferBarriers.count());
196 SkASSERT(!fImageBarriers.count());
197 SkASSERT(!fBarriersByRegion);
198 SkASSERT(!fSrcStageMask);
199 SkASSERT(!fDstStageMask);
200}
201
202
Greg Daniel6ecc9112017-06-16 16:17:03 +0000203void GrVkCommandBuffer::bindInputBuffer(GrVkGpu* gpu, uint32_t binding,
204 const GrVkVertexBuffer* vbuffer) {
205 VkBuffer vkBuffer = vbuffer->buffer();
206 SkASSERT(VK_NULL_HANDLE != vkBuffer);
207 SkASSERT(binding < kMaxInputBuffers);
208 // TODO: once vbuffer->offset() no longer always returns 0, we will need to track the offset
209 // to know if we can skip binding or not.
210 if (vkBuffer != fBoundInputBuffers[binding]) {
211 VkDeviceSize offset = vbuffer->offset();
212 GR_VK_CALL(gpu->vkInterface(), CmdBindVertexBuffers(fCmdBuffer,
213 binding,
214 1,
215 &vkBuffer,
216 &offset));
217 fBoundInputBuffers[binding] = vkBuffer;
Greg Daniel59dc1482019-02-22 10:46:38 -0500218 this->addResource(vbuffer->resource());
Greg Daniel6ecc9112017-06-16 16:17:03 +0000219 }
220}
221
222void GrVkCommandBuffer::bindIndexBuffer(GrVkGpu* gpu, const GrVkIndexBuffer* ibuffer) {
223 VkBuffer vkBuffer = ibuffer->buffer();
224 SkASSERT(VK_NULL_HANDLE != vkBuffer);
225 // TODO: once ibuffer->offset() no longer always returns 0, we will need to track the offset
226 // to know if we can skip binding or not.
227 if (vkBuffer != fBoundIndexBuffer) {
228 GR_VK_CALL(gpu->vkInterface(), CmdBindIndexBuffer(fCmdBuffer,
229 vkBuffer,
230 ibuffer->offset(),
231 VK_INDEX_TYPE_UINT16));
232 fBoundIndexBuffer = vkBuffer;
Greg Daniel59dc1482019-02-22 10:46:38 -0500233 this->addResource(ibuffer->resource());
Greg Daniel6ecc9112017-06-16 16:17:03 +0000234 }
235}
236
Greg Daniel164a9f02016-02-22 09:56:40 -0500237void GrVkCommandBuffer::clearAttachments(const GrVkGpu* gpu,
238 int numAttachments,
239 const VkClearAttachment* attachments,
240 int numRects,
Greg Danielf346df32019-04-03 14:52:13 -0400241 const VkClearRect* clearRects) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500242 SkASSERT(fIsActive);
243 SkASSERT(fActiveRenderPass);
244 SkASSERT(numAttachments > 0);
245 SkASSERT(numRects > 0);
Greg Danielf346df32019-04-03 14:52:13 -0400246
Greg Danielee54f232019-04-03 14:58:40 -0400247 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400248
Greg Daniel164a9f02016-02-22 09:56:40 -0500249#ifdef SK_DEBUG
250 for (int i = 0; i < numAttachments; ++i) {
251 if (attachments[i].aspectMask == VK_IMAGE_ASPECT_COLOR_BIT) {
252 uint32_t testIndex;
253 SkAssertResult(fActiveRenderPass->colorAttachmentIndex(&testIndex));
254 SkASSERT(testIndex == attachments[i].colorAttachment);
255 }
256 }
257#endif
258 GR_VK_CALL(gpu->vkInterface(), CmdClearAttachments(fCmdBuffer,
259 numAttachments,
260 attachments,
261 numRects,
262 clearRects));
Greg Daniela718a612019-10-07 16:25:41 -0400263 if (gpu->vkCaps().mustInvalidatePrimaryCmdBufferStateAfterClearAttachments()) {
264 this->invalidateState();
265 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500266}
267
268void GrVkCommandBuffer::bindDescriptorSets(const GrVkGpu* gpu,
egdaniel22281c12016-03-23 13:49:40 -0700269 GrVkPipelineState* pipelineState,
Greg Danieleecc6872019-07-29 13:21:37 -0400270 VkPipelineLayout layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500271 uint32_t firstSet,
272 uint32_t setCount,
273 const VkDescriptorSet* descriptorSets,
274 uint32_t dynamicOffsetCount,
275 const uint32_t* dynamicOffsets) {
276 SkASSERT(fIsActive);
277 GR_VK_CALL(gpu->vkInterface(), CmdBindDescriptorSets(fCmdBuffer,
278 VK_PIPELINE_BIND_POINT_GRAPHICS,
Greg Danieleecc6872019-07-29 13:21:37 -0400279 layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500280 firstSet,
281 setCount,
282 descriptorSets,
283 dynamicOffsetCount,
284 dynamicOffsets));
egdanielbc9b2962016-09-27 08:00:53 -0700285}
286
egdaniel470d77a2016-03-18 12:50:27 -0700287void GrVkCommandBuffer::bindPipeline(const GrVkGpu* gpu, const GrVkPipeline* pipeline) {
288 SkASSERT(fIsActive);
egdaniel470d77a2016-03-18 12:50:27 -0700289 GR_VK_CALL(gpu->vkInterface(), CmdBindPipeline(fCmdBuffer,
290 VK_PIPELINE_BIND_POINT_GRAPHICS,
291 pipeline->pipeline()));
egdanielec440992016-09-13 09:54:11 -0700292 this->addResource(pipeline);
egdaniel470d77a2016-03-18 12:50:27 -0700293}
294
Greg Daniel164a9f02016-02-22 09:56:40 -0500295void GrVkCommandBuffer::drawIndexed(const GrVkGpu* gpu,
296 uint32_t indexCount,
297 uint32_t instanceCount,
298 uint32_t firstIndex,
299 int32_t vertexOffset,
Greg Danielf346df32019-04-03 14:52:13 -0400300 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500301 SkASSERT(fIsActive);
302 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400303 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500304 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndexed(fCmdBuffer,
305 indexCount,
306 instanceCount,
307 firstIndex,
308 vertexOffset,
309 firstInstance));
310}
311
312void GrVkCommandBuffer::draw(const GrVkGpu* gpu,
313 uint32_t vertexCount,
314 uint32_t instanceCount,
315 uint32_t firstVertex,
Greg Danielf346df32019-04-03 14:52:13 -0400316 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500317 SkASSERT(fIsActive);
318 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400319 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500320 GR_VK_CALL(gpu->vkInterface(), CmdDraw(fCmdBuffer,
321 vertexCount,
322 instanceCount,
323 firstVertex,
324 firstInstance));
325}
egdaniel470d77a2016-03-18 12:50:27 -0700326
327void GrVkCommandBuffer::setViewport(const GrVkGpu* gpu,
328 uint32_t firstViewport,
329 uint32_t viewportCount,
330 const VkViewport* viewports) {
331 SkASSERT(fIsActive);
332 SkASSERT(1 == viewportCount);
333 if (memcmp(viewports, &fCachedViewport, sizeof(VkViewport))) {
334 GR_VK_CALL(gpu->vkInterface(), CmdSetViewport(fCmdBuffer,
335 firstViewport,
336 viewportCount,
337 viewports));
338 fCachedViewport = viewports[0];
339 }
340}
341
342void GrVkCommandBuffer::setScissor(const GrVkGpu* gpu,
343 uint32_t firstScissor,
344 uint32_t scissorCount,
345 const VkRect2D* scissors) {
346 SkASSERT(fIsActive);
347 SkASSERT(1 == scissorCount);
348 if (memcmp(scissors, &fCachedScissor, sizeof(VkRect2D))) {
349 GR_VK_CALL(gpu->vkInterface(), CmdSetScissor(fCmdBuffer,
350 firstScissor,
351 scissorCount,
352 scissors));
353 fCachedScissor = scissors[0];
354 }
355}
356
357void GrVkCommandBuffer::setBlendConstants(const GrVkGpu* gpu,
358 const float blendConstants[4]) {
359 SkASSERT(fIsActive);
360 if (memcmp(blendConstants, fCachedBlendConstant, 4 * sizeof(float))) {
361 GR_VK_CALL(gpu->vkInterface(), CmdSetBlendConstants(fCmdBuffer, blendConstants));
362 memcpy(fCachedBlendConstant, blendConstants, 4 * sizeof(float));
363 }
364}
egdaniel9a6cf802016-06-08 08:22:05 -0700365
Greg Danielee54f232019-04-03 14:58:40 -0400366void GrVkCommandBuffer::addingWork(const GrVkGpu* gpu) {
367 this->submitPipelineBarriers(gpu);
368 fHasWork = true;
369}
370
egdaniel9a6cf802016-06-08 08:22:05 -0700371///////////////////////////////////////////////////////////////////////////////
372// PrimaryCommandBuffer
373////////////////////////////////////////////////////////////////////////////////
egdaniel9cb63402016-06-23 08:37:05 -0700374GrVkPrimaryCommandBuffer::~GrVkPrimaryCommandBuffer() {
375 // Should have ended any render pass we're in the middle of
376 SkASSERT(!fActiveRenderPass);
377}
378
egdaniel9a6cf802016-06-08 08:22:05 -0700379GrVkPrimaryCommandBuffer* GrVkPrimaryCommandBuffer::Create(const GrVkGpu* gpu,
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500380 GrVkCommandPool* cmdPool) {
egdaniel9a6cf802016-06-08 08:22:05 -0700381 const VkCommandBufferAllocateInfo cmdInfo = {
382 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400383 nullptr, // pNext
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500384 cmdPool->vkCommandPool(), // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700385 VK_COMMAND_BUFFER_LEVEL_PRIMARY, // level
386 1 // bufferCount
387 };
388
389 VkCommandBuffer cmdBuffer;
390 VkResult err = GR_VK_CALL(gpu->vkInterface(), AllocateCommandBuffers(gpu->device(),
391 &cmdInfo,
392 &cmdBuffer));
393 if (err) {
394 return nullptr;
395 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500396 return new GrVkPrimaryCommandBuffer(cmdBuffer, cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700397}
398
Greg Daniele643da62019-11-05 12:36:42 -0500399void GrVkPrimaryCommandBuffer::begin(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700400 SkASSERT(!fIsActive);
401 VkCommandBufferBeginInfo cmdBufferBeginInfo;
402 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
403 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
404 cmdBufferBeginInfo.pNext = nullptr;
405 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
406 cmdBufferBeginInfo.pInheritanceInfo = nullptr;
407
Greg Daniele643da62019-11-05 12:36:42 -0500408 GR_VK_CALL_ERRCHECK(gpu, BeginCommandBuffer(fCmdBuffer, &cmdBufferBeginInfo));
egdaniel9a6cf802016-06-08 08:22:05 -0700409 fIsActive = true;
410}
411
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500412void GrVkPrimaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700413 SkASSERT(fIsActive);
414 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400415
416 this->submitPipelineBarriers(gpu);
417
Greg Daniele643da62019-11-05 12:36:42 -0500418 GR_VK_CALL_ERRCHECK(gpu, EndCommandBuffer(fCmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700419 this->invalidateState();
420 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400421 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700422}
423
Greg Danielfa3adf72019-11-07 09:53:41 -0500424bool GrVkPrimaryCommandBuffer::beginRenderPass(GrVkGpu* gpu,
egdaniel9cb63402016-06-23 08:37:05 -0700425 const GrVkRenderPass* renderPass,
Robert Phillips95214472017-08-08 18:00:03 -0400426 const VkClearValue clearValues[],
Greg Danielfa3adf72019-11-07 09:53:41 -0500427 GrVkRenderTarget* target,
egdaniel9cb63402016-06-23 08:37:05 -0700428 const SkIRect& bounds,
429 bool forSecondaryCB) {
egdaniel9a6cf802016-06-08 08:22:05 -0700430 SkASSERT(fIsActive);
431 SkASSERT(!fActiveRenderPass);
Greg Danielfa3adf72019-11-07 09:53:41 -0500432 SkASSERT(renderPass->isCompatible(*target));
433
434 const GrVkFramebuffer* framebuffer = target->getFramebuffer();
435 if (!framebuffer) {
436 return false;
437 }
egdaniel9cb63402016-06-23 08:37:05 -0700438
Greg Danielee54f232019-04-03 14:58:40 -0400439 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400440
egdaniel9a6cf802016-06-08 08:22:05 -0700441 VkRenderPassBeginInfo beginInfo;
egdaniel9cb63402016-06-23 08:37:05 -0700442 VkRect2D renderArea;
443 renderArea.offset = { bounds.fLeft , bounds.fTop };
444 renderArea.extent = { (uint32_t)bounds.width(), (uint32_t)bounds.height() };
445
446 memset(&beginInfo, 0, sizeof(VkRenderPassBeginInfo));
447 beginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
448 beginInfo.pNext = nullptr;
449 beginInfo.renderPass = renderPass->vkRenderPass();
Greg Danielfa3adf72019-11-07 09:53:41 -0500450 beginInfo.framebuffer = framebuffer->framebuffer();
egdaniel9cb63402016-06-23 08:37:05 -0700451 beginInfo.renderArea = renderArea;
Greg Danielb68319a2018-02-23 16:08:28 -0500452 beginInfo.clearValueCount = renderPass->clearValueCount();
egdaniel9cb63402016-06-23 08:37:05 -0700453 beginInfo.pClearValues = clearValues;
454
455 VkSubpassContents contents = forSecondaryCB ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
456 : VK_SUBPASS_CONTENTS_INLINE;
457
egdaniel9a6cf802016-06-08 08:22:05 -0700458 GR_VK_CALL(gpu->vkInterface(), CmdBeginRenderPass(fCmdBuffer, &beginInfo, contents));
459 fActiveRenderPass = renderPass;
460 this->addResource(renderPass);
Greg Danielfa3adf72019-11-07 09:53:41 -0500461 target->addResources(*this);
462 return true;
egdaniel9a6cf802016-06-08 08:22:05 -0700463}
464
465void GrVkPrimaryCommandBuffer::endRenderPass(const GrVkGpu* gpu) {
466 SkASSERT(fIsActive);
467 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400468 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700469 GR_VK_CALL(gpu->vkInterface(), CmdEndRenderPass(fCmdBuffer));
470 fActiveRenderPass = nullptr;
471}
472
473void GrVkPrimaryCommandBuffer::executeCommands(const GrVkGpu* gpu,
Greg Daniel8daf3b72019-07-30 09:57:26 -0400474 std::unique_ptr<GrVkSecondaryCommandBuffer> buffer) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500475 // The Vulkan spec allows secondary command buffers to be executed on a primary command buffer
476 // if the command pools both were created from were created with the same queue family. However,
477 // we currently always create them from the same pool.
478 SkASSERT(buffer->commandPool() == fCmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700479 SkASSERT(fIsActive);
Greg Daniel77b53f62016-10-18 11:48:51 -0400480 SkASSERT(!buffer->fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700481 SkASSERT(fActiveRenderPass);
482 SkASSERT(fActiveRenderPass->isCompatible(*buffer->fActiveRenderPass));
483
Greg Danielee54f232019-04-03 14:58:40 -0400484 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400485
egdaniel9a6cf802016-06-08 08:22:05 -0700486 GR_VK_CALL(gpu->vkInterface(), CmdExecuteCommands(fCmdBuffer, 1, &buffer->fCmdBuffer));
Greg Daniel8daf3b72019-07-30 09:57:26 -0400487 fSecondaryCommandBuffers.push_back(std::move(buffer));
egdaniel066df7c2016-06-08 14:02:27 -0700488 // When executing a secondary command buffer all state (besides render pass state) becomes
489 // invalidated and must be reset. This includes bound buffers, pipelines, dynamic state, etc.
490 this->invalidateState();
egdaniel9a6cf802016-06-08 08:22:05 -0700491}
492
Greg Daniele643da62019-11-05 12:36:42 -0500493static void submit_to_queue(GrVkGpu* gpu,
Greg Daniel48661b82018-01-22 16:11:35 -0500494 VkQueue queue,
495 VkFence fence,
496 uint32_t waitCount,
497 const VkSemaphore* waitSemaphores,
498 const VkPipelineStageFlags* waitStages,
499 uint32_t commandBufferCount,
500 const VkCommandBuffer* commandBuffers,
501 uint32_t signalCount,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400502 const VkSemaphore* signalSemaphores,
503 GrProtected protectedContext) {
504 VkProtectedSubmitInfo protectedSubmitInfo;
505 if (protectedContext == GrProtected::kYes) {
506 memset(&protectedSubmitInfo, 0, sizeof(VkProtectedSubmitInfo));
507 protectedSubmitInfo.sType = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO;
508 protectedSubmitInfo.pNext = nullptr;
509 protectedSubmitInfo.protectedSubmit = VK_TRUE;
510 }
511
Greg Daniel48661b82018-01-22 16:11:35 -0500512 VkSubmitInfo submitInfo;
513 memset(&submitInfo, 0, sizeof(VkSubmitInfo));
514 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400515 submitInfo.pNext = protectedContext == GrProtected::kYes ? &protectedSubmitInfo : nullptr;
Greg Daniel48661b82018-01-22 16:11:35 -0500516 submitInfo.waitSemaphoreCount = waitCount;
517 submitInfo.pWaitSemaphores = waitSemaphores;
518 submitInfo.pWaitDstStageMask = waitStages;
519 submitInfo.commandBufferCount = commandBufferCount;
520 submitInfo.pCommandBuffers = commandBuffers;
521 submitInfo.signalSemaphoreCount = signalCount;
522 submitInfo.pSignalSemaphores = signalSemaphores;
Greg Daniele643da62019-11-05 12:36:42 -0500523 GR_VK_CALL_ERRCHECK(gpu, QueueSubmit(queue, 1, &submitInfo, fence));
Greg Daniel48661b82018-01-22 16:11:35 -0500524}
525
Greg Daniel6be35232017-03-01 17:01:09 -0500526void GrVkPrimaryCommandBuffer::submitToQueue(
Greg Daniele643da62019-11-05 12:36:42 -0500527 GrVkGpu* gpu,
Greg Daniel6be35232017-03-01 17:01:09 -0500528 VkQueue queue,
529 GrVkGpu::SyncQueue sync,
Greg Daniel48661b82018-01-22 16:11:35 -0500530 SkTArray<GrVkSemaphore::Resource*>& signalSemaphores,
531 SkTArray<GrVkSemaphore::Resource*>& waitSemaphores) {
egdaniel9a6cf802016-06-08 08:22:05 -0700532 SkASSERT(!fIsActive);
533
534 VkResult err;
jvanverth7ec92412016-07-06 09:24:57 -0700535 if (VK_NULL_HANDLE == fSubmitFence) {
536 VkFenceCreateInfo fenceInfo;
537 memset(&fenceInfo, 0, sizeof(VkFenceCreateInfo));
538 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
539 err = GR_VK_CALL(gpu->vkInterface(), CreateFence(gpu->device(), &fenceInfo, nullptr,
540 &fSubmitFence));
541 SkASSERT(!err);
542 } else {
543 GR_VK_CALL(gpu->vkInterface(), ResetFences(gpu->device(), 1, &fSubmitFence));
544 }
egdaniel9a6cf802016-06-08 08:22:05 -0700545
Greg Daniela5cb7812017-06-16 09:45:32 -0400546 int signalCount = signalSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500547 int waitCount = waitSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500548
Greg Daniel48661b82018-01-22 16:11:35 -0500549 if (0 == signalCount && 0 == waitCount) {
550 // This command buffer has no dependent semaphores so we can simply just submit it to the
551 // queue with no worries.
Greg Daniele643da62019-11-05 12:36:42 -0500552 submit_to_queue(gpu, queue, fSubmitFence, 0, nullptr, nullptr, 1, &fCmdBuffer, 0, nullptr,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400553 gpu->protectedContext() ? GrProtected::kYes : GrProtected::kNo);
Greg Daniel48661b82018-01-22 16:11:35 -0500554 } else {
Greg Daniel48661b82018-01-22 16:11:35 -0500555 SkTArray<VkSemaphore> vkSignalSems(signalCount);
556 for (int i = 0; i < signalCount; ++i) {
557 if (signalSemaphores[i]->shouldSignal()) {
558 this->addResource(signalSemaphores[i]);
559 vkSignalSems.push_back(signalSemaphores[i]->semaphore());
560 }
561 }
562
563 SkTArray<VkSemaphore> vkWaitSems(waitCount);
564 SkTArray<VkPipelineStageFlags> vkWaitStages(waitCount);
565 for (int i = 0; i < waitCount; ++i) {
566 if (waitSemaphores[i]->shouldWait()) {
567 this->addResource(waitSemaphores[i]);
568 vkWaitSems.push_back(waitSemaphores[i]->semaphore());
569 vkWaitStages.push_back(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
570 }
571 }
Greg Daniele643da62019-11-05 12:36:42 -0500572 submit_to_queue(gpu, queue, fSubmitFence, vkWaitSems.count(), vkWaitSems.begin(),
573 vkWaitStages.begin(), 1, &fCmdBuffer,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400574 vkSignalSems.count(), vkSignalSems.begin(),
575 gpu->protectedContext() ? GrProtected::kYes : GrProtected::kNo);
Greg Daniel48661b82018-01-22 16:11:35 -0500576 for (int i = 0; i < signalCount; ++i) {
577 signalSemaphores[i]->markAsSignaled();
578 }
579 for (int i = 0; i < waitCount; ++i) {
580 waitSemaphores[i]->markAsWaited();
581 }
Greg Daniel48661b82018-01-22 16:11:35 -0500582 }
egdaniel9a6cf802016-06-08 08:22:05 -0700583
584 if (GrVkGpu::kForce_SyncQueue == sync) {
585 err = GR_VK_CALL(gpu->vkInterface(),
586 WaitForFences(gpu->device(), 1, &fSubmitFence, true, UINT64_MAX));
Emircan Uysaler283ec652019-10-31 15:33:31 -0400587 if (VK_SUCCESS != err) {
588 SkDebugf("Fence failed: %d\n", err);
Ben Wagnerb4aab9a2017-08-16 10:53:04 -0400589 SK_ABORT("failing");
egdaniel9a6cf802016-06-08 08:22:05 -0700590 }
egdaniel9a6cf802016-06-08 08:22:05 -0700591
Greg Daniela3aa75a2019-04-12 14:24:55 -0400592 fFinishedProcs.reset();
593
egdaniel9a6cf802016-06-08 08:22:05 -0700594 // Destroy the fence
595 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
596 fSubmitFence = VK_NULL_HANDLE;
597 }
598}
599
Greg Daniela3aa75a2019-04-12 14:24:55 -0400600bool GrVkPrimaryCommandBuffer::finished(const GrVkGpu* gpu) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500601 SkASSERT(!fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700602 if (VK_NULL_HANDLE == fSubmitFence) {
603 return true;
604 }
605
606 VkResult err = GR_VK_CALL(gpu->vkInterface(), GetFenceStatus(gpu->device(), fSubmitFence));
607 switch (err) {
608 case VK_SUCCESS:
609 return true;
610
611 case VK_NOT_READY:
612 return false;
613
614 default:
615 SkDebugf("Error getting fence status: %d\n", err);
Ben Wagnerb4aab9a2017-08-16 10:53:04 -0400616 SK_ABORT("failing");
egdaniel9a6cf802016-06-08 08:22:05 -0700617 break;
618 }
619
620 return false;
621}
622
Greg Daniela3aa75a2019-04-12 14:24:55 -0400623void GrVkPrimaryCommandBuffer::addFinishedProc(sk_sp<GrRefCntedCallback> finishedProc) {
624 fFinishedProcs.push_back(std::move(finishedProc));
625}
626
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500627void GrVkPrimaryCommandBuffer::onReleaseResources(GrVkGpu* gpu) {
jvanverth7ec92412016-07-06 09:24:57 -0700628 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500629 fSecondaryCommandBuffers[i]->releaseResources(gpu);
630 }
Brian Salomonab32f652019-05-10 14:24:50 -0400631 fFinishedProcs.reset();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500632}
633
Greg Daniel8daf3b72019-07-30 09:57:26 -0400634void GrVkPrimaryCommandBuffer::recycleSecondaryCommandBuffers(GrVkGpu* gpu) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500635 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
636 SkASSERT(fSecondaryCommandBuffers[i]->commandPool() == fCmdPool);
Greg Daniel8daf3b72019-07-30 09:57:26 -0400637 fSecondaryCommandBuffers[i].release()->recycle(gpu);
jvanverth7ec92412016-07-06 09:24:57 -0700638 }
639 fSecondaryCommandBuffers.reset();
640}
641
egdaniel9a6cf802016-06-08 08:22:05 -0700642void GrVkPrimaryCommandBuffer::copyImage(const GrVkGpu* gpu,
643 GrVkImage* srcImage,
644 VkImageLayout srcLayout,
645 GrVkImage* dstImage,
646 VkImageLayout dstLayout,
647 uint32_t copyRegionCount,
648 const VkImageCopy* copyRegions) {
649 SkASSERT(fIsActive);
650 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400651 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700652 this->addResource(srcImage->resource());
653 this->addResource(dstImage->resource());
654 GR_VK_CALL(gpu->vkInterface(), CmdCopyImage(fCmdBuffer,
655 srcImage->image(),
656 srcLayout,
657 dstImage->image(),
658 dstLayout,
659 copyRegionCount,
660 copyRegions));
661}
662
663void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
664 const GrVkResource* srcResource,
665 VkImage srcImage,
666 VkImageLayout srcLayout,
667 const GrVkResource* dstResource,
668 VkImage dstImage,
669 VkImageLayout dstLayout,
670 uint32_t blitRegionCount,
671 const VkImageBlit* blitRegions,
672 VkFilter filter) {
673 SkASSERT(fIsActive);
674 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400675 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700676 this->addResource(srcResource);
677 this->addResource(dstResource);
678 GR_VK_CALL(gpu->vkInterface(), CmdBlitImage(fCmdBuffer,
679 srcImage,
680 srcLayout,
681 dstImage,
682 dstLayout,
683 blitRegionCount,
684 blitRegions,
685 filter));
686}
687
Greg Daniel6ecc9112017-06-16 16:17:03 +0000688void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
689 const GrVkImage& srcImage,
690 const GrVkImage& dstImage,
691 uint32_t blitRegionCount,
692 const VkImageBlit* blitRegions,
693 VkFilter filter) {
694 this->blitImage(gpu,
695 srcImage.resource(),
696 srcImage.image(),
697 srcImage.currentLayout(),
698 dstImage.resource(),
699 dstImage.image(),
700 dstImage.currentLayout(),
701 blitRegionCount,
702 blitRegions,
703 filter);
704}
705
706
egdaniel9a6cf802016-06-08 08:22:05 -0700707void GrVkPrimaryCommandBuffer::copyImageToBuffer(const GrVkGpu* gpu,
708 GrVkImage* srcImage,
709 VkImageLayout srcLayout,
710 GrVkTransferBuffer* dstBuffer,
711 uint32_t copyRegionCount,
712 const VkBufferImageCopy* copyRegions) {
713 SkASSERT(fIsActive);
714 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400715 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700716 this->addResource(srcImage->resource());
717 this->addResource(dstBuffer->resource());
718 GR_VK_CALL(gpu->vkInterface(), CmdCopyImageToBuffer(fCmdBuffer,
719 srcImage->image(),
720 srcLayout,
721 dstBuffer->buffer(),
722 copyRegionCount,
723 copyRegions));
724}
725
726void GrVkPrimaryCommandBuffer::copyBufferToImage(const GrVkGpu* gpu,
727 GrVkTransferBuffer* srcBuffer,
728 GrVkImage* dstImage,
729 VkImageLayout dstLayout,
730 uint32_t copyRegionCount,
731 const VkBufferImageCopy* copyRegions) {
732 SkASSERT(fIsActive);
733 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400734 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700735 this->addResource(srcBuffer->resource());
736 this->addResource(dstImage->resource());
737 GR_VK_CALL(gpu->vkInterface(), CmdCopyBufferToImage(fCmdBuffer,
738 srcBuffer->buffer(),
739 dstImage->image(),
740 dstLayout,
741 copyRegionCount,
742 copyRegions));
743}
744
Greg Daniel6888c0d2017-08-25 11:55:50 -0400745
746void GrVkPrimaryCommandBuffer::copyBuffer(GrVkGpu* gpu,
747 GrVkBuffer* srcBuffer,
748 GrVkBuffer* dstBuffer,
749 uint32_t regionCount,
750 const VkBufferCopy* regions) {
751 SkASSERT(fIsActive);
752 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400753 this->addingWork(gpu);
Greg Daniel6888c0d2017-08-25 11:55:50 -0400754#ifdef SK_DEBUG
755 for (uint32_t i = 0; i < regionCount; ++i) {
756 const VkBufferCopy& region = regions[i];
757 SkASSERT(region.size > 0);
758 SkASSERT(region.srcOffset < srcBuffer->size());
759 SkASSERT(region.dstOffset < dstBuffer->size());
760 SkASSERT(region.srcOffset + region.size <= srcBuffer->size());
761 SkASSERT(region.dstOffset + region.size <= dstBuffer->size());
762 }
763#endif
764 this->addResource(srcBuffer->resource());
765 this->addResource(dstBuffer->resource());
766 GR_VK_CALL(gpu->vkInterface(), CmdCopyBuffer(fCmdBuffer,
767 srcBuffer->buffer(),
768 dstBuffer->buffer(),
769 regionCount,
770 regions));
771}
772
jvanvertha584de92016-06-30 09:10:52 -0700773void GrVkPrimaryCommandBuffer::updateBuffer(GrVkGpu* gpu,
774 GrVkBuffer* dstBuffer,
775 VkDeviceSize dstOffset,
776 VkDeviceSize dataSize,
777 const void* data) {
778 SkASSERT(fIsActive);
779 SkASSERT(!fActiveRenderPass);
780 SkASSERT(0 == (dstOffset & 0x03)); // four byte aligned
781 // TODO: handle larger transfer sizes
782 SkASSERT(dataSize <= 65536);
783 SkASSERT(0 == (dataSize & 0x03)); // four byte aligned
Greg Danielee54f232019-04-03 14:58:40 -0400784 this->addingWork(gpu);
jvanvertha584de92016-06-30 09:10:52 -0700785 this->addResource(dstBuffer->resource());
786 GR_VK_CALL(gpu->vkInterface(), CmdUpdateBuffer(fCmdBuffer,
787 dstBuffer->buffer(),
788 dstOffset,
789 dataSize,
790 (const uint32_t*) data));
791}
792
egdaniel9a6cf802016-06-08 08:22:05 -0700793void GrVkPrimaryCommandBuffer::clearColorImage(const GrVkGpu* gpu,
794 GrVkImage* image,
795 const VkClearColorValue* color,
796 uint32_t subRangeCount,
797 const VkImageSubresourceRange* subRanges) {
798 SkASSERT(fIsActive);
799 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400800 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700801 this->addResource(image->resource());
802 GR_VK_CALL(gpu->vkInterface(), CmdClearColorImage(fCmdBuffer,
803 image->image(),
804 image->currentLayout(),
805 color,
806 subRangeCount,
807 subRanges));
808}
809
810void GrVkPrimaryCommandBuffer::clearDepthStencilImage(const GrVkGpu* gpu,
811 GrVkImage* image,
812 const VkClearDepthStencilValue* color,
813 uint32_t subRangeCount,
814 const VkImageSubresourceRange* subRanges) {
815 SkASSERT(fIsActive);
816 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400817 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700818 this->addResource(image->resource());
819 GR_VK_CALL(gpu->vkInterface(), CmdClearDepthStencilImage(fCmdBuffer,
820 image->image(),
821 image->currentLayout(),
822 color,
823 subRangeCount,
824 subRanges));
825}
826
egdaniel52ad2512016-08-04 12:50:01 -0700827void GrVkPrimaryCommandBuffer::resolveImage(GrVkGpu* gpu,
828 const GrVkImage& srcImage,
829 const GrVkImage& dstImage,
830 uint32_t regionCount,
831 const VkImageResolve* regions) {
832 SkASSERT(fIsActive);
833 SkASSERT(!fActiveRenderPass);
834
Greg Danielee54f232019-04-03 14:58:40 -0400835 this->addingWork(gpu);
egdaniel52ad2512016-08-04 12:50:01 -0700836 this->addResource(srcImage.resource());
837 this->addResource(dstImage.resource());
838
839 GR_VK_CALL(gpu->vkInterface(), CmdResolveImage(fCmdBuffer,
840 srcImage.image(),
841 srcImage.currentLayout(),
842 dstImage.image(),
843 dstImage.currentLayout(),
844 regionCount,
845 regions));
846}
847
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500848void GrVkPrimaryCommandBuffer::onFreeGPUData(GrVkGpu* gpu) const {
egdaniel9cb63402016-06-23 08:37:05 -0700849 SkASSERT(!fActiveRenderPass);
850 // Destroy the fence, if any
851 if (VK_NULL_HANDLE != fSubmitFence) {
852 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
853 }
Greg Daniel8daf3b72019-07-30 09:57:26 -0400854 for (const auto& buffer : fSecondaryCommandBuffers) {
855 buffer->freeGPUData(gpu);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500856 }
857}
858
859void GrVkPrimaryCommandBuffer::onAbandonGPUData() const {
860 SkASSERT(!fActiveRenderPass);
Greg Daniel8daf3b72019-07-30 09:57:26 -0400861 for (const auto& buffer : fSecondaryCommandBuffers) {
862 buffer->abandonGPUData();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500863 }
egdaniel9cb63402016-06-23 08:37:05 -0700864}
865
egdaniel9a6cf802016-06-08 08:22:05 -0700866///////////////////////////////////////////////////////////////////////////////
867// SecondaryCommandBuffer
868////////////////////////////////////////////////////////////////////////////////
869
jvanverth7ec92412016-07-06 09:24:57 -0700870GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(const GrVkGpu* gpu,
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500871 GrVkCommandPool* cmdPool) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500872 SkASSERT(cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700873 const VkCommandBufferAllocateInfo cmdInfo = {
874 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400875 nullptr, // pNext
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500876 cmdPool->vkCommandPool(), // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700877 VK_COMMAND_BUFFER_LEVEL_SECONDARY, // level
878 1 // bufferCount
879 };
880
881 VkCommandBuffer cmdBuffer;
882 VkResult err = GR_VK_CALL(gpu->vkInterface(), AllocateCommandBuffers(gpu->device(),
883 &cmdInfo,
884 &cmdBuffer));
885 if (err) {
886 return nullptr;
887 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500888 return new GrVkSecondaryCommandBuffer(cmdBuffer, cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700889}
890
Greg Daniel070cbaf2019-01-03 17:35:54 -0500891GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(VkCommandBuffer cmdBuffer) {
892 return new GrVkSecondaryCommandBuffer(cmdBuffer, nullptr);
893}
egdaniel9a6cf802016-06-08 08:22:05 -0700894
Greg Daniele643da62019-11-05 12:36:42 -0500895void GrVkSecondaryCommandBuffer::begin(GrVkGpu* gpu, const GrVkFramebuffer* framebuffer,
jvanverth7ec92412016-07-06 09:24:57 -0700896 const GrVkRenderPass* compatibleRenderPass) {
egdaniel9a6cf802016-06-08 08:22:05 -0700897 SkASSERT(!fIsActive);
jvanverth7ec92412016-07-06 09:24:57 -0700898 SkASSERT(compatibleRenderPass);
899 fActiveRenderPass = compatibleRenderPass;
egdaniel9a6cf802016-06-08 08:22:05 -0700900
Greg Daniel070cbaf2019-01-03 17:35:54 -0500901 if (!this->isWrapped()) {
902 VkCommandBufferInheritanceInfo inheritanceInfo;
903 memset(&inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
904 inheritanceInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
905 inheritanceInfo.pNext = nullptr;
906 inheritanceInfo.renderPass = fActiveRenderPass->vkRenderPass();
907 inheritanceInfo.subpass = 0; // Currently only using 1 subpass for each render pass
908 inheritanceInfo.framebuffer = framebuffer ? framebuffer->framebuffer() : VK_NULL_HANDLE;
909 inheritanceInfo.occlusionQueryEnable = false;
910 inheritanceInfo.queryFlags = 0;
911 inheritanceInfo.pipelineStatistics = 0;
egdaniel9a6cf802016-06-08 08:22:05 -0700912
Greg Daniel070cbaf2019-01-03 17:35:54 -0500913 VkCommandBufferBeginInfo cmdBufferBeginInfo;
914 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
915 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
916 cmdBufferBeginInfo.pNext = nullptr;
917 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT |
918 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
919 cmdBufferBeginInfo.pInheritanceInfo = &inheritanceInfo;
egdaniel9a6cf802016-06-08 08:22:05 -0700920
Greg Daniele643da62019-11-05 12:36:42 -0500921 GR_VK_CALL_ERRCHECK(gpu, BeginCommandBuffer(fCmdBuffer, &cmdBufferBeginInfo));
Greg Daniel070cbaf2019-01-03 17:35:54 -0500922 }
egdaniel9a6cf802016-06-08 08:22:05 -0700923 fIsActive = true;
924}
925
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500926void GrVkSecondaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700927 SkASSERT(fIsActive);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500928 if (!this->isWrapped()) {
Greg Daniele643da62019-11-05 12:36:42 -0500929 GR_VK_CALL_ERRCHECK(gpu, EndCommandBuffer(fCmdBuffer));
Greg Daniel070cbaf2019-01-03 17:35:54 -0500930 }
egdaniel9a6cf802016-06-08 08:22:05 -0700931 this->invalidateState();
932 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400933 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700934}
Greg Daniel8daf3b72019-07-30 09:57:26 -0400935
936void GrVkSecondaryCommandBuffer::recycle(GrVkGpu* gpu) {
937 if (this->isWrapped()) {
938 this->freeGPUData(gpu);
939 delete this;
940 } else {
941 fCmdPool->recycleSecondaryCommandBuffer(this);
942 }
943}
944