blob: e23459b496af2cf06c80d14baf4b87ba4d10173b [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
Greg Danield922f332020-04-27 11:21:36 -04002 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
Greg Daniel164a9f02016-02-22 09:56:40 -05007
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkCommandBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -05009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkRect.h"
11#include "src/gpu/vk/GrVkCommandPool.h"
12#include "src/gpu/vk/GrVkFramebuffer.h"
13#include "src/gpu/vk/GrVkGpu.h"
14#include "src/gpu/vk/GrVkImage.h"
15#include "src/gpu/vk/GrVkImageView.h"
Chris Dalton10ee0b22020-04-02 16:28:52 -060016#include "src/gpu/vk/GrVkMeshBuffer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050017#include "src/gpu/vk/GrVkPipeline.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/vk/GrVkPipelineState.h"
19#include "src/gpu/vk/GrVkPipelineState.h"
20#include "src/gpu/vk/GrVkRenderPass.h"
21#include "src/gpu/vk/GrVkRenderTarget.h"
22#include "src/gpu/vk/GrVkTransferBuffer.h"
23#include "src/gpu/vk/GrVkUtil.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050024
25void GrVkCommandBuffer::invalidateState() {
Chris Dalton1d616352017-05-31 12:51:23 -060026 for (auto& boundInputBuffer : fBoundInputBuffers) {
27 boundInputBuffer = VK_NULL_HANDLE;
28 }
egdaniel470d77a2016-03-18 12:50:27 -070029 fBoundIndexBuffer = VK_NULL_HANDLE;
egdaniel470d77a2016-03-18 12:50:27 -070030
31 memset(&fCachedViewport, 0, sizeof(VkViewport));
32 fCachedViewport.width = - 1.0f; // Viewport must have a width greater than 0
33
34 memset(&fCachedScissor, 0, sizeof(VkRect2D));
35 fCachedScissor.offset.x = -1; // Scissor offset must be greater that 0 to be valid
36
37 for (int i = 0; i < 4; ++i) {
38 fCachedBlendConstant[i] = -1.0;
39 }
Greg Daniel164a9f02016-02-22 09:56:40 -050040}
41
Jim Van Verth5082df12020-03-11 16:14:51 -040042void GrVkCommandBuffer::freeGPUData(const GrGpu* gpu, VkCommandPool cmdPool) const {
Brian Salomone39526b2019-06-24 16:35:53 -040043 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Greg Daniel164a9f02016-02-22 09:56:40 -050044 SkASSERT(!fIsActive);
Greg Daniel0addbdf2019-11-25 15:03:58 -050045 SkASSERT(!fTrackedResources.count());
46 SkASSERT(!fTrackedRecycledResources.count());
47 SkASSERT(cmdPool != VK_NULL_HANDLE);
48 SkASSERT(!this->isWrapped());
halcanary9d524f22016-03-29 09:03:52 -070049
Jim Van Verth3e192162020-03-10 16:23:16 -040050 GrVkGpu* vkGpu = (GrVkGpu*)gpu;
51 GR_VK_CALL(vkGpu->vkInterface(), FreeCommandBuffers(vkGpu->device(), cmdPool, 1, &fCmdBuffer));
egdaniel9cb63402016-06-23 08:37:05 -070052
Jim Van Verth3e192162020-03-10 16:23:16 -040053 this->onFreeGPUData(vkGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050054}
55
Jim Van Verth5082df12020-03-11 16:14:51 -040056void GrVkCommandBuffer::releaseResources() {
Brian Salomone39526b2019-06-24 16:35:53 -040057 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
jvanverth7ec92412016-07-06 09:24:57 -070058 SkASSERT(!fIsActive);
59 for (int i = 0; i < fTrackedResources.count(); ++i) {
Jim Van Verth3e192162020-03-10 16:23:16 -040060 fTrackedResources[i]->notifyFinishedWithWorkOnGpu();
Jim Van Verth5082df12020-03-11 16:14:51 -040061 fTrackedResources[i]->unref();
jvanverth7ec92412016-07-06 09:24:57 -070062 }
egdanielc1be9bc2016-07-20 08:33:00 -070063 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Jim Van Verth3e192162020-03-10 16:23:16 -040064 fTrackedRecycledResources[i]->notifyFinishedWithWorkOnGpu();
Jim Van Verth5082df12020-03-11 16:14:51 -040065 fTrackedRecycledResources[i]->recycle();
egdanielc1be9bc2016-07-20 08:33:00 -070066 }
egdaniel594739c2016-09-20 12:39:25 -070067
68 if (++fNumResets > kNumRewindResetsBeforeFullReset) {
69 fTrackedResources.reset();
70 fTrackedRecycledResources.reset();
71 fTrackedResources.setReserve(kInitialTrackedResourcesCount);
72 fTrackedRecycledResources.setReserve(kInitialTrackedResourcesCount);
73 fNumResets = 0;
74 } else {
75 fTrackedResources.rewind();
76 fTrackedRecycledResources.rewind();
77 }
78
jvanverth7ec92412016-07-06 09:24:57 -070079 this->invalidateState();
80
Jim Van Verth5082df12020-03-11 16:14:51 -040081 this->onReleaseResources();
jvanverth7ec92412016-07-06 09:24:57 -070082}
83
Greg Daniel164a9f02016-02-22 09:56:40 -050084////////////////////////////////////////////////////////////////////////////////
85// CommandBuffer commands
86////////////////////////////////////////////////////////////////////////////////
87
88void GrVkCommandBuffer::pipelineBarrier(const GrVkGpu* gpu,
Jim Van Verth3e192162020-03-10 16:23:16 -040089 const GrManagedResource* resource,
Greg Daniel164a9f02016-02-22 09:56:40 -050090 VkPipelineStageFlags srcStageMask,
91 VkPipelineStageFlags dstStageMask,
92 bool byRegion,
93 BarrierType barrierType,
Greg Daniel59dc1482019-02-22 10:46:38 -050094 void* barrier) {
Greg Daniel070cbaf2019-01-03 17:35:54 -050095 SkASSERT(!this->isWrapped());
Greg Daniel164a9f02016-02-22 09:56:40 -050096 SkASSERT(fIsActive);
egdaniel58a8d922016-04-21 08:03:10 -070097 // For images we can have barriers inside of render passes but they require us to add more
98 // support in subpasses which need self dependencies to have barriers inside them. Also, we can
99 // never have buffer barriers inside of a render pass. For now we will just assert that we are
100 // not in a render pass.
101 SkASSERT(!fActiveRenderPass);
Greg Danielf346df32019-04-03 14:52:13 -0400102
Greg Danielee54f232019-04-03 14:58:40 -0400103 if (barrierType == kBufferMemory_BarrierType) {
104 const VkBufferMemoryBarrier* barrierPtr = reinterpret_cast<VkBufferMemoryBarrier*>(barrier);
105 fBufferBarriers.push_back(*barrierPtr);
106 } else {
107 SkASSERT(barrierType == kImageMemory_BarrierType);
108 const VkImageMemoryBarrier* barrierPtr = reinterpret_cast<VkImageMemoryBarrier*>(barrier);
Greg Daniel212ff052019-04-09 10:41:34 -0400109 // We need to check if we are adding a pipeline barrier that covers part of the same
110 // subresource range as a barrier that is already in current batch. If it does, then we must
111 // submit the first batch because the vulkan spec does not define a specific ordering for
112 // barriers submitted in the same batch.
113 // TODO: Look if we can gain anything by merging barriers together instead of submitting
114 // the old ones.
115 for (int i = 0; i < fImageBarriers.count(); ++i) {
116 VkImageMemoryBarrier& currentBarrier = fImageBarriers[i];
117 if (barrierPtr->image == currentBarrier.image) {
118 const VkImageSubresourceRange newRange = barrierPtr->subresourceRange;
119 const VkImageSubresourceRange oldRange = currentBarrier.subresourceRange;
120 SkASSERT(newRange.aspectMask == oldRange.aspectMask);
121 SkASSERT(newRange.baseArrayLayer == oldRange.baseArrayLayer);
122 SkASSERT(newRange.layerCount == oldRange.layerCount);
123 uint32_t newStart = newRange.baseMipLevel;
124 uint32_t newEnd = newRange.baseMipLevel + newRange.levelCount - 1;
125 uint32_t oldStart = oldRange.baseMipLevel;
126 uint32_t oldEnd = oldRange.baseMipLevel + oldRange.levelCount - 1;
Brian Osman788b9162020-02-07 10:36:46 -0500127 if (std::max(newStart, oldStart) <= std::min(newEnd, oldEnd)) {
Greg Daniel212ff052019-04-09 10:41:34 -0400128 this->submitPipelineBarriers(gpu);
129 break;
130 }
131 }
132 }
Greg Danielee54f232019-04-03 14:58:40 -0400133 fImageBarriers.push_back(*barrierPtr);
Greg Daniel164a9f02016-02-22 09:56:40 -0500134 }
Greg Danielee54f232019-04-03 14:58:40 -0400135 fBarriersByRegion |= byRegion;
136
137 fSrcStageMask = fSrcStageMask | srcStageMask;
138 fDstStageMask = fDstStageMask | dstStageMask;
139
140 fHasWork = true;
Greg Daniel59dc1482019-02-22 10:46:38 -0500141 if (resource) {
142 this->addResource(resource);
143 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500144}
145
Greg Danielee54f232019-04-03 14:58:40 -0400146void GrVkCommandBuffer::submitPipelineBarriers(const GrVkGpu* gpu) {
147 SkASSERT(fIsActive);
148
149 // Currently we never submit a pipeline barrier without at least one memory barrier.
150 if (fBufferBarriers.count() || fImageBarriers.count()) {
151 // For images we can have barriers inside of render passes but they require us to add more
152 // support in subpasses which need self dependencies to have barriers inside them. Also, we
153 // can never have buffer barriers inside of a render pass. For now we will just assert that
154 // we are not in a render pass.
155 SkASSERT(!fActiveRenderPass);
156 SkASSERT(!this->isWrapped());
157 SkASSERT(fSrcStageMask && fDstStageMask);
158
159 VkDependencyFlags dependencyFlags = fBarriersByRegion ? VK_DEPENDENCY_BY_REGION_BIT : 0;
160 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(
161 fCmdBuffer, fSrcStageMask, fDstStageMask, dependencyFlags, 0, nullptr,
162 fBufferBarriers.count(), fBufferBarriers.begin(),
163 fImageBarriers.count(), fImageBarriers.begin()));
164 fBufferBarriers.reset();
165 fImageBarriers.reset();
166 fBarriersByRegion = false;
167 fSrcStageMask = 0;
168 fDstStageMask = 0;
169 }
170 SkASSERT(!fBufferBarriers.count());
171 SkASSERT(!fImageBarriers.count());
172 SkASSERT(!fBarriersByRegion);
173 SkASSERT(!fSrcStageMask);
174 SkASSERT(!fDstStageMask);
175}
176
177
Greg Daniel6ecc9112017-06-16 16:17:03 +0000178void GrVkCommandBuffer::bindInputBuffer(GrVkGpu* gpu, uint32_t binding,
Chris Dalton10ee0b22020-04-02 16:28:52 -0600179 const GrVkMeshBuffer* vbuffer) {
Greg Daniel6ecc9112017-06-16 16:17:03 +0000180 VkBuffer vkBuffer = vbuffer->buffer();
181 SkASSERT(VK_NULL_HANDLE != vkBuffer);
182 SkASSERT(binding < kMaxInputBuffers);
183 // TODO: once vbuffer->offset() no longer always returns 0, we will need to track the offset
184 // to know if we can skip binding or not.
185 if (vkBuffer != fBoundInputBuffers[binding]) {
186 VkDeviceSize offset = vbuffer->offset();
187 GR_VK_CALL(gpu->vkInterface(), CmdBindVertexBuffers(fCmdBuffer,
188 binding,
189 1,
190 &vkBuffer,
191 &offset));
192 fBoundInputBuffers[binding] = vkBuffer;
Greg Daniel59dc1482019-02-22 10:46:38 -0500193 this->addResource(vbuffer->resource());
Greg Daniel6ecc9112017-06-16 16:17:03 +0000194 }
195}
196
Chris Dalton10ee0b22020-04-02 16:28:52 -0600197void GrVkCommandBuffer::bindIndexBuffer(GrVkGpu* gpu, const GrVkMeshBuffer* ibuffer) {
Greg Daniel6ecc9112017-06-16 16:17:03 +0000198 VkBuffer vkBuffer = ibuffer->buffer();
199 SkASSERT(VK_NULL_HANDLE != vkBuffer);
200 // TODO: once ibuffer->offset() no longer always returns 0, we will need to track the offset
201 // to know if we can skip binding or not.
202 if (vkBuffer != fBoundIndexBuffer) {
203 GR_VK_CALL(gpu->vkInterface(), CmdBindIndexBuffer(fCmdBuffer,
204 vkBuffer,
205 ibuffer->offset(),
206 VK_INDEX_TYPE_UINT16));
207 fBoundIndexBuffer = vkBuffer;
Greg Daniel59dc1482019-02-22 10:46:38 -0500208 this->addResource(ibuffer->resource());
Greg Daniel6ecc9112017-06-16 16:17:03 +0000209 }
210}
211
Greg Daniel164a9f02016-02-22 09:56:40 -0500212void GrVkCommandBuffer::clearAttachments(const GrVkGpu* gpu,
213 int numAttachments,
214 const VkClearAttachment* attachments,
215 int numRects,
Greg Danielf346df32019-04-03 14:52:13 -0400216 const VkClearRect* clearRects) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500217 SkASSERT(fIsActive);
218 SkASSERT(fActiveRenderPass);
219 SkASSERT(numAttachments > 0);
220 SkASSERT(numRects > 0);
Greg Danielf346df32019-04-03 14:52:13 -0400221
Greg Danielee54f232019-04-03 14:58:40 -0400222 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400223
Greg Daniel164a9f02016-02-22 09:56:40 -0500224#ifdef SK_DEBUG
225 for (int i = 0; i < numAttachments; ++i) {
226 if (attachments[i].aspectMask == VK_IMAGE_ASPECT_COLOR_BIT) {
227 uint32_t testIndex;
228 SkAssertResult(fActiveRenderPass->colorAttachmentIndex(&testIndex));
229 SkASSERT(testIndex == attachments[i].colorAttachment);
230 }
231 }
232#endif
233 GR_VK_CALL(gpu->vkInterface(), CmdClearAttachments(fCmdBuffer,
234 numAttachments,
235 attachments,
236 numRects,
237 clearRects));
Greg Daniela718a612019-10-07 16:25:41 -0400238 if (gpu->vkCaps().mustInvalidatePrimaryCmdBufferStateAfterClearAttachments()) {
239 this->invalidateState();
240 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500241}
242
243void GrVkCommandBuffer::bindDescriptorSets(const GrVkGpu* gpu,
egdaniel22281c12016-03-23 13:49:40 -0700244 GrVkPipelineState* pipelineState,
Greg Danieleecc6872019-07-29 13:21:37 -0400245 VkPipelineLayout layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500246 uint32_t firstSet,
247 uint32_t setCount,
248 const VkDescriptorSet* descriptorSets,
249 uint32_t dynamicOffsetCount,
250 const uint32_t* dynamicOffsets) {
251 SkASSERT(fIsActive);
252 GR_VK_CALL(gpu->vkInterface(), CmdBindDescriptorSets(fCmdBuffer,
253 VK_PIPELINE_BIND_POINT_GRAPHICS,
Greg Danieleecc6872019-07-29 13:21:37 -0400254 layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500255 firstSet,
256 setCount,
257 descriptorSets,
258 dynamicOffsetCount,
259 dynamicOffsets));
egdanielbc9b2962016-09-27 08:00:53 -0700260}
261
egdaniel470d77a2016-03-18 12:50:27 -0700262void GrVkCommandBuffer::bindPipeline(const GrVkGpu* gpu, const GrVkPipeline* pipeline) {
263 SkASSERT(fIsActive);
egdaniel470d77a2016-03-18 12:50:27 -0700264 GR_VK_CALL(gpu->vkInterface(), CmdBindPipeline(fCmdBuffer,
265 VK_PIPELINE_BIND_POINT_GRAPHICS,
266 pipeline->pipeline()));
egdanielec440992016-09-13 09:54:11 -0700267 this->addResource(pipeline);
egdaniel470d77a2016-03-18 12:50:27 -0700268}
269
Greg Daniel164a9f02016-02-22 09:56:40 -0500270void GrVkCommandBuffer::drawIndexed(const GrVkGpu* gpu,
271 uint32_t indexCount,
272 uint32_t instanceCount,
273 uint32_t firstIndex,
274 int32_t vertexOffset,
Greg Danielf346df32019-04-03 14:52:13 -0400275 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500276 SkASSERT(fIsActive);
277 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400278 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500279 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndexed(fCmdBuffer,
280 indexCount,
281 instanceCount,
282 firstIndex,
283 vertexOffset,
284 firstInstance));
285}
286
287void GrVkCommandBuffer::draw(const GrVkGpu* gpu,
288 uint32_t vertexCount,
289 uint32_t instanceCount,
290 uint32_t firstVertex,
Greg Danielf346df32019-04-03 14:52:13 -0400291 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500292 SkASSERT(fIsActive);
293 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400294 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500295 GR_VK_CALL(gpu->vkInterface(), CmdDraw(fCmdBuffer,
296 vertexCount,
297 instanceCount,
298 firstVertex,
299 firstInstance));
300}
egdaniel470d77a2016-03-18 12:50:27 -0700301
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600302void GrVkCommandBuffer::drawIndirect(const GrVkGpu* gpu,
303 const GrVkMeshBuffer* indirectBuffer,
304 VkDeviceSize offset,
305 uint32_t drawCount,
306 uint32_t stride) {
307 SkASSERT(fIsActive);
308 SkASSERT(fActiveRenderPass);
309 SkASSERT(!indirectBuffer->isCpuBuffer());
310 this->addingWork(gpu);
311 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndirect(fCmdBuffer,
312 indirectBuffer->buffer(),
313 offset,
314 drawCount,
315 stride));
316}
317
318void GrVkCommandBuffer::drawIndexedIndirect(const GrVkGpu* gpu,
319 const GrVkMeshBuffer* indirectBuffer,
320 VkDeviceSize offset,
321 uint32_t drawCount,
322 uint32_t stride) {
323 SkASSERT(fIsActive);
324 SkASSERT(fActiveRenderPass);
325 SkASSERT(!indirectBuffer->isCpuBuffer());
326 this->addingWork(gpu);
327 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndexedIndirect(fCmdBuffer,
328 indirectBuffer->buffer(),
329 offset,
330 drawCount,
331 stride));
332}
333
egdaniel470d77a2016-03-18 12:50:27 -0700334void GrVkCommandBuffer::setViewport(const GrVkGpu* gpu,
335 uint32_t firstViewport,
336 uint32_t viewportCount,
337 const VkViewport* viewports) {
338 SkASSERT(fIsActive);
339 SkASSERT(1 == viewportCount);
340 if (memcmp(viewports, &fCachedViewport, sizeof(VkViewport))) {
341 GR_VK_CALL(gpu->vkInterface(), CmdSetViewport(fCmdBuffer,
342 firstViewport,
343 viewportCount,
344 viewports));
345 fCachedViewport = viewports[0];
346 }
347}
348
349void GrVkCommandBuffer::setScissor(const GrVkGpu* gpu,
350 uint32_t firstScissor,
351 uint32_t scissorCount,
352 const VkRect2D* scissors) {
353 SkASSERT(fIsActive);
354 SkASSERT(1 == scissorCount);
355 if (memcmp(scissors, &fCachedScissor, sizeof(VkRect2D))) {
356 GR_VK_CALL(gpu->vkInterface(), CmdSetScissor(fCmdBuffer,
357 firstScissor,
358 scissorCount,
359 scissors));
360 fCachedScissor = scissors[0];
361 }
362}
363
364void GrVkCommandBuffer::setBlendConstants(const GrVkGpu* gpu,
365 const float blendConstants[4]) {
366 SkASSERT(fIsActive);
367 if (memcmp(blendConstants, fCachedBlendConstant, 4 * sizeof(float))) {
368 GR_VK_CALL(gpu->vkInterface(), CmdSetBlendConstants(fCmdBuffer, blendConstants));
369 memcpy(fCachedBlendConstant, blendConstants, 4 * sizeof(float));
370 }
371}
egdaniel9a6cf802016-06-08 08:22:05 -0700372
Greg Danielee54f232019-04-03 14:58:40 -0400373void GrVkCommandBuffer::addingWork(const GrVkGpu* gpu) {
374 this->submitPipelineBarriers(gpu);
375 fHasWork = true;
376}
377
Greg Danield922f332020-04-27 11:21:36 -0400378#ifdef SK_DEBUG
379bool GrVkCommandBuffer::validateNoSharedImageResources(const GrVkCommandBuffer* other) {
380 auto resourceIsInCommandBuffer = [this](const GrManagedResource* resource) {
381 if (!resource->asVkImageResource()) {
382 return false;
383 }
384
385 for (int i = 0; i < fTrackedResources.count(); ++i) {
386 if (resource == fTrackedResources[i]->asVkImageResource()) {
387 return true;
388 }
389 }
390 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
391 if (resource == fTrackedRecycledResources[i]->asVkImageResource()) {
392 return true;
393 }
394 }
395 return false;
396 };
397
398 for (int i = 0; i < other->fTrackedResources.count(); ++i) {
399 if (resourceIsInCommandBuffer(other->fTrackedResources[i])) {
400 return false;
401 }
402 }
403
404 for (int i = 0; i < other->fTrackedRecycledResources.count(); ++i) {
405 if (resourceIsInCommandBuffer(other->fTrackedRecycledResources[i])) {
406 return false;
407 }
408 }
409 return true;
410}
411#endif
412
egdaniel9a6cf802016-06-08 08:22:05 -0700413///////////////////////////////////////////////////////////////////////////////
414// PrimaryCommandBuffer
415////////////////////////////////////////////////////////////////////////////////
egdaniel9cb63402016-06-23 08:37:05 -0700416GrVkPrimaryCommandBuffer::~GrVkPrimaryCommandBuffer() {
417 // Should have ended any render pass we're in the middle of
418 SkASSERT(!fActiveRenderPass);
419}
420
Greg Daniel315c8dc2019-11-26 15:41:27 -0500421GrVkPrimaryCommandBuffer* GrVkPrimaryCommandBuffer::Create(GrVkGpu* gpu,
Greg Daniel0addbdf2019-11-25 15:03:58 -0500422 VkCommandPool cmdPool) {
egdaniel9a6cf802016-06-08 08:22:05 -0700423 const VkCommandBufferAllocateInfo cmdInfo = {
424 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400425 nullptr, // pNext
Greg Daniel0addbdf2019-11-25 15:03:58 -0500426 cmdPool, // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700427 VK_COMMAND_BUFFER_LEVEL_PRIMARY, // level
428 1 // bufferCount
429 };
430
431 VkCommandBuffer cmdBuffer;
Greg Daniel315c8dc2019-11-26 15:41:27 -0500432 VkResult err;
433 GR_VK_CALL_RESULT(gpu, err, AllocateCommandBuffers(gpu->device(), &cmdInfo, &cmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700434 if (err) {
435 return nullptr;
436 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500437 return new GrVkPrimaryCommandBuffer(cmdBuffer);
egdaniel9a6cf802016-06-08 08:22:05 -0700438}
439
Greg Daniele643da62019-11-05 12:36:42 -0500440void GrVkPrimaryCommandBuffer::begin(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700441 SkASSERT(!fIsActive);
442 VkCommandBufferBeginInfo cmdBufferBeginInfo;
443 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
444 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
445 cmdBufferBeginInfo.pNext = nullptr;
446 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
447 cmdBufferBeginInfo.pInheritanceInfo = nullptr;
448
Greg Daniele643da62019-11-05 12:36:42 -0500449 GR_VK_CALL_ERRCHECK(gpu, BeginCommandBuffer(fCmdBuffer, &cmdBufferBeginInfo));
egdaniel9a6cf802016-06-08 08:22:05 -0700450 fIsActive = true;
451}
452
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500453void GrVkPrimaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700454 SkASSERT(fIsActive);
455 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400456
457 this->submitPipelineBarriers(gpu);
458
Greg Daniele643da62019-11-05 12:36:42 -0500459 GR_VK_CALL_ERRCHECK(gpu, EndCommandBuffer(fCmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700460 this->invalidateState();
461 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400462 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700463}
464
Greg Danielfa3adf72019-11-07 09:53:41 -0500465bool GrVkPrimaryCommandBuffer::beginRenderPass(GrVkGpu* gpu,
egdaniel9cb63402016-06-23 08:37:05 -0700466 const GrVkRenderPass* renderPass,
Robert Phillips95214472017-08-08 18:00:03 -0400467 const VkClearValue clearValues[],
Greg Danielfa3adf72019-11-07 09:53:41 -0500468 GrVkRenderTarget* target,
egdaniel9cb63402016-06-23 08:37:05 -0700469 const SkIRect& bounds,
470 bool forSecondaryCB) {
egdaniel9a6cf802016-06-08 08:22:05 -0700471 SkASSERT(fIsActive);
472 SkASSERT(!fActiveRenderPass);
Greg Danielfa3adf72019-11-07 09:53:41 -0500473 SkASSERT(renderPass->isCompatible(*target));
474
475 const GrVkFramebuffer* framebuffer = target->getFramebuffer();
476 if (!framebuffer) {
477 return false;
478 }
egdaniel9cb63402016-06-23 08:37:05 -0700479
Greg Danielee54f232019-04-03 14:58:40 -0400480 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400481
egdaniel9a6cf802016-06-08 08:22:05 -0700482 VkRenderPassBeginInfo beginInfo;
egdaniel9cb63402016-06-23 08:37:05 -0700483 VkRect2D renderArea;
484 renderArea.offset = { bounds.fLeft , bounds.fTop };
485 renderArea.extent = { (uint32_t)bounds.width(), (uint32_t)bounds.height() };
486
487 memset(&beginInfo, 0, sizeof(VkRenderPassBeginInfo));
488 beginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
489 beginInfo.pNext = nullptr;
490 beginInfo.renderPass = renderPass->vkRenderPass();
Greg Danielfa3adf72019-11-07 09:53:41 -0500491 beginInfo.framebuffer = framebuffer->framebuffer();
egdaniel9cb63402016-06-23 08:37:05 -0700492 beginInfo.renderArea = renderArea;
Greg Danielb68319a2018-02-23 16:08:28 -0500493 beginInfo.clearValueCount = renderPass->clearValueCount();
egdaniel9cb63402016-06-23 08:37:05 -0700494 beginInfo.pClearValues = clearValues;
495
496 VkSubpassContents contents = forSecondaryCB ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
497 : VK_SUBPASS_CONTENTS_INLINE;
498
egdaniel9a6cf802016-06-08 08:22:05 -0700499 GR_VK_CALL(gpu->vkInterface(), CmdBeginRenderPass(fCmdBuffer, &beginInfo, contents));
500 fActiveRenderPass = renderPass;
501 this->addResource(renderPass);
Greg Danielfa3adf72019-11-07 09:53:41 -0500502 target->addResources(*this);
503 return true;
egdaniel9a6cf802016-06-08 08:22:05 -0700504}
505
506void GrVkPrimaryCommandBuffer::endRenderPass(const GrVkGpu* gpu) {
507 SkASSERT(fIsActive);
508 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400509 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700510 GR_VK_CALL(gpu->vkInterface(), CmdEndRenderPass(fCmdBuffer));
511 fActiveRenderPass = nullptr;
512}
513
514void GrVkPrimaryCommandBuffer::executeCommands(const GrVkGpu* gpu,
Greg Daniel8daf3b72019-07-30 09:57:26 -0400515 std::unique_ptr<GrVkSecondaryCommandBuffer> buffer) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500516 // The Vulkan spec allows secondary command buffers to be executed on a primary command buffer
517 // if the command pools both were created from were created with the same queue family. However,
518 // we currently always create them from the same pool.
egdaniel9a6cf802016-06-08 08:22:05 -0700519 SkASSERT(fIsActive);
Greg Daniel77b53f62016-10-18 11:48:51 -0400520 SkASSERT(!buffer->fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700521 SkASSERT(fActiveRenderPass);
522 SkASSERT(fActiveRenderPass->isCompatible(*buffer->fActiveRenderPass));
523
Greg Danielee54f232019-04-03 14:58:40 -0400524 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400525
egdaniel9a6cf802016-06-08 08:22:05 -0700526 GR_VK_CALL(gpu->vkInterface(), CmdExecuteCommands(fCmdBuffer, 1, &buffer->fCmdBuffer));
Greg Daniel8daf3b72019-07-30 09:57:26 -0400527 fSecondaryCommandBuffers.push_back(std::move(buffer));
egdaniel066df7c2016-06-08 14:02:27 -0700528 // When executing a secondary command buffer all state (besides render pass state) becomes
529 // invalidated and must be reset. This includes bound buffers, pipelines, dynamic state, etc.
530 this->invalidateState();
egdaniel9a6cf802016-06-08 08:22:05 -0700531}
532
Greg Daniele1185582019-12-04 11:29:44 -0500533static bool submit_to_queue(GrVkGpu* gpu,
Greg Daniel48661b82018-01-22 16:11:35 -0500534 VkQueue queue,
535 VkFence fence,
536 uint32_t waitCount,
537 const VkSemaphore* waitSemaphores,
538 const VkPipelineStageFlags* waitStages,
539 uint32_t commandBufferCount,
540 const VkCommandBuffer* commandBuffers,
541 uint32_t signalCount,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400542 const VkSemaphore* signalSemaphores,
543 GrProtected protectedContext) {
544 VkProtectedSubmitInfo protectedSubmitInfo;
545 if (protectedContext == GrProtected::kYes) {
546 memset(&protectedSubmitInfo, 0, sizeof(VkProtectedSubmitInfo));
547 protectedSubmitInfo.sType = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO;
548 protectedSubmitInfo.pNext = nullptr;
549 protectedSubmitInfo.protectedSubmit = VK_TRUE;
550 }
551
Greg Daniel48661b82018-01-22 16:11:35 -0500552 VkSubmitInfo submitInfo;
553 memset(&submitInfo, 0, sizeof(VkSubmitInfo));
554 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400555 submitInfo.pNext = protectedContext == GrProtected::kYes ? &protectedSubmitInfo : nullptr;
Greg Daniel48661b82018-01-22 16:11:35 -0500556 submitInfo.waitSemaphoreCount = waitCount;
557 submitInfo.pWaitSemaphores = waitSemaphores;
558 submitInfo.pWaitDstStageMask = waitStages;
559 submitInfo.commandBufferCount = commandBufferCount;
560 submitInfo.pCommandBuffers = commandBuffers;
561 submitInfo.signalSemaphoreCount = signalCount;
562 submitInfo.pSignalSemaphores = signalSemaphores;
Greg Daniele1185582019-12-04 11:29:44 -0500563 VkResult result;
564 GR_VK_CALL_RESULT(gpu, result, QueueSubmit(queue, 1, &submitInfo, fence));
565 return result == VK_SUCCESS;
Greg Daniel48661b82018-01-22 16:11:35 -0500566}
567
Greg Daniele1185582019-12-04 11:29:44 -0500568bool GrVkPrimaryCommandBuffer::submitToQueue(
Greg Daniele643da62019-11-05 12:36:42 -0500569 GrVkGpu* gpu,
Greg Daniel6be35232017-03-01 17:01:09 -0500570 VkQueue queue,
Greg Daniel48661b82018-01-22 16:11:35 -0500571 SkTArray<GrVkSemaphore::Resource*>& signalSemaphores,
572 SkTArray<GrVkSemaphore::Resource*>& waitSemaphores) {
egdaniel9a6cf802016-06-08 08:22:05 -0700573 SkASSERT(!fIsActive);
574
575 VkResult err;
jvanverth7ec92412016-07-06 09:24:57 -0700576 if (VK_NULL_HANDLE == fSubmitFence) {
577 VkFenceCreateInfo fenceInfo;
578 memset(&fenceInfo, 0, sizeof(VkFenceCreateInfo));
579 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
Greg Daniele1185582019-12-04 11:29:44 -0500580 GR_VK_CALL_RESULT(gpu, err, CreateFence(gpu->device(), &fenceInfo, nullptr,
581 &fSubmitFence));
582 if (err) {
583 fSubmitFence = VK_NULL_HANDLE;
584 return false;
585 }
jvanverth7ec92412016-07-06 09:24:57 -0700586 } else {
Greg Daniele1185582019-12-04 11:29:44 -0500587 // This cannot return DEVICE_LOST so we assert we succeeded.
588 GR_VK_CALL_RESULT(gpu, err, ResetFences(gpu->device(), 1, &fSubmitFence));
589 SkASSERT(err == VK_SUCCESS);
jvanverth7ec92412016-07-06 09:24:57 -0700590 }
egdaniel9a6cf802016-06-08 08:22:05 -0700591
Greg Daniela5cb7812017-06-16 09:45:32 -0400592 int signalCount = signalSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500593 int waitCount = waitSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500594
Greg Daniele1185582019-12-04 11:29:44 -0500595 bool submitted = false;
596
Greg Daniel48661b82018-01-22 16:11:35 -0500597 if (0 == signalCount && 0 == waitCount) {
598 // This command buffer has no dependent semaphores so we can simply just submit it to the
599 // queue with no worries.
Greg Daniele1185582019-12-04 11:29:44 -0500600 submitted = submit_to_queue(
601 gpu, queue, fSubmitFence, 0, nullptr, nullptr, 1, &fCmdBuffer, 0, nullptr,
602 gpu->protectedContext() ? GrProtected::kYes : GrProtected::kNo);
Greg Daniel48661b82018-01-22 16:11:35 -0500603 } else {
Greg Daniel48661b82018-01-22 16:11:35 -0500604 SkTArray<VkSemaphore> vkSignalSems(signalCount);
605 for (int i = 0; i < signalCount; ++i) {
606 if (signalSemaphores[i]->shouldSignal()) {
607 this->addResource(signalSemaphores[i]);
608 vkSignalSems.push_back(signalSemaphores[i]->semaphore());
609 }
610 }
611
612 SkTArray<VkSemaphore> vkWaitSems(waitCount);
613 SkTArray<VkPipelineStageFlags> vkWaitStages(waitCount);
614 for (int i = 0; i < waitCount; ++i) {
615 if (waitSemaphores[i]->shouldWait()) {
616 this->addResource(waitSemaphores[i]);
617 vkWaitSems.push_back(waitSemaphores[i]->semaphore());
618 vkWaitStages.push_back(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
619 }
620 }
Greg Daniele1185582019-12-04 11:29:44 -0500621 submitted = submit_to_queue(gpu, queue, fSubmitFence, vkWaitSems.count(),
622 vkWaitSems.begin(), vkWaitStages.begin(), 1, &fCmdBuffer,
623 vkSignalSems.count(), vkSignalSems.begin(),
624 gpu->protectedContext() ? GrProtected::kYes : GrProtected::kNo);
625 if (submitted) {
626 for (int i = 0; i < signalCount; ++i) {
627 signalSemaphores[i]->markAsSignaled();
628 }
629 for (int i = 0; i < waitCount; ++i) {
630 waitSemaphores[i]->markAsWaited();
631 }
Greg Daniel48661b82018-01-22 16:11:35 -0500632 }
Greg Daniel48661b82018-01-22 16:11:35 -0500633 }
egdaniel9a6cf802016-06-08 08:22:05 -0700634
Greg Daniele1185582019-12-04 11:29:44 -0500635 if (!submitted) {
636 // Destroy the fence or else we will try to wait forever for it to finish.
egdaniel9a6cf802016-06-08 08:22:05 -0700637 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
638 fSubmitFence = VK_NULL_HANDLE;
Greg Daniele1185582019-12-04 11:29:44 -0500639 return false;
egdaniel9a6cf802016-06-08 08:22:05 -0700640 }
Greg Daniele1185582019-12-04 11:29:44 -0500641 return true;
egdaniel9a6cf802016-06-08 08:22:05 -0700642}
643
Greg Daniele1185582019-12-04 11:29:44 -0500644void GrVkPrimaryCommandBuffer::forceSync(GrVkGpu* gpu) {
645 SkASSERT(fSubmitFence != VK_NULL_HANDLE);
646 GR_VK_CALL_ERRCHECK(gpu, WaitForFences(gpu->device(), 1, &fSubmitFence, true, UINT64_MAX));
647}
648
649bool GrVkPrimaryCommandBuffer::finished(GrVkGpu* gpu) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500650 SkASSERT(!fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700651 if (VK_NULL_HANDLE == fSubmitFence) {
652 return true;
653 }
654
Greg Daniele1185582019-12-04 11:29:44 -0500655 VkResult err;
656 GR_VK_CALL_RESULT_NOCHECK(gpu, err, GetFenceStatus(gpu->device(), fSubmitFence));
egdaniel9a6cf802016-06-08 08:22:05 -0700657 switch (err) {
658 case VK_SUCCESS:
Greg Daniele1185582019-12-04 11:29:44 -0500659 case VK_ERROR_DEVICE_LOST:
egdaniel9a6cf802016-06-08 08:22:05 -0700660 return true;
661
662 case VK_NOT_READY:
663 return false;
664
665 default:
666 SkDebugf("Error getting fence status: %d\n", err);
Greg Daniele1185582019-12-04 11:29:44 -0500667 SK_ABORT("Got an invalid fence status");
668 return false;
egdaniel9a6cf802016-06-08 08:22:05 -0700669 }
egdaniel9a6cf802016-06-08 08:22:05 -0700670}
671
Greg Daniela3aa75a2019-04-12 14:24:55 -0400672void GrVkPrimaryCommandBuffer::addFinishedProc(sk_sp<GrRefCntedCallback> finishedProc) {
673 fFinishedProcs.push_back(std::move(finishedProc));
674}
675
Jim Van Verth5082df12020-03-11 16:14:51 -0400676void GrVkPrimaryCommandBuffer::onReleaseResources() {
jvanverth7ec92412016-07-06 09:24:57 -0700677 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400678 fSecondaryCommandBuffers[i]->releaseResources();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500679 }
Greg Danielfe159622020-04-10 17:43:51 +0000680 this->callFinishedProcs();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500681}
682
Greg Daniel0addbdf2019-11-25 15:03:58 -0500683void GrVkPrimaryCommandBuffer::recycleSecondaryCommandBuffers(GrVkCommandPool* cmdPool) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500684 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500685 fSecondaryCommandBuffers[i].release()->recycle(cmdPool);
jvanverth7ec92412016-07-06 09:24:57 -0700686 }
687 fSecondaryCommandBuffers.reset();
688}
689
egdaniel9a6cf802016-06-08 08:22:05 -0700690void GrVkPrimaryCommandBuffer::copyImage(const GrVkGpu* gpu,
691 GrVkImage* srcImage,
692 VkImageLayout srcLayout,
693 GrVkImage* dstImage,
694 VkImageLayout dstLayout,
695 uint32_t copyRegionCount,
696 const VkImageCopy* copyRegions) {
697 SkASSERT(fIsActive);
698 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400699 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700700 this->addResource(srcImage->resource());
701 this->addResource(dstImage->resource());
702 GR_VK_CALL(gpu->vkInterface(), CmdCopyImage(fCmdBuffer,
703 srcImage->image(),
704 srcLayout,
705 dstImage->image(),
706 dstLayout,
707 copyRegionCount,
708 copyRegions));
709}
710
711void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
Jim Van Verth3e192162020-03-10 16:23:16 -0400712 const GrManagedResource* srcResource,
egdaniel9a6cf802016-06-08 08:22:05 -0700713 VkImage srcImage,
714 VkImageLayout srcLayout,
Jim Van Verth3e192162020-03-10 16:23:16 -0400715 const GrManagedResource* dstResource,
egdaniel9a6cf802016-06-08 08:22:05 -0700716 VkImage dstImage,
717 VkImageLayout dstLayout,
718 uint32_t blitRegionCount,
719 const VkImageBlit* blitRegions,
720 VkFilter filter) {
721 SkASSERT(fIsActive);
722 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400723 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700724 this->addResource(srcResource);
725 this->addResource(dstResource);
726 GR_VK_CALL(gpu->vkInterface(), CmdBlitImage(fCmdBuffer,
727 srcImage,
728 srcLayout,
729 dstImage,
730 dstLayout,
731 blitRegionCount,
732 blitRegions,
733 filter));
734}
735
Greg Daniel6ecc9112017-06-16 16:17:03 +0000736void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
737 const GrVkImage& srcImage,
738 const GrVkImage& dstImage,
739 uint32_t blitRegionCount,
740 const VkImageBlit* blitRegions,
741 VkFilter filter) {
742 this->blitImage(gpu,
743 srcImage.resource(),
744 srcImage.image(),
745 srcImage.currentLayout(),
746 dstImage.resource(),
747 dstImage.image(),
748 dstImage.currentLayout(),
749 blitRegionCount,
750 blitRegions,
751 filter);
752}
753
754
egdaniel9a6cf802016-06-08 08:22:05 -0700755void GrVkPrimaryCommandBuffer::copyImageToBuffer(const GrVkGpu* gpu,
756 GrVkImage* srcImage,
757 VkImageLayout srcLayout,
758 GrVkTransferBuffer* dstBuffer,
759 uint32_t copyRegionCount,
760 const VkBufferImageCopy* copyRegions) {
761 SkASSERT(fIsActive);
762 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400763 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700764 this->addResource(srcImage->resource());
765 this->addResource(dstBuffer->resource());
766 GR_VK_CALL(gpu->vkInterface(), CmdCopyImageToBuffer(fCmdBuffer,
767 srcImage->image(),
768 srcLayout,
769 dstBuffer->buffer(),
770 copyRegionCount,
771 copyRegions));
772}
773
774void GrVkPrimaryCommandBuffer::copyBufferToImage(const GrVkGpu* gpu,
775 GrVkTransferBuffer* srcBuffer,
776 GrVkImage* dstImage,
777 VkImageLayout dstLayout,
778 uint32_t copyRegionCount,
779 const VkBufferImageCopy* copyRegions) {
780 SkASSERT(fIsActive);
781 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400782 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700783 this->addResource(srcBuffer->resource());
784 this->addResource(dstImage->resource());
785 GR_VK_CALL(gpu->vkInterface(), CmdCopyBufferToImage(fCmdBuffer,
786 srcBuffer->buffer(),
787 dstImage->image(),
788 dstLayout,
789 copyRegionCount,
790 copyRegions));
791}
792
Greg Daniel6888c0d2017-08-25 11:55:50 -0400793
794void GrVkPrimaryCommandBuffer::copyBuffer(GrVkGpu* gpu,
795 GrVkBuffer* srcBuffer,
796 GrVkBuffer* dstBuffer,
797 uint32_t regionCount,
798 const VkBufferCopy* regions) {
799 SkASSERT(fIsActive);
800 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400801 this->addingWork(gpu);
Greg Daniel6888c0d2017-08-25 11:55:50 -0400802#ifdef SK_DEBUG
803 for (uint32_t i = 0; i < regionCount; ++i) {
804 const VkBufferCopy& region = regions[i];
805 SkASSERT(region.size > 0);
806 SkASSERT(region.srcOffset < srcBuffer->size());
807 SkASSERT(region.dstOffset < dstBuffer->size());
808 SkASSERT(region.srcOffset + region.size <= srcBuffer->size());
809 SkASSERT(region.dstOffset + region.size <= dstBuffer->size());
810 }
811#endif
812 this->addResource(srcBuffer->resource());
813 this->addResource(dstBuffer->resource());
814 GR_VK_CALL(gpu->vkInterface(), CmdCopyBuffer(fCmdBuffer,
815 srcBuffer->buffer(),
816 dstBuffer->buffer(),
817 regionCount,
818 regions));
819}
820
jvanvertha584de92016-06-30 09:10:52 -0700821void GrVkPrimaryCommandBuffer::updateBuffer(GrVkGpu* gpu,
822 GrVkBuffer* dstBuffer,
823 VkDeviceSize dstOffset,
824 VkDeviceSize dataSize,
825 const void* data) {
826 SkASSERT(fIsActive);
827 SkASSERT(!fActiveRenderPass);
828 SkASSERT(0 == (dstOffset & 0x03)); // four byte aligned
829 // TODO: handle larger transfer sizes
830 SkASSERT(dataSize <= 65536);
831 SkASSERT(0 == (dataSize & 0x03)); // four byte aligned
Greg Danielee54f232019-04-03 14:58:40 -0400832 this->addingWork(gpu);
jvanvertha584de92016-06-30 09:10:52 -0700833 this->addResource(dstBuffer->resource());
834 GR_VK_CALL(gpu->vkInterface(), CmdUpdateBuffer(fCmdBuffer,
835 dstBuffer->buffer(),
836 dstOffset,
837 dataSize,
838 (const uint32_t*) data));
839}
840
egdaniel9a6cf802016-06-08 08:22:05 -0700841void GrVkPrimaryCommandBuffer::clearColorImage(const GrVkGpu* gpu,
842 GrVkImage* image,
843 const VkClearColorValue* color,
844 uint32_t subRangeCount,
845 const VkImageSubresourceRange* subRanges) {
846 SkASSERT(fIsActive);
847 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400848 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700849 this->addResource(image->resource());
850 GR_VK_CALL(gpu->vkInterface(), CmdClearColorImage(fCmdBuffer,
851 image->image(),
852 image->currentLayout(),
853 color,
854 subRangeCount,
855 subRanges));
856}
857
858void GrVkPrimaryCommandBuffer::clearDepthStencilImage(const GrVkGpu* gpu,
859 GrVkImage* image,
860 const VkClearDepthStencilValue* color,
861 uint32_t subRangeCount,
862 const VkImageSubresourceRange* subRanges) {
863 SkASSERT(fIsActive);
864 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400865 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700866 this->addResource(image->resource());
867 GR_VK_CALL(gpu->vkInterface(), CmdClearDepthStencilImage(fCmdBuffer,
868 image->image(),
869 image->currentLayout(),
870 color,
871 subRangeCount,
872 subRanges));
873}
874
egdaniel52ad2512016-08-04 12:50:01 -0700875void GrVkPrimaryCommandBuffer::resolveImage(GrVkGpu* gpu,
876 const GrVkImage& srcImage,
877 const GrVkImage& dstImage,
878 uint32_t regionCount,
879 const VkImageResolve* regions) {
880 SkASSERT(fIsActive);
881 SkASSERT(!fActiveRenderPass);
882
Greg Danielee54f232019-04-03 14:58:40 -0400883 this->addingWork(gpu);
egdaniel52ad2512016-08-04 12:50:01 -0700884 this->addResource(srcImage.resource());
885 this->addResource(dstImage.resource());
886
887 GR_VK_CALL(gpu->vkInterface(), CmdResolveImage(fCmdBuffer,
888 srcImage.image(),
889 srcImage.currentLayout(),
890 dstImage.image(),
891 dstImage.currentLayout(),
892 regionCount,
893 regions));
894}
895
Jim Van Verth5082df12020-03-11 16:14:51 -0400896void GrVkPrimaryCommandBuffer::onFreeGPUData(const GrVkGpu* gpu) const {
egdaniel9cb63402016-06-23 08:37:05 -0700897 SkASSERT(!fActiveRenderPass);
898 // Destroy the fence, if any
899 if (VK_NULL_HANDLE != fSubmitFence) {
900 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
901 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500902 SkASSERT(!fSecondaryCommandBuffers.count());
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500903}
904
egdaniel9a6cf802016-06-08 08:22:05 -0700905///////////////////////////////////////////////////////////////////////////////
906// SecondaryCommandBuffer
907////////////////////////////////////////////////////////////////////////////////
908
Greg Daniel315c8dc2019-11-26 15:41:27 -0500909GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(GrVkGpu* gpu,
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500910 GrVkCommandPool* cmdPool) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500911 SkASSERT(cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700912 const VkCommandBufferAllocateInfo cmdInfo = {
913 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400914 nullptr, // pNext
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500915 cmdPool->vkCommandPool(), // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700916 VK_COMMAND_BUFFER_LEVEL_SECONDARY, // level
917 1 // bufferCount
918 };
919
920 VkCommandBuffer cmdBuffer;
Greg Daniel315c8dc2019-11-26 15:41:27 -0500921 VkResult err;
922 GR_VK_CALL_RESULT(gpu, err, AllocateCommandBuffers(gpu->device(), &cmdInfo, &cmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700923 if (err) {
924 return nullptr;
925 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500926 return new GrVkSecondaryCommandBuffer(cmdBuffer, false);
egdaniel9a6cf802016-06-08 08:22:05 -0700927}
928
Greg Daniel070cbaf2019-01-03 17:35:54 -0500929GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(VkCommandBuffer cmdBuffer) {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500930 return new GrVkSecondaryCommandBuffer(cmdBuffer, true);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500931}
egdaniel9a6cf802016-06-08 08:22:05 -0700932
Greg Daniele643da62019-11-05 12:36:42 -0500933void GrVkSecondaryCommandBuffer::begin(GrVkGpu* gpu, const GrVkFramebuffer* framebuffer,
jvanverth7ec92412016-07-06 09:24:57 -0700934 const GrVkRenderPass* compatibleRenderPass) {
egdaniel9a6cf802016-06-08 08:22:05 -0700935 SkASSERT(!fIsActive);
jvanverth7ec92412016-07-06 09:24:57 -0700936 SkASSERT(compatibleRenderPass);
937 fActiveRenderPass = compatibleRenderPass;
egdaniel9a6cf802016-06-08 08:22:05 -0700938
Greg Daniel070cbaf2019-01-03 17:35:54 -0500939 if (!this->isWrapped()) {
940 VkCommandBufferInheritanceInfo inheritanceInfo;
941 memset(&inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
942 inheritanceInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
943 inheritanceInfo.pNext = nullptr;
944 inheritanceInfo.renderPass = fActiveRenderPass->vkRenderPass();
945 inheritanceInfo.subpass = 0; // Currently only using 1 subpass for each render pass
946 inheritanceInfo.framebuffer = framebuffer ? framebuffer->framebuffer() : VK_NULL_HANDLE;
947 inheritanceInfo.occlusionQueryEnable = false;
948 inheritanceInfo.queryFlags = 0;
949 inheritanceInfo.pipelineStatistics = 0;
egdaniel9a6cf802016-06-08 08:22:05 -0700950
Greg Daniel070cbaf2019-01-03 17:35:54 -0500951 VkCommandBufferBeginInfo cmdBufferBeginInfo;
952 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
953 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
954 cmdBufferBeginInfo.pNext = nullptr;
955 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT |
956 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
957 cmdBufferBeginInfo.pInheritanceInfo = &inheritanceInfo;
egdaniel9a6cf802016-06-08 08:22:05 -0700958
Greg Daniele643da62019-11-05 12:36:42 -0500959 GR_VK_CALL_ERRCHECK(gpu, BeginCommandBuffer(fCmdBuffer, &cmdBufferBeginInfo));
Greg Daniel070cbaf2019-01-03 17:35:54 -0500960 }
egdaniel9a6cf802016-06-08 08:22:05 -0700961 fIsActive = true;
962}
963
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500964void GrVkSecondaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700965 SkASSERT(fIsActive);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500966 if (!this->isWrapped()) {
Greg Daniele643da62019-11-05 12:36:42 -0500967 GR_VK_CALL_ERRCHECK(gpu, EndCommandBuffer(fCmdBuffer));
Greg Daniel070cbaf2019-01-03 17:35:54 -0500968 }
egdaniel9a6cf802016-06-08 08:22:05 -0700969 this->invalidateState();
970 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400971 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700972}
Greg Daniel8daf3b72019-07-30 09:57:26 -0400973
Greg Daniel0addbdf2019-11-25 15:03:58 -0500974void GrVkSecondaryCommandBuffer::recycle(GrVkCommandPool* cmdPool) {
Greg Daniel8daf3b72019-07-30 09:57:26 -0400975 if (this->isWrapped()) {
Greg Daniel8daf3b72019-07-30 09:57:26 -0400976 delete this;
977 } else {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500978 cmdPool->recycleSecondaryCommandBuffer(this);
Greg Daniel8daf3b72019-07-30 09:57:26 -0400979 }
980}
981