blob: cdc092a0d7abec3e90715989cd7f63f6943a14f2 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
Greg Danield922f332020-04-27 11:21:36 -04002 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
Greg Daniel164a9f02016-02-22 09:56:40 -05007
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkCommandBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -05009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkRect.h"
Greg Daniel87d784f2021-02-02 15:36:06 -050011#include "src/gpu/vk/GrVkBuffer2.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "src/gpu/vk/GrVkCommandPool.h"
13#include "src/gpu/vk/GrVkFramebuffer.h"
14#include "src/gpu/vk/GrVkGpu.h"
15#include "src/gpu/vk/GrVkImage.h"
16#include "src/gpu/vk/GrVkImageView.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050017#include "src/gpu/vk/GrVkPipeline.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/vk/GrVkPipelineState.h"
19#include "src/gpu/vk/GrVkPipelineState.h"
20#include "src/gpu/vk/GrVkRenderPass.h"
21#include "src/gpu/vk/GrVkRenderTarget.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050022#include "src/gpu/vk/GrVkUtil.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050023
24void GrVkCommandBuffer::invalidateState() {
Chris Dalton1d616352017-05-31 12:51:23 -060025 for (auto& boundInputBuffer : fBoundInputBuffers) {
26 boundInputBuffer = VK_NULL_HANDLE;
27 }
egdaniel470d77a2016-03-18 12:50:27 -070028 fBoundIndexBuffer = VK_NULL_HANDLE;
egdaniel470d77a2016-03-18 12:50:27 -070029
30 memset(&fCachedViewport, 0, sizeof(VkViewport));
31 fCachedViewport.width = - 1.0f; // Viewport must have a width greater than 0
32
33 memset(&fCachedScissor, 0, sizeof(VkRect2D));
34 fCachedScissor.offset.x = -1; // Scissor offset must be greater that 0 to be valid
35
36 for (int i = 0; i < 4; ++i) {
37 fCachedBlendConstant[i] = -1.0;
38 }
Greg Daniel164a9f02016-02-22 09:56:40 -050039}
40
Jim Van Verth5082df12020-03-11 16:14:51 -040041void GrVkCommandBuffer::freeGPUData(const GrGpu* gpu, VkCommandPool cmdPool) const {
Brian Salomone39526b2019-06-24 16:35:53 -040042 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Greg Daniel164a9f02016-02-22 09:56:40 -050043 SkASSERT(!fIsActive);
Greg Daniel0addbdf2019-11-25 15:03:58 -050044 SkASSERT(!fTrackedResources.count());
45 SkASSERT(!fTrackedRecycledResources.count());
Greg Daniela58db7f2020-07-15 09:17:59 -040046 SkASSERT(!fTrackedGpuBuffers.count());
Greg Daniel609e1a92020-12-11 14:18:19 -050047 SkASSERT(!fTrackedGpuSurfaces.count());
Greg Daniel0addbdf2019-11-25 15:03:58 -050048 SkASSERT(cmdPool != VK_NULL_HANDLE);
49 SkASSERT(!this->isWrapped());
halcanary9d524f22016-03-29 09:03:52 -070050
Jim Van Verth3e192162020-03-10 16:23:16 -040051 GrVkGpu* vkGpu = (GrVkGpu*)gpu;
52 GR_VK_CALL(vkGpu->vkInterface(), FreeCommandBuffers(vkGpu->device(), cmdPool, 1, &fCmdBuffer));
egdaniel9cb63402016-06-23 08:37:05 -070053
Jim Van Verth3e192162020-03-10 16:23:16 -040054 this->onFreeGPUData(vkGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050055}
56
Jim Van Verth5082df12020-03-11 16:14:51 -040057void GrVkCommandBuffer::releaseResources() {
Brian Salomone39526b2019-06-24 16:35:53 -040058 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
jvanverth7ec92412016-07-06 09:24:57 -070059 SkASSERT(!fIsActive);
60 for (int i = 0; i < fTrackedResources.count(); ++i) {
Jim Van Verth3e192162020-03-10 16:23:16 -040061 fTrackedResources[i]->notifyFinishedWithWorkOnGpu();
jvanverth7ec92412016-07-06 09:24:57 -070062 }
Greg Danield15176d2021-01-05 16:42:49 -050063 fTrackedResources.reset();
egdanielc1be9bc2016-07-20 08:33:00 -070064 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Jim Van Verth3e192162020-03-10 16:23:16 -040065 fTrackedRecycledResources[i]->notifyFinishedWithWorkOnGpu();
egdanielc1be9bc2016-07-20 08:33:00 -070066 }
Greg Danielc8e16bb2021-01-12 10:45:38 -050067 fTrackedRecycledResources.reset();
egdaniel594739c2016-09-20 12:39:25 -070068
Greg Daniela58db7f2020-07-15 09:17:59 -040069 fTrackedGpuBuffers.reset();
Greg Daniel609e1a92020-12-11 14:18:19 -050070 fTrackedGpuSurfaces.reset();
Greg Daniela58db7f2020-07-15 09:17:59 -040071
jvanverth7ec92412016-07-06 09:24:57 -070072 this->invalidateState();
73
Jim Van Verth5082df12020-03-11 16:14:51 -040074 this->onReleaseResources();
jvanverth7ec92412016-07-06 09:24:57 -070075}
76
Greg Daniel164a9f02016-02-22 09:56:40 -050077////////////////////////////////////////////////////////////////////////////////
78// CommandBuffer commands
79////////////////////////////////////////////////////////////////////////////////
80
81void GrVkCommandBuffer::pipelineBarrier(const GrVkGpu* gpu,
Jim Van Verth3e192162020-03-10 16:23:16 -040082 const GrManagedResource* resource,
Greg Daniel164a9f02016-02-22 09:56:40 -050083 VkPipelineStageFlags srcStageMask,
84 VkPipelineStageFlags dstStageMask,
85 bool byRegion,
86 BarrierType barrierType,
Greg Daniel59dc1482019-02-22 10:46:38 -050087 void* barrier) {
Greg Daniel070cbaf2019-01-03 17:35:54 -050088 SkASSERT(!this->isWrapped());
Greg Daniel164a9f02016-02-22 09:56:40 -050089 SkASSERT(fIsActive);
Greg Daniel9a18b082020-08-14 14:03:50 -040090#ifdef SK_DEBUG
egdaniel58a8d922016-04-21 08:03:10 -070091 // For images we can have barriers inside of render passes but they require us to add more
92 // support in subpasses which need self dependencies to have barriers inside them. Also, we can
93 // never have buffer barriers inside of a render pass. For now we will just assert that we are
94 // not in a render pass.
Greg Daniel9a18b082020-08-14 14:03:50 -040095 bool isValidSubpassBarrier = false;
96 if (barrierType == kImageMemory_BarrierType) {
97 VkImageMemoryBarrier* imgBarrier = static_cast<VkImageMemoryBarrier*>(barrier);
98 isValidSubpassBarrier = (imgBarrier->newLayout == imgBarrier->oldLayout) &&
99 (imgBarrier->srcQueueFamilyIndex == VK_QUEUE_FAMILY_IGNORED) &&
100 (imgBarrier->dstQueueFamilyIndex == VK_QUEUE_FAMILY_IGNORED) &&
101 byRegion;
102 }
103 SkASSERT(!fActiveRenderPass || isValidSubpassBarrier);
104#endif
Greg Danielf346df32019-04-03 14:52:13 -0400105
Greg Danielee54f232019-04-03 14:58:40 -0400106 if (barrierType == kBufferMemory_BarrierType) {
Greg Daniel9a18b082020-08-14 14:03:50 -0400107 const VkBufferMemoryBarrier* barrierPtr = static_cast<VkBufferMemoryBarrier*>(barrier);
Greg Danielee54f232019-04-03 14:58:40 -0400108 fBufferBarriers.push_back(*barrierPtr);
109 } else {
110 SkASSERT(barrierType == kImageMemory_BarrierType);
Greg Daniel9a18b082020-08-14 14:03:50 -0400111 const VkImageMemoryBarrier* barrierPtr = static_cast<VkImageMemoryBarrier*>(barrier);
Greg Daniel212ff052019-04-09 10:41:34 -0400112 // We need to check if we are adding a pipeline barrier that covers part of the same
113 // subresource range as a barrier that is already in current batch. If it does, then we must
114 // submit the first batch because the vulkan spec does not define a specific ordering for
115 // barriers submitted in the same batch.
116 // TODO: Look if we can gain anything by merging barriers together instead of submitting
117 // the old ones.
118 for (int i = 0; i < fImageBarriers.count(); ++i) {
119 VkImageMemoryBarrier& currentBarrier = fImageBarriers[i];
120 if (barrierPtr->image == currentBarrier.image) {
121 const VkImageSubresourceRange newRange = barrierPtr->subresourceRange;
122 const VkImageSubresourceRange oldRange = currentBarrier.subresourceRange;
123 SkASSERT(newRange.aspectMask == oldRange.aspectMask);
124 SkASSERT(newRange.baseArrayLayer == oldRange.baseArrayLayer);
125 SkASSERT(newRange.layerCount == oldRange.layerCount);
126 uint32_t newStart = newRange.baseMipLevel;
127 uint32_t newEnd = newRange.baseMipLevel + newRange.levelCount - 1;
128 uint32_t oldStart = oldRange.baseMipLevel;
129 uint32_t oldEnd = oldRange.baseMipLevel + oldRange.levelCount - 1;
Brian Osman788b9162020-02-07 10:36:46 -0500130 if (std::max(newStart, oldStart) <= std::min(newEnd, oldEnd)) {
Greg Daniel212ff052019-04-09 10:41:34 -0400131 this->submitPipelineBarriers(gpu);
132 break;
133 }
134 }
135 }
Greg Danielee54f232019-04-03 14:58:40 -0400136 fImageBarriers.push_back(*barrierPtr);
Greg Daniel164a9f02016-02-22 09:56:40 -0500137 }
Greg Danielee54f232019-04-03 14:58:40 -0400138 fBarriersByRegion |= byRegion;
Greg Danielee54f232019-04-03 14:58:40 -0400139 fSrcStageMask = fSrcStageMask | srcStageMask;
140 fDstStageMask = fDstStageMask | dstStageMask;
141
142 fHasWork = true;
Greg Daniel59dc1482019-02-22 10:46:38 -0500143 if (resource) {
144 this->addResource(resource);
145 }
Greg Daniel9a18b082020-08-14 14:03:50 -0400146 if (fActiveRenderPass) {
147 this->submitPipelineBarriers(gpu, true);
148 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500149}
150
Greg Daniel9a18b082020-08-14 14:03:50 -0400151void GrVkCommandBuffer::submitPipelineBarriers(const GrVkGpu* gpu, bool forSelfDependency) {
Greg Danielee54f232019-04-03 14:58:40 -0400152 SkASSERT(fIsActive);
153
154 // Currently we never submit a pipeline barrier without at least one memory barrier.
155 if (fBufferBarriers.count() || fImageBarriers.count()) {
156 // For images we can have barriers inside of render passes but they require us to add more
157 // support in subpasses which need self dependencies to have barriers inside them. Also, we
158 // can never have buffer barriers inside of a render pass. For now we will just assert that
159 // we are not in a render pass.
Greg Daniel9a18b082020-08-14 14:03:50 -0400160 SkASSERT(!fActiveRenderPass || forSelfDependency);
Greg Danielee54f232019-04-03 14:58:40 -0400161 SkASSERT(!this->isWrapped());
162 SkASSERT(fSrcStageMask && fDstStageMask);
163
164 VkDependencyFlags dependencyFlags = fBarriersByRegion ? VK_DEPENDENCY_BY_REGION_BIT : 0;
165 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(
166 fCmdBuffer, fSrcStageMask, fDstStageMask, dependencyFlags, 0, nullptr,
167 fBufferBarriers.count(), fBufferBarriers.begin(),
168 fImageBarriers.count(), fImageBarriers.begin()));
169 fBufferBarriers.reset();
170 fImageBarriers.reset();
171 fBarriersByRegion = false;
172 fSrcStageMask = 0;
173 fDstStageMask = 0;
174 }
175 SkASSERT(!fBufferBarriers.count());
176 SkASSERT(!fImageBarriers.count());
177 SkASSERT(!fBarriersByRegion);
178 SkASSERT(!fSrcStageMask);
179 SkASSERT(!fDstStageMask);
180}
181
Greg Daniel6ecc9112017-06-16 16:17:03 +0000182void GrVkCommandBuffer::bindInputBuffer(GrVkGpu* gpu, uint32_t binding,
Greg Daniel426274b2020-07-20 11:37:38 -0400183 sk_sp<const GrBuffer> buffer) {
Greg Daniel5af72c12021-02-08 13:52:08 -0500184 VkBuffer vkBuffer = static_cast<const GrVkBuffer2*>(buffer.get())->vkBuffer();
Greg Daniel6ecc9112017-06-16 16:17:03 +0000185 SkASSERT(VK_NULL_HANDLE != vkBuffer);
186 SkASSERT(binding < kMaxInputBuffers);
187 // TODO: once vbuffer->offset() no longer always returns 0, we will need to track the offset
188 // to know if we can skip binding or not.
189 if (vkBuffer != fBoundInputBuffers[binding]) {
Greg Daniel5af72c12021-02-08 13:52:08 -0500190 VkDeviceSize offset = 0;
Greg Daniel6ecc9112017-06-16 16:17:03 +0000191 GR_VK_CALL(gpu->vkInterface(), CmdBindVertexBuffers(fCmdBuffer,
192 binding,
193 1,
194 &vkBuffer,
195 &offset));
196 fBoundInputBuffers[binding] = vkBuffer;
Greg Daniel426274b2020-07-20 11:37:38 -0400197 this->addGrBuffer(std::move(buffer));
Greg Daniel6ecc9112017-06-16 16:17:03 +0000198 }
199}
200
Greg Daniel426274b2020-07-20 11:37:38 -0400201void GrVkCommandBuffer::bindIndexBuffer(GrVkGpu* gpu, sk_sp<const GrBuffer> buffer) {
Greg Daniel5af72c12021-02-08 13:52:08 -0500202 VkBuffer vkBuffer = static_cast<const GrVkBuffer2*>(buffer.get())->vkBuffer();
Greg Daniel6ecc9112017-06-16 16:17:03 +0000203 SkASSERT(VK_NULL_HANDLE != vkBuffer);
204 // TODO: once ibuffer->offset() no longer always returns 0, we will need to track the offset
205 // to know if we can skip binding or not.
206 if (vkBuffer != fBoundIndexBuffer) {
207 GR_VK_CALL(gpu->vkInterface(), CmdBindIndexBuffer(fCmdBuffer,
Greg Daniel5af72c12021-02-08 13:52:08 -0500208 vkBuffer, /*offset=*/0,
Greg Daniel6ecc9112017-06-16 16:17:03 +0000209 VK_INDEX_TYPE_UINT16));
210 fBoundIndexBuffer = vkBuffer;
Greg Daniel426274b2020-07-20 11:37:38 -0400211 this->addGrBuffer(std::move(buffer));
Greg Daniel6ecc9112017-06-16 16:17:03 +0000212 }
213}
214
Greg Daniel164a9f02016-02-22 09:56:40 -0500215void GrVkCommandBuffer::clearAttachments(const GrVkGpu* gpu,
216 int numAttachments,
217 const VkClearAttachment* attachments,
218 int numRects,
Greg Danielf346df32019-04-03 14:52:13 -0400219 const VkClearRect* clearRects) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500220 SkASSERT(fIsActive);
221 SkASSERT(fActiveRenderPass);
222 SkASSERT(numAttachments > 0);
223 SkASSERT(numRects > 0);
Greg Danielf346df32019-04-03 14:52:13 -0400224
Greg Danielee54f232019-04-03 14:58:40 -0400225 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400226
Greg Daniel164a9f02016-02-22 09:56:40 -0500227#ifdef SK_DEBUG
228 for (int i = 0; i < numAttachments; ++i) {
229 if (attachments[i].aspectMask == VK_IMAGE_ASPECT_COLOR_BIT) {
230 uint32_t testIndex;
231 SkAssertResult(fActiveRenderPass->colorAttachmentIndex(&testIndex));
232 SkASSERT(testIndex == attachments[i].colorAttachment);
233 }
234 }
235#endif
236 GR_VK_CALL(gpu->vkInterface(), CmdClearAttachments(fCmdBuffer,
237 numAttachments,
238 attachments,
239 numRects,
240 clearRects));
Greg Daniela718a612019-10-07 16:25:41 -0400241 if (gpu->vkCaps().mustInvalidatePrimaryCmdBufferStateAfterClearAttachments()) {
242 this->invalidateState();
243 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500244}
245
246void GrVkCommandBuffer::bindDescriptorSets(const GrVkGpu* gpu,
Greg Danieleecc6872019-07-29 13:21:37 -0400247 VkPipelineLayout layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500248 uint32_t firstSet,
249 uint32_t setCount,
250 const VkDescriptorSet* descriptorSets,
251 uint32_t dynamicOffsetCount,
252 const uint32_t* dynamicOffsets) {
253 SkASSERT(fIsActive);
254 GR_VK_CALL(gpu->vkInterface(), CmdBindDescriptorSets(fCmdBuffer,
255 VK_PIPELINE_BIND_POINT_GRAPHICS,
Greg Danieleecc6872019-07-29 13:21:37 -0400256 layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500257 firstSet,
258 setCount,
259 descriptorSets,
260 dynamicOffsetCount,
261 dynamicOffsets));
egdanielbc9b2962016-09-27 08:00:53 -0700262}
263
Greg Daniel3ef052c2021-01-05 12:20:27 -0500264void GrVkCommandBuffer::bindPipeline(const GrVkGpu* gpu, sk_sp<const GrVkPipeline> pipeline) {
egdaniel470d77a2016-03-18 12:50:27 -0700265 SkASSERT(fIsActive);
egdaniel470d77a2016-03-18 12:50:27 -0700266 GR_VK_CALL(gpu->vkInterface(), CmdBindPipeline(fCmdBuffer,
267 VK_PIPELINE_BIND_POINT_GRAPHICS,
268 pipeline->pipeline()));
Greg Danield15176d2021-01-05 16:42:49 -0500269 this->addResource(std::move(pipeline));
egdaniel470d77a2016-03-18 12:50:27 -0700270}
271
Jim Van Verth7d338c22021-02-03 12:54:30 -0500272void GrVkCommandBuffer::pushConstants(const GrVkGpu* gpu, VkPipelineLayout layout,
273 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
274 const void* values) {
275 SkASSERT(fIsActive);
276 // offset and size must be a multiple of 4
277 SkASSERT(!SkToBool(offset & 0x3));
278 SkASSERT(!SkToBool(size & 0x3));
279 GR_VK_CALL(gpu->vkInterface(), CmdPushConstants(fCmdBuffer,
280 layout,
281 stageFlags,
282 offset,
283 size,
284 values));
285}
286
Greg Daniel164a9f02016-02-22 09:56:40 -0500287void GrVkCommandBuffer::drawIndexed(const GrVkGpu* gpu,
288 uint32_t indexCount,
289 uint32_t instanceCount,
290 uint32_t firstIndex,
291 int32_t vertexOffset,
Greg Danielf346df32019-04-03 14:52:13 -0400292 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500293 SkASSERT(fIsActive);
294 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400295 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500296 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndexed(fCmdBuffer,
297 indexCount,
298 instanceCount,
299 firstIndex,
300 vertexOffset,
301 firstInstance));
302}
303
304void GrVkCommandBuffer::draw(const GrVkGpu* gpu,
305 uint32_t vertexCount,
306 uint32_t instanceCount,
307 uint32_t firstVertex,
Greg Danielf346df32019-04-03 14:52:13 -0400308 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500309 SkASSERT(fIsActive);
310 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400311 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500312 GR_VK_CALL(gpu->vkInterface(), CmdDraw(fCmdBuffer,
313 vertexCount,
314 instanceCount,
315 firstVertex,
316 firstInstance));
317}
egdaniel470d77a2016-03-18 12:50:27 -0700318
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600319void GrVkCommandBuffer::drawIndirect(const GrVkGpu* gpu,
Greg Daniel5af72c12021-02-08 13:52:08 -0500320 sk_sp<const GrBuffer> indirectBuffer,
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600321 VkDeviceSize offset,
322 uint32_t drawCount,
323 uint32_t stride) {
324 SkASSERT(fIsActive);
325 SkASSERT(fActiveRenderPass);
326 SkASSERT(!indirectBuffer->isCpuBuffer());
327 this->addingWork(gpu);
Greg Daniel5af72c12021-02-08 13:52:08 -0500328 VkBuffer vkBuffer = static_cast<const GrVkBuffer2*>(indirectBuffer.get())->vkBuffer();
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600329 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndirect(fCmdBuffer,
Greg Daniel5af72c12021-02-08 13:52:08 -0500330 vkBuffer,
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600331 offset,
332 drawCount,
333 stride));
Greg Daniel5af72c12021-02-08 13:52:08 -0500334 this->addGrBuffer(std::move(indirectBuffer));
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600335}
336
337void GrVkCommandBuffer::drawIndexedIndirect(const GrVkGpu* gpu,
Greg Daniel5af72c12021-02-08 13:52:08 -0500338 sk_sp<const GrBuffer> indirectBuffer,
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600339 VkDeviceSize offset,
340 uint32_t drawCount,
341 uint32_t stride) {
342 SkASSERT(fIsActive);
343 SkASSERT(fActiveRenderPass);
344 SkASSERT(!indirectBuffer->isCpuBuffer());
345 this->addingWork(gpu);
Greg Daniel5af72c12021-02-08 13:52:08 -0500346 VkBuffer vkBuffer = static_cast<const GrVkBuffer2*>(indirectBuffer.get())->vkBuffer();
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600347 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndexedIndirect(fCmdBuffer,
Greg Daniel5af72c12021-02-08 13:52:08 -0500348 vkBuffer,
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600349 offset,
350 drawCount,
351 stride));
Greg Daniel5af72c12021-02-08 13:52:08 -0500352 this->addGrBuffer(std::move(indirectBuffer));
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600353}
354
egdaniel470d77a2016-03-18 12:50:27 -0700355void GrVkCommandBuffer::setViewport(const GrVkGpu* gpu,
356 uint32_t firstViewport,
357 uint32_t viewportCount,
358 const VkViewport* viewports) {
359 SkASSERT(fIsActive);
360 SkASSERT(1 == viewportCount);
John Stilesc1c3c6d2020-08-15 23:22:53 -0400361 if (0 != memcmp(viewports, &fCachedViewport, sizeof(VkViewport))) {
egdaniel470d77a2016-03-18 12:50:27 -0700362 GR_VK_CALL(gpu->vkInterface(), CmdSetViewport(fCmdBuffer,
363 firstViewport,
364 viewportCount,
365 viewports));
366 fCachedViewport = viewports[0];
367 }
368}
369
370void GrVkCommandBuffer::setScissor(const GrVkGpu* gpu,
371 uint32_t firstScissor,
372 uint32_t scissorCount,
373 const VkRect2D* scissors) {
374 SkASSERT(fIsActive);
375 SkASSERT(1 == scissorCount);
John Stilesc1c3c6d2020-08-15 23:22:53 -0400376 if (0 != memcmp(scissors, &fCachedScissor, sizeof(VkRect2D))) {
egdaniel470d77a2016-03-18 12:50:27 -0700377 GR_VK_CALL(gpu->vkInterface(), CmdSetScissor(fCmdBuffer,
378 firstScissor,
379 scissorCount,
380 scissors));
381 fCachedScissor = scissors[0];
382 }
383}
384
385void GrVkCommandBuffer::setBlendConstants(const GrVkGpu* gpu,
386 const float blendConstants[4]) {
387 SkASSERT(fIsActive);
John Stilesc1c3c6d2020-08-15 23:22:53 -0400388 if (0 != memcmp(blendConstants, fCachedBlendConstant, 4 * sizeof(float))) {
egdaniel470d77a2016-03-18 12:50:27 -0700389 GR_VK_CALL(gpu->vkInterface(), CmdSetBlendConstants(fCmdBuffer, blendConstants));
390 memcpy(fCachedBlendConstant, blendConstants, 4 * sizeof(float));
391 }
392}
egdaniel9a6cf802016-06-08 08:22:05 -0700393
Greg Danielee54f232019-04-03 14:58:40 -0400394void GrVkCommandBuffer::addingWork(const GrVkGpu* gpu) {
395 this->submitPipelineBarriers(gpu);
396 fHasWork = true;
397}
398
egdaniel9a6cf802016-06-08 08:22:05 -0700399///////////////////////////////////////////////////////////////////////////////
400// PrimaryCommandBuffer
401////////////////////////////////////////////////////////////////////////////////
egdaniel9cb63402016-06-23 08:37:05 -0700402GrVkPrimaryCommandBuffer::~GrVkPrimaryCommandBuffer() {
403 // Should have ended any render pass we're in the middle of
404 SkASSERT(!fActiveRenderPass);
405}
406
Greg Daniel315c8dc2019-11-26 15:41:27 -0500407GrVkPrimaryCommandBuffer* GrVkPrimaryCommandBuffer::Create(GrVkGpu* gpu,
Greg Daniel0addbdf2019-11-25 15:03:58 -0500408 VkCommandPool cmdPool) {
egdaniel9a6cf802016-06-08 08:22:05 -0700409 const VkCommandBufferAllocateInfo cmdInfo = {
410 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400411 nullptr, // pNext
Greg Daniel0addbdf2019-11-25 15:03:58 -0500412 cmdPool, // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700413 VK_COMMAND_BUFFER_LEVEL_PRIMARY, // level
414 1 // bufferCount
415 };
416
417 VkCommandBuffer cmdBuffer;
Greg Daniel315c8dc2019-11-26 15:41:27 -0500418 VkResult err;
419 GR_VK_CALL_RESULT(gpu, err, AllocateCommandBuffers(gpu->device(), &cmdInfo, &cmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700420 if (err) {
421 return nullptr;
422 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500423 return new GrVkPrimaryCommandBuffer(cmdBuffer);
egdaniel9a6cf802016-06-08 08:22:05 -0700424}
425
Greg Daniele643da62019-11-05 12:36:42 -0500426void GrVkPrimaryCommandBuffer::begin(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700427 SkASSERT(!fIsActive);
428 VkCommandBufferBeginInfo cmdBufferBeginInfo;
429 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
430 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
431 cmdBufferBeginInfo.pNext = nullptr;
432 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
433 cmdBufferBeginInfo.pInheritanceInfo = nullptr;
434
Greg Daniele643da62019-11-05 12:36:42 -0500435 GR_VK_CALL_ERRCHECK(gpu, BeginCommandBuffer(fCmdBuffer, &cmdBufferBeginInfo));
egdaniel9a6cf802016-06-08 08:22:05 -0700436 fIsActive = true;
437}
438
Greg Daniel2e967df2021-02-08 10:38:31 -0500439void GrVkPrimaryCommandBuffer::end(GrVkGpu* gpu, bool abandoningBuffer) {
egdaniel9a6cf802016-06-08 08:22:05 -0700440 SkASSERT(fIsActive);
441 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400442
Greg Daniel2e967df2021-02-08 10:38:31 -0500443 // If we are in the process of abandoning the context then the GrResourceCache will have freed
444 // all resources before destroying the GrVkGpu. When we destroy the GrVkGpu we call end on the
445 // command buffer to keep all our state tracking consistent. However, the vulkan validation
446 // layers complain about calling end on a command buffer that contains resources that have
447 // already been deleted. From the vulkan API it isn't required to end the command buffer to
448 // delete it, so we just skip the vulkan API calls and update our own state tracking.
449 if (!abandoningBuffer) {
450 this->submitPipelineBarriers(gpu);
Greg Danielee54f232019-04-03 14:58:40 -0400451
Greg Daniel2e967df2021-02-08 10:38:31 -0500452 GR_VK_CALL_ERRCHECK(gpu, EndCommandBuffer(fCmdBuffer));
453 }
egdaniel9a6cf802016-06-08 08:22:05 -0700454 this->invalidateState();
455 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400456 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700457}
458
Greg Danielfa3adf72019-11-07 09:53:41 -0500459bool GrVkPrimaryCommandBuffer::beginRenderPass(GrVkGpu* gpu,
egdaniel9cb63402016-06-23 08:37:05 -0700460 const GrVkRenderPass* renderPass,
Robert Phillips95214472017-08-08 18:00:03 -0400461 const VkClearValue clearValues[],
Greg Danielfa3adf72019-11-07 09:53:41 -0500462 GrVkRenderTarget* target,
egdaniel9cb63402016-06-23 08:37:05 -0700463 const SkIRect& bounds,
464 bool forSecondaryCB) {
egdaniel9a6cf802016-06-08 08:22:05 -0700465 SkASSERT(fIsActive);
466 SkASSERT(!fActiveRenderPass);
Greg Daniel7acddf52020-12-16 15:15:51 -0500467 SkASSERT(renderPass->isCompatible(*target, renderPass->selfDependencyFlags(),
468 renderPass->loadFromResolve()));
Greg Danielfa3adf72019-11-07 09:53:41 -0500469
Greg Daniel7acddf52020-12-16 15:15:51 -0500470 const GrVkFramebuffer* framebuffer = target->getFramebuffer(*renderPass);
Greg Danielfa3adf72019-11-07 09:53:41 -0500471 if (!framebuffer) {
472 return false;
473 }
egdaniel9cb63402016-06-23 08:37:05 -0700474
Greg Danielee54f232019-04-03 14:58:40 -0400475 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400476
egdaniel9a6cf802016-06-08 08:22:05 -0700477 VkRenderPassBeginInfo beginInfo;
egdaniel9cb63402016-06-23 08:37:05 -0700478 VkRect2D renderArea;
479 renderArea.offset = { bounds.fLeft , bounds.fTop };
480 renderArea.extent = { (uint32_t)bounds.width(), (uint32_t)bounds.height() };
481
482 memset(&beginInfo, 0, sizeof(VkRenderPassBeginInfo));
483 beginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
484 beginInfo.pNext = nullptr;
485 beginInfo.renderPass = renderPass->vkRenderPass();
Greg Danielfa3adf72019-11-07 09:53:41 -0500486 beginInfo.framebuffer = framebuffer->framebuffer();
egdaniel9cb63402016-06-23 08:37:05 -0700487 beginInfo.renderArea = renderArea;
Greg Danielb68319a2018-02-23 16:08:28 -0500488 beginInfo.clearValueCount = renderPass->clearValueCount();
egdaniel9cb63402016-06-23 08:37:05 -0700489 beginInfo.pClearValues = clearValues;
490
491 VkSubpassContents contents = forSecondaryCB ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
492 : VK_SUBPASS_CONTENTS_INLINE;
493
egdaniel9a6cf802016-06-08 08:22:05 -0700494 GR_VK_CALL(gpu->vkInterface(), CmdBeginRenderPass(fCmdBuffer, &beginInfo, contents));
495 fActiveRenderPass = renderPass;
496 this->addResource(renderPass);
Greg Daniel7acddf52020-12-16 15:15:51 -0500497 target->addResources(*this, *renderPass);
Greg Danielfa3adf72019-11-07 09:53:41 -0500498 return true;
egdaniel9a6cf802016-06-08 08:22:05 -0700499}
500
501void GrVkPrimaryCommandBuffer::endRenderPass(const GrVkGpu* gpu) {
502 SkASSERT(fIsActive);
503 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400504 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700505 GR_VK_CALL(gpu->vkInterface(), CmdEndRenderPass(fCmdBuffer));
506 fActiveRenderPass = nullptr;
507}
508
Greg Daniela8c32102020-12-30 15:09:32 -0500509
510void GrVkPrimaryCommandBuffer::nexSubpass(GrVkGpu* gpu, bool forSecondaryCB) {
511 SkASSERT(fIsActive);
512 SkASSERT(fActiveRenderPass);
513 VkSubpassContents contents = forSecondaryCB ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
514 : VK_SUBPASS_CONTENTS_INLINE;
515 GR_VK_CALL(gpu->vkInterface(), CmdNextSubpass(fCmdBuffer, contents));
516}
517
egdaniel9a6cf802016-06-08 08:22:05 -0700518void GrVkPrimaryCommandBuffer::executeCommands(const GrVkGpu* gpu,
Greg Daniel8daf3b72019-07-30 09:57:26 -0400519 std::unique_ptr<GrVkSecondaryCommandBuffer> buffer) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500520 // The Vulkan spec allows secondary command buffers to be executed on a primary command buffer
521 // if the command pools both were created from were created with the same queue family. However,
522 // we currently always create them from the same pool.
egdaniel9a6cf802016-06-08 08:22:05 -0700523 SkASSERT(fIsActive);
Greg Daniel77b53f62016-10-18 11:48:51 -0400524 SkASSERT(!buffer->fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700525 SkASSERT(fActiveRenderPass);
526 SkASSERT(fActiveRenderPass->isCompatible(*buffer->fActiveRenderPass));
527
Greg Danielee54f232019-04-03 14:58:40 -0400528 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400529
egdaniel9a6cf802016-06-08 08:22:05 -0700530 GR_VK_CALL(gpu->vkInterface(), CmdExecuteCommands(fCmdBuffer, 1, &buffer->fCmdBuffer));
Greg Daniel8daf3b72019-07-30 09:57:26 -0400531 fSecondaryCommandBuffers.push_back(std::move(buffer));
egdaniel066df7c2016-06-08 14:02:27 -0700532 // When executing a secondary command buffer all state (besides render pass state) becomes
533 // invalidated and must be reset. This includes bound buffers, pipelines, dynamic state, etc.
534 this->invalidateState();
egdaniel9a6cf802016-06-08 08:22:05 -0700535}
536
Greg Daniele1185582019-12-04 11:29:44 -0500537static bool submit_to_queue(GrVkGpu* gpu,
Greg Daniel48661b82018-01-22 16:11:35 -0500538 VkQueue queue,
539 VkFence fence,
540 uint32_t waitCount,
541 const VkSemaphore* waitSemaphores,
542 const VkPipelineStageFlags* waitStages,
543 uint32_t commandBufferCount,
544 const VkCommandBuffer* commandBuffers,
545 uint32_t signalCount,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400546 const VkSemaphore* signalSemaphores,
547 GrProtected protectedContext) {
548 VkProtectedSubmitInfo protectedSubmitInfo;
549 if (protectedContext == GrProtected::kYes) {
550 memset(&protectedSubmitInfo, 0, sizeof(VkProtectedSubmitInfo));
551 protectedSubmitInfo.sType = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO;
552 protectedSubmitInfo.pNext = nullptr;
553 protectedSubmitInfo.protectedSubmit = VK_TRUE;
554 }
555
Greg Daniel48661b82018-01-22 16:11:35 -0500556 VkSubmitInfo submitInfo;
557 memset(&submitInfo, 0, sizeof(VkSubmitInfo));
558 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400559 submitInfo.pNext = protectedContext == GrProtected::kYes ? &protectedSubmitInfo : nullptr;
Greg Daniel48661b82018-01-22 16:11:35 -0500560 submitInfo.waitSemaphoreCount = waitCount;
561 submitInfo.pWaitSemaphores = waitSemaphores;
562 submitInfo.pWaitDstStageMask = waitStages;
563 submitInfo.commandBufferCount = commandBufferCount;
564 submitInfo.pCommandBuffers = commandBuffers;
565 submitInfo.signalSemaphoreCount = signalCount;
566 submitInfo.pSignalSemaphores = signalSemaphores;
Greg Daniele1185582019-12-04 11:29:44 -0500567 VkResult result;
568 GR_VK_CALL_RESULT(gpu, result, QueueSubmit(queue, 1, &submitInfo, fence));
569 return result == VK_SUCCESS;
Greg Daniel48661b82018-01-22 16:11:35 -0500570}
571
Greg Daniele1185582019-12-04 11:29:44 -0500572bool GrVkPrimaryCommandBuffer::submitToQueue(
Greg Daniele643da62019-11-05 12:36:42 -0500573 GrVkGpu* gpu,
Greg Daniel6be35232017-03-01 17:01:09 -0500574 VkQueue queue,
Greg Daniel48661b82018-01-22 16:11:35 -0500575 SkTArray<GrVkSemaphore::Resource*>& signalSemaphores,
576 SkTArray<GrVkSemaphore::Resource*>& waitSemaphores) {
egdaniel9a6cf802016-06-08 08:22:05 -0700577 SkASSERT(!fIsActive);
578
579 VkResult err;
jvanverth7ec92412016-07-06 09:24:57 -0700580 if (VK_NULL_HANDLE == fSubmitFence) {
581 VkFenceCreateInfo fenceInfo;
582 memset(&fenceInfo, 0, sizeof(VkFenceCreateInfo));
583 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
Greg Daniele1185582019-12-04 11:29:44 -0500584 GR_VK_CALL_RESULT(gpu, err, CreateFence(gpu->device(), &fenceInfo, nullptr,
585 &fSubmitFence));
586 if (err) {
587 fSubmitFence = VK_NULL_HANDLE;
588 return false;
589 }
jvanverth7ec92412016-07-06 09:24:57 -0700590 } else {
Greg Daniele1185582019-12-04 11:29:44 -0500591 // This cannot return DEVICE_LOST so we assert we succeeded.
592 GR_VK_CALL_RESULT(gpu, err, ResetFences(gpu->device(), 1, &fSubmitFence));
593 SkASSERT(err == VK_SUCCESS);
jvanverth7ec92412016-07-06 09:24:57 -0700594 }
egdaniel9a6cf802016-06-08 08:22:05 -0700595
Greg Daniela5cb7812017-06-16 09:45:32 -0400596 int signalCount = signalSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500597 int waitCount = waitSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500598
Greg Daniele1185582019-12-04 11:29:44 -0500599 bool submitted = false;
600
Greg Daniel48661b82018-01-22 16:11:35 -0500601 if (0 == signalCount && 0 == waitCount) {
602 // This command buffer has no dependent semaphores so we can simply just submit it to the
603 // queue with no worries.
Greg Daniele1185582019-12-04 11:29:44 -0500604 submitted = submit_to_queue(
605 gpu, queue, fSubmitFence, 0, nullptr, nullptr, 1, &fCmdBuffer, 0, nullptr,
606 gpu->protectedContext() ? GrProtected::kYes : GrProtected::kNo);
Greg Daniel48661b82018-01-22 16:11:35 -0500607 } else {
Greg Daniel48661b82018-01-22 16:11:35 -0500608 SkTArray<VkSemaphore> vkSignalSems(signalCount);
609 for (int i = 0; i < signalCount; ++i) {
610 if (signalSemaphores[i]->shouldSignal()) {
611 this->addResource(signalSemaphores[i]);
612 vkSignalSems.push_back(signalSemaphores[i]->semaphore());
613 }
614 }
615
616 SkTArray<VkSemaphore> vkWaitSems(waitCount);
617 SkTArray<VkPipelineStageFlags> vkWaitStages(waitCount);
618 for (int i = 0; i < waitCount; ++i) {
619 if (waitSemaphores[i]->shouldWait()) {
620 this->addResource(waitSemaphores[i]);
621 vkWaitSems.push_back(waitSemaphores[i]->semaphore());
622 vkWaitStages.push_back(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
623 }
624 }
Greg Daniele1185582019-12-04 11:29:44 -0500625 submitted = submit_to_queue(gpu, queue, fSubmitFence, vkWaitSems.count(),
626 vkWaitSems.begin(), vkWaitStages.begin(), 1, &fCmdBuffer,
627 vkSignalSems.count(), vkSignalSems.begin(),
628 gpu->protectedContext() ? GrProtected::kYes : GrProtected::kNo);
629 if (submitted) {
630 for (int i = 0; i < signalCount; ++i) {
631 signalSemaphores[i]->markAsSignaled();
632 }
633 for (int i = 0; i < waitCount; ++i) {
634 waitSemaphores[i]->markAsWaited();
635 }
Greg Daniel48661b82018-01-22 16:11:35 -0500636 }
Greg Daniel48661b82018-01-22 16:11:35 -0500637 }
egdaniel9a6cf802016-06-08 08:22:05 -0700638
Greg Daniele1185582019-12-04 11:29:44 -0500639 if (!submitted) {
640 // Destroy the fence or else we will try to wait forever for it to finish.
egdaniel9a6cf802016-06-08 08:22:05 -0700641 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
642 fSubmitFence = VK_NULL_HANDLE;
Greg Daniele1185582019-12-04 11:29:44 -0500643 return false;
egdaniel9a6cf802016-06-08 08:22:05 -0700644 }
Greg Daniele1185582019-12-04 11:29:44 -0500645 return true;
egdaniel9a6cf802016-06-08 08:22:05 -0700646}
647
Greg Daniele1185582019-12-04 11:29:44 -0500648void GrVkPrimaryCommandBuffer::forceSync(GrVkGpu* gpu) {
649 SkASSERT(fSubmitFence != VK_NULL_HANDLE);
650 GR_VK_CALL_ERRCHECK(gpu, WaitForFences(gpu->device(), 1, &fSubmitFence, true, UINT64_MAX));
651}
652
653bool GrVkPrimaryCommandBuffer::finished(GrVkGpu* gpu) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500654 SkASSERT(!fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700655 if (VK_NULL_HANDLE == fSubmitFence) {
656 return true;
657 }
658
Greg Daniele1185582019-12-04 11:29:44 -0500659 VkResult err;
660 GR_VK_CALL_RESULT_NOCHECK(gpu, err, GetFenceStatus(gpu->device(), fSubmitFence));
egdaniel9a6cf802016-06-08 08:22:05 -0700661 switch (err) {
662 case VK_SUCCESS:
Greg Daniele1185582019-12-04 11:29:44 -0500663 case VK_ERROR_DEVICE_LOST:
egdaniel9a6cf802016-06-08 08:22:05 -0700664 return true;
665
666 case VK_NOT_READY:
667 return false;
668
669 default:
670 SkDebugf("Error getting fence status: %d\n", err);
Greg Daniele1185582019-12-04 11:29:44 -0500671 SK_ABORT("Got an invalid fence status");
672 return false;
egdaniel9a6cf802016-06-08 08:22:05 -0700673 }
egdaniel9a6cf802016-06-08 08:22:05 -0700674}
675
Greg Daniela3aa75a2019-04-12 14:24:55 -0400676void GrVkPrimaryCommandBuffer::addFinishedProc(sk_sp<GrRefCntedCallback> finishedProc) {
677 fFinishedProcs.push_back(std::move(finishedProc));
678}
679
Jim Van Verth5082df12020-03-11 16:14:51 -0400680void GrVkPrimaryCommandBuffer::onReleaseResources() {
jvanverth7ec92412016-07-06 09:24:57 -0700681 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400682 fSecondaryCommandBuffers[i]->releaseResources();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500683 }
Greg Danielfe159622020-04-10 17:43:51 +0000684 this->callFinishedProcs();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500685}
686
Greg Daniel0addbdf2019-11-25 15:03:58 -0500687void GrVkPrimaryCommandBuffer::recycleSecondaryCommandBuffers(GrVkCommandPool* cmdPool) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500688 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500689 fSecondaryCommandBuffers[i].release()->recycle(cmdPool);
jvanverth7ec92412016-07-06 09:24:57 -0700690 }
691 fSecondaryCommandBuffers.reset();
692}
693
egdaniel9a6cf802016-06-08 08:22:05 -0700694void GrVkPrimaryCommandBuffer::copyImage(const GrVkGpu* gpu,
695 GrVkImage* srcImage,
696 VkImageLayout srcLayout,
697 GrVkImage* dstImage,
698 VkImageLayout dstLayout,
699 uint32_t copyRegionCount,
700 const VkImageCopy* copyRegions) {
701 SkASSERT(fIsActive);
702 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400703 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700704 this->addResource(srcImage->resource());
705 this->addResource(dstImage->resource());
706 GR_VK_CALL(gpu->vkInterface(), CmdCopyImage(fCmdBuffer,
707 srcImage->image(),
708 srcLayout,
709 dstImage->image(),
710 dstLayout,
711 copyRegionCount,
712 copyRegions));
713}
714
715void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
Jim Van Verth3e192162020-03-10 16:23:16 -0400716 const GrManagedResource* srcResource,
egdaniel9a6cf802016-06-08 08:22:05 -0700717 VkImage srcImage,
718 VkImageLayout srcLayout,
Jim Van Verth3e192162020-03-10 16:23:16 -0400719 const GrManagedResource* dstResource,
egdaniel9a6cf802016-06-08 08:22:05 -0700720 VkImage dstImage,
721 VkImageLayout dstLayout,
722 uint32_t blitRegionCount,
723 const VkImageBlit* blitRegions,
724 VkFilter filter) {
725 SkASSERT(fIsActive);
726 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400727 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700728 this->addResource(srcResource);
729 this->addResource(dstResource);
730 GR_VK_CALL(gpu->vkInterface(), CmdBlitImage(fCmdBuffer,
731 srcImage,
732 srcLayout,
733 dstImage,
734 dstLayout,
735 blitRegionCount,
736 blitRegions,
737 filter));
738}
739
Greg Daniel6ecc9112017-06-16 16:17:03 +0000740void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
741 const GrVkImage& srcImage,
742 const GrVkImage& dstImage,
743 uint32_t blitRegionCount,
744 const VkImageBlit* blitRegions,
745 VkFilter filter) {
746 this->blitImage(gpu,
747 srcImage.resource(),
748 srcImage.image(),
749 srcImage.currentLayout(),
750 dstImage.resource(),
751 dstImage.image(),
752 dstImage.currentLayout(),
753 blitRegionCount,
754 blitRegions,
755 filter);
756}
757
758
egdaniel9a6cf802016-06-08 08:22:05 -0700759void GrVkPrimaryCommandBuffer::copyImageToBuffer(const GrVkGpu* gpu,
760 GrVkImage* srcImage,
761 VkImageLayout srcLayout,
Greg Daniel2e967df2021-02-08 10:38:31 -0500762 sk_sp<GrGpuBuffer> dstBuffer,
egdaniel9a6cf802016-06-08 08:22:05 -0700763 uint32_t copyRegionCount,
764 const VkBufferImageCopy* copyRegions) {
765 SkASSERT(fIsActive);
766 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400767 this->addingWork(gpu);
Greg Daniel2e967df2021-02-08 10:38:31 -0500768 GrVkBuffer2* vkBuffer = static_cast<GrVkBuffer2*>(dstBuffer.get());
egdaniel9a6cf802016-06-08 08:22:05 -0700769 GR_VK_CALL(gpu->vkInterface(), CmdCopyImageToBuffer(fCmdBuffer,
770 srcImage->image(),
771 srcLayout,
Greg Daniel2e967df2021-02-08 10:38:31 -0500772 vkBuffer->vkBuffer(),
egdaniel9a6cf802016-06-08 08:22:05 -0700773 copyRegionCount,
774 copyRegions));
Greg Daniel2e967df2021-02-08 10:38:31 -0500775 this->addResource(srcImage->resource());
776 this->addGrBuffer(std::move(dstBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700777}
778
779void GrVkPrimaryCommandBuffer::copyBufferToImage(const GrVkGpu* gpu,
Greg Daniel2e967df2021-02-08 10:38:31 -0500780 VkBuffer srcBuffer,
egdaniel9a6cf802016-06-08 08:22:05 -0700781 GrVkImage* dstImage,
782 VkImageLayout dstLayout,
783 uint32_t copyRegionCount,
784 const VkBufferImageCopy* copyRegions) {
785 SkASSERT(fIsActive);
786 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400787 this->addingWork(gpu);
Greg Daniel2e967df2021-02-08 10:38:31 -0500788
egdaniel9a6cf802016-06-08 08:22:05 -0700789 GR_VK_CALL(gpu->vkInterface(), CmdCopyBufferToImage(fCmdBuffer,
Greg Daniel2e967df2021-02-08 10:38:31 -0500790 srcBuffer,
egdaniel9a6cf802016-06-08 08:22:05 -0700791 dstImage->image(),
792 dstLayout,
793 copyRegionCount,
794 copyRegions));
Greg Daniel2e967df2021-02-08 10:38:31 -0500795 this->addResource(dstImage->resource());
egdaniel9a6cf802016-06-08 08:22:05 -0700796}
797
Greg Daniel6888c0d2017-08-25 11:55:50 -0400798
799void GrVkPrimaryCommandBuffer::copyBuffer(GrVkGpu* gpu,
Greg Daniel2e967df2021-02-08 10:38:31 -0500800 sk_sp<GrGpuBuffer> srcBuffer,
Greg Daniel6888c0d2017-08-25 11:55:50 -0400801 GrVkBuffer* dstBuffer,
802 uint32_t regionCount,
803 const VkBufferCopy* regions) {
804 SkASSERT(fIsActive);
805 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400806 this->addingWork(gpu);
Greg Daniel6888c0d2017-08-25 11:55:50 -0400807#ifdef SK_DEBUG
808 for (uint32_t i = 0; i < regionCount; ++i) {
809 const VkBufferCopy& region = regions[i];
810 SkASSERT(region.size > 0);
811 SkASSERT(region.srcOffset < srcBuffer->size());
812 SkASSERT(region.dstOffset < dstBuffer->size());
813 SkASSERT(region.srcOffset + region.size <= srcBuffer->size());
814 SkASSERT(region.dstOffset + region.size <= dstBuffer->size());
815 }
816#endif
Greg Daniel2e967df2021-02-08 10:38:31 -0500817 const GrVkBuffer2* srcVk = static_cast<GrVkBuffer2*>(srcBuffer.get());
818
Greg Daniel6888c0d2017-08-25 11:55:50 -0400819 GR_VK_CALL(gpu->vkInterface(), CmdCopyBuffer(fCmdBuffer,
Greg Daniel2e967df2021-02-08 10:38:31 -0500820 srcVk->vkBuffer(),
Greg Daniel6888c0d2017-08-25 11:55:50 -0400821 dstBuffer->buffer(),
822 regionCount,
823 regions));
Greg Daniel2e967df2021-02-08 10:38:31 -0500824 this->addGrBuffer(std::move(srcBuffer));
825 this->addResource(dstBuffer->resource());
Greg Daniel6888c0d2017-08-25 11:55:50 -0400826}
827
Greg Daniel87d784f2021-02-02 15:36:06 -0500828void GrVkPrimaryCommandBuffer::copyBuffer(GrVkGpu* gpu,
Greg Daniel2e967df2021-02-08 10:38:31 -0500829 sk_sp<GrGpuBuffer> srcBuffer,
830 sk_sp<GrGpuBuffer> dstBuffer,
Greg Daniel87d784f2021-02-02 15:36:06 -0500831 uint32_t regionCount,
832 const VkBufferCopy* regions) {
833 SkASSERT(fIsActive);
834 SkASSERT(!fActiveRenderPass);
835 this->addingWork(gpu);
836#ifdef SK_DEBUG
837 for (uint32_t i = 0; i < regionCount; ++i) {
838 const VkBufferCopy& region = regions[i];
839 SkASSERT(region.size > 0);
840 SkASSERT(region.srcOffset < srcBuffer->size());
841 SkASSERT(region.dstOffset < dstBuffer->size());
842 SkASSERT(region.srcOffset + region.size <= srcBuffer->size());
843 SkASSERT(region.dstOffset + region.size <= dstBuffer->size());
844 }
845#endif
846
Greg Daniel2e967df2021-02-08 10:38:31 -0500847 const GrVkBuffer2* srcVk = static_cast<GrVkBuffer2*>(srcBuffer.get());
848 const GrVkBuffer2* dstVk = static_cast<GrVkBuffer2*>(dstBuffer.get());
849
850 GR_VK_CALL(gpu->vkInterface(), CmdCopyBuffer(fCmdBuffer,
851 srcVk->vkBuffer(),
852 dstVk->vkBuffer(),
853 regionCount,
854 regions));
855 this->addGrBuffer(std::move(srcBuffer));
Greg Daniel87d784f2021-02-02 15:36:06 -0500856 this->addGrBuffer(std::move(dstBuffer));
857}
858
jvanvertha584de92016-06-30 09:10:52 -0700859void GrVkPrimaryCommandBuffer::updateBuffer(GrVkGpu* gpu,
860 GrVkBuffer* dstBuffer,
861 VkDeviceSize dstOffset,
862 VkDeviceSize dataSize,
863 const void* data) {
864 SkASSERT(fIsActive);
865 SkASSERT(!fActiveRenderPass);
866 SkASSERT(0 == (dstOffset & 0x03)); // four byte aligned
867 // TODO: handle larger transfer sizes
868 SkASSERT(dataSize <= 65536);
869 SkASSERT(0 == (dataSize & 0x03)); // four byte aligned
Greg Danielee54f232019-04-03 14:58:40 -0400870 this->addingWork(gpu);
jvanvertha584de92016-06-30 09:10:52 -0700871 this->addResource(dstBuffer->resource());
872 GR_VK_CALL(gpu->vkInterface(), CmdUpdateBuffer(fCmdBuffer,
873 dstBuffer->buffer(),
874 dstOffset,
875 dataSize,
876 (const uint32_t*) data));
877}
878
Greg Daniel87d784f2021-02-02 15:36:06 -0500879void GrVkPrimaryCommandBuffer::updateBuffer(GrVkGpu* gpu,
880 sk_sp<GrVkBuffer2> dstBuffer,
881 VkDeviceSize dstOffset,
882 VkDeviceSize dataSize,
883 const void* data) {
884 SkASSERT(fIsActive);
885 SkASSERT(!fActiveRenderPass);
886 SkASSERT(0 == (dstOffset & 0x03)); // four byte aligned
887 // TODO: handle larger transfer sizes
888 SkASSERT(dataSize <= 65536);
889 SkASSERT(0 == (dataSize & 0x03)); // four byte aligned
890 this->addingWork(gpu);
891 GR_VK_CALL(
892 gpu->vkInterface(),
893 CmdUpdateBuffer(
894 fCmdBuffer, dstBuffer->vkBuffer(), dstOffset, dataSize, (const uint32_t*)data));
895 this->addGrBuffer(std::move(dstBuffer));
896}
897
egdaniel9a6cf802016-06-08 08:22:05 -0700898void GrVkPrimaryCommandBuffer::clearColorImage(const GrVkGpu* gpu,
899 GrVkImage* image,
900 const VkClearColorValue* color,
901 uint32_t subRangeCount,
902 const VkImageSubresourceRange* subRanges) {
903 SkASSERT(fIsActive);
904 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400905 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700906 this->addResource(image->resource());
907 GR_VK_CALL(gpu->vkInterface(), CmdClearColorImage(fCmdBuffer,
908 image->image(),
909 image->currentLayout(),
910 color,
911 subRangeCount,
912 subRanges));
913}
914
915void GrVkPrimaryCommandBuffer::clearDepthStencilImage(const GrVkGpu* gpu,
916 GrVkImage* image,
917 const VkClearDepthStencilValue* color,
918 uint32_t subRangeCount,
919 const VkImageSubresourceRange* subRanges) {
920 SkASSERT(fIsActive);
921 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400922 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700923 this->addResource(image->resource());
924 GR_VK_CALL(gpu->vkInterface(), CmdClearDepthStencilImage(fCmdBuffer,
925 image->image(),
926 image->currentLayout(),
927 color,
928 subRangeCount,
929 subRanges));
930}
931
egdaniel52ad2512016-08-04 12:50:01 -0700932void GrVkPrimaryCommandBuffer::resolveImage(GrVkGpu* gpu,
933 const GrVkImage& srcImage,
934 const GrVkImage& dstImage,
935 uint32_t regionCount,
936 const VkImageResolve* regions) {
937 SkASSERT(fIsActive);
938 SkASSERT(!fActiveRenderPass);
939
Greg Danielee54f232019-04-03 14:58:40 -0400940 this->addingWork(gpu);
egdaniel52ad2512016-08-04 12:50:01 -0700941 this->addResource(srcImage.resource());
942 this->addResource(dstImage.resource());
943
944 GR_VK_CALL(gpu->vkInterface(), CmdResolveImage(fCmdBuffer,
945 srcImage.image(),
946 srcImage.currentLayout(),
947 dstImage.image(),
948 dstImage.currentLayout(),
949 regionCount,
950 regions));
951}
952
Jim Van Verth5082df12020-03-11 16:14:51 -0400953void GrVkPrimaryCommandBuffer::onFreeGPUData(const GrVkGpu* gpu) const {
egdaniel9cb63402016-06-23 08:37:05 -0700954 SkASSERT(!fActiveRenderPass);
955 // Destroy the fence, if any
956 if (VK_NULL_HANDLE != fSubmitFence) {
957 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
958 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500959 SkASSERT(!fSecondaryCommandBuffers.count());
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500960}
961
egdaniel9a6cf802016-06-08 08:22:05 -0700962///////////////////////////////////////////////////////////////////////////////
963// SecondaryCommandBuffer
964////////////////////////////////////////////////////////////////////////////////
965
Greg Daniel315c8dc2019-11-26 15:41:27 -0500966GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(GrVkGpu* gpu,
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500967 GrVkCommandPool* cmdPool) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500968 SkASSERT(cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700969 const VkCommandBufferAllocateInfo cmdInfo = {
970 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400971 nullptr, // pNext
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500972 cmdPool->vkCommandPool(), // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700973 VK_COMMAND_BUFFER_LEVEL_SECONDARY, // level
974 1 // bufferCount
975 };
976
977 VkCommandBuffer cmdBuffer;
Greg Daniel315c8dc2019-11-26 15:41:27 -0500978 VkResult err;
979 GR_VK_CALL_RESULT(gpu, err, AllocateCommandBuffers(gpu->device(), &cmdInfo, &cmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700980 if (err) {
981 return nullptr;
982 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500983 return new GrVkSecondaryCommandBuffer(cmdBuffer, false);
egdaniel9a6cf802016-06-08 08:22:05 -0700984}
985
Greg Daniel070cbaf2019-01-03 17:35:54 -0500986GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(VkCommandBuffer cmdBuffer) {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500987 return new GrVkSecondaryCommandBuffer(cmdBuffer, true);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500988}
egdaniel9a6cf802016-06-08 08:22:05 -0700989
Greg Daniele643da62019-11-05 12:36:42 -0500990void GrVkSecondaryCommandBuffer::begin(GrVkGpu* gpu, const GrVkFramebuffer* framebuffer,
jvanverth7ec92412016-07-06 09:24:57 -0700991 const GrVkRenderPass* compatibleRenderPass) {
egdaniel9a6cf802016-06-08 08:22:05 -0700992 SkASSERT(!fIsActive);
jvanverth7ec92412016-07-06 09:24:57 -0700993 SkASSERT(compatibleRenderPass);
994 fActiveRenderPass = compatibleRenderPass;
egdaniel9a6cf802016-06-08 08:22:05 -0700995
Greg Daniel070cbaf2019-01-03 17:35:54 -0500996 if (!this->isWrapped()) {
997 VkCommandBufferInheritanceInfo inheritanceInfo;
998 memset(&inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
999 inheritanceInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
1000 inheritanceInfo.pNext = nullptr;
1001 inheritanceInfo.renderPass = fActiveRenderPass->vkRenderPass();
1002 inheritanceInfo.subpass = 0; // Currently only using 1 subpass for each render pass
1003 inheritanceInfo.framebuffer = framebuffer ? framebuffer->framebuffer() : VK_NULL_HANDLE;
1004 inheritanceInfo.occlusionQueryEnable = false;
1005 inheritanceInfo.queryFlags = 0;
1006 inheritanceInfo.pipelineStatistics = 0;
egdaniel9a6cf802016-06-08 08:22:05 -07001007
Greg Daniel070cbaf2019-01-03 17:35:54 -05001008 VkCommandBufferBeginInfo cmdBufferBeginInfo;
1009 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
1010 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
1011 cmdBufferBeginInfo.pNext = nullptr;
1012 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT |
1013 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
1014 cmdBufferBeginInfo.pInheritanceInfo = &inheritanceInfo;
egdaniel9a6cf802016-06-08 08:22:05 -07001015
Greg Daniele643da62019-11-05 12:36:42 -05001016 GR_VK_CALL_ERRCHECK(gpu, BeginCommandBuffer(fCmdBuffer, &cmdBufferBeginInfo));
Greg Daniel070cbaf2019-01-03 17:35:54 -05001017 }
egdaniel9a6cf802016-06-08 08:22:05 -07001018 fIsActive = true;
1019}
1020
Ethan Nicholas8e265a72018-12-12 16:22:40 -05001021void GrVkSecondaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -07001022 SkASSERT(fIsActive);
Greg Daniel070cbaf2019-01-03 17:35:54 -05001023 if (!this->isWrapped()) {
Greg Daniele643da62019-11-05 12:36:42 -05001024 GR_VK_CALL_ERRCHECK(gpu, EndCommandBuffer(fCmdBuffer));
Greg Daniel070cbaf2019-01-03 17:35:54 -05001025 }
egdaniel9a6cf802016-06-08 08:22:05 -07001026 this->invalidateState();
1027 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -04001028 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -07001029}
Greg Daniel8daf3b72019-07-30 09:57:26 -04001030
Greg Daniel0addbdf2019-11-25 15:03:58 -05001031void GrVkSecondaryCommandBuffer::recycle(GrVkCommandPool* cmdPool) {
Greg Daniel8daf3b72019-07-30 09:57:26 -04001032 if (this->isWrapped()) {
Greg Daniel8daf3b72019-07-30 09:57:26 -04001033 delete this;
1034 } else {
Greg Daniel0addbdf2019-11-25 15:03:58 -05001035 cmdPool->recycleSecondaryCommandBuffer(this);
Greg Daniel8daf3b72019-07-30 09:57:26 -04001036 }
1037}
1038