blob: 2293703b61452f347fa6191e442f12c2694270a9 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
Greg Danield922f332020-04-27 11:21:36 -04002 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
Greg Daniel164a9f02016-02-22 09:56:40 -05007
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkCommandBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -05009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkRect.h"
11#include "src/gpu/vk/GrVkCommandPool.h"
12#include "src/gpu/vk/GrVkFramebuffer.h"
13#include "src/gpu/vk/GrVkGpu.h"
14#include "src/gpu/vk/GrVkImage.h"
15#include "src/gpu/vk/GrVkImageView.h"
Chris Dalton10ee0b22020-04-02 16:28:52 -060016#include "src/gpu/vk/GrVkMeshBuffer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050017#include "src/gpu/vk/GrVkPipeline.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/vk/GrVkPipelineState.h"
19#include "src/gpu/vk/GrVkPipelineState.h"
20#include "src/gpu/vk/GrVkRenderPass.h"
21#include "src/gpu/vk/GrVkRenderTarget.h"
22#include "src/gpu/vk/GrVkTransferBuffer.h"
23#include "src/gpu/vk/GrVkUtil.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050024
25void GrVkCommandBuffer::invalidateState() {
Chris Dalton1d616352017-05-31 12:51:23 -060026 for (auto& boundInputBuffer : fBoundInputBuffers) {
27 boundInputBuffer = VK_NULL_HANDLE;
28 }
egdaniel470d77a2016-03-18 12:50:27 -070029 fBoundIndexBuffer = VK_NULL_HANDLE;
egdaniel470d77a2016-03-18 12:50:27 -070030
31 memset(&fCachedViewport, 0, sizeof(VkViewport));
32 fCachedViewport.width = - 1.0f; // Viewport must have a width greater than 0
33
34 memset(&fCachedScissor, 0, sizeof(VkRect2D));
35 fCachedScissor.offset.x = -1; // Scissor offset must be greater that 0 to be valid
36
37 for (int i = 0; i < 4; ++i) {
38 fCachedBlendConstant[i] = -1.0;
39 }
Greg Daniel164a9f02016-02-22 09:56:40 -050040}
41
Jim Van Verth5082df12020-03-11 16:14:51 -040042void GrVkCommandBuffer::freeGPUData(const GrGpu* gpu, VkCommandPool cmdPool) const {
Brian Salomone39526b2019-06-24 16:35:53 -040043 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Greg Daniel164a9f02016-02-22 09:56:40 -050044 SkASSERT(!fIsActive);
Greg Daniel0addbdf2019-11-25 15:03:58 -050045 SkASSERT(!fTrackedResources.count());
46 SkASSERT(!fTrackedRecycledResources.count());
Greg Daniela58db7f2020-07-15 09:17:59 -040047 SkASSERT(!fTrackedGpuBuffers.count());
Greg Daniel609e1a92020-12-11 14:18:19 -050048 SkASSERT(!fTrackedGpuSurfaces.count());
Greg Daniel0addbdf2019-11-25 15:03:58 -050049 SkASSERT(cmdPool != VK_NULL_HANDLE);
50 SkASSERT(!this->isWrapped());
halcanary9d524f22016-03-29 09:03:52 -070051
Jim Van Verth3e192162020-03-10 16:23:16 -040052 GrVkGpu* vkGpu = (GrVkGpu*)gpu;
53 GR_VK_CALL(vkGpu->vkInterface(), FreeCommandBuffers(vkGpu->device(), cmdPool, 1, &fCmdBuffer));
egdaniel9cb63402016-06-23 08:37:05 -070054
Jim Van Verth3e192162020-03-10 16:23:16 -040055 this->onFreeGPUData(vkGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050056}
57
Jim Van Verth5082df12020-03-11 16:14:51 -040058void GrVkCommandBuffer::releaseResources() {
Brian Salomone39526b2019-06-24 16:35:53 -040059 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
jvanverth7ec92412016-07-06 09:24:57 -070060 SkASSERT(!fIsActive);
61 for (int i = 0; i < fTrackedResources.count(); ++i) {
Jim Van Verth3e192162020-03-10 16:23:16 -040062 fTrackedResources[i]->notifyFinishedWithWorkOnGpu();
Jim Van Verth5082df12020-03-11 16:14:51 -040063 fTrackedResources[i]->unref();
jvanverth7ec92412016-07-06 09:24:57 -070064 }
egdanielc1be9bc2016-07-20 08:33:00 -070065 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Jim Van Verth3e192162020-03-10 16:23:16 -040066 fTrackedRecycledResources[i]->notifyFinishedWithWorkOnGpu();
Jim Van Verth5082df12020-03-11 16:14:51 -040067 fTrackedRecycledResources[i]->recycle();
egdanielc1be9bc2016-07-20 08:33:00 -070068 }
egdaniel594739c2016-09-20 12:39:25 -070069
70 if (++fNumResets > kNumRewindResetsBeforeFullReset) {
71 fTrackedResources.reset();
72 fTrackedRecycledResources.reset();
73 fTrackedResources.setReserve(kInitialTrackedResourcesCount);
74 fTrackedRecycledResources.setReserve(kInitialTrackedResourcesCount);
75 fNumResets = 0;
76 } else {
77 fTrackedResources.rewind();
78 fTrackedRecycledResources.rewind();
79 }
80
Greg Daniela58db7f2020-07-15 09:17:59 -040081 fTrackedGpuBuffers.reset();
Greg Daniel609e1a92020-12-11 14:18:19 -050082 fTrackedGpuSurfaces.reset();
Greg Daniela58db7f2020-07-15 09:17:59 -040083
jvanverth7ec92412016-07-06 09:24:57 -070084 this->invalidateState();
85
Jim Van Verth5082df12020-03-11 16:14:51 -040086 this->onReleaseResources();
jvanverth7ec92412016-07-06 09:24:57 -070087}
88
Greg Daniel164a9f02016-02-22 09:56:40 -050089////////////////////////////////////////////////////////////////////////////////
90// CommandBuffer commands
91////////////////////////////////////////////////////////////////////////////////
92
93void GrVkCommandBuffer::pipelineBarrier(const GrVkGpu* gpu,
Jim Van Verth3e192162020-03-10 16:23:16 -040094 const GrManagedResource* resource,
Greg Daniel164a9f02016-02-22 09:56:40 -050095 VkPipelineStageFlags srcStageMask,
96 VkPipelineStageFlags dstStageMask,
97 bool byRegion,
98 BarrierType barrierType,
Greg Daniel59dc1482019-02-22 10:46:38 -050099 void* barrier) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500100 SkASSERT(!this->isWrapped());
Greg Daniel164a9f02016-02-22 09:56:40 -0500101 SkASSERT(fIsActive);
Greg Daniel9a18b082020-08-14 14:03:50 -0400102#ifdef SK_DEBUG
egdaniel58a8d922016-04-21 08:03:10 -0700103 // For images we can have barriers inside of render passes but they require us to add more
104 // support in subpasses which need self dependencies to have barriers inside them. Also, we can
105 // never have buffer barriers inside of a render pass. For now we will just assert that we are
106 // not in a render pass.
Greg Daniel9a18b082020-08-14 14:03:50 -0400107 bool isValidSubpassBarrier = false;
108 if (barrierType == kImageMemory_BarrierType) {
109 VkImageMemoryBarrier* imgBarrier = static_cast<VkImageMemoryBarrier*>(barrier);
110 isValidSubpassBarrier = (imgBarrier->newLayout == imgBarrier->oldLayout) &&
111 (imgBarrier->srcQueueFamilyIndex == VK_QUEUE_FAMILY_IGNORED) &&
112 (imgBarrier->dstQueueFamilyIndex == VK_QUEUE_FAMILY_IGNORED) &&
113 byRegion;
114 }
115 SkASSERT(!fActiveRenderPass || isValidSubpassBarrier);
116#endif
Greg Danielf346df32019-04-03 14:52:13 -0400117
Greg Danielee54f232019-04-03 14:58:40 -0400118 if (barrierType == kBufferMemory_BarrierType) {
Greg Daniel9a18b082020-08-14 14:03:50 -0400119 const VkBufferMemoryBarrier* barrierPtr = static_cast<VkBufferMemoryBarrier*>(barrier);
Greg Danielee54f232019-04-03 14:58:40 -0400120 fBufferBarriers.push_back(*barrierPtr);
121 } else {
122 SkASSERT(barrierType == kImageMemory_BarrierType);
Greg Daniel9a18b082020-08-14 14:03:50 -0400123 const VkImageMemoryBarrier* barrierPtr = static_cast<VkImageMemoryBarrier*>(barrier);
Greg Daniel212ff052019-04-09 10:41:34 -0400124 // We need to check if we are adding a pipeline barrier that covers part of the same
125 // subresource range as a barrier that is already in current batch. If it does, then we must
126 // submit the first batch because the vulkan spec does not define a specific ordering for
127 // barriers submitted in the same batch.
128 // TODO: Look if we can gain anything by merging barriers together instead of submitting
129 // the old ones.
130 for (int i = 0; i < fImageBarriers.count(); ++i) {
131 VkImageMemoryBarrier& currentBarrier = fImageBarriers[i];
132 if (barrierPtr->image == currentBarrier.image) {
133 const VkImageSubresourceRange newRange = barrierPtr->subresourceRange;
134 const VkImageSubresourceRange oldRange = currentBarrier.subresourceRange;
135 SkASSERT(newRange.aspectMask == oldRange.aspectMask);
136 SkASSERT(newRange.baseArrayLayer == oldRange.baseArrayLayer);
137 SkASSERT(newRange.layerCount == oldRange.layerCount);
138 uint32_t newStart = newRange.baseMipLevel;
139 uint32_t newEnd = newRange.baseMipLevel + newRange.levelCount - 1;
140 uint32_t oldStart = oldRange.baseMipLevel;
141 uint32_t oldEnd = oldRange.baseMipLevel + oldRange.levelCount - 1;
Brian Osman788b9162020-02-07 10:36:46 -0500142 if (std::max(newStart, oldStart) <= std::min(newEnd, oldEnd)) {
Greg Daniel212ff052019-04-09 10:41:34 -0400143 this->submitPipelineBarriers(gpu);
144 break;
145 }
146 }
147 }
Greg Danielee54f232019-04-03 14:58:40 -0400148 fImageBarriers.push_back(*barrierPtr);
Greg Daniel164a9f02016-02-22 09:56:40 -0500149 }
Greg Danielee54f232019-04-03 14:58:40 -0400150 fBarriersByRegion |= byRegion;
Greg Danielee54f232019-04-03 14:58:40 -0400151 fSrcStageMask = fSrcStageMask | srcStageMask;
152 fDstStageMask = fDstStageMask | dstStageMask;
153
154 fHasWork = true;
Greg Daniel59dc1482019-02-22 10:46:38 -0500155 if (resource) {
156 this->addResource(resource);
157 }
Greg Daniel9a18b082020-08-14 14:03:50 -0400158 if (fActiveRenderPass) {
159 this->submitPipelineBarriers(gpu, true);
160 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500161}
162
Greg Daniel9a18b082020-08-14 14:03:50 -0400163void GrVkCommandBuffer::submitPipelineBarriers(const GrVkGpu* gpu, bool forSelfDependency) {
Greg Danielee54f232019-04-03 14:58:40 -0400164 SkASSERT(fIsActive);
165
166 // Currently we never submit a pipeline barrier without at least one memory barrier.
167 if (fBufferBarriers.count() || fImageBarriers.count()) {
168 // For images we can have barriers inside of render passes but they require us to add more
169 // support in subpasses which need self dependencies to have barriers inside them. Also, we
170 // can never have buffer barriers inside of a render pass. For now we will just assert that
171 // we are not in a render pass.
Greg Daniel9a18b082020-08-14 14:03:50 -0400172 SkASSERT(!fActiveRenderPass || forSelfDependency);
Greg Danielee54f232019-04-03 14:58:40 -0400173 SkASSERT(!this->isWrapped());
174 SkASSERT(fSrcStageMask && fDstStageMask);
175
176 VkDependencyFlags dependencyFlags = fBarriersByRegion ? VK_DEPENDENCY_BY_REGION_BIT : 0;
177 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(
178 fCmdBuffer, fSrcStageMask, fDstStageMask, dependencyFlags, 0, nullptr,
179 fBufferBarriers.count(), fBufferBarriers.begin(),
180 fImageBarriers.count(), fImageBarriers.begin()));
181 fBufferBarriers.reset();
182 fImageBarriers.reset();
183 fBarriersByRegion = false;
184 fSrcStageMask = 0;
185 fDstStageMask = 0;
186 }
187 SkASSERT(!fBufferBarriers.count());
188 SkASSERT(!fImageBarriers.count());
189 SkASSERT(!fBarriersByRegion);
190 SkASSERT(!fSrcStageMask);
191 SkASSERT(!fDstStageMask);
192}
193
Greg Daniel6ecc9112017-06-16 16:17:03 +0000194void GrVkCommandBuffer::bindInputBuffer(GrVkGpu* gpu, uint32_t binding,
Greg Daniel426274b2020-07-20 11:37:38 -0400195 sk_sp<const GrBuffer> buffer) {
196 auto* vkMeshBuffer = static_cast<const GrVkMeshBuffer*>(buffer.get());
197 VkBuffer vkBuffer = vkMeshBuffer->buffer();
Greg Daniel6ecc9112017-06-16 16:17:03 +0000198 SkASSERT(VK_NULL_HANDLE != vkBuffer);
199 SkASSERT(binding < kMaxInputBuffers);
200 // TODO: once vbuffer->offset() no longer always returns 0, we will need to track the offset
201 // to know if we can skip binding or not.
202 if (vkBuffer != fBoundInputBuffers[binding]) {
Greg Daniel426274b2020-07-20 11:37:38 -0400203 VkDeviceSize offset = vkMeshBuffer->offset();
Greg Daniel6ecc9112017-06-16 16:17:03 +0000204 GR_VK_CALL(gpu->vkInterface(), CmdBindVertexBuffers(fCmdBuffer,
205 binding,
206 1,
207 &vkBuffer,
208 &offset));
209 fBoundInputBuffers[binding] = vkBuffer;
Greg Daniel426274b2020-07-20 11:37:38 -0400210 this->addResource(vkMeshBuffer->resource());
211 this->addGrBuffer(std::move(buffer));
Greg Daniel6ecc9112017-06-16 16:17:03 +0000212 }
213}
214
Greg Daniel426274b2020-07-20 11:37:38 -0400215void GrVkCommandBuffer::bindIndexBuffer(GrVkGpu* gpu, sk_sp<const GrBuffer> buffer) {
216 auto* vkMeshBuffer = static_cast<const GrVkMeshBuffer*>(buffer.get());
217 VkBuffer vkBuffer = vkMeshBuffer->buffer();
Greg Daniel6ecc9112017-06-16 16:17:03 +0000218 SkASSERT(VK_NULL_HANDLE != vkBuffer);
219 // TODO: once ibuffer->offset() no longer always returns 0, we will need to track the offset
220 // to know if we can skip binding or not.
221 if (vkBuffer != fBoundIndexBuffer) {
222 GR_VK_CALL(gpu->vkInterface(), CmdBindIndexBuffer(fCmdBuffer,
Greg Daniel426274b2020-07-20 11:37:38 -0400223 vkBuffer, vkMeshBuffer->offset(),
Greg Daniel6ecc9112017-06-16 16:17:03 +0000224 VK_INDEX_TYPE_UINT16));
225 fBoundIndexBuffer = vkBuffer;
Greg Daniel426274b2020-07-20 11:37:38 -0400226 this->addResource(vkMeshBuffer->resource());
227 this->addGrBuffer(std::move(buffer));
Greg Daniel6ecc9112017-06-16 16:17:03 +0000228 }
229}
230
Greg Daniel164a9f02016-02-22 09:56:40 -0500231void GrVkCommandBuffer::clearAttachments(const GrVkGpu* gpu,
232 int numAttachments,
233 const VkClearAttachment* attachments,
234 int numRects,
Greg Danielf346df32019-04-03 14:52:13 -0400235 const VkClearRect* clearRects) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500236 SkASSERT(fIsActive);
237 SkASSERT(fActiveRenderPass);
238 SkASSERT(numAttachments > 0);
239 SkASSERT(numRects > 0);
Greg Danielf346df32019-04-03 14:52:13 -0400240
Greg Danielee54f232019-04-03 14:58:40 -0400241 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400242
Greg Daniel164a9f02016-02-22 09:56:40 -0500243#ifdef SK_DEBUG
244 for (int i = 0; i < numAttachments; ++i) {
245 if (attachments[i].aspectMask == VK_IMAGE_ASPECT_COLOR_BIT) {
246 uint32_t testIndex;
247 SkAssertResult(fActiveRenderPass->colorAttachmentIndex(&testIndex));
248 SkASSERT(testIndex == attachments[i].colorAttachment);
249 }
250 }
251#endif
252 GR_VK_CALL(gpu->vkInterface(), CmdClearAttachments(fCmdBuffer,
253 numAttachments,
254 attachments,
255 numRects,
256 clearRects));
Greg Daniela718a612019-10-07 16:25:41 -0400257 if (gpu->vkCaps().mustInvalidatePrimaryCmdBufferStateAfterClearAttachments()) {
258 this->invalidateState();
259 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500260}
261
262void GrVkCommandBuffer::bindDescriptorSets(const GrVkGpu* gpu,
Greg Danieleecc6872019-07-29 13:21:37 -0400263 VkPipelineLayout layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500264 uint32_t firstSet,
265 uint32_t setCount,
266 const VkDescriptorSet* descriptorSets,
267 uint32_t dynamicOffsetCount,
268 const uint32_t* dynamicOffsets) {
269 SkASSERT(fIsActive);
270 GR_VK_CALL(gpu->vkInterface(), CmdBindDescriptorSets(fCmdBuffer,
271 VK_PIPELINE_BIND_POINT_GRAPHICS,
Greg Danieleecc6872019-07-29 13:21:37 -0400272 layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500273 firstSet,
274 setCount,
275 descriptorSets,
276 dynamicOffsetCount,
277 dynamicOffsets));
egdanielbc9b2962016-09-27 08:00:53 -0700278}
279
Greg Daniel3ef052c2021-01-05 12:20:27 -0500280void GrVkCommandBuffer::bindPipeline(const GrVkGpu* gpu, sk_sp<const GrVkPipeline> pipeline) {
egdaniel470d77a2016-03-18 12:50:27 -0700281 SkASSERT(fIsActive);
egdaniel470d77a2016-03-18 12:50:27 -0700282 GR_VK_CALL(gpu->vkInterface(), CmdBindPipeline(fCmdBuffer,
283 VK_PIPELINE_BIND_POINT_GRAPHICS,
284 pipeline->pipeline()));
Greg Daniel3ef052c2021-01-05 12:20:27 -0500285 this->addResource(pipeline.get());
egdaniel470d77a2016-03-18 12:50:27 -0700286}
287
Greg Daniel164a9f02016-02-22 09:56:40 -0500288void GrVkCommandBuffer::drawIndexed(const GrVkGpu* gpu,
289 uint32_t indexCount,
290 uint32_t instanceCount,
291 uint32_t firstIndex,
292 int32_t vertexOffset,
Greg Danielf346df32019-04-03 14:52:13 -0400293 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500294 SkASSERT(fIsActive);
295 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400296 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500297 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndexed(fCmdBuffer,
298 indexCount,
299 instanceCount,
300 firstIndex,
301 vertexOffset,
302 firstInstance));
303}
304
305void GrVkCommandBuffer::draw(const GrVkGpu* gpu,
306 uint32_t vertexCount,
307 uint32_t instanceCount,
308 uint32_t firstVertex,
Greg Danielf346df32019-04-03 14:52:13 -0400309 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500310 SkASSERT(fIsActive);
311 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400312 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500313 GR_VK_CALL(gpu->vkInterface(), CmdDraw(fCmdBuffer,
314 vertexCount,
315 instanceCount,
316 firstVertex,
317 firstInstance));
318}
egdaniel470d77a2016-03-18 12:50:27 -0700319
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600320void GrVkCommandBuffer::drawIndirect(const GrVkGpu* gpu,
321 const GrVkMeshBuffer* indirectBuffer,
322 VkDeviceSize offset,
323 uint32_t drawCount,
324 uint32_t stride) {
325 SkASSERT(fIsActive);
326 SkASSERT(fActiveRenderPass);
327 SkASSERT(!indirectBuffer->isCpuBuffer());
328 this->addingWork(gpu);
Chris Daltonc95dd322020-05-27 09:20:25 -0600329 this->addResource(indirectBuffer->resource());
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600330 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndirect(fCmdBuffer,
331 indirectBuffer->buffer(),
332 offset,
333 drawCount,
334 stride));
335}
336
337void GrVkCommandBuffer::drawIndexedIndirect(const GrVkGpu* gpu,
338 const GrVkMeshBuffer* indirectBuffer,
339 VkDeviceSize offset,
340 uint32_t drawCount,
341 uint32_t stride) {
342 SkASSERT(fIsActive);
343 SkASSERT(fActiveRenderPass);
344 SkASSERT(!indirectBuffer->isCpuBuffer());
345 this->addingWork(gpu);
Chris Daltonc95dd322020-05-27 09:20:25 -0600346 this->addResource(indirectBuffer->resource());
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600347 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndexedIndirect(fCmdBuffer,
348 indirectBuffer->buffer(),
349 offset,
350 drawCount,
351 stride));
352}
353
egdaniel470d77a2016-03-18 12:50:27 -0700354void GrVkCommandBuffer::setViewport(const GrVkGpu* gpu,
355 uint32_t firstViewport,
356 uint32_t viewportCount,
357 const VkViewport* viewports) {
358 SkASSERT(fIsActive);
359 SkASSERT(1 == viewportCount);
John Stilesc1c3c6d2020-08-15 23:22:53 -0400360 if (0 != memcmp(viewports, &fCachedViewport, sizeof(VkViewport))) {
egdaniel470d77a2016-03-18 12:50:27 -0700361 GR_VK_CALL(gpu->vkInterface(), CmdSetViewport(fCmdBuffer,
362 firstViewport,
363 viewportCount,
364 viewports));
365 fCachedViewport = viewports[0];
366 }
367}
368
369void GrVkCommandBuffer::setScissor(const GrVkGpu* gpu,
370 uint32_t firstScissor,
371 uint32_t scissorCount,
372 const VkRect2D* scissors) {
373 SkASSERT(fIsActive);
374 SkASSERT(1 == scissorCount);
John Stilesc1c3c6d2020-08-15 23:22:53 -0400375 if (0 != memcmp(scissors, &fCachedScissor, sizeof(VkRect2D))) {
egdaniel470d77a2016-03-18 12:50:27 -0700376 GR_VK_CALL(gpu->vkInterface(), CmdSetScissor(fCmdBuffer,
377 firstScissor,
378 scissorCount,
379 scissors));
380 fCachedScissor = scissors[0];
381 }
382}
383
384void GrVkCommandBuffer::setBlendConstants(const GrVkGpu* gpu,
385 const float blendConstants[4]) {
386 SkASSERT(fIsActive);
John Stilesc1c3c6d2020-08-15 23:22:53 -0400387 if (0 != memcmp(blendConstants, fCachedBlendConstant, 4 * sizeof(float))) {
egdaniel470d77a2016-03-18 12:50:27 -0700388 GR_VK_CALL(gpu->vkInterface(), CmdSetBlendConstants(fCmdBuffer, blendConstants));
389 memcpy(fCachedBlendConstant, blendConstants, 4 * sizeof(float));
390 }
391}
egdaniel9a6cf802016-06-08 08:22:05 -0700392
Greg Danielee54f232019-04-03 14:58:40 -0400393void GrVkCommandBuffer::addingWork(const GrVkGpu* gpu) {
394 this->submitPipelineBarriers(gpu);
395 fHasWork = true;
396}
397
egdaniel9a6cf802016-06-08 08:22:05 -0700398///////////////////////////////////////////////////////////////////////////////
399// PrimaryCommandBuffer
400////////////////////////////////////////////////////////////////////////////////
egdaniel9cb63402016-06-23 08:37:05 -0700401GrVkPrimaryCommandBuffer::~GrVkPrimaryCommandBuffer() {
402 // Should have ended any render pass we're in the middle of
403 SkASSERT(!fActiveRenderPass);
404}
405
Greg Daniel315c8dc2019-11-26 15:41:27 -0500406GrVkPrimaryCommandBuffer* GrVkPrimaryCommandBuffer::Create(GrVkGpu* gpu,
Greg Daniel0addbdf2019-11-25 15:03:58 -0500407 VkCommandPool cmdPool) {
egdaniel9a6cf802016-06-08 08:22:05 -0700408 const VkCommandBufferAllocateInfo cmdInfo = {
409 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400410 nullptr, // pNext
Greg Daniel0addbdf2019-11-25 15:03:58 -0500411 cmdPool, // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700412 VK_COMMAND_BUFFER_LEVEL_PRIMARY, // level
413 1 // bufferCount
414 };
415
416 VkCommandBuffer cmdBuffer;
Greg Daniel315c8dc2019-11-26 15:41:27 -0500417 VkResult err;
418 GR_VK_CALL_RESULT(gpu, err, AllocateCommandBuffers(gpu->device(), &cmdInfo, &cmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700419 if (err) {
420 return nullptr;
421 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500422 return new GrVkPrimaryCommandBuffer(cmdBuffer);
egdaniel9a6cf802016-06-08 08:22:05 -0700423}
424
Greg Daniele643da62019-11-05 12:36:42 -0500425void GrVkPrimaryCommandBuffer::begin(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700426 SkASSERT(!fIsActive);
427 VkCommandBufferBeginInfo cmdBufferBeginInfo;
428 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
429 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
430 cmdBufferBeginInfo.pNext = nullptr;
431 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
432 cmdBufferBeginInfo.pInheritanceInfo = nullptr;
433
Greg Daniele643da62019-11-05 12:36:42 -0500434 GR_VK_CALL_ERRCHECK(gpu, BeginCommandBuffer(fCmdBuffer, &cmdBufferBeginInfo));
egdaniel9a6cf802016-06-08 08:22:05 -0700435 fIsActive = true;
436}
437
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500438void GrVkPrimaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700439 SkASSERT(fIsActive);
440 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400441
442 this->submitPipelineBarriers(gpu);
443
Greg Daniele643da62019-11-05 12:36:42 -0500444 GR_VK_CALL_ERRCHECK(gpu, EndCommandBuffer(fCmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700445 this->invalidateState();
446 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400447 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700448}
449
Greg Danielfa3adf72019-11-07 09:53:41 -0500450bool GrVkPrimaryCommandBuffer::beginRenderPass(GrVkGpu* gpu,
egdaniel9cb63402016-06-23 08:37:05 -0700451 const GrVkRenderPass* renderPass,
Robert Phillips95214472017-08-08 18:00:03 -0400452 const VkClearValue clearValues[],
Greg Danielfa3adf72019-11-07 09:53:41 -0500453 GrVkRenderTarget* target,
egdaniel9cb63402016-06-23 08:37:05 -0700454 const SkIRect& bounds,
455 bool forSecondaryCB) {
egdaniel9a6cf802016-06-08 08:22:05 -0700456 SkASSERT(fIsActive);
457 SkASSERT(!fActiveRenderPass);
Greg Daniel7acddf52020-12-16 15:15:51 -0500458 SkASSERT(renderPass->isCompatible(*target, renderPass->selfDependencyFlags(),
459 renderPass->loadFromResolve()));
Greg Danielfa3adf72019-11-07 09:53:41 -0500460
Greg Daniel7acddf52020-12-16 15:15:51 -0500461 const GrVkFramebuffer* framebuffer = target->getFramebuffer(*renderPass);
Greg Danielfa3adf72019-11-07 09:53:41 -0500462 if (!framebuffer) {
463 return false;
464 }
egdaniel9cb63402016-06-23 08:37:05 -0700465
Greg Danielee54f232019-04-03 14:58:40 -0400466 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400467
egdaniel9a6cf802016-06-08 08:22:05 -0700468 VkRenderPassBeginInfo beginInfo;
egdaniel9cb63402016-06-23 08:37:05 -0700469 VkRect2D renderArea;
470 renderArea.offset = { bounds.fLeft , bounds.fTop };
471 renderArea.extent = { (uint32_t)bounds.width(), (uint32_t)bounds.height() };
472
473 memset(&beginInfo, 0, sizeof(VkRenderPassBeginInfo));
474 beginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
475 beginInfo.pNext = nullptr;
476 beginInfo.renderPass = renderPass->vkRenderPass();
Greg Danielfa3adf72019-11-07 09:53:41 -0500477 beginInfo.framebuffer = framebuffer->framebuffer();
egdaniel9cb63402016-06-23 08:37:05 -0700478 beginInfo.renderArea = renderArea;
Greg Danielb68319a2018-02-23 16:08:28 -0500479 beginInfo.clearValueCount = renderPass->clearValueCount();
egdaniel9cb63402016-06-23 08:37:05 -0700480 beginInfo.pClearValues = clearValues;
481
482 VkSubpassContents contents = forSecondaryCB ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
483 : VK_SUBPASS_CONTENTS_INLINE;
484
egdaniel9a6cf802016-06-08 08:22:05 -0700485 GR_VK_CALL(gpu->vkInterface(), CmdBeginRenderPass(fCmdBuffer, &beginInfo, contents));
486 fActiveRenderPass = renderPass;
487 this->addResource(renderPass);
Greg Daniel7acddf52020-12-16 15:15:51 -0500488 target->addResources(*this, *renderPass);
Greg Danielfa3adf72019-11-07 09:53:41 -0500489 return true;
egdaniel9a6cf802016-06-08 08:22:05 -0700490}
491
492void GrVkPrimaryCommandBuffer::endRenderPass(const GrVkGpu* gpu) {
493 SkASSERT(fIsActive);
494 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400495 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700496 GR_VK_CALL(gpu->vkInterface(), CmdEndRenderPass(fCmdBuffer));
497 fActiveRenderPass = nullptr;
498}
499
Greg Daniela8c32102020-12-30 15:09:32 -0500500
501void GrVkPrimaryCommandBuffer::nexSubpass(GrVkGpu* gpu, bool forSecondaryCB) {
502 SkASSERT(fIsActive);
503 SkASSERT(fActiveRenderPass);
504 VkSubpassContents contents = forSecondaryCB ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
505 : VK_SUBPASS_CONTENTS_INLINE;
506 GR_VK_CALL(gpu->vkInterface(), CmdNextSubpass(fCmdBuffer, contents));
507}
508
egdaniel9a6cf802016-06-08 08:22:05 -0700509void GrVkPrimaryCommandBuffer::executeCommands(const GrVkGpu* gpu,
Greg Daniel8daf3b72019-07-30 09:57:26 -0400510 std::unique_ptr<GrVkSecondaryCommandBuffer> buffer) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500511 // The Vulkan spec allows secondary command buffers to be executed on a primary command buffer
512 // if the command pools both were created from were created with the same queue family. However,
513 // we currently always create them from the same pool.
egdaniel9a6cf802016-06-08 08:22:05 -0700514 SkASSERT(fIsActive);
Greg Daniel77b53f62016-10-18 11:48:51 -0400515 SkASSERT(!buffer->fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700516 SkASSERT(fActiveRenderPass);
517 SkASSERT(fActiveRenderPass->isCompatible(*buffer->fActiveRenderPass));
518
Greg Danielee54f232019-04-03 14:58:40 -0400519 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400520
egdaniel9a6cf802016-06-08 08:22:05 -0700521 GR_VK_CALL(gpu->vkInterface(), CmdExecuteCommands(fCmdBuffer, 1, &buffer->fCmdBuffer));
Greg Daniel8daf3b72019-07-30 09:57:26 -0400522 fSecondaryCommandBuffers.push_back(std::move(buffer));
egdaniel066df7c2016-06-08 14:02:27 -0700523 // When executing a secondary command buffer all state (besides render pass state) becomes
524 // invalidated and must be reset. This includes bound buffers, pipelines, dynamic state, etc.
525 this->invalidateState();
egdaniel9a6cf802016-06-08 08:22:05 -0700526}
527
Greg Daniele1185582019-12-04 11:29:44 -0500528static bool submit_to_queue(GrVkGpu* gpu,
Greg Daniel48661b82018-01-22 16:11:35 -0500529 VkQueue queue,
530 VkFence fence,
531 uint32_t waitCount,
532 const VkSemaphore* waitSemaphores,
533 const VkPipelineStageFlags* waitStages,
534 uint32_t commandBufferCount,
535 const VkCommandBuffer* commandBuffers,
536 uint32_t signalCount,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400537 const VkSemaphore* signalSemaphores,
538 GrProtected protectedContext) {
539 VkProtectedSubmitInfo protectedSubmitInfo;
540 if (protectedContext == GrProtected::kYes) {
541 memset(&protectedSubmitInfo, 0, sizeof(VkProtectedSubmitInfo));
542 protectedSubmitInfo.sType = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO;
543 protectedSubmitInfo.pNext = nullptr;
544 protectedSubmitInfo.protectedSubmit = VK_TRUE;
545 }
546
Greg Daniel48661b82018-01-22 16:11:35 -0500547 VkSubmitInfo submitInfo;
548 memset(&submitInfo, 0, sizeof(VkSubmitInfo));
549 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400550 submitInfo.pNext = protectedContext == GrProtected::kYes ? &protectedSubmitInfo : nullptr;
Greg Daniel48661b82018-01-22 16:11:35 -0500551 submitInfo.waitSemaphoreCount = waitCount;
552 submitInfo.pWaitSemaphores = waitSemaphores;
553 submitInfo.pWaitDstStageMask = waitStages;
554 submitInfo.commandBufferCount = commandBufferCount;
555 submitInfo.pCommandBuffers = commandBuffers;
556 submitInfo.signalSemaphoreCount = signalCount;
557 submitInfo.pSignalSemaphores = signalSemaphores;
Greg Daniele1185582019-12-04 11:29:44 -0500558 VkResult result;
559 GR_VK_CALL_RESULT(gpu, result, QueueSubmit(queue, 1, &submitInfo, fence));
560 return result == VK_SUCCESS;
Greg Daniel48661b82018-01-22 16:11:35 -0500561}
562
Greg Daniele1185582019-12-04 11:29:44 -0500563bool GrVkPrimaryCommandBuffer::submitToQueue(
Greg Daniele643da62019-11-05 12:36:42 -0500564 GrVkGpu* gpu,
Greg Daniel6be35232017-03-01 17:01:09 -0500565 VkQueue queue,
Greg Daniel48661b82018-01-22 16:11:35 -0500566 SkTArray<GrVkSemaphore::Resource*>& signalSemaphores,
567 SkTArray<GrVkSemaphore::Resource*>& waitSemaphores) {
egdaniel9a6cf802016-06-08 08:22:05 -0700568 SkASSERT(!fIsActive);
569
570 VkResult err;
jvanverth7ec92412016-07-06 09:24:57 -0700571 if (VK_NULL_HANDLE == fSubmitFence) {
572 VkFenceCreateInfo fenceInfo;
573 memset(&fenceInfo, 0, sizeof(VkFenceCreateInfo));
574 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
Greg Daniele1185582019-12-04 11:29:44 -0500575 GR_VK_CALL_RESULT(gpu, err, CreateFence(gpu->device(), &fenceInfo, nullptr,
576 &fSubmitFence));
577 if (err) {
578 fSubmitFence = VK_NULL_HANDLE;
579 return false;
580 }
jvanverth7ec92412016-07-06 09:24:57 -0700581 } else {
Greg Daniele1185582019-12-04 11:29:44 -0500582 // This cannot return DEVICE_LOST so we assert we succeeded.
583 GR_VK_CALL_RESULT(gpu, err, ResetFences(gpu->device(), 1, &fSubmitFence));
584 SkASSERT(err == VK_SUCCESS);
jvanverth7ec92412016-07-06 09:24:57 -0700585 }
egdaniel9a6cf802016-06-08 08:22:05 -0700586
Greg Daniela5cb7812017-06-16 09:45:32 -0400587 int signalCount = signalSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500588 int waitCount = waitSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500589
Greg Daniele1185582019-12-04 11:29:44 -0500590 bool submitted = false;
591
Greg Daniel48661b82018-01-22 16:11:35 -0500592 if (0 == signalCount && 0 == waitCount) {
593 // This command buffer has no dependent semaphores so we can simply just submit it to the
594 // queue with no worries.
Greg Daniele1185582019-12-04 11:29:44 -0500595 submitted = submit_to_queue(
596 gpu, queue, fSubmitFence, 0, nullptr, nullptr, 1, &fCmdBuffer, 0, nullptr,
597 gpu->protectedContext() ? GrProtected::kYes : GrProtected::kNo);
Greg Daniel48661b82018-01-22 16:11:35 -0500598 } else {
Greg Daniel48661b82018-01-22 16:11:35 -0500599 SkTArray<VkSemaphore> vkSignalSems(signalCount);
600 for (int i = 0; i < signalCount; ++i) {
601 if (signalSemaphores[i]->shouldSignal()) {
602 this->addResource(signalSemaphores[i]);
603 vkSignalSems.push_back(signalSemaphores[i]->semaphore());
604 }
605 }
606
607 SkTArray<VkSemaphore> vkWaitSems(waitCount);
608 SkTArray<VkPipelineStageFlags> vkWaitStages(waitCount);
609 for (int i = 0; i < waitCount; ++i) {
610 if (waitSemaphores[i]->shouldWait()) {
611 this->addResource(waitSemaphores[i]);
612 vkWaitSems.push_back(waitSemaphores[i]->semaphore());
613 vkWaitStages.push_back(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
614 }
615 }
Greg Daniele1185582019-12-04 11:29:44 -0500616 submitted = submit_to_queue(gpu, queue, fSubmitFence, vkWaitSems.count(),
617 vkWaitSems.begin(), vkWaitStages.begin(), 1, &fCmdBuffer,
618 vkSignalSems.count(), vkSignalSems.begin(),
619 gpu->protectedContext() ? GrProtected::kYes : GrProtected::kNo);
620 if (submitted) {
621 for (int i = 0; i < signalCount; ++i) {
622 signalSemaphores[i]->markAsSignaled();
623 }
624 for (int i = 0; i < waitCount; ++i) {
625 waitSemaphores[i]->markAsWaited();
626 }
Greg Daniel48661b82018-01-22 16:11:35 -0500627 }
Greg Daniel48661b82018-01-22 16:11:35 -0500628 }
egdaniel9a6cf802016-06-08 08:22:05 -0700629
Greg Daniele1185582019-12-04 11:29:44 -0500630 if (!submitted) {
631 // Destroy the fence or else we will try to wait forever for it to finish.
egdaniel9a6cf802016-06-08 08:22:05 -0700632 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
633 fSubmitFence = VK_NULL_HANDLE;
Greg Daniele1185582019-12-04 11:29:44 -0500634 return false;
egdaniel9a6cf802016-06-08 08:22:05 -0700635 }
Greg Daniele1185582019-12-04 11:29:44 -0500636 return true;
egdaniel9a6cf802016-06-08 08:22:05 -0700637}
638
Greg Daniele1185582019-12-04 11:29:44 -0500639void GrVkPrimaryCommandBuffer::forceSync(GrVkGpu* gpu) {
640 SkASSERT(fSubmitFence != VK_NULL_HANDLE);
641 GR_VK_CALL_ERRCHECK(gpu, WaitForFences(gpu->device(), 1, &fSubmitFence, true, UINT64_MAX));
642}
643
644bool GrVkPrimaryCommandBuffer::finished(GrVkGpu* gpu) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500645 SkASSERT(!fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700646 if (VK_NULL_HANDLE == fSubmitFence) {
647 return true;
648 }
649
Greg Daniele1185582019-12-04 11:29:44 -0500650 VkResult err;
651 GR_VK_CALL_RESULT_NOCHECK(gpu, err, GetFenceStatus(gpu->device(), fSubmitFence));
egdaniel9a6cf802016-06-08 08:22:05 -0700652 switch (err) {
653 case VK_SUCCESS:
Greg Daniele1185582019-12-04 11:29:44 -0500654 case VK_ERROR_DEVICE_LOST:
egdaniel9a6cf802016-06-08 08:22:05 -0700655 return true;
656
657 case VK_NOT_READY:
658 return false;
659
660 default:
661 SkDebugf("Error getting fence status: %d\n", err);
Greg Daniele1185582019-12-04 11:29:44 -0500662 SK_ABORT("Got an invalid fence status");
663 return false;
egdaniel9a6cf802016-06-08 08:22:05 -0700664 }
egdaniel9a6cf802016-06-08 08:22:05 -0700665}
666
Greg Daniela3aa75a2019-04-12 14:24:55 -0400667void GrVkPrimaryCommandBuffer::addFinishedProc(sk_sp<GrRefCntedCallback> finishedProc) {
668 fFinishedProcs.push_back(std::move(finishedProc));
669}
670
Jim Van Verth5082df12020-03-11 16:14:51 -0400671void GrVkPrimaryCommandBuffer::onReleaseResources() {
jvanverth7ec92412016-07-06 09:24:57 -0700672 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400673 fSecondaryCommandBuffers[i]->releaseResources();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500674 }
Greg Danielfe159622020-04-10 17:43:51 +0000675 this->callFinishedProcs();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500676}
677
Greg Daniel0addbdf2019-11-25 15:03:58 -0500678void GrVkPrimaryCommandBuffer::recycleSecondaryCommandBuffers(GrVkCommandPool* cmdPool) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500679 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500680 fSecondaryCommandBuffers[i].release()->recycle(cmdPool);
jvanverth7ec92412016-07-06 09:24:57 -0700681 }
682 fSecondaryCommandBuffers.reset();
683}
684
egdaniel9a6cf802016-06-08 08:22:05 -0700685void GrVkPrimaryCommandBuffer::copyImage(const GrVkGpu* gpu,
686 GrVkImage* srcImage,
687 VkImageLayout srcLayout,
688 GrVkImage* dstImage,
689 VkImageLayout dstLayout,
690 uint32_t copyRegionCount,
691 const VkImageCopy* copyRegions) {
692 SkASSERT(fIsActive);
693 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400694 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700695 this->addResource(srcImage->resource());
696 this->addResource(dstImage->resource());
697 GR_VK_CALL(gpu->vkInterface(), CmdCopyImage(fCmdBuffer,
698 srcImage->image(),
699 srcLayout,
700 dstImage->image(),
701 dstLayout,
702 copyRegionCount,
703 copyRegions));
704}
705
706void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
Jim Van Verth3e192162020-03-10 16:23:16 -0400707 const GrManagedResource* srcResource,
egdaniel9a6cf802016-06-08 08:22:05 -0700708 VkImage srcImage,
709 VkImageLayout srcLayout,
Jim Van Verth3e192162020-03-10 16:23:16 -0400710 const GrManagedResource* dstResource,
egdaniel9a6cf802016-06-08 08:22:05 -0700711 VkImage dstImage,
712 VkImageLayout dstLayout,
713 uint32_t blitRegionCount,
714 const VkImageBlit* blitRegions,
715 VkFilter filter) {
716 SkASSERT(fIsActive);
717 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400718 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700719 this->addResource(srcResource);
720 this->addResource(dstResource);
721 GR_VK_CALL(gpu->vkInterface(), CmdBlitImage(fCmdBuffer,
722 srcImage,
723 srcLayout,
724 dstImage,
725 dstLayout,
726 blitRegionCount,
727 blitRegions,
728 filter));
729}
730
Greg Daniel6ecc9112017-06-16 16:17:03 +0000731void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
732 const GrVkImage& srcImage,
733 const GrVkImage& dstImage,
734 uint32_t blitRegionCount,
735 const VkImageBlit* blitRegions,
736 VkFilter filter) {
737 this->blitImage(gpu,
738 srcImage.resource(),
739 srcImage.image(),
740 srcImage.currentLayout(),
741 dstImage.resource(),
742 dstImage.image(),
743 dstImage.currentLayout(),
744 blitRegionCount,
745 blitRegions,
746 filter);
747}
748
749
egdaniel9a6cf802016-06-08 08:22:05 -0700750void GrVkPrimaryCommandBuffer::copyImageToBuffer(const GrVkGpu* gpu,
751 GrVkImage* srcImage,
752 VkImageLayout srcLayout,
753 GrVkTransferBuffer* dstBuffer,
754 uint32_t copyRegionCount,
755 const VkBufferImageCopy* copyRegions) {
756 SkASSERT(fIsActive);
757 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400758 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700759 this->addResource(srcImage->resource());
760 this->addResource(dstBuffer->resource());
761 GR_VK_CALL(gpu->vkInterface(), CmdCopyImageToBuffer(fCmdBuffer,
762 srcImage->image(),
763 srcLayout,
764 dstBuffer->buffer(),
765 copyRegionCount,
766 copyRegions));
767}
768
769void GrVkPrimaryCommandBuffer::copyBufferToImage(const GrVkGpu* gpu,
770 GrVkTransferBuffer* srcBuffer,
771 GrVkImage* dstImage,
772 VkImageLayout dstLayout,
773 uint32_t copyRegionCount,
774 const VkBufferImageCopy* copyRegions) {
775 SkASSERT(fIsActive);
776 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400777 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700778 this->addResource(srcBuffer->resource());
779 this->addResource(dstImage->resource());
780 GR_VK_CALL(gpu->vkInterface(), CmdCopyBufferToImage(fCmdBuffer,
781 srcBuffer->buffer(),
782 dstImage->image(),
783 dstLayout,
784 copyRegionCount,
785 copyRegions));
786}
787
Greg Daniel6888c0d2017-08-25 11:55:50 -0400788
789void GrVkPrimaryCommandBuffer::copyBuffer(GrVkGpu* gpu,
790 GrVkBuffer* srcBuffer,
791 GrVkBuffer* dstBuffer,
792 uint32_t regionCount,
793 const VkBufferCopy* regions) {
794 SkASSERT(fIsActive);
795 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400796 this->addingWork(gpu);
Greg Daniel6888c0d2017-08-25 11:55:50 -0400797#ifdef SK_DEBUG
798 for (uint32_t i = 0; i < regionCount; ++i) {
799 const VkBufferCopy& region = regions[i];
800 SkASSERT(region.size > 0);
801 SkASSERT(region.srcOffset < srcBuffer->size());
802 SkASSERT(region.dstOffset < dstBuffer->size());
803 SkASSERT(region.srcOffset + region.size <= srcBuffer->size());
804 SkASSERT(region.dstOffset + region.size <= dstBuffer->size());
805 }
806#endif
807 this->addResource(srcBuffer->resource());
808 this->addResource(dstBuffer->resource());
809 GR_VK_CALL(gpu->vkInterface(), CmdCopyBuffer(fCmdBuffer,
810 srcBuffer->buffer(),
811 dstBuffer->buffer(),
812 regionCount,
813 regions));
814}
815
jvanvertha584de92016-06-30 09:10:52 -0700816void GrVkPrimaryCommandBuffer::updateBuffer(GrVkGpu* gpu,
817 GrVkBuffer* dstBuffer,
818 VkDeviceSize dstOffset,
819 VkDeviceSize dataSize,
820 const void* data) {
821 SkASSERT(fIsActive);
822 SkASSERT(!fActiveRenderPass);
823 SkASSERT(0 == (dstOffset & 0x03)); // four byte aligned
824 // TODO: handle larger transfer sizes
825 SkASSERT(dataSize <= 65536);
826 SkASSERT(0 == (dataSize & 0x03)); // four byte aligned
Greg Danielee54f232019-04-03 14:58:40 -0400827 this->addingWork(gpu);
jvanvertha584de92016-06-30 09:10:52 -0700828 this->addResource(dstBuffer->resource());
829 GR_VK_CALL(gpu->vkInterface(), CmdUpdateBuffer(fCmdBuffer,
830 dstBuffer->buffer(),
831 dstOffset,
832 dataSize,
833 (const uint32_t*) data));
834}
835
egdaniel9a6cf802016-06-08 08:22:05 -0700836void GrVkPrimaryCommandBuffer::clearColorImage(const GrVkGpu* gpu,
837 GrVkImage* image,
838 const VkClearColorValue* color,
839 uint32_t subRangeCount,
840 const VkImageSubresourceRange* subRanges) {
841 SkASSERT(fIsActive);
842 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400843 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700844 this->addResource(image->resource());
845 GR_VK_CALL(gpu->vkInterface(), CmdClearColorImage(fCmdBuffer,
846 image->image(),
847 image->currentLayout(),
848 color,
849 subRangeCount,
850 subRanges));
851}
852
853void GrVkPrimaryCommandBuffer::clearDepthStencilImage(const GrVkGpu* gpu,
854 GrVkImage* image,
855 const VkClearDepthStencilValue* color,
856 uint32_t subRangeCount,
857 const VkImageSubresourceRange* subRanges) {
858 SkASSERT(fIsActive);
859 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400860 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700861 this->addResource(image->resource());
862 GR_VK_CALL(gpu->vkInterface(), CmdClearDepthStencilImage(fCmdBuffer,
863 image->image(),
864 image->currentLayout(),
865 color,
866 subRangeCount,
867 subRanges));
868}
869
egdaniel52ad2512016-08-04 12:50:01 -0700870void GrVkPrimaryCommandBuffer::resolveImage(GrVkGpu* gpu,
871 const GrVkImage& srcImage,
872 const GrVkImage& dstImage,
873 uint32_t regionCount,
874 const VkImageResolve* regions) {
875 SkASSERT(fIsActive);
876 SkASSERT(!fActiveRenderPass);
877
Greg Danielee54f232019-04-03 14:58:40 -0400878 this->addingWork(gpu);
egdaniel52ad2512016-08-04 12:50:01 -0700879 this->addResource(srcImage.resource());
880 this->addResource(dstImage.resource());
881
882 GR_VK_CALL(gpu->vkInterface(), CmdResolveImage(fCmdBuffer,
883 srcImage.image(),
884 srcImage.currentLayout(),
885 dstImage.image(),
886 dstImage.currentLayout(),
887 regionCount,
888 regions));
889}
890
Jim Van Verth5082df12020-03-11 16:14:51 -0400891void GrVkPrimaryCommandBuffer::onFreeGPUData(const GrVkGpu* gpu) const {
egdaniel9cb63402016-06-23 08:37:05 -0700892 SkASSERT(!fActiveRenderPass);
893 // Destroy the fence, if any
894 if (VK_NULL_HANDLE != fSubmitFence) {
895 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
896 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500897 SkASSERT(!fSecondaryCommandBuffers.count());
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500898}
899
egdaniel9a6cf802016-06-08 08:22:05 -0700900///////////////////////////////////////////////////////////////////////////////
901// SecondaryCommandBuffer
902////////////////////////////////////////////////////////////////////////////////
903
Greg Daniel315c8dc2019-11-26 15:41:27 -0500904GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(GrVkGpu* gpu,
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500905 GrVkCommandPool* cmdPool) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500906 SkASSERT(cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700907 const VkCommandBufferAllocateInfo cmdInfo = {
908 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400909 nullptr, // pNext
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500910 cmdPool->vkCommandPool(), // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700911 VK_COMMAND_BUFFER_LEVEL_SECONDARY, // level
912 1 // bufferCount
913 };
914
915 VkCommandBuffer cmdBuffer;
Greg Daniel315c8dc2019-11-26 15:41:27 -0500916 VkResult err;
917 GR_VK_CALL_RESULT(gpu, err, AllocateCommandBuffers(gpu->device(), &cmdInfo, &cmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700918 if (err) {
919 return nullptr;
920 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500921 return new GrVkSecondaryCommandBuffer(cmdBuffer, false);
egdaniel9a6cf802016-06-08 08:22:05 -0700922}
923
Greg Daniel070cbaf2019-01-03 17:35:54 -0500924GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(VkCommandBuffer cmdBuffer) {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500925 return new GrVkSecondaryCommandBuffer(cmdBuffer, true);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500926}
egdaniel9a6cf802016-06-08 08:22:05 -0700927
Greg Daniele643da62019-11-05 12:36:42 -0500928void GrVkSecondaryCommandBuffer::begin(GrVkGpu* gpu, const GrVkFramebuffer* framebuffer,
jvanverth7ec92412016-07-06 09:24:57 -0700929 const GrVkRenderPass* compatibleRenderPass) {
egdaniel9a6cf802016-06-08 08:22:05 -0700930 SkASSERT(!fIsActive);
jvanverth7ec92412016-07-06 09:24:57 -0700931 SkASSERT(compatibleRenderPass);
932 fActiveRenderPass = compatibleRenderPass;
egdaniel9a6cf802016-06-08 08:22:05 -0700933
Greg Daniel070cbaf2019-01-03 17:35:54 -0500934 if (!this->isWrapped()) {
935 VkCommandBufferInheritanceInfo inheritanceInfo;
936 memset(&inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
937 inheritanceInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
938 inheritanceInfo.pNext = nullptr;
939 inheritanceInfo.renderPass = fActiveRenderPass->vkRenderPass();
940 inheritanceInfo.subpass = 0; // Currently only using 1 subpass for each render pass
941 inheritanceInfo.framebuffer = framebuffer ? framebuffer->framebuffer() : VK_NULL_HANDLE;
942 inheritanceInfo.occlusionQueryEnable = false;
943 inheritanceInfo.queryFlags = 0;
944 inheritanceInfo.pipelineStatistics = 0;
egdaniel9a6cf802016-06-08 08:22:05 -0700945
Greg Daniel070cbaf2019-01-03 17:35:54 -0500946 VkCommandBufferBeginInfo cmdBufferBeginInfo;
947 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
948 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
949 cmdBufferBeginInfo.pNext = nullptr;
950 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT |
951 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
952 cmdBufferBeginInfo.pInheritanceInfo = &inheritanceInfo;
egdaniel9a6cf802016-06-08 08:22:05 -0700953
Greg Daniele643da62019-11-05 12:36:42 -0500954 GR_VK_CALL_ERRCHECK(gpu, BeginCommandBuffer(fCmdBuffer, &cmdBufferBeginInfo));
Greg Daniel070cbaf2019-01-03 17:35:54 -0500955 }
egdaniel9a6cf802016-06-08 08:22:05 -0700956 fIsActive = true;
957}
958
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500959void GrVkSecondaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700960 SkASSERT(fIsActive);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500961 if (!this->isWrapped()) {
Greg Daniele643da62019-11-05 12:36:42 -0500962 GR_VK_CALL_ERRCHECK(gpu, EndCommandBuffer(fCmdBuffer));
Greg Daniel070cbaf2019-01-03 17:35:54 -0500963 }
egdaniel9a6cf802016-06-08 08:22:05 -0700964 this->invalidateState();
965 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400966 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700967}
Greg Daniel8daf3b72019-07-30 09:57:26 -0400968
Greg Daniel0addbdf2019-11-25 15:03:58 -0500969void GrVkSecondaryCommandBuffer::recycle(GrVkCommandPool* cmdPool) {
Greg Daniel8daf3b72019-07-30 09:57:26 -0400970 if (this->isWrapped()) {
Greg Daniel8daf3b72019-07-30 09:57:26 -0400971 delete this;
972 } else {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500973 cmdPool->recycleSecondaryCommandBuffer(this);
Greg Daniel8daf3b72019-07-30 09:57:26 -0400974 }
975}
976