blob: 056d71dc8fd80f7148ccb8042f1e67c8a5bff4aa [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
Greg Danield922f332020-04-27 11:21:36 -04002 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
Greg Daniel164a9f02016-02-22 09:56:40 -05007
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkCommandBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -05009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkRect.h"
Robert Phillips06273bc2021-08-11 15:43:50 -040011#include "src/core/SkTraceEvent.h"
Greg Danielaf1d1932021-02-08 13:55:26 -050012#include "src/gpu/vk/GrVkBuffer.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "src/gpu/vk/GrVkCommandPool.h"
14#include "src/gpu/vk/GrVkFramebuffer.h"
15#include "src/gpu/vk/GrVkGpu.h"
16#include "src/gpu/vk/GrVkImage.h"
17#include "src/gpu/vk/GrVkImageView.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/vk/GrVkPipeline.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050019#include "src/gpu/vk/GrVkPipelineState.h"
20#include "src/gpu/vk/GrVkPipelineState.h"
21#include "src/gpu/vk/GrVkRenderPass.h"
22#include "src/gpu/vk/GrVkRenderTarget.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050023#include "src/gpu/vk/GrVkUtil.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050024
25void GrVkCommandBuffer::invalidateState() {
Chris Dalton1d616352017-05-31 12:51:23 -060026 for (auto& boundInputBuffer : fBoundInputBuffers) {
27 boundInputBuffer = VK_NULL_HANDLE;
28 }
egdaniel470d77a2016-03-18 12:50:27 -070029 fBoundIndexBuffer = VK_NULL_HANDLE;
egdaniel470d77a2016-03-18 12:50:27 -070030
31 memset(&fCachedViewport, 0, sizeof(VkViewport));
32 fCachedViewport.width = - 1.0f; // Viewport must have a width greater than 0
33
34 memset(&fCachedScissor, 0, sizeof(VkRect2D));
35 fCachedScissor.offset.x = -1; // Scissor offset must be greater that 0 to be valid
36
37 for (int i = 0; i < 4; ++i) {
38 fCachedBlendConstant[i] = -1.0;
39 }
Greg Daniel164a9f02016-02-22 09:56:40 -050040}
41
Jim Van Verth5082df12020-03-11 16:14:51 -040042void GrVkCommandBuffer::freeGPUData(const GrGpu* gpu, VkCommandPool cmdPool) const {
Brian Salomone39526b2019-06-24 16:35:53 -040043 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Greg Daniel164a9f02016-02-22 09:56:40 -050044 SkASSERT(!fIsActive);
Greg Daniel0addbdf2019-11-25 15:03:58 -050045 SkASSERT(!fTrackedResources.count());
46 SkASSERT(!fTrackedRecycledResources.count());
Greg Daniela58db7f2020-07-15 09:17:59 -040047 SkASSERT(!fTrackedGpuBuffers.count());
Greg Daniel609e1a92020-12-11 14:18:19 -050048 SkASSERT(!fTrackedGpuSurfaces.count());
Greg Daniel0addbdf2019-11-25 15:03:58 -050049 SkASSERT(cmdPool != VK_NULL_HANDLE);
50 SkASSERT(!this->isWrapped());
halcanary9d524f22016-03-29 09:03:52 -070051
Jim Van Verth3e192162020-03-10 16:23:16 -040052 GrVkGpu* vkGpu = (GrVkGpu*)gpu;
53 GR_VK_CALL(vkGpu->vkInterface(), FreeCommandBuffers(vkGpu->device(), cmdPool, 1, &fCmdBuffer));
egdaniel9cb63402016-06-23 08:37:05 -070054
Jim Van Verth3e192162020-03-10 16:23:16 -040055 this->onFreeGPUData(vkGpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050056}
57
Jim Van Verth5082df12020-03-11 16:14:51 -040058void GrVkCommandBuffer::releaseResources() {
Brian Salomone39526b2019-06-24 16:35:53 -040059 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
Greg Danielde4bbdb2021-04-13 14:23:23 -040060 SkASSERT(!fIsActive || this->isWrapped());
Greg Danield15176d2021-01-05 16:42:49 -050061 fTrackedResources.reset();
Greg Danielc8e16bb2021-01-12 10:45:38 -050062 fTrackedRecycledResources.reset();
egdaniel594739c2016-09-20 12:39:25 -070063
Greg Daniela58db7f2020-07-15 09:17:59 -040064 fTrackedGpuBuffers.reset();
Greg Daniel609e1a92020-12-11 14:18:19 -050065 fTrackedGpuSurfaces.reset();
Greg Daniela58db7f2020-07-15 09:17:59 -040066
jvanverth7ec92412016-07-06 09:24:57 -070067 this->invalidateState();
68
Jim Van Verth5082df12020-03-11 16:14:51 -040069 this->onReleaseResources();
jvanverth7ec92412016-07-06 09:24:57 -070070}
71
Greg Daniel164a9f02016-02-22 09:56:40 -050072////////////////////////////////////////////////////////////////////////////////
73// CommandBuffer commands
74////////////////////////////////////////////////////////////////////////////////
75
76void GrVkCommandBuffer::pipelineBarrier(const GrVkGpu* gpu,
Jim Van Verth3e192162020-03-10 16:23:16 -040077 const GrManagedResource* resource,
Greg Daniel164a9f02016-02-22 09:56:40 -050078 VkPipelineStageFlags srcStageMask,
79 VkPipelineStageFlags dstStageMask,
80 bool byRegion,
81 BarrierType barrierType,
Greg Daniel59dc1482019-02-22 10:46:38 -050082 void* barrier) {
Greg Daniel070cbaf2019-01-03 17:35:54 -050083 SkASSERT(!this->isWrapped());
Greg Daniel164a9f02016-02-22 09:56:40 -050084 SkASSERT(fIsActive);
Greg Daniel9a18b082020-08-14 14:03:50 -040085#ifdef SK_DEBUG
egdaniel58a8d922016-04-21 08:03:10 -070086 // For images we can have barriers inside of render passes but they require us to add more
87 // support in subpasses which need self dependencies to have barriers inside them. Also, we can
88 // never have buffer barriers inside of a render pass. For now we will just assert that we are
89 // not in a render pass.
Greg Daniel9a18b082020-08-14 14:03:50 -040090 bool isValidSubpassBarrier = false;
91 if (barrierType == kImageMemory_BarrierType) {
92 VkImageMemoryBarrier* imgBarrier = static_cast<VkImageMemoryBarrier*>(barrier);
93 isValidSubpassBarrier = (imgBarrier->newLayout == imgBarrier->oldLayout) &&
94 (imgBarrier->srcQueueFamilyIndex == VK_QUEUE_FAMILY_IGNORED) &&
95 (imgBarrier->dstQueueFamilyIndex == VK_QUEUE_FAMILY_IGNORED) &&
96 byRegion;
97 }
98 SkASSERT(!fActiveRenderPass || isValidSubpassBarrier);
99#endif
Greg Danielf346df32019-04-03 14:52:13 -0400100
Greg Danielee54f232019-04-03 14:58:40 -0400101 if (barrierType == kBufferMemory_BarrierType) {
Greg Daniel9a18b082020-08-14 14:03:50 -0400102 const VkBufferMemoryBarrier* barrierPtr = static_cast<VkBufferMemoryBarrier*>(barrier);
Greg Danielee54f232019-04-03 14:58:40 -0400103 fBufferBarriers.push_back(*barrierPtr);
104 } else {
105 SkASSERT(barrierType == kImageMemory_BarrierType);
Greg Daniel9a18b082020-08-14 14:03:50 -0400106 const VkImageMemoryBarrier* barrierPtr = static_cast<VkImageMemoryBarrier*>(barrier);
Greg Daniel212ff052019-04-09 10:41:34 -0400107 // We need to check if we are adding a pipeline barrier that covers part of the same
108 // subresource range as a barrier that is already in current batch. If it does, then we must
109 // submit the first batch because the vulkan spec does not define a specific ordering for
110 // barriers submitted in the same batch.
111 // TODO: Look if we can gain anything by merging barriers together instead of submitting
112 // the old ones.
113 for (int i = 0; i < fImageBarriers.count(); ++i) {
114 VkImageMemoryBarrier& currentBarrier = fImageBarriers[i];
115 if (barrierPtr->image == currentBarrier.image) {
116 const VkImageSubresourceRange newRange = barrierPtr->subresourceRange;
117 const VkImageSubresourceRange oldRange = currentBarrier.subresourceRange;
118 SkASSERT(newRange.aspectMask == oldRange.aspectMask);
119 SkASSERT(newRange.baseArrayLayer == oldRange.baseArrayLayer);
120 SkASSERT(newRange.layerCount == oldRange.layerCount);
121 uint32_t newStart = newRange.baseMipLevel;
122 uint32_t newEnd = newRange.baseMipLevel + newRange.levelCount - 1;
123 uint32_t oldStart = oldRange.baseMipLevel;
124 uint32_t oldEnd = oldRange.baseMipLevel + oldRange.levelCount - 1;
Brian Osman788b9162020-02-07 10:36:46 -0500125 if (std::max(newStart, oldStart) <= std::min(newEnd, oldEnd)) {
Greg Daniel212ff052019-04-09 10:41:34 -0400126 this->submitPipelineBarriers(gpu);
127 break;
128 }
129 }
130 }
Greg Danielee54f232019-04-03 14:58:40 -0400131 fImageBarriers.push_back(*barrierPtr);
Greg Daniel164a9f02016-02-22 09:56:40 -0500132 }
Greg Danielee54f232019-04-03 14:58:40 -0400133 fBarriersByRegion |= byRegion;
Greg Danielee54f232019-04-03 14:58:40 -0400134 fSrcStageMask = fSrcStageMask | srcStageMask;
135 fDstStageMask = fDstStageMask | dstStageMask;
136
137 fHasWork = true;
Greg Daniel59dc1482019-02-22 10:46:38 -0500138 if (resource) {
139 this->addResource(resource);
140 }
Greg Daniel9a18b082020-08-14 14:03:50 -0400141 if (fActiveRenderPass) {
142 this->submitPipelineBarriers(gpu, true);
143 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500144}
145
Greg Daniel9a18b082020-08-14 14:03:50 -0400146void GrVkCommandBuffer::submitPipelineBarriers(const GrVkGpu* gpu, bool forSelfDependency) {
Greg Danielee54f232019-04-03 14:58:40 -0400147 SkASSERT(fIsActive);
148
149 // Currently we never submit a pipeline barrier without at least one memory barrier.
150 if (fBufferBarriers.count() || fImageBarriers.count()) {
151 // For images we can have barriers inside of render passes but they require us to add more
152 // support in subpasses which need self dependencies to have barriers inside them. Also, we
153 // can never have buffer barriers inside of a render pass. For now we will just assert that
154 // we are not in a render pass.
Greg Daniel9a18b082020-08-14 14:03:50 -0400155 SkASSERT(!fActiveRenderPass || forSelfDependency);
Greg Danielee54f232019-04-03 14:58:40 -0400156 SkASSERT(!this->isWrapped());
157 SkASSERT(fSrcStageMask && fDstStageMask);
158
159 VkDependencyFlags dependencyFlags = fBarriersByRegion ? VK_DEPENDENCY_BY_REGION_BIT : 0;
160 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(
161 fCmdBuffer, fSrcStageMask, fDstStageMask, dependencyFlags, 0, nullptr,
162 fBufferBarriers.count(), fBufferBarriers.begin(),
163 fImageBarriers.count(), fImageBarriers.begin()));
164 fBufferBarriers.reset();
165 fImageBarriers.reset();
166 fBarriersByRegion = false;
167 fSrcStageMask = 0;
168 fDstStageMask = 0;
169 }
170 SkASSERT(!fBufferBarriers.count());
171 SkASSERT(!fImageBarriers.count());
172 SkASSERT(!fBarriersByRegion);
173 SkASSERT(!fSrcStageMask);
174 SkASSERT(!fDstStageMask);
175}
176
Greg Daniel6ecc9112017-06-16 16:17:03 +0000177void GrVkCommandBuffer::bindInputBuffer(GrVkGpu* gpu, uint32_t binding,
Greg Daniel426274b2020-07-20 11:37:38 -0400178 sk_sp<const GrBuffer> buffer) {
Greg Danielaf1d1932021-02-08 13:55:26 -0500179 VkBuffer vkBuffer = static_cast<const GrVkBuffer*>(buffer.get())->vkBuffer();
Greg Daniel6ecc9112017-06-16 16:17:03 +0000180 SkASSERT(VK_NULL_HANDLE != vkBuffer);
181 SkASSERT(binding < kMaxInputBuffers);
182 // TODO: once vbuffer->offset() no longer always returns 0, we will need to track the offset
183 // to know if we can skip binding or not.
184 if (vkBuffer != fBoundInputBuffers[binding]) {
Greg Daniel5af72c12021-02-08 13:52:08 -0500185 VkDeviceSize offset = 0;
Greg Daniel6ecc9112017-06-16 16:17:03 +0000186 GR_VK_CALL(gpu->vkInterface(), CmdBindVertexBuffers(fCmdBuffer,
187 binding,
188 1,
189 &vkBuffer,
190 &offset));
191 fBoundInputBuffers[binding] = vkBuffer;
Greg Daniel426274b2020-07-20 11:37:38 -0400192 this->addGrBuffer(std::move(buffer));
Greg Daniel6ecc9112017-06-16 16:17:03 +0000193 }
194}
195
Greg Daniel426274b2020-07-20 11:37:38 -0400196void GrVkCommandBuffer::bindIndexBuffer(GrVkGpu* gpu, sk_sp<const GrBuffer> buffer) {
Greg Danielaf1d1932021-02-08 13:55:26 -0500197 VkBuffer vkBuffer = static_cast<const GrVkBuffer*>(buffer.get())->vkBuffer();
Greg Daniel6ecc9112017-06-16 16:17:03 +0000198 SkASSERT(VK_NULL_HANDLE != vkBuffer);
199 // TODO: once ibuffer->offset() no longer always returns 0, we will need to track the offset
200 // to know if we can skip binding or not.
201 if (vkBuffer != fBoundIndexBuffer) {
202 GR_VK_CALL(gpu->vkInterface(), CmdBindIndexBuffer(fCmdBuffer,
Greg Daniel5af72c12021-02-08 13:52:08 -0500203 vkBuffer, /*offset=*/0,
Greg Daniel6ecc9112017-06-16 16:17:03 +0000204 VK_INDEX_TYPE_UINT16));
205 fBoundIndexBuffer = vkBuffer;
Greg Daniel426274b2020-07-20 11:37:38 -0400206 this->addGrBuffer(std::move(buffer));
Greg Daniel6ecc9112017-06-16 16:17:03 +0000207 }
208}
209
Greg Daniel164a9f02016-02-22 09:56:40 -0500210void GrVkCommandBuffer::clearAttachments(const GrVkGpu* gpu,
211 int numAttachments,
212 const VkClearAttachment* attachments,
213 int numRects,
Greg Danielf346df32019-04-03 14:52:13 -0400214 const VkClearRect* clearRects) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500215 SkASSERT(fIsActive);
216 SkASSERT(fActiveRenderPass);
217 SkASSERT(numAttachments > 0);
218 SkASSERT(numRects > 0);
Greg Danielf346df32019-04-03 14:52:13 -0400219
Greg Danielee54f232019-04-03 14:58:40 -0400220 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400221
Greg Daniel164a9f02016-02-22 09:56:40 -0500222#ifdef SK_DEBUG
223 for (int i = 0; i < numAttachments; ++i) {
224 if (attachments[i].aspectMask == VK_IMAGE_ASPECT_COLOR_BIT) {
225 uint32_t testIndex;
226 SkAssertResult(fActiveRenderPass->colorAttachmentIndex(&testIndex));
227 SkASSERT(testIndex == attachments[i].colorAttachment);
228 }
229 }
230#endif
231 GR_VK_CALL(gpu->vkInterface(), CmdClearAttachments(fCmdBuffer,
232 numAttachments,
233 attachments,
234 numRects,
235 clearRects));
Greg Daniela718a612019-10-07 16:25:41 -0400236 if (gpu->vkCaps().mustInvalidatePrimaryCmdBufferStateAfterClearAttachments()) {
237 this->invalidateState();
238 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500239}
240
241void GrVkCommandBuffer::bindDescriptorSets(const GrVkGpu* gpu,
Greg Danieleecc6872019-07-29 13:21:37 -0400242 VkPipelineLayout layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500243 uint32_t firstSet,
244 uint32_t setCount,
245 const VkDescriptorSet* descriptorSets,
246 uint32_t dynamicOffsetCount,
247 const uint32_t* dynamicOffsets) {
248 SkASSERT(fIsActive);
249 GR_VK_CALL(gpu->vkInterface(), CmdBindDescriptorSets(fCmdBuffer,
250 VK_PIPELINE_BIND_POINT_GRAPHICS,
Greg Danieleecc6872019-07-29 13:21:37 -0400251 layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500252 firstSet,
253 setCount,
254 descriptorSets,
255 dynamicOffsetCount,
256 dynamicOffsets));
egdanielbc9b2962016-09-27 08:00:53 -0700257}
258
Greg Daniel3ef052c2021-01-05 12:20:27 -0500259void GrVkCommandBuffer::bindPipeline(const GrVkGpu* gpu, sk_sp<const GrVkPipeline> pipeline) {
egdaniel470d77a2016-03-18 12:50:27 -0700260 SkASSERT(fIsActive);
egdaniel470d77a2016-03-18 12:50:27 -0700261 GR_VK_CALL(gpu->vkInterface(), CmdBindPipeline(fCmdBuffer,
262 VK_PIPELINE_BIND_POINT_GRAPHICS,
263 pipeline->pipeline()));
Greg Danield15176d2021-01-05 16:42:49 -0500264 this->addResource(std::move(pipeline));
egdaniel470d77a2016-03-18 12:50:27 -0700265}
266
Jim Van Verth7d338c22021-02-03 12:54:30 -0500267void GrVkCommandBuffer::pushConstants(const GrVkGpu* gpu, VkPipelineLayout layout,
268 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
269 const void* values) {
270 SkASSERT(fIsActive);
271 // offset and size must be a multiple of 4
272 SkASSERT(!SkToBool(offset & 0x3));
273 SkASSERT(!SkToBool(size & 0x3));
274 GR_VK_CALL(gpu->vkInterface(), CmdPushConstants(fCmdBuffer,
275 layout,
276 stageFlags,
277 offset,
278 size,
279 values));
280}
281
Greg Daniel164a9f02016-02-22 09:56:40 -0500282void GrVkCommandBuffer::drawIndexed(const GrVkGpu* gpu,
283 uint32_t indexCount,
284 uint32_t instanceCount,
285 uint32_t firstIndex,
286 int32_t vertexOffset,
Greg Danielf346df32019-04-03 14:52:13 -0400287 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500288 SkASSERT(fIsActive);
289 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400290 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500291 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndexed(fCmdBuffer,
292 indexCount,
293 instanceCount,
294 firstIndex,
295 vertexOffset,
296 firstInstance));
297}
298
299void GrVkCommandBuffer::draw(const GrVkGpu* gpu,
300 uint32_t vertexCount,
301 uint32_t instanceCount,
302 uint32_t firstVertex,
Greg Danielf346df32019-04-03 14:52:13 -0400303 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500304 SkASSERT(fIsActive);
305 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400306 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500307 GR_VK_CALL(gpu->vkInterface(), CmdDraw(fCmdBuffer,
308 vertexCount,
309 instanceCount,
310 firstVertex,
311 firstInstance));
312}
egdaniel470d77a2016-03-18 12:50:27 -0700313
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600314void GrVkCommandBuffer::drawIndirect(const GrVkGpu* gpu,
Greg Daniel5af72c12021-02-08 13:52:08 -0500315 sk_sp<const GrBuffer> indirectBuffer,
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600316 VkDeviceSize offset,
317 uint32_t drawCount,
318 uint32_t stride) {
319 SkASSERT(fIsActive);
320 SkASSERT(fActiveRenderPass);
321 SkASSERT(!indirectBuffer->isCpuBuffer());
322 this->addingWork(gpu);
Greg Danielaf1d1932021-02-08 13:55:26 -0500323 VkBuffer vkBuffer = static_cast<const GrVkBuffer*>(indirectBuffer.get())->vkBuffer();
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600324 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndirect(fCmdBuffer,
Greg Daniel5af72c12021-02-08 13:52:08 -0500325 vkBuffer,
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600326 offset,
327 drawCount,
328 stride));
Greg Daniel5af72c12021-02-08 13:52:08 -0500329 this->addGrBuffer(std::move(indirectBuffer));
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600330}
331
332void GrVkCommandBuffer::drawIndexedIndirect(const GrVkGpu* gpu,
Greg Daniel5af72c12021-02-08 13:52:08 -0500333 sk_sp<const GrBuffer> indirectBuffer,
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600334 VkDeviceSize offset,
335 uint32_t drawCount,
336 uint32_t stride) {
337 SkASSERT(fIsActive);
338 SkASSERT(fActiveRenderPass);
339 SkASSERT(!indirectBuffer->isCpuBuffer());
340 this->addingWork(gpu);
Greg Danielaf1d1932021-02-08 13:55:26 -0500341 VkBuffer vkBuffer = static_cast<const GrVkBuffer*>(indirectBuffer.get())->vkBuffer();
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600342 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndexedIndirect(fCmdBuffer,
Greg Daniel5af72c12021-02-08 13:52:08 -0500343 vkBuffer,
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600344 offset,
345 drawCount,
346 stride));
Greg Daniel5af72c12021-02-08 13:52:08 -0500347 this->addGrBuffer(std::move(indirectBuffer));
Chris Dalton03fdf6a2020-04-07 12:31:59 -0600348}
349
egdaniel470d77a2016-03-18 12:50:27 -0700350void GrVkCommandBuffer::setViewport(const GrVkGpu* gpu,
351 uint32_t firstViewport,
352 uint32_t viewportCount,
353 const VkViewport* viewports) {
354 SkASSERT(fIsActive);
355 SkASSERT(1 == viewportCount);
John Stilesc1c3c6d2020-08-15 23:22:53 -0400356 if (0 != memcmp(viewports, &fCachedViewport, sizeof(VkViewport))) {
egdaniel470d77a2016-03-18 12:50:27 -0700357 GR_VK_CALL(gpu->vkInterface(), CmdSetViewport(fCmdBuffer,
358 firstViewport,
359 viewportCount,
360 viewports));
361 fCachedViewport = viewports[0];
362 }
363}
364
365void GrVkCommandBuffer::setScissor(const GrVkGpu* gpu,
366 uint32_t firstScissor,
367 uint32_t scissorCount,
368 const VkRect2D* scissors) {
369 SkASSERT(fIsActive);
370 SkASSERT(1 == scissorCount);
John Stilesc1c3c6d2020-08-15 23:22:53 -0400371 if (0 != memcmp(scissors, &fCachedScissor, sizeof(VkRect2D))) {
egdaniel470d77a2016-03-18 12:50:27 -0700372 GR_VK_CALL(gpu->vkInterface(), CmdSetScissor(fCmdBuffer,
373 firstScissor,
374 scissorCount,
375 scissors));
376 fCachedScissor = scissors[0];
377 }
378}
379
380void GrVkCommandBuffer::setBlendConstants(const GrVkGpu* gpu,
381 const float blendConstants[4]) {
382 SkASSERT(fIsActive);
John Stilesc1c3c6d2020-08-15 23:22:53 -0400383 if (0 != memcmp(blendConstants, fCachedBlendConstant, 4 * sizeof(float))) {
egdaniel470d77a2016-03-18 12:50:27 -0700384 GR_VK_CALL(gpu->vkInterface(), CmdSetBlendConstants(fCmdBuffer, blendConstants));
385 memcpy(fCachedBlendConstant, blendConstants, 4 * sizeof(float));
386 }
387}
egdaniel9a6cf802016-06-08 08:22:05 -0700388
Greg Danielee54f232019-04-03 14:58:40 -0400389void GrVkCommandBuffer::addingWork(const GrVkGpu* gpu) {
390 this->submitPipelineBarriers(gpu);
391 fHasWork = true;
392}
393
egdaniel9a6cf802016-06-08 08:22:05 -0700394///////////////////////////////////////////////////////////////////////////////
395// PrimaryCommandBuffer
396////////////////////////////////////////////////////////////////////////////////
egdaniel9cb63402016-06-23 08:37:05 -0700397GrVkPrimaryCommandBuffer::~GrVkPrimaryCommandBuffer() {
398 // Should have ended any render pass we're in the middle of
399 SkASSERT(!fActiveRenderPass);
400}
401
Greg Daniel315c8dc2019-11-26 15:41:27 -0500402GrVkPrimaryCommandBuffer* GrVkPrimaryCommandBuffer::Create(GrVkGpu* gpu,
Greg Daniel0addbdf2019-11-25 15:03:58 -0500403 VkCommandPool cmdPool) {
egdaniel9a6cf802016-06-08 08:22:05 -0700404 const VkCommandBufferAllocateInfo cmdInfo = {
405 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400406 nullptr, // pNext
Greg Daniel0addbdf2019-11-25 15:03:58 -0500407 cmdPool, // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700408 VK_COMMAND_BUFFER_LEVEL_PRIMARY, // level
409 1 // bufferCount
410 };
411
412 VkCommandBuffer cmdBuffer;
Greg Daniel315c8dc2019-11-26 15:41:27 -0500413 VkResult err;
414 GR_VK_CALL_RESULT(gpu, err, AllocateCommandBuffers(gpu->device(), &cmdInfo, &cmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700415 if (err) {
416 return nullptr;
417 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500418 return new GrVkPrimaryCommandBuffer(cmdBuffer);
egdaniel9a6cf802016-06-08 08:22:05 -0700419}
420
Greg Daniele643da62019-11-05 12:36:42 -0500421void GrVkPrimaryCommandBuffer::begin(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700422 SkASSERT(!fIsActive);
423 VkCommandBufferBeginInfo cmdBufferBeginInfo;
424 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
425 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
426 cmdBufferBeginInfo.pNext = nullptr;
427 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
428 cmdBufferBeginInfo.pInheritanceInfo = nullptr;
429
Greg Daniele643da62019-11-05 12:36:42 -0500430 GR_VK_CALL_ERRCHECK(gpu, BeginCommandBuffer(fCmdBuffer, &cmdBufferBeginInfo));
egdaniel9a6cf802016-06-08 08:22:05 -0700431 fIsActive = true;
432}
433
Greg Daniel2e967df2021-02-08 10:38:31 -0500434void GrVkPrimaryCommandBuffer::end(GrVkGpu* gpu, bool abandoningBuffer) {
egdaniel9a6cf802016-06-08 08:22:05 -0700435 SkASSERT(fIsActive);
436 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400437
Greg Daniel2e967df2021-02-08 10:38:31 -0500438 // If we are in the process of abandoning the context then the GrResourceCache will have freed
439 // all resources before destroying the GrVkGpu. When we destroy the GrVkGpu we call end on the
440 // command buffer to keep all our state tracking consistent. However, the vulkan validation
441 // layers complain about calling end on a command buffer that contains resources that have
442 // already been deleted. From the vulkan API it isn't required to end the command buffer to
443 // delete it, so we just skip the vulkan API calls and update our own state tracking.
444 if (!abandoningBuffer) {
445 this->submitPipelineBarriers(gpu);
Greg Danielee54f232019-04-03 14:58:40 -0400446
Greg Daniel2e967df2021-02-08 10:38:31 -0500447 GR_VK_CALL_ERRCHECK(gpu, EndCommandBuffer(fCmdBuffer));
448 }
egdaniel9a6cf802016-06-08 08:22:05 -0700449 this->invalidateState();
450 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400451 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700452}
453
Greg Danielfa3adf72019-11-07 09:53:41 -0500454bool GrVkPrimaryCommandBuffer::beginRenderPass(GrVkGpu* gpu,
egdaniel9cb63402016-06-23 08:37:05 -0700455 const GrVkRenderPass* renderPass,
Greg Daniel60ec6172021-04-16 11:31:58 -0400456 sk_sp<const GrVkFramebuffer> framebuffer,
Robert Phillips95214472017-08-08 18:00:03 -0400457 const VkClearValue clearValues[],
Greg Daniel10344252021-04-22 09:52:25 -0400458 const GrSurface* target,
egdaniel9cb63402016-06-23 08:37:05 -0700459 const SkIRect& bounds,
460 bool forSecondaryCB) {
egdaniel9a6cf802016-06-08 08:22:05 -0700461 SkASSERT(fIsActive);
462 SkASSERT(!fActiveRenderPass);
Greg Danielfa3adf72019-11-07 09:53:41 -0500463
Greg Daniel60ec6172021-04-16 11:31:58 -0400464 SkASSERT(framebuffer);
egdaniel9cb63402016-06-23 08:37:05 -0700465
Greg Danielee54f232019-04-03 14:58:40 -0400466 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400467
egdaniel9a6cf802016-06-08 08:22:05 -0700468 VkRenderPassBeginInfo beginInfo;
egdaniel9cb63402016-06-23 08:37:05 -0700469 VkRect2D renderArea;
470 renderArea.offset = { bounds.fLeft , bounds.fTop };
471 renderArea.extent = { (uint32_t)bounds.width(), (uint32_t)bounds.height() };
472
473 memset(&beginInfo, 0, sizeof(VkRenderPassBeginInfo));
474 beginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
475 beginInfo.pNext = nullptr;
476 beginInfo.renderPass = renderPass->vkRenderPass();
Greg Danielfa3adf72019-11-07 09:53:41 -0500477 beginInfo.framebuffer = framebuffer->framebuffer();
egdaniel9cb63402016-06-23 08:37:05 -0700478 beginInfo.renderArea = renderArea;
Greg Danielb68319a2018-02-23 16:08:28 -0500479 beginInfo.clearValueCount = renderPass->clearValueCount();
egdaniel9cb63402016-06-23 08:37:05 -0700480 beginInfo.pClearValues = clearValues;
481
482 VkSubpassContents contents = forSecondaryCB ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
483 : VK_SUBPASS_CONTENTS_INLINE;
484
egdaniel9a6cf802016-06-08 08:22:05 -0700485 GR_VK_CALL(gpu->vkInterface(), CmdBeginRenderPass(fCmdBuffer, &beginInfo, contents));
486 fActiveRenderPass = renderPass;
487 this->addResource(renderPass);
Greg Daniel60ec6172021-04-16 11:31:58 -0400488 this->addResource(std::move(framebuffer));
Greg Daniel38517c22021-03-29 16:01:19 -0400489 this->addGrSurface(sk_ref_sp(target));
Greg Danielfa3adf72019-11-07 09:53:41 -0500490 return true;
egdaniel9a6cf802016-06-08 08:22:05 -0700491}
492
493void GrVkPrimaryCommandBuffer::endRenderPass(const GrVkGpu* gpu) {
494 SkASSERT(fIsActive);
495 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400496 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700497 GR_VK_CALL(gpu->vkInterface(), CmdEndRenderPass(fCmdBuffer));
498 fActiveRenderPass = nullptr;
499}
500
Greg Daniela8c32102020-12-30 15:09:32 -0500501
502void GrVkPrimaryCommandBuffer::nexSubpass(GrVkGpu* gpu, bool forSecondaryCB) {
503 SkASSERT(fIsActive);
504 SkASSERT(fActiveRenderPass);
505 VkSubpassContents contents = forSecondaryCB ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
506 : VK_SUBPASS_CONTENTS_INLINE;
507 GR_VK_CALL(gpu->vkInterface(), CmdNextSubpass(fCmdBuffer, contents));
508}
509
egdaniel9a6cf802016-06-08 08:22:05 -0700510void GrVkPrimaryCommandBuffer::executeCommands(const GrVkGpu* gpu,
Greg Daniel8daf3b72019-07-30 09:57:26 -0400511 std::unique_ptr<GrVkSecondaryCommandBuffer> buffer) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500512 // The Vulkan spec allows secondary command buffers to be executed on a primary command buffer
513 // if the command pools both were created from were created with the same queue family. However,
514 // we currently always create them from the same pool.
egdaniel9a6cf802016-06-08 08:22:05 -0700515 SkASSERT(fIsActive);
Greg Daniel77b53f62016-10-18 11:48:51 -0400516 SkASSERT(!buffer->fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700517 SkASSERT(fActiveRenderPass);
518 SkASSERT(fActiveRenderPass->isCompatible(*buffer->fActiveRenderPass));
519
Greg Danielee54f232019-04-03 14:58:40 -0400520 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400521
egdaniel9a6cf802016-06-08 08:22:05 -0700522 GR_VK_CALL(gpu->vkInterface(), CmdExecuteCommands(fCmdBuffer, 1, &buffer->fCmdBuffer));
Greg Daniel8daf3b72019-07-30 09:57:26 -0400523 fSecondaryCommandBuffers.push_back(std::move(buffer));
egdaniel066df7c2016-06-08 14:02:27 -0700524 // When executing a secondary command buffer all state (besides render pass state) becomes
525 // invalidated and must be reset. This includes bound buffers, pipelines, dynamic state, etc.
526 this->invalidateState();
egdaniel9a6cf802016-06-08 08:22:05 -0700527}
528
Greg Daniele1185582019-12-04 11:29:44 -0500529static bool submit_to_queue(GrVkGpu* gpu,
Greg Daniel48661b82018-01-22 16:11:35 -0500530 VkQueue queue,
531 VkFence fence,
532 uint32_t waitCount,
533 const VkSemaphore* waitSemaphores,
534 const VkPipelineStageFlags* waitStages,
535 uint32_t commandBufferCount,
536 const VkCommandBuffer* commandBuffers,
537 uint32_t signalCount,
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400538 const VkSemaphore* signalSemaphores,
539 GrProtected protectedContext) {
540 VkProtectedSubmitInfo protectedSubmitInfo;
541 if (protectedContext == GrProtected::kYes) {
542 memset(&protectedSubmitInfo, 0, sizeof(VkProtectedSubmitInfo));
543 protectedSubmitInfo.sType = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO;
544 protectedSubmitInfo.pNext = nullptr;
545 protectedSubmitInfo.protectedSubmit = VK_TRUE;
546 }
547
Greg Daniel48661b82018-01-22 16:11:35 -0500548 VkSubmitInfo submitInfo;
549 memset(&submitInfo, 0, sizeof(VkSubmitInfo));
550 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400551 submitInfo.pNext = protectedContext == GrProtected::kYes ? &protectedSubmitInfo : nullptr;
Greg Daniel48661b82018-01-22 16:11:35 -0500552 submitInfo.waitSemaphoreCount = waitCount;
553 submitInfo.pWaitSemaphores = waitSemaphores;
554 submitInfo.pWaitDstStageMask = waitStages;
555 submitInfo.commandBufferCount = commandBufferCount;
556 submitInfo.pCommandBuffers = commandBuffers;
557 submitInfo.signalSemaphoreCount = signalCount;
558 submitInfo.pSignalSemaphores = signalSemaphores;
Greg Daniele1185582019-12-04 11:29:44 -0500559 VkResult result;
560 GR_VK_CALL_RESULT(gpu, result, QueueSubmit(queue, 1, &submitInfo, fence));
561 return result == VK_SUCCESS;
Greg Daniel48661b82018-01-22 16:11:35 -0500562}
563
Greg Daniele1185582019-12-04 11:29:44 -0500564bool GrVkPrimaryCommandBuffer::submitToQueue(
Greg Daniele643da62019-11-05 12:36:42 -0500565 GrVkGpu* gpu,
Greg Daniel6be35232017-03-01 17:01:09 -0500566 VkQueue queue,
Greg Daniel48661b82018-01-22 16:11:35 -0500567 SkTArray<GrVkSemaphore::Resource*>& signalSemaphores,
568 SkTArray<GrVkSemaphore::Resource*>& waitSemaphores) {
egdaniel9a6cf802016-06-08 08:22:05 -0700569 SkASSERT(!fIsActive);
570
571 VkResult err;
jvanverth7ec92412016-07-06 09:24:57 -0700572 if (VK_NULL_HANDLE == fSubmitFence) {
573 VkFenceCreateInfo fenceInfo;
574 memset(&fenceInfo, 0, sizeof(VkFenceCreateInfo));
575 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
Greg Daniele1185582019-12-04 11:29:44 -0500576 GR_VK_CALL_RESULT(gpu, err, CreateFence(gpu->device(), &fenceInfo, nullptr,
577 &fSubmitFence));
578 if (err) {
579 fSubmitFence = VK_NULL_HANDLE;
580 return false;
581 }
jvanverth7ec92412016-07-06 09:24:57 -0700582 } else {
Greg Daniele1185582019-12-04 11:29:44 -0500583 // This cannot return DEVICE_LOST so we assert we succeeded.
584 GR_VK_CALL_RESULT(gpu, err, ResetFences(gpu->device(), 1, &fSubmitFence));
585 SkASSERT(err == VK_SUCCESS);
jvanverth7ec92412016-07-06 09:24:57 -0700586 }
egdaniel9a6cf802016-06-08 08:22:05 -0700587
Greg Daniela5cb7812017-06-16 09:45:32 -0400588 int signalCount = signalSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500589 int waitCount = waitSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500590
Greg Daniele1185582019-12-04 11:29:44 -0500591 bool submitted = false;
592
Greg Daniel48661b82018-01-22 16:11:35 -0500593 if (0 == signalCount && 0 == waitCount) {
594 // This command buffer has no dependent semaphores so we can simply just submit it to the
595 // queue with no worries.
Greg Daniele1185582019-12-04 11:29:44 -0500596 submitted = submit_to_queue(
597 gpu, queue, fSubmitFence, 0, nullptr, nullptr, 1, &fCmdBuffer, 0, nullptr,
598 gpu->protectedContext() ? GrProtected::kYes : GrProtected::kNo);
Greg Daniel48661b82018-01-22 16:11:35 -0500599 } else {
Greg Daniel48661b82018-01-22 16:11:35 -0500600 SkTArray<VkSemaphore> vkSignalSems(signalCount);
601 for (int i = 0; i < signalCount; ++i) {
602 if (signalSemaphores[i]->shouldSignal()) {
603 this->addResource(signalSemaphores[i]);
604 vkSignalSems.push_back(signalSemaphores[i]->semaphore());
605 }
606 }
607
608 SkTArray<VkSemaphore> vkWaitSems(waitCount);
609 SkTArray<VkPipelineStageFlags> vkWaitStages(waitCount);
610 for (int i = 0; i < waitCount; ++i) {
611 if (waitSemaphores[i]->shouldWait()) {
612 this->addResource(waitSemaphores[i]);
613 vkWaitSems.push_back(waitSemaphores[i]->semaphore());
614 vkWaitStages.push_back(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
615 }
616 }
Greg Daniele1185582019-12-04 11:29:44 -0500617 submitted = submit_to_queue(gpu, queue, fSubmitFence, vkWaitSems.count(),
618 vkWaitSems.begin(), vkWaitStages.begin(), 1, &fCmdBuffer,
619 vkSignalSems.count(), vkSignalSems.begin(),
620 gpu->protectedContext() ? GrProtected::kYes : GrProtected::kNo);
621 if (submitted) {
622 for (int i = 0; i < signalCount; ++i) {
623 signalSemaphores[i]->markAsSignaled();
624 }
625 for (int i = 0; i < waitCount; ++i) {
626 waitSemaphores[i]->markAsWaited();
627 }
Greg Daniel48661b82018-01-22 16:11:35 -0500628 }
Greg Daniel48661b82018-01-22 16:11:35 -0500629 }
egdaniel9a6cf802016-06-08 08:22:05 -0700630
Greg Daniele1185582019-12-04 11:29:44 -0500631 if (!submitted) {
632 // Destroy the fence or else we will try to wait forever for it to finish.
egdaniel9a6cf802016-06-08 08:22:05 -0700633 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
634 fSubmitFence = VK_NULL_HANDLE;
Greg Daniele1185582019-12-04 11:29:44 -0500635 return false;
egdaniel9a6cf802016-06-08 08:22:05 -0700636 }
Greg Daniele1185582019-12-04 11:29:44 -0500637 return true;
egdaniel9a6cf802016-06-08 08:22:05 -0700638}
639
Greg Daniele1185582019-12-04 11:29:44 -0500640void GrVkPrimaryCommandBuffer::forceSync(GrVkGpu* gpu) {
Greg Danielfda45862021-02-09 17:55:07 -0500641 if (fSubmitFence == VK_NULL_HANDLE) {
642 return;
643 }
Greg Daniele1185582019-12-04 11:29:44 -0500644 GR_VK_CALL_ERRCHECK(gpu, WaitForFences(gpu->device(), 1, &fSubmitFence, true, UINT64_MAX));
645}
646
647bool GrVkPrimaryCommandBuffer::finished(GrVkGpu* gpu) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500648 SkASSERT(!fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700649 if (VK_NULL_HANDLE == fSubmitFence) {
650 return true;
651 }
652
Greg Daniele1185582019-12-04 11:29:44 -0500653 VkResult err;
654 GR_VK_CALL_RESULT_NOCHECK(gpu, err, GetFenceStatus(gpu->device(), fSubmitFence));
egdaniel9a6cf802016-06-08 08:22:05 -0700655 switch (err) {
656 case VK_SUCCESS:
Greg Daniele1185582019-12-04 11:29:44 -0500657 case VK_ERROR_DEVICE_LOST:
egdaniel9a6cf802016-06-08 08:22:05 -0700658 return true;
659
660 case VK_NOT_READY:
661 return false;
662
663 default:
664 SkDebugf("Error getting fence status: %d\n", err);
Greg Daniele1185582019-12-04 11:29:44 -0500665 SK_ABORT("Got an invalid fence status");
666 return false;
egdaniel9a6cf802016-06-08 08:22:05 -0700667 }
egdaniel9a6cf802016-06-08 08:22:05 -0700668}
669
Greg Daniela3aa75a2019-04-12 14:24:55 -0400670void GrVkPrimaryCommandBuffer::addFinishedProc(sk_sp<GrRefCntedCallback> finishedProc) {
671 fFinishedProcs.push_back(std::move(finishedProc));
672}
673
Jim Van Verth5082df12020-03-11 16:14:51 -0400674void GrVkPrimaryCommandBuffer::onReleaseResources() {
jvanverth7ec92412016-07-06 09:24:57 -0700675 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Jim Van Verth5082df12020-03-11 16:14:51 -0400676 fSecondaryCommandBuffers[i]->releaseResources();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500677 }
Greg Danielfe159622020-04-10 17:43:51 +0000678 this->callFinishedProcs();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500679}
680
Greg Daniel0addbdf2019-11-25 15:03:58 -0500681void GrVkPrimaryCommandBuffer::recycleSecondaryCommandBuffers(GrVkCommandPool* cmdPool) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500682 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500683 fSecondaryCommandBuffers[i].release()->recycle(cmdPool);
jvanverth7ec92412016-07-06 09:24:57 -0700684 }
685 fSecondaryCommandBuffers.reset();
686}
687
egdaniel9a6cf802016-06-08 08:22:05 -0700688void GrVkPrimaryCommandBuffer::copyImage(const GrVkGpu* gpu,
689 GrVkImage* srcImage,
690 VkImageLayout srcLayout,
691 GrVkImage* dstImage,
692 VkImageLayout dstLayout,
693 uint32_t copyRegionCount,
694 const VkImageCopy* copyRegions) {
695 SkASSERT(fIsActive);
696 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400697 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700698 this->addResource(srcImage->resource());
699 this->addResource(dstImage->resource());
700 GR_VK_CALL(gpu->vkInterface(), CmdCopyImage(fCmdBuffer,
701 srcImage->image(),
702 srcLayout,
703 dstImage->image(),
704 dstLayout,
705 copyRegionCount,
706 copyRegions));
707}
708
709void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
Jim Van Verth3e192162020-03-10 16:23:16 -0400710 const GrManagedResource* srcResource,
egdaniel9a6cf802016-06-08 08:22:05 -0700711 VkImage srcImage,
712 VkImageLayout srcLayout,
Jim Van Verth3e192162020-03-10 16:23:16 -0400713 const GrManagedResource* dstResource,
egdaniel9a6cf802016-06-08 08:22:05 -0700714 VkImage dstImage,
715 VkImageLayout dstLayout,
716 uint32_t blitRegionCount,
717 const VkImageBlit* blitRegions,
718 VkFilter filter) {
719 SkASSERT(fIsActive);
720 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400721 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700722 this->addResource(srcResource);
723 this->addResource(dstResource);
724 GR_VK_CALL(gpu->vkInterface(), CmdBlitImage(fCmdBuffer,
725 srcImage,
726 srcLayout,
727 dstImage,
728 dstLayout,
729 blitRegionCount,
730 blitRegions,
731 filter));
732}
733
Greg Daniel6ecc9112017-06-16 16:17:03 +0000734void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
735 const GrVkImage& srcImage,
736 const GrVkImage& dstImage,
737 uint32_t blitRegionCount,
738 const VkImageBlit* blitRegions,
739 VkFilter filter) {
740 this->blitImage(gpu,
741 srcImage.resource(),
742 srcImage.image(),
743 srcImage.currentLayout(),
744 dstImage.resource(),
745 dstImage.image(),
746 dstImage.currentLayout(),
747 blitRegionCount,
748 blitRegions,
749 filter);
750}
751
752
egdaniel9a6cf802016-06-08 08:22:05 -0700753void GrVkPrimaryCommandBuffer::copyImageToBuffer(const GrVkGpu* gpu,
754 GrVkImage* srcImage,
755 VkImageLayout srcLayout,
Greg Daniel2e967df2021-02-08 10:38:31 -0500756 sk_sp<GrGpuBuffer> dstBuffer,
egdaniel9a6cf802016-06-08 08:22:05 -0700757 uint32_t copyRegionCount,
758 const VkBufferImageCopy* copyRegions) {
759 SkASSERT(fIsActive);
760 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400761 this->addingWork(gpu);
Greg Danielaf1d1932021-02-08 13:55:26 -0500762 GrVkBuffer* vkBuffer = static_cast<GrVkBuffer*>(dstBuffer.get());
egdaniel9a6cf802016-06-08 08:22:05 -0700763 GR_VK_CALL(gpu->vkInterface(), CmdCopyImageToBuffer(fCmdBuffer,
764 srcImage->image(),
765 srcLayout,
Greg Daniel2e967df2021-02-08 10:38:31 -0500766 vkBuffer->vkBuffer(),
egdaniel9a6cf802016-06-08 08:22:05 -0700767 copyRegionCount,
768 copyRegions));
Greg Daniel2e967df2021-02-08 10:38:31 -0500769 this->addResource(srcImage->resource());
770 this->addGrBuffer(std::move(dstBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700771}
772
773void GrVkPrimaryCommandBuffer::copyBufferToImage(const GrVkGpu* gpu,
Greg Daniel2e967df2021-02-08 10:38:31 -0500774 VkBuffer srcBuffer,
egdaniel9a6cf802016-06-08 08:22:05 -0700775 GrVkImage* dstImage,
776 VkImageLayout dstLayout,
777 uint32_t copyRegionCount,
778 const VkBufferImageCopy* copyRegions) {
779 SkASSERT(fIsActive);
780 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400781 this->addingWork(gpu);
Greg Daniel2e967df2021-02-08 10:38:31 -0500782
egdaniel9a6cf802016-06-08 08:22:05 -0700783 GR_VK_CALL(gpu->vkInterface(), CmdCopyBufferToImage(fCmdBuffer,
Greg Daniel2e967df2021-02-08 10:38:31 -0500784 srcBuffer,
egdaniel9a6cf802016-06-08 08:22:05 -0700785 dstImage->image(),
786 dstLayout,
787 copyRegionCount,
788 copyRegions));
Greg Daniel2e967df2021-02-08 10:38:31 -0500789 this->addResource(dstImage->resource());
egdaniel9a6cf802016-06-08 08:22:05 -0700790}
791
Greg Daniel87d784f2021-02-02 15:36:06 -0500792void GrVkPrimaryCommandBuffer::copyBuffer(GrVkGpu* gpu,
Greg Daniel2e967df2021-02-08 10:38:31 -0500793 sk_sp<GrGpuBuffer> srcBuffer,
794 sk_sp<GrGpuBuffer> dstBuffer,
Greg Daniel87d784f2021-02-02 15:36:06 -0500795 uint32_t regionCount,
796 const VkBufferCopy* regions) {
797 SkASSERT(fIsActive);
798 SkASSERT(!fActiveRenderPass);
799 this->addingWork(gpu);
800#ifdef SK_DEBUG
801 for (uint32_t i = 0; i < regionCount; ++i) {
802 const VkBufferCopy& region = regions[i];
803 SkASSERT(region.size > 0);
804 SkASSERT(region.srcOffset < srcBuffer->size());
805 SkASSERT(region.dstOffset < dstBuffer->size());
806 SkASSERT(region.srcOffset + region.size <= srcBuffer->size());
807 SkASSERT(region.dstOffset + region.size <= dstBuffer->size());
808 }
809#endif
810
Greg Danielaf1d1932021-02-08 13:55:26 -0500811 const GrVkBuffer* srcVk = static_cast<GrVkBuffer*>(srcBuffer.get());
812 const GrVkBuffer* dstVk = static_cast<GrVkBuffer*>(dstBuffer.get());
Greg Daniel2e967df2021-02-08 10:38:31 -0500813
814 GR_VK_CALL(gpu->vkInterface(), CmdCopyBuffer(fCmdBuffer,
815 srcVk->vkBuffer(),
816 dstVk->vkBuffer(),
817 regionCount,
818 regions));
819 this->addGrBuffer(std::move(srcBuffer));
Greg Daniel87d784f2021-02-02 15:36:06 -0500820 this->addGrBuffer(std::move(dstBuffer));
821}
822
jvanvertha584de92016-06-30 09:10:52 -0700823void GrVkPrimaryCommandBuffer::updateBuffer(GrVkGpu* gpu,
Greg Danielaf1d1932021-02-08 13:55:26 -0500824 sk_sp<GrVkBuffer> dstBuffer,
Greg Daniel87d784f2021-02-02 15:36:06 -0500825 VkDeviceSize dstOffset,
826 VkDeviceSize dataSize,
827 const void* data) {
828 SkASSERT(fIsActive);
829 SkASSERT(!fActiveRenderPass);
830 SkASSERT(0 == (dstOffset & 0x03)); // four byte aligned
831 // TODO: handle larger transfer sizes
832 SkASSERT(dataSize <= 65536);
833 SkASSERT(0 == (dataSize & 0x03)); // four byte aligned
834 this->addingWork(gpu);
835 GR_VK_CALL(
836 gpu->vkInterface(),
837 CmdUpdateBuffer(
838 fCmdBuffer, dstBuffer->vkBuffer(), dstOffset, dataSize, (const uint32_t*)data));
839 this->addGrBuffer(std::move(dstBuffer));
840}
841
egdaniel9a6cf802016-06-08 08:22:05 -0700842void GrVkPrimaryCommandBuffer::clearColorImage(const GrVkGpu* gpu,
843 GrVkImage* image,
844 const VkClearColorValue* color,
845 uint32_t subRangeCount,
846 const VkImageSubresourceRange* subRanges) {
847 SkASSERT(fIsActive);
848 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400849 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700850 this->addResource(image->resource());
851 GR_VK_CALL(gpu->vkInterface(), CmdClearColorImage(fCmdBuffer,
852 image->image(),
853 image->currentLayout(),
854 color,
855 subRangeCount,
856 subRanges));
857}
858
859void GrVkPrimaryCommandBuffer::clearDepthStencilImage(const GrVkGpu* gpu,
860 GrVkImage* image,
861 const VkClearDepthStencilValue* color,
862 uint32_t subRangeCount,
863 const VkImageSubresourceRange* subRanges) {
864 SkASSERT(fIsActive);
865 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400866 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700867 this->addResource(image->resource());
868 GR_VK_CALL(gpu->vkInterface(), CmdClearDepthStencilImage(fCmdBuffer,
869 image->image(),
870 image->currentLayout(),
871 color,
872 subRangeCount,
873 subRanges));
874}
875
egdaniel52ad2512016-08-04 12:50:01 -0700876void GrVkPrimaryCommandBuffer::resolveImage(GrVkGpu* gpu,
877 const GrVkImage& srcImage,
878 const GrVkImage& dstImage,
879 uint32_t regionCount,
880 const VkImageResolve* regions) {
881 SkASSERT(fIsActive);
882 SkASSERT(!fActiveRenderPass);
883
Greg Danielee54f232019-04-03 14:58:40 -0400884 this->addingWork(gpu);
egdaniel52ad2512016-08-04 12:50:01 -0700885 this->addResource(srcImage.resource());
886 this->addResource(dstImage.resource());
887
888 GR_VK_CALL(gpu->vkInterface(), CmdResolveImage(fCmdBuffer,
889 srcImage.image(),
890 srcImage.currentLayout(),
891 dstImage.image(),
892 dstImage.currentLayout(),
893 regionCount,
894 regions));
895}
896
Jim Van Verth5082df12020-03-11 16:14:51 -0400897void GrVkPrimaryCommandBuffer::onFreeGPUData(const GrVkGpu* gpu) const {
egdaniel9cb63402016-06-23 08:37:05 -0700898 SkASSERT(!fActiveRenderPass);
899 // Destroy the fence, if any
900 if (VK_NULL_HANDLE != fSubmitFence) {
901 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
902 }
Greg Daniel0addbdf2019-11-25 15:03:58 -0500903 SkASSERT(!fSecondaryCommandBuffers.count());
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500904}
905
egdaniel9a6cf802016-06-08 08:22:05 -0700906///////////////////////////////////////////////////////////////////////////////
907// SecondaryCommandBuffer
908////////////////////////////////////////////////////////////////////////////////
909
Greg Daniel315c8dc2019-11-26 15:41:27 -0500910GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(GrVkGpu* gpu,
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500911 GrVkCommandPool* cmdPool) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500912 SkASSERT(cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700913 const VkCommandBufferAllocateInfo cmdInfo = {
914 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400915 nullptr, // pNext
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500916 cmdPool->vkCommandPool(), // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700917 VK_COMMAND_BUFFER_LEVEL_SECONDARY, // level
918 1 // bufferCount
919 };
920
921 VkCommandBuffer cmdBuffer;
Greg Daniel315c8dc2019-11-26 15:41:27 -0500922 VkResult err;
923 GR_VK_CALL_RESULT(gpu, err, AllocateCommandBuffers(gpu->device(), &cmdInfo, &cmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700924 if (err) {
925 return nullptr;
926 }
Greg Danielde4bbdb2021-04-13 14:23:23 -0400927 return new GrVkSecondaryCommandBuffer(cmdBuffer, /*externalRenderPass=*/nullptr);
egdaniel9a6cf802016-06-08 08:22:05 -0700928}
929
Greg Danielde4bbdb2021-04-13 14:23:23 -0400930GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(
931 VkCommandBuffer cmdBuffer, const GrVkRenderPass* externalRenderPass) {
932 return new GrVkSecondaryCommandBuffer(cmdBuffer, externalRenderPass);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500933}
egdaniel9a6cf802016-06-08 08:22:05 -0700934
Greg Daniele643da62019-11-05 12:36:42 -0500935void GrVkSecondaryCommandBuffer::begin(GrVkGpu* gpu, const GrVkFramebuffer* framebuffer,
jvanverth7ec92412016-07-06 09:24:57 -0700936 const GrVkRenderPass* compatibleRenderPass) {
egdaniel9a6cf802016-06-08 08:22:05 -0700937 SkASSERT(!fIsActive);
Greg Danielde4bbdb2021-04-13 14:23:23 -0400938 SkASSERT(!this->isWrapped());
jvanverth7ec92412016-07-06 09:24:57 -0700939 SkASSERT(compatibleRenderPass);
940 fActiveRenderPass = compatibleRenderPass;
egdaniel9a6cf802016-06-08 08:22:05 -0700941
Greg Danielde4bbdb2021-04-13 14:23:23 -0400942 VkCommandBufferInheritanceInfo inheritanceInfo;
943 memset(&inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
944 inheritanceInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
945 inheritanceInfo.pNext = nullptr;
946 inheritanceInfo.renderPass = fActiveRenderPass->vkRenderPass();
947 inheritanceInfo.subpass = 0; // Currently only using 1 subpass for each render pass
948 inheritanceInfo.framebuffer = framebuffer ? framebuffer->framebuffer() : VK_NULL_HANDLE;
949 inheritanceInfo.occlusionQueryEnable = false;
950 inheritanceInfo.queryFlags = 0;
951 inheritanceInfo.pipelineStatistics = 0;
egdaniel9a6cf802016-06-08 08:22:05 -0700952
Greg Danielde4bbdb2021-04-13 14:23:23 -0400953 VkCommandBufferBeginInfo cmdBufferBeginInfo;
954 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
955 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
956 cmdBufferBeginInfo.pNext = nullptr;
957 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT |
958 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
959 cmdBufferBeginInfo.pInheritanceInfo = &inheritanceInfo;
egdaniel9a6cf802016-06-08 08:22:05 -0700960
Greg Danielde4bbdb2021-04-13 14:23:23 -0400961 GR_VK_CALL_ERRCHECK(gpu, BeginCommandBuffer(fCmdBuffer, &cmdBufferBeginInfo));
962
egdaniel9a6cf802016-06-08 08:22:05 -0700963 fIsActive = true;
964}
965
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500966void GrVkSecondaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700967 SkASSERT(fIsActive);
Greg Danielde4bbdb2021-04-13 14:23:23 -0400968 SkASSERT(!this->isWrapped());
969 GR_VK_CALL_ERRCHECK(gpu, EndCommandBuffer(fCmdBuffer));
egdaniel9a6cf802016-06-08 08:22:05 -0700970 this->invalidateState();
Robert Phillips04d2ce22019-04-03 13:20:43 -0400971 fHasWork = false;
Greg Danielde4bbdb2021-04-13 14:23:23 -0400972 fIsActive = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700973}
Greg Daniel8daf3b72019-07-30 09:57:26 -0400974
Greg Daniel0addbdf2019-11-25 15:03:58 -0500975void GrVkSecondaryCommandBuffer::recycle(GrVkCommandPool* cmdPool) {
Greg Daniel8daf3b72019-07-30 09:57:26 -0400976 if (this->isWrapped()) {
Greg Daniel8daf3b72019-07-30 09:57:26 -0400977 delete this;
978 } else {
Greg Daniel0addbdf2019-11-25 15:03:58 -0500979 cmdPool->recycleSecondaryCommandBuffer(this);
Greg Daniel8daf3b72019-07-30 09:57:26 -0400980 }
981}
982