blob: 0dfdade9aba64db4254177ffb81d0fcb87094525 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2015 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkCommandBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -05009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkRect.h"
11#include "src/gpu/vk/GrVkCommandPool.h"
12#include "src/gpu/vk/GrVkFramebuffer.h"
13#include "src/gpu/vk/GrVkGpu.h"
14#include "src/gpu/vk/GrVkImage.h"
15#include "src/gpu/vk/GrVkImageView.h"
16#include "src/gpu/vk/GrVkIndexBuffer.h"
17#include "src/gpu/vk/GrVkPipeline.h"
18#include "src/gpu/vk/GrVkPipelineLayout.h"
19#include "src/gpu/vk/GrVkPipelineState.h"
20#include "src/gpu/vk/GrVkPipelineState.h"
21#include "src/gpu/vk/GrVkRenderPass.h"
22#include "src/gpu/vk/GrVkRenderTarget.h"
23#include "src/gpu/vk/GrVkTransferBuffer.h"
24#include "src/gpu/vk/GrVkUtil.h"
25#include "src/gpu/vk/GrVkVertexBuffer.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050026
27void GrVkCommandBuffer::invalidateState() {
Chris Dalton1d616352017-05-31 12:51:23 -060028 for (auto& boundInputBuffer : fBoundInputBuffers) {
29 boundInputBuffer = VK_NULL_HANDLE;
30 }
egdaniel470d77a2016-03-18 12:50:27 -070031 fBoundIndexBuffer = VK_NULL_HANDLE;
egdaniel470d77a2016-03-18 12:50:27 -070032
33 memset(&fCachedViewport, 0, sizeof(VkViewport));
34 fCachedViewport.width = - 1.0f; // Viewport must have a width greater than 0
35
36 memset(&fCachedScissor, 0, sizeof(VkRect2D));
37 fCachedScissor.offset.x = -1; // Scissor offset must be greater that 0 to be valid
38
39 for (int i = 0; i < 4; ++i) {
40 fCachedBlendConstant[i] = -1.0;
41 }
Greg Daniel164a9f02016-02-22 09:56:40 -050042}
43
Ethan Nicholas8e265a72018-12-12 16:22:40 -050044void GrVkCommandBuffer::freeGPUData(GrVkGpu* gpu) const {
Greg Daniel164a9f02016-02-22 09:56:40 -050045 SkASSERT(!fIsActive);
Greg Daniel164a9f02016-02-22 09:56:40 -050046 for (int i = 0; i < fTrackedResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050047 fTrackedResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel164a9f02016-02-22 09:56:40 -050048 fTrackedResources[i]->unref(gpu);
49 }
halcanary9d524f22016-03-29 09:03:52 -070050
egdanielc1be9bc2016-07-20 08:33:00 -070051 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050052 fTrackedRecycledResources[i]->notifyRemovedFromCommandBuffer();
egdanielc1be9bc2016-07-20 08:33:00 -070053 fTrackedRecycledResources[i]->recycle(const_cast<GrVkGpu*>(gpu));
54 }
55
Greg Daniel7d918fd2018-06-19 15:22:01 -040056 for (int i = 0; i < fTrackedRecordingResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050057 fTrackedRecordingResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel7d918fd2018-06-19 15:22:01 -040058 fTrackedRecordingResources[i]->unref(gpu);
59 }
60
Greg Daniel070cbaf2019-01-03 17:35:54 -050061 if (!this->isWrapped()) {
62 GR_VK_CALL(gpu->vkInterface(), FreeCommandBuffers(gpu->device(), fCmdPool->vkCommandPool(),
63 1, &fCmdBuffer));
64 }
egdaniel9cb63402016-06-23 08:37:05 -070065
66 this->onFreeGPUData(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050067}
68
Greg Danielcef213c2017-04-21 11:52:27 -040069void GrVkCommandBuffer::abandonGPUData() const {
Ethan Nicholas8e265a72018-12-12 16:22:40 -050070 SkDEBUGCODE(fResourcesReleased = true;)
Greg Daniel164a9f02016-02-22 09:56:40 -050071 for (int i = 0; i < fTrackedResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050072 fTrackedResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel164a9f02016-02-22 09:56:40 -050073 fTrackedResources[i]->unrefAndAbandon();
74 }
egdanielc1be9bc2016-07-20 08:33:00 -070075
76 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050077 fTrackedRecycledResources[i]->notifyRemovedFromCommandBuffer();
egdanielc1be9bc2016-07-20 08:33:00 -070078 // We don't recycle resources when abandoning them.
79 fTrackedRecycledResources[i]->unrefAndAbandon();
80 }
Greg Daniel7d918fd2018-06-19 15:22:01 -040081
82 for (int i = 0; i < fTrackedRecordingResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050083 fTrackedRecordingResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel7d918fd2018-06-19 15:22:01 -040084 fTrackedRecordingResources[i]->unrefAndAbandon();
85 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -050086
87 this->onAbandonGPUData();
Greg Daniel164a9f02016-02-22 09:56:40 -050088}
89
Ethan Nicholas8e265a72018-12-12 16:22:40 -050090void GrVkCommandBuffer::releaseResources(GrVkGpu* gpu) {
91 SkDEBUGCODE(fResourcesReleased = true;)
jvanverth7ec92412016-07-06 09:24:57 -070092 SkASSERT(!fIsActive);
93 for (int i = 0; i < fTrackedResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050094 fTrackedResources[i]->notifyRemovedFromCommandBuffer();
jvanverth7ec92412016-07-06 09:24:57 -070095 fTrackedResources[i]->unref(gpu);
96 }
egdanielc1be9bc2016-07-20 08:33:00 -070097 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050098 fTrackedRecycledResources[i]->notifyRemovedFromCommandBuffer();
egdanielc1be9bc2016-07-20 08:33:00 -070099 fTrackedRecycledResources[i]->recycle(const_cast<GrVkGpu*>(gpu));
100 }
egdaniel594739c2016-09-20 12:39:25 -0700101
Greg Daniel7d918fd2018-06-19 15:22:01 -0400102 for (int i = 0; i < fTrackedRecordingResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -0500103 fTrackedRecordingResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel7d918fd2018-06-19 15:22:01 -0400104 fTrackedRecordingResources[i]->unref(gpu);
105 }
106
egdaniel594739c2016-09-20 12:39:25 -0700107 if (++fNumResets > kNumRewindResetsBeforeFullReset) {
108 fTrackedResources.reset();
109 fTrackedRecycledResources.reset();
Greg Daniel7d918fd2018-06-19 15:22:01 -0400110 fTrackedRecordingResources.reset();
egdaniel594739c2016-09-20 12:39:25 -0700111 fTrackedResources.setReserve(kInitialTrackedResourcesCount);
112 fTrackedRecycledResources.setReserve(kInitialTrackedResourcesCount);
Greg Daniel7d918fd2018-06-19 15:22:01 -0400113 fTrackedRecordingResources.setReserve(kInitialTrackedResourcesCount);
egdaniel594739c2016-09-20 12:39:25 -0700114 fNumResets = 0;
115 } else {
116 fTrackedResources.rewind();
117 fTrackedRecycledResources.rewind();
Greg Daniel7d918fd2018-06-19 15:22:01 -0400118 fTrackedRecordingResources.rewind();
egdaniel594739c2016-09-20 12:39:25 -0700119 }
120
jvanverth7ec92412016-07-06 09:24:57 -0700121 this->invalidateState();
122
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500123 this->onReleaseResources(gpu);
jvanverth7ec92412016-07-06 09:24:57 -0700124}
125
Greg Daniel164a9f02016-02-22 09:56:40 -0500126////////////////////////////////////////////////////////////////////////////////
127// CommandBuffer commands
128////////////////////////////////////////////////////////////////////////////////
129
130void GrVkCommandBuffer::pipelineBarrier(const GrVkGpu* gpu,
Greg Daniel59dc1482019-02-22 10:46:38 -0500131 const GrVkResource* resource,
Greg Daniel164a9f02016-02-22 09:56:40 -0500132 VkPipelineStageFlags srcStageMask,
133 VkPipelineStageFlags dstStageMask,
134 bool byRegion,
135 BarrierType barrierType,
Greg Daniel59dc1482019-02-22 10:46:38 -0500136 void* barrier) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500137 SkASSERT(!this->isWrapped());
Greg Daniel164a9f02016-02-22 09:56:40 -0500138 SkASSERT(fIsActive);
egdaniel58a8d922016-04-21 08:03:10 -0700139 // For images we can have barriers inside of render passes but they require us to add more
140 // support in subpasses which need self dependencies to have barriers inside them. Also, we can
141 // never have buffer barriers inside of a render pass. For now we will just assert that we are
142 // not in a render pass.
143 SkASSERT(!fActiveRenderPass);
Greg Danielf346df32019-04-03 14:52:13 -0400144
Greg Danielee54f232019-04-03 14:58:40 -0400145 if (barrierType == kBufferMemory_BarrierType) {
146 const VkBufferMemoryBarrier* barrierPtr = reinterpret_cast<VkBufferMemoryBarrier*>(barrier);
147 fBufferBarriers.push_back(*barrierPtr);
148 } else {
149 SkASSERT(barrierType == kImageMemory_BarrierType);
150 const VkImageMemoryBarrier* barrierPtr = reinterpret_cast<VkImageMemoryBarrier*>(barrier);
Greg Daniel212ff052019-04-09 10:41:34 -0400151 // We need to check if we are adding a pipeline barrier that covers part of the same
152 // subresource range as a barrier that is already in current batch. If it does, then we must
153 // submit the first batch because the vulkan spec does not define a specific ordering for
154 // barriers submitted in the same batch.
155 // TODO: Look if we can gain anything by merging barriers together instead of submitting
156 // the old ones.
157 for (int i = 0; i < fImageBarriers.count(); ++i) {
158 VkImageMemoryBarrier& currentBarrier = fImageBarriers[i];
159 if (barrierPtr->image == currentBarrier.image) {
160 const VkImageSubresourceRange newRange = barrierPtr->subresourceRange;
161 const VkImageSubresourceRange oldRange = currentBarrier.subresourceRange;
162 SkASSERT(newRange.aspectMask == oldRange.aspectMask);
163 SkASSERT(newRange.baseArrayLayer == oldRange.baseArrayLayer);
164 SkASSERT(newRange.layerCount == oldRange.layerCount);
165 uint32_t newStart = newRange.baseMipLevel;
166 uint32_t newEnd = newRange.baseMipLevel + newRange.levelCount - 1;
167 uint32_t oldStart = oldRange.baseMipLevel;
168 uint32_t oldEnd = oldRange.baseMipLevel + oldRange.levelCount - 1;
169 if (SkTMax(newStart, oldStart) <= SkTMin(newEnd, oldEnd)) {
170 this->submitPipelineBarriers(gpu);
171 break;
172 }
173 }
174 }
Greg Danielee54f232019-04-03 14:58:40 -0400175 fImageBarriers.push_back(*barrierPtr);
Greg Daniel164a9f02016-02-22 09:56:40 -0500176 }
Greg Danielee54f232019-04-03 14:58:40 -0400177 fBarriersByRegion |= byRegion;
178
179 fSrcStageMask = fSrcStageMask | srcStageMask;
180 fDstStageMask = fDstStageMask | dstStageMask;
181
182 fHasWork = true;
Greg Daniel59dc1482019-02-22 10:46:38 -0500183 if (resource) {
184 this->addResource(resource);
185 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500186}
187
Greg Danielee54f232019-04-03 14:58:40 -0400188void GrVkCommandBuffer::submitPipelineBarriers(const GrVkGpu* gpu) {
189 SkASSERT(fIsActive);
190
191 // Currently we never submit a pipeline barrier without at least one memory barrier.
192 if (fBufferBarriers.count() || fImageBarriers.count()) {
193 // For images we can have barriers inside of render passes but they require us to add more
194 // support in subpasses which need self dependencies to have barriers inside them. Also, we
195 // can never have buffer barriers inside of a render pass. For now we will just assert that
196 // we are not in a render pass.
197 SkASSERT(!fActiveRenderPass);
198 SkASSERT(!this->isWrapped());
199 SkASSERT(fSrcStageMask && fDstStageMask);
200
201 VkDependencyFlags dependencyFlags = fBarriersByRegion ? VK_DEPENDENCY_BY_REGION_BIT : 0;
202 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(
203 fCmdBuffer, fSrcStageMask, fDstStageMask, dependencyFlags, 0, nullptr,
204 fBufferBarriers.count(), fBufferBarriers.begin(),
205 fImageBarriers.count(), fImageBarriers.begin()));
206 fBufferBarriers.reset();
207 fImageBarriers.reset();
208 fBarriersByRegion = false;
209 fSrcStageMask = 0;
210 fDstStageMask = 0;
211 }
212 SkASSERT(!fBufferBarriers.count());
213 SkASSERT(!fImageBarriers.count());
214 SkASSERT(!fBarriersByRegion);
215 SkASSERT(!fSrcStageMask);
216 SkASSERT(!fDstStageMask);
217}
218
219
Greg Daniel6ecc9112017-06-16 16:17:03 +0000220void GrVkCommandBuffer::bindInputBuffer(GrVkGpu* gpu, uint32_t binding,
221 const GrVkVertexBuffer* vbuffer) {
222 VkBuffer vkBuffer = vbuffer->buffer();
223 SkASSERT(VK_NULL_HANDLE != vkBuffer);
224 SkASSERT(binding < kMaxInputBuffers);
225 // TODO: once vbuffer->offset() no longer always returns 0, we will need to track the offset
226 // to know if we can skip binding or not.
227 if (vkBuffer != fBoundInputBuffers[binding]) {
228 VkDeviceSize offset = vbuffer->offset();
229 GR_VK_CALL(gpu->vkInterface(), CmdBindVertexBuffers(fCmdBuffer,
230 binding,
231 1,
232 &vkBuffer,
233 &offset));
234 fBoundInputBuffers[binding] = vkBuffer;
Greg Daniel59dc1482019-02-22 10:46:38 -0500235 this->addResource(vbuffer->resource());
Greg Daniel6ecc9112017-06-16 16:17:03 +0000236 }
237}
238
239void GrVkCommandBuffer::bindIndexBuffer(GrVkGpu* gpu, const GrVkIndexBuffer* ibuffer) {
240 VkBuffer vkBuffer = ibuffer->buffer();
241 SkASSERT(VK_NULL_HANDLE != vkBuffer);
242 // TODO: once ibuffer->offset() no longer always returns 0, we will need to track the offset
243 // to know if we can skip binding or not.
244 if (vkBuffer != fBoundIndexBuffer) {
245 GR_VK_CALL(gpu->vkInterface(), CmdBindIndexBuffer(fCmdBuffer,
246 vkBuffer,
247 ibuffer->offset(),
248 VK_INDEX_TYPE_UINT16));
249 fBoundIndexBuffer = vkBuffer;
Greg Daniel59dc1482019-02-22 10:46:38 -0500250 this->addResource(ibuffer->resource());
Greg Daniel6ecc9112017-06-16 16:17:03 +0000251 }
252}
253
Greg Daniel164a9f02016-02-22 09:56:40 -0500254void GrVkCommandBuffer::clearAttachments(const GrVkGpu* gpu,
255 int numAttachments,
256 const VkClearAttachment* attachments,
257 int numRects,
Greg Danielf346df32019-04-03 14:52:13 -0400258 const VkClearRect* clearRects) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500259 SkASSERT(fIsActive);
260 SkASSERT(fActiveRenderPass);
261 SkASSERT(numAttachments > 0);
262 SkASSERT(numRects > 0);
Greg Danielf346df32019-04-03 14:52:13 -0400263
Greg Danielee54f232019-04-03 14:58:40 -0400264 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400265
Greg Daniel164a9f02016-02-22 09:56:40 -0500266#ifdef SK_DEBUG
267 for (int i = 0; i < numAttachments; ++i) {
268 if (attachments[i].aspectMask == VK_IMAGE_ASPECT_COLOR_BIT) {
269 uint32_t testIndex;
270 SkAssertResult(fActiveRenderPass->colorAttachmentIndex(&testIndex));
271 SkASSERT(testIndex == attachments[i].colorAttachment);
272 }
273 }
274#endif
275 GR_VK_CALL(gpu->vkInterface(), CmdClearAttachments(fCmdBuffer,
276 numAttachments,
277 attachments,
278 numRects,
279 clearRects));
280}
281
282void GrVkCommandBuffer::bindDescriptorSets(const GrVkGpu* gpu,
egdaniel22281c12016-03-23 13:49:40 -0700283 GrVkPipelineState* pipelineState,
Greg Daniel7d918fd2018-06-19 15:22:01 -0400284 GrVkPipelineLayout* layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500285 uint32_t firstSet,
286 uint32_t setCount,
287 const VkDescriptorSet* descriptorSets,
288 uint32_t dynamicOffsetCount,
289 const uint32_t* dynamicOffsets) {
290 SkASSERT(fIsActive);
291 GR_VK_CALL(gpu->vkInterface(), CmdBindDescriptorSets(fCmdBuffer,
292 VK_PIPELINE_BIND_POINT_GRAPHICS,
Greg Daniel7d918fd2018-06-19 15:22:01 -0400293 layout->layout(),
Greg Daniel164a9f02016-02-22 09:56:40 -0500294 firstSet,
295 setCount,
296 descriptorSets,
297 dynamicOffsetCount,
298 dynamicOffsets));
Greg Daniel7d918fd2018-06-19 15:22:01 -0400299 this->addRecordingResource(layout);
Greg Daniel164a9f02016-02-22 09:56:40 -0500300}
301
egdanielbc9b2962016-09-27 08:00:53 -0700302void GrVkCommandBuffer::bindDescriptorSets(const GrVkGpu* gpu,
303 const SkTArray<const GrVkRecycledResource*>& recycled,
304 const SkTArray<const GrVkResource*>& resources,
Greg Daniel7d918fd2018-06-19 15:22:01 -0400305 GrVkPipelineLayout* layout,
egdanielbc9b2962016-09-27 08:00:53 -0700306 uint32_t firstSet,
307 uint32_t setCount,
308 const VkDescriptorSet* descriptorSets,
309 uint32_t dynamicOffsetCount,
310 const uint32_t* dynamicOffsets) {
311 SkASSERT(fIsActive);
312 GR_VK_CALL(gpu->vkInterface(), CmdBindDescriptorSets(fCmdBuffer,
313 VK_PIPELINE_BIND_POINT_GRAPHICS,
Greg Daniel7d918fd2018-06-19 15:22:01 -0400314 layout->layout(),
egdanielbc9b2962016-09-27 08:00:53 -0700315 firstSet,
316 setCount,
317 descriptorSets,
318 dynamicOffsetCount,
319 dynamicOffsets));
Greg Daniel7d918fd2018-06-19 15:22:01 -0400320 this->addRecordingResource(layout);
egdanielbc9b2962016-09-27 08:00:53 -0700321 for (int i = 0; i < recycled.count(); ++i) {
322 this->addRecycledResource(recycled[i]);
323 }
324 for (int i = 0; i < resources.count(); ++i) {
325 this->addResource(resources[i]);
326 }
327}
328
egdaniel470d77a2016-03-18 12:50:27 -0700329void GrVkCommandBuffer::bindPipeline(const GrVkGpu* gpu, const GrVkPipeline* pipeline) {
330 SkASSERT(fIsActive);
egdaniel470d77a2016-03-18 12:50:27 -0700331 GR_VK_CALL(gpu->vkInterface(), CmdBindPipeline(fCmdBuffer,
332 VK_PIPELINE_BIND_POINT_GRAPHICS,
333 pipeline->pipeline()));
egdanielec440992016-09-13 09:54:11 -0700334 this->addResource(pipeline);
egdaniel470d77a2016-03-18 12:50:27 -0700335}
336
Greg Daniel164a9f02016-02-22 09:56:40 -0500337void GrVkCommandBuffer::drawIndexed(const GrVkGpu* gpu,
338 uint32_t indexCount,
339 uint32_t instanceCount,
340 uint32_t firstIndex,
341 int32_t vertexOffset,
Greg Danielf346df32019-04-03 14:52:13 -0400342 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500343 SkASSERT(fIsActive);
344 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400345 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500346 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndexed(fCmdBuffer,
347 indexCount,
348 instanceCount,
349 firstIndex,
350 vertexOffset,
351 firstInstance));
352}
353
354void GrVkCommandBuffer::draw(const GrVkGpu* gpu,
355 uint32_t vertexCount,
356 uint32_t instanceCount,
357 uint32_t firstVertex,
Greg Danielf346df32019-04-03 14:52:13 -0400358 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500359 SkASSERT(fIsActive);
360 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400361 this->addingWork(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -0500362 GR_VK_CALL(gpu->vkInterface(), CmdDraw(fCmdBuffer,
363 vertexCount,
364 instanceCount,
365 firstVertex,
366 firstInstance));
367}
egdaniel470d77a2016-03-18 12:50:27 -0700368
369void GrVkCommandBuffer::setViewport(const GrVkGpu* gpu,
370 uint32_t firstViewport,
371 uint32_t viewportCount,
372 const VkViewport* viewports) {
373 SkASSERT(fIsActive);
374 SkASSERT(1 == viewportCount);
375 if (memcmp(viewports, &fCachedViewport, sizeof(VkViewport))) {
376 GR_VK_CALL(gpu->vkInterface(), CmdSetViewport(fCmdBuffer,
377 firstViewport,
378 viewportCount,
379 viewports));
380 fCachedViewport = viewports[0];
381 }
382}
383
384void GrVkCommandBuffer::setScissor(const GrVkGpu* gpu,
385 uint32_t firstScissor,
386 uint32_t scissorCount,
387 const VkRect2D* scissors) {
388 SkASSERT(fIsActive);
389 SkASSERT(1 == scissorCount);
390 if (memcmp(scissors, &fCachedScissor, sizeof(VkRect2D))) {
391 GR_VK_CALL(gpu->vkInterface(), CmdSetScissor(fCmdBuffer,
392 firstScissor,
393 scissorCount,
394 scissors));
395 fCachedScissor = scissors[0];
396 }
397}
398
399void GrVkCommandBuffer::setBlendConstants(const GrVkGpu* gpu,
400 const float blendConstants[4]) {
401 SkASSERT(fIsActive);
402 if (memcmp(blendConstants, fCachedBlendConstant, 4 * sizeof(float))) {
403 GR_VK_CALL(gpu->vkInterface(), CmdSetBlendConstants(fCmdBuffer, blendConstants));
404 memcpy(fCachedBlendConstant, blendConstants, 4 * sizeof(float));
405 }
406}
egdaniel9a6cf802016-06-08 08:22:05 -0700407
Greg Danielee54f232019-04-03 14:58:40 -0400408void GrVkCommandBuffer::addingWork(const GrVkGpu* gpu) {
409 this->submitPipelineBarriers(gpu);
410 fHasWork = true;
411}
412
egdaniel9a6cf802016-06-08 08:22:05 -0700413///////////////////////////////////////////////////////////////////////////////
414// PrimaryCommandBuffer
415////////////////////////////////////////////////////////////////////////////////
egdaniel9cb63402016-06-23 08:37:05 -0700416GrVkPrimaryCommandBuffer::~GrVkPrimaryCommandBuffer() {
417 // Should have ended any render pass we're in the middle of
418 SkASSERT(!fActiveRenderPass);
419}
420
egdaniel9a6cf802016-06-08 08:22:05 -0700421GrVkPrimaryCommandBuffer* GrVkPrimaryCommandBuffer::Create(const GrVkGpu* gpu,
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500422 GrVkCommandPool* cmdPool) {
egdaniel9a6cf802016-06-08 08:22:05 -0700423 const VkCommandBufferAllocateInfo cmdInfo = {
424 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400425 nullptr, // pNext
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500426 cmdPool->vkCommandPool(), // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700427 VK_COMMAND_BUFFER_LEVEL_PRIMARY, // level
428 1 // bufferCount
429 };
430
431 VkCommandBuffer cmdBuffer;
432 VkResult err = GR_VK_CALL(gpu->vkInterface(), AllocateCommandBuffers(gpu->device(),
433 &cmdInfo,
434 &cmdBuffer));
435 if (err) {
436 return nullptr;
437 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500438 return new GrVkPrimaryCommandBuffer(cmdBuffer, cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700439}
440
441void GrVkPrimaryCommandBuffer::begin(const GrVkGpu* gpu) {
442 SkASSERT(!fIsActive);
443 VkCommandBufferBeginInfo cmdBufferBeginInfo;
444 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
445 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
446 cmdBufferBeginInfo.pNext = nullptr;
447 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
448 cmdBufferBeginInfo.pInheritanceInfo = nullptr;
449
450 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), BeginCommandBuffer(fCmdBuffer,
451 &cmdBufferBeginInfo));
452 fIsActive = true;
453}
454
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500455void GrVkPrimaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700456 SkASSERT(fIsActive);
457 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400458
459 this->submitPipelineBarriers(gpu);
460
egdaniel9a6cf802016-06-08 08:22:05 -0700461 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), EndCommandBuffer(fCmdBuffer));
Greg Daniel7d918fd2018-06-19 15:22:01 -0400462 for (int i = 0; i < fTrackedRecordingResources.count(); ++i) {
463 fTrackedRecordingResources[i]->unref(gpu);
464 }
465 fTrackedRecordingResources.rewind();
egdaniel9a6cf802016-06-08 08:22:05 -0700466 this->invalidateState();
467 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400468 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700469}
470
471void GrVkPrimaryCommandBuffer::beginRenderPass(const GrVkGpu* gpu,
egdaniel9cb63402016-06-23 08:37:05 -0700472 const GrVkRenderPass* renderPass,
Robert Phillips95214472017-08-08 18:00:03 -0400473 const VkClearValue clearValues[],
egdaniel9cb63402016-06-23 08:37:05 -0700474 const GrVkRenderTarget& target,
475 const SkIRect& bounds,
476 bool forSecondaryCB) {
egdaniel9a6cf802016-06-08 08:22:05 -0700477 SkASSERT(fIsActive);
478 SkASSERT(!fActiveRenderPass);
egdaniel9cb63402016-06-23 08:37:05 -0700479 SkASSERT(renderPass->isCompatible(target));
480
Greg Danielee54f232019-04-03 14:58:40 -0400481 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400482
egdaniel9a6cf802016-06-08 08:22:05 -0700483 VkRenderPassBeginInfo beginInfo;
egdaniel9cb63402016-06-23 08:37:05 -0700484 VkRect2D renderArea;
485 renderArea.offset = { bounds.fLeft , bounds.fTop };
486 renderArea.extent = { (uint32_t)bounds.width(), (uint32_t)bounds.height() };
487
488 memset(&beginInfo, 0, sizeof(VkRenderPassBeginInfo));
489 beginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
490 beginInfo.pNext = nullptr;
491 beginInfo.renderPass = renderPass->vkRenderPass();
492 beginInfo.framebuffer = target.framebuffer()->framebuffer();
493 beginInfo.renderArea = renderArea;
Greg Danielb68319a2018-02-23 16:08:28 -0500494 beginInfo.clearValueCount = renderPass->clearValueCount();
egdaniel9cb63402016-06-23 08:37:05 -0700495 beginInfo.pClearValues = clearValues;
496
497 VkSubpassContents contents = forSecondaryCB ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
498 : VK_SUBPASS_CONTENTS_INLINE;
499
egdaniel9a6cf802016-06-08 08:22:05 -0700500 GR_VK_CALL(gpu->vkInterface(), CmdBeginRenderPass(fCmdBuffer, &beginInfo, contents));
501 fActiveRenderPass = renderPass;
502 this->addResource(renderPass);
503 target.addResources(*this);
504}
505
506void GrVkPrimaryCommandBuffer::endRenderPass(const GrVkGpu* gpu) {
507 SkASSERT(fIsActive);
508 SkASSERT(fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400509 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700510 GR_VK_CALL(gpu->vkInterface(), CmdEndRenderPass(fCmdBuffer));
511 fActiveRenderPass = nullptr;
512}
513
514void GrVkPrimaryCommandBuffer::executeCommands(const GrVkGpu* gpu,
jvanverth7ec92412016-07-06 09:24:57 -0700515 GrVkSecondaryCommandBuffer* buffer) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500516 // The Vulkan spec allows secondary command buffers to be executed on a primary command buffer
517 // if the command pools both were created from were created with the same queue family. However,
518 // we currently always create them from the same pool.
519 SkASSERT(buffer->commandPool() == fCmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700520 SkASSERT(fIsActive);
Greg Daniel77b53f62016-10-18 11:48:51 -0400521 SkASSERT(!buffer->fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700522 SkASSERT(fActiveRenderPass);
523 SkASSERT(fActiveRenderPass->isCompatible(*buffer->fActiveRenderPass));
524
Greg Danielee54f232019-04-03 14:58:40 -0400525 this->addingWork(gpu);
Greg Danielf346df32019-04-03 14:52:13 -0400526
egdaniel9a6cf802016-06-08 08:22:05 -0700527 GR_VK_CALL(gpu->vkInterface(), CmdExecuteCommands(fCmdBuffer, 1, &buffer->fCmdBuffer));
jvanverth7ec92412016-07-06 09:24:57 -0700528 buffer->ref();
529 fSecondaryCommandBuffers.push_back(buffer);
egdaniel066df7c2016-06-08 14:02:27 -0700530 // When executing a secondary command buffer all state (besides render pass state) becomes
531 // invalidated and must be reset. This includes bound buffers, pipelines, dynamic state, etc.
532 this->invalidateState();
egdaniel9a6cf802016-06-08 08:22:05 -0700533}
534
Greg Daniel48661b82018-01-22 16:11:35 -0500535static void submit_to_queue(const GrVkInterface* interface,
536 VkQueue queue,
537 VkFence fence,
538 uint32_t waitCount,
539 const VkSemaphore* waitSemaphores,
540 const VkPipelineStageFlags* waitStages,
541 uint32_t commandBufferCount,
542 const VkCommandBuffer* commandBuffers,
543 uint32_t signalCount,
544 const VkSemaphore* signalSemaphores) {
545 VkSubmitInfo submitInfo;
546 memset(&submitInfo, 0, sizeof(VkSubmitInfo));
547 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
548 submitInfo.pNext = nullptr;
549 submitInfo.waitSemaphoreCount = waitCount;
550 submitInfo.pWaitSemaphores = waitSemaphores;
551 submitInfo.pWaitDstStageMask = waitStages;
552 submitInfo.commandBufferCount = commandBufferCount;
553 submitInfo.pCommandBuffers = commandBuffers;
554 submitInfo.signalSemaphoreCount = signalCount;
555 submitInfo.pSignalSemaphores = signalSemaphores;
556 GR_VK_CALL_ERRCHECK(interface, QueueSubmit(queue, 1, &submitInfo, fence));
557}
558
Greg Daniel6be35232017-03-01 17:01:09 -0500559void GrVkPrimaryCommandBuffer::submitToQueue(
560 const GrVkGpu* gpu,
561 VkQueue queue,
562 GrVkGpu::SyncQueue sync,
Greg Daniel48661b82018-01-22 16:11:35 -0500563 SkTArray<GrVkSemaphore::Resource*>& signalSemaphores,
564 SkTArray<GrVkSemaphore::Resource*>& waitSemaphores) {
egdaniel9a6cf802016-06-08 08:22:05 -0700565 SkASSERT(!fIsActive);
566
567 VkResult err;
jvanverth7ec92412016-07-06 09:24:57 -0700568 if (VK_NULL_HANDLE == fSubmitFence) {
569 VkFenceCreateInfo fenceInfo;
570 memset(&fenceInfo, 0, sizeof(VkFenceCreateInfo));
571 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
572 err = GR_VK_CALL(gpu->vkInterface(), CreateFence(gpu->device(), &fenceInfo, nullptr,
573 &fSubmitFence));
574 SkASSERT(!err);
575 } else {
576 GR_VK_CALL(gpu->vkInterface(), ResetFences(gpu->device(), 1, &fSubmitFence));
577 }
egdaniel9a6cf802016-06-08 08:22:05 -0700578
Greg Daniela5cb7812017-06-16 09:45:32 -0400579 int signalCount = signalSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500580 int waitCount = waitSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500581
Greg Daniel48661b82018-01-22 16:11:35 -0500582 if (0 == signalCount && 0 == waitCount) {
583 // This command buffer has no dependent semaphores so we can simply just submit it to the
584 // queue with no worries.
585 submit_to_queue(gpu->vkInterface(), queue, fSubmitFence, 0, nullptr, nullptr,
586 1, &fCmdBuffer, 0, nullptr);
587 } else {
Greg Daniel48661b82018-01-22 16:11:35 -0500588 SkTArray<VkSemaphore> vkSignalSems(signalCount);
589 for (int i = 0; i < signalCount; ++i) {
590 if (signalSemaphores[i]->shouldSignal()) {
591 this->addResource(signalSemaphores[i]);
592 vkSignalSems.push_back(signalSemaphores[i]->semaphore());
593 }
594 }
595
596 SkTArray<VkSemaphore> vkWaitSems(waitCount);
597 SkTArray<VkPipelineStageFlags> vkWaitStages(waitCount);
598 for (int i = 0; i < waitCount; ++i) {
599 if (waitSemaphores[i]->shouldWait()) {
600 this->addResource(waitSemaphores[i]);
601 vkWaitSems.push_back(waitSemaphores[i]->semaphore());
602 vkWaitStages.push_back(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
603 }
604 }
605 submit_to_queue(gpu->vkInterface(), queue, fSubmitFence,
606 vkWaitSems.count(), vkWaitSems.begin(), vkWaitStages.begin(),
607 1, &fCmdBuffer,
608 vkSignalSems.count(), vkSignalSems.begin());
Greg Danielb3f66542019-05-10 17:11:19 -0400609
Greg Daniel48661b82018-01-22 16:11:35 -0500610 for (int i = 0; i < signalCount; ++i) {
611 signalSemaphores[i]->markAsSignaled();
612 }
613 for (int i = 0; i < waitCount; ++i) {
614 waitSemaphores[i]->markAsWaited();
615 }
Greg Daniel48661b82018-01-22 16:11:35 -0500616 }
egdaniel9a6cf802016-06-08 08:22:05 -0700617
618 if (GrVkGpu::kForce_SyncQueue == sync) {
619 err = GR_VK_CALL(gpu->vkInterface(),
620 WaitForFences(gpu->device(), 1, &fSubmitFence, true, UINT64_MAX));
621 if (VK_TIMEOUT == err) {
622 SkDebugf("Fence failed to signal: %d\n", err);
Ben Wagnerb4aab9a2017-08-16 10:53:04 -0400623 SK_ABORT("failing");
egdaniel9a6cf802016-06-08 08:22:05 -0700624 }
625 SkASSERT(!err);
626
Greg Daniela3aa75a2019-04-12 14:24:55 -0400627 fFinishedProcs.reset();
628
egdaniel9a6cf802016-06-08 08:22:05 -0700629 // Destroy the fence
630 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
631 fSubmitFence = VK_NULL_HANDLE;
632 }
633}
634
Greg Daniela3aa75a2019-04-12 14:24:55 -0400635bool GrVkPrimaryCommandBuffer::finished(const GrVkGpu* gpu) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500636 SkASSERT(!fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700637 if (VK_NULL_HANDLE == fSubmitFence) {
638 return true;
639 }
640
641 VkResult err = GR_VK_CALL(gpu->vkInterface(), GetFenceStatus(gpu->device(), fSubmitFence));
642 switch (err) {
643 case VK_SUCCESS:
644 return true;
645
646 case VK_NOT_READY:
647 return false;
648
649 default:
650 SkDebugf("Error getting fence status: %d\n", err);
Ben Wagnerb4aab9a2017-08-16 10:53:04 -0400651 SK_ABORT("failing");
egdaniel9a6cf802016-06-08 08:22:05 -0700652 break;
653 }
654
655 return false;
656}
657
Greg Daniela3aa75a2019-04-12 14:24:55 -0400658void GrVkPrimaryCommandBuffer::addFinishedProc(sk_sp<GrRefCntedCallback> finishedProc) {
659 fFinishedProcs.push_back(std::move(finishedProc));
660}
661
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500662void GrVkPrimaryCommandBuffer::onReleaseResources(GrVkGpu* gpu) {
jvanverth7ec92412016-07-06 09:24:57 -0700663 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500664 fSecondaryCommandBuffers[i]->releaseResources(gpu);
665 }
Brian Salomonab32f652019-05-10 14:24:50 -0400666 fFinishedProcs.reset();
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500667}
668
669void GrVkPrimaryCommandBuffer::recycleSecondaryCommandBuffers() {
670 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
671 SkASSERT(fSecondaryCommandBuffers[i]->commandPool() == fCmdPool);
672 fCmdPool->recycleSecondaryCommandBuffer(fSecondaryCommandBuffers[i]);
jvanverth7ec92412016-07-06 09:24:57 -0700673 }
674 fSecondaryCommandBuffers.reset();
675}
676
egdaniel9a6cf802016-06-08 08:22:05 -0700677void GrVkPrimaryCommandBuffer::copyImage(const GrVkGpu* gpu,
678 GrVkImage* srcImage,
679 VkImageLayout srcLayout,
680 GrVkImage* dstImage,
681 VkImageLayout dstLayout,
682 uint32_t copyRegionCount,
683 const VkImageCopy* copyRegions) {
684 SkASSERT(fIsActive);
685 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400686 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700687 this->addResource(srcImage->resource());
688 this->addResource(dstImage->resource());
689 GR_VK_CALL(gpu->vkInterface(), CmdCopyImage(fCmdBuffer,
690 srcImage->image(),
691 srcLayout,
692 dstImage->image(),
693 dstLayout,
694 copyRegionCount,
695 copyRegions));
696}
697
698void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
699 const GrVkResource* srcResource,
700 VkImage srcImage,
701 VkImageLayout srcLayout,
702 const GrVkResource* dstResource,
703 VkImage dstImage,
704 VkImageLayout dstLayout,
705 uint32_t blitRegionCount,
706 const VkImageBlit* blitRegions,
707 VkFilter filter) {
708 SkASSERT(fIsActive);
709 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400710 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700711 this->addResource(srcResource);
712 this->addResource(dstResource);
713 GR_VK_CALL(gpu->vkInterface(), CmdBlitImage(fCmdBuffer,
714 srcImage,
715 srcLayout,
716 dstImage,
717 dstLayout,
718 blitRegionCount,
719 blitRegions,
720 filter));
721}
722
Greg Daniel6ecc9112017-06-16 16:17:03 +0000723void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
724 const GrVkImage& srcImage,
725 const GrVkImage& dstImage,
726 uint32_t blitRegionCount,
727 const VkImageBlit* blitRegions,
728 VkFilter filter) {
729 this->blitImage(gpu,
730 srcImage.resource(),
731 srcImage.image(),
732 srcImage.currentLayout(),
733 dstImage.resource(),
734 dstImage.image(),
735 dstImage.currentLayout(),
736 blitRegionCount,
737 blitRegions,
738 filter);
739}
740
741
egdaniel9a6cf802016-06-08 08:22:05 -0700742void GrVkPrimaryCommandBuffer::copyImageToBuffer(const GrVkGpu* gpu,
743 GrVkImage* srcImage,
744 VkImageLayout srcLayout,
745 GrVkTransferBuffer* dstBuffer,
746 uint32_t copyRegionCount,
747 const VkBufferImageCopy* copyRegions) {
748 SkASSERT(fIsActive);
749 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400750 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700751 this->addResource(srcImage->resource());
752 this->addResource(dstBuffer->resource());
753 GR_VK_CALL(gpu->vkInterface(), CmdCopyImageToBuffer(fCmdBuffer,
754 srcImage->image(),
755 srcLayout,
756 dstBuffer->buffer(),
757 copyRegionCount,
758 copyRegions));
759}
760
761void GrVkPrimaryCommandBuffer::copyBufferToImage(const GrVkGpu* gpu,
762 GrVkTransferBuffer* srcBuffer,
763 GrVkImage* dstImage,
764 VkImageLayout dstLayout,
765 uint32_t copyRegionCount,
766 const VkBufferImageCopy* copyRegions) {
767 SkASSERT(fIsActive);
768 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400769 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700770 this->addResource(srcBuffer->resource());
771 this->addResource(dstImage->resource());
772 GR_VK_CALL(gpu->vkInterface(), CmdCopyBufferToImage(fCmdBuffer,
773 srcBuffer->buffer(),
774 dstImage->image(),
775 dstLayout,
776 copyRegionCount,
777 copyRegions));
778}
779
Greg Daniel6888c0d2017-08-25 11:55:50 -0400780
781void GrVkPrimaryCommandBuffer::copyBuffer(GrVkGpu* gpu,
782 GrVkBuffer* srcBuffer,
783 GrVkBuffer* dstBuffer,
784 uint32_t regionCount,
785 const VkBufferCopy* regions) {
786 SkASSERT(fIsActive);
787 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400788 this->addingWork(gpu);
Greg Daniel6888c0d2017-08-25 11:55:50 -0400789#ifdef SK_DEBUG
790 for (uint32_t i = 0; i < regionCount; ++i) {
791 const VkBufferCopy& region = regions[i];
792 SkASSERT(region.size > 0);
793 SkASSERT(region.srcOffset < srcBuffer->size());
794 SkASSERT(region.dstOffset < dstBuffer->size());
795 SkASSERT(region.srcOffset + region.size <= srcBuffer->size());
796 SkASSERT(region.dstOffset + region.size <= dstBuffer->size());
797 }
798#endif
799 this->addResource(srcBuffer->resource());
800 this->addResource(dstBuffer->resource());
801 GR_VK_CALL(gpu->vkInterface(), CmdCopyBuffer(fCmdBuffer,
802 srcBuffer->buffer(),
803 dstBuffer->buffer(),
804 regionCount,
805 regions));
806}
807
jvanvertha584de92016-06-30 09:10:52 -0700808void GrVkPrimaryCommandBuffer::updateBuffer(GrVkGpu* gpu,
809 GrVkBuffer* dstBuffer,
810 VkDeviceSize dstOffset,
811 VkDeviceSize dataSize,
812 const void* data) {
813 SkASSERT(fIsActive);
814 SkASSERT(!fActiveRenderPass);
815 SkASSERT(0 == (dstOffset & 0x03)); // four byte aligned
816 // TODO: handle larger transfer sizes
817 SkASSERT(dataSize <= 65536);
818 SkASSERT(0 == (dataSize & 0x03)); // four byte aligned
Greg Danielee54f232019-04-03 14:58:40 -0400819 this->addingWork(gpu);
jvanvertha584de92016-06-30 09:10:52 -0700820 this->addResource(dstBuffer->resource());
821 GR_VK_CALL(gpu->vkInterface(), CmdUpdateBuffer(fCmdBuffer,
822 dstBuffer->buffer(),
823 dstOffset,
824 dataSize,
825 (const uint32_t*) data));
826}
827
egdaniel9a6cf802016-06-08 08:22:05 -0700828void GrVkPrimaryCommandBuffer::clearColorImage(const GrVkGpu* gpu,
829 GrVkImage* image,
830 const VkClearColorValue* color,
831 uint32_t subRangeCount,
832 const VkImageSubresourceRange* subRanges) {
833 SkASSERT(fIsActive);
834 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400835 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700836 this->addResource(image->resource());
837 GR_VK_CALL(gpu->vkInterface(), CmdClearColorImage(fCmdBuffer,
838 image->image(),
839 image->currentLayout(),
840 color,
841 subRangeCount,
842 subRanges));
843}
844
845void GrVkPrimaryCommandBuffer::clearDepthStencilImage(const GrVkGpu* gpu,
846 GrVkImage* image,
847 const VkClearDepthStencilValue* color,
848 uint32_t subRangeCount,
849 const VkImageSubresourceRange* subRanges) {
850 SkASSERT(fIsActive);
851 SkASSERT(!fActiveRenderPass);
Greg Danielee54f232019-04-03 14:58:40 -0400852 this->addingWork(gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700853 this->addResource(image->resource());
854 GR_VK_CALL(gpu->vkInterface(), CmdClearDepthStencilImage(fCmdBuffer,
855 image->image(),
856 image->currentLayout(),
857 color,
858 subRangeCount,
859 subRanges));
860}
861
egdaniel52ad2512016-08-04 12:50:01 -0700862void GrVkPrimaryCommandBuffer::resolveImage(GrVkGpu* gpu,
863 const GrVkImage& srcImage,
864 const GrVkImage& dstImage,
865 uint32_t regionCount,
866 const VkImageResolve* regions) {
867 SkASSERT(fIsActive);
868 SkASSERT(!fActiveRenderPass);
869
Greg Danielee54f232019-04-03 14:58:40 -0400870 this->addingWork(gpu);
egdaniel52ad2512016-08-04 12:50:01 -0700871 this->addResource(srcImage.resource());
872 this->addResource(dstImage.resource());
873
874 GR_VK_CALL(gpu->vkInterface(), CmdResolveImage(fCmdBuffer,
875 srcImage.image(),
876 srcImage.currentLayout(),
877 dstImage.image(),
878 dstImage.currentLayout(),
879 regionCount,
880 regions));
881}
882
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500883void GrVkPrimaryCommandBuffer::onFreeGPUData(GrVkGpu* gpu) const {
egdaniel9cb63402016-06-23 08:37:05 -0700884 SkASSERT(!fActiveRenderPass);
885 // Destroy the fence, if any
886 if (VK_NULL_HANDLE != fSubmitFence) {
887 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
888 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500889 for (GrVkSecondaryCommandBuffer* buffer : fSecondaryCommandBuffers) {
890 buffer->unref(gpu);
891 }
892}
893
894void GrVkPrimaryCommandBuffer::onAbandonGPUData() const {
895 SkASSERT(!fActiveRenderPass);
896 for (GrVkSecondaryCommandBuffer* buffer : fSecondaryCommandBuffers) {
897 buffer->unrefAndAbandon();
898 }
egdaniel9cb63402016-06-23 08:37:05 -0700899}
900
egdaniel9a6cf802016-06-08 08:22:05 -0700901///////////////////////////////////////////////////////////////////////////////
902// SecondaryCommandBuffer
903////////////////////////////////////////////////////////////////////////////////
904
jvanverth7ec92412016-07-06 09:24:57 -0700905GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(const GrVkGpu* gpu,
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500906 GrVkCommandPool* cmdPool) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500907 SkASSERT(cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700908 const VkCommandBufferAllocateInfo cmdInfo = {
909 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400910 nullptr, // pNext
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500911 cmdPool->vkCommandPool(), // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700912 VK_COMMAND_BUFFER_LEVEL_SECONDARY, // level
913 1 // bufferCount
914 };
915
916 VkCommandBuffer cmdBuffer;
917 VkResult err = GR_VK_CALL(gpu->vkInterface(), AllocateCommandBuffers(gpu->device(),
918 &cmdInfo,
919 &cmdBuffer));
920 if (err) {
921 return nullptr;
922 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500923 return new GrVkSecondaryCommandBuffer(cmdBuffer, cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700924}
925
Greg Daniel070cbaf2019-01-03 17:35:54 -0500926GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(VkCommandBuffer cmdBuffer) {
927 return new GrVkSecondaryCommandBuffer(cmdBuffer, nullptr);
928}
egdaniel9a6cf802016-06-08 08:22:05 -0700929
jvanverth7ec92412016-07-06 09:24:57 -0700930void GrVkSecondaryCommandBuffer::begin(const GrVkGpu* gpu, const GrVkFramebuffer* framebuffer,
931 const GrVkRenderPass* compatibleRenderPass) {
egdaniel9a6cf802016-06-08 08:22:05 -0700932 SkASSERT(!fIsActive);
jvanverth7ec92412016-07-06 09:24:57 -0700933 SkASSERT(compatibleRenderPass);
934 fActiveRenderPass = compatibleRenderPass;
egdaniel9a6cf802016-06-08 08:22:05 -0700935
Greg Daniel070cbaf2019-01-03 17:35:54 -0500936 if (!this->isWrapped()) {
937 VkCommandBufferInheritanceInfo inheritanceInfo;
938 memset(&inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
939 inheritanceInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
940 inheritanceInfo.pNext = nullptr;
941 inheritanceInfo.renderPass = fActiveRenderPass->vkRenderPass();
942 inheritanceInfo.subpass = 0; // Currently only using 1 subpass for each render pass
943 inheritanceInfo.framebuffer = framebuffer ? framebuffer->framebuffer() : VK_NULL_HANDLE;
944 inheritanceInfo.occlusionQueryEnable = false;
945 inheritanceInfo.queryFlags = 0;
946 inheritanceInfo.pipelineStatistics = 0;
egdaniel9a6cf802016-06-08 08:22:05 -0700947
Greg Daniel070cbaf2019-01-03 17:35:54 -0500948 VkCommandBufferBeginInfo cmdBufferBeginInfo;
949 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
950 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
951 cmdBufferBeginInfo.pNext = nullptr;
952 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT |
953 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
954 cmdBufferBeginInfo.pInheritanceInfo = &inheritanceInfo;
egdaniel9a6cf802016-06-08 08:22:05 -0700955
Greg Daniel070cbaf2019-01-03 17:35:54 -0500956 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), BeginCommandBuffer(fCmdBuffer,
957 &cmdBufferBeginInfo));
958 }
egdaniel9a6cf802016-06-08 08:22:05 -0700959 fIsActive = true;
960}
961
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500962void GrVkSecondaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700963 SkASSERT(fIsActive);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500964 if (!this->isWrapped()) {
965 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), EndCommandBuffer(fCmdBuffer));
966 }
egdaniel9a6cf802016-06-08 08:22:05 -0700967 this->invalidateState();
968 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400969 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700970}