blob: 191bfc9d05ab8dd943a95057b9ccec716efd7e56 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2015 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkCommandBuffer.h"
9
Ethan Nicholas8e265a72018-12-12 16:22:40 -050010#include "GrVkCommandPool.h"
Greg Daniel6ecc9112017-06-16 16:17:03 +000011#include "GrVkGpu.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050012#include "GrVkFramebuffer.h"
Greg Daniel6ecc9112017-06-16 16:17:03 +000013#include "GrVkImage.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050014#include "GrVkImageView.h"
Greg Daniel6ecc9112017-06-16 16:17:03 +000015#include "GrVkIndexBuffer.h"
egdaniel470d77a2016-03-18 12:50:27 -070016#include "GrVkPipeline.h"
Greg Daniel6ecc9112017-06-16 16:17:03 +000017#include "GrVkPipelineState.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050018#include "GrVkRenderPass.h"
19#include "GrVkRenderTarget.h"
Greg Daniel7d918fd2018-06-19 15:22:01 -040020#include "GrVkPipelineLayout.h"
egdaniel22281c12016-03-23 13:49:40 -070021#include "GrVkPipelineState.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050022#include "GrVkTransferBuffer.h"
23#include "GrVkUtil.h"
Greg Daniel6ecc9112017-06-16 16:17:03 +000024#include "GrVkVertexBuffer.h"
egdaniel9cb63402016-06-23 08:37:05 -070025#include "SkRect.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050026
27void GrVkCommandBuffer::invalidateState() {
Chris Dalton1d616352017-05-31 12:51:23 -060028 for (auto& boundInputBuffer : fBoundInputBuffers) {
29 boundInputBuffer = VK_NULL_HANDLE;
30 }
egdaniel470d77a2016-03-18 12:50:27 -070031 fBoundIndexBuffer = VK_NULL_HANDLE;
egdaniel470d77a2016-03-18 12:50:27 -070032
33 memset(&fCachedViewport, 0, sizeof(VkViewport));
34 fCachedViewport.width = - 1.0f; // Viewport must have a width greater than 0
35
36 memset(&fCachedScissor, 0, sizeof(VkRect2D));
37 fCachedScissor.offset.x = -1; // Scissor offset must be greater that 0 to be valid
38
39 for (int i = 0; i < 4; ++i) {
40 fCachedBlendConstant[i] = -1.0;
41 }
Greg Daniel164a9f02016-02-22 09:56:40 -050042}
43
Ethan Nicholas8e265a72018-12-12 16:22:40 -050044void GrVkCommandBuffer::freeGPUData(GrVkGpu* gpu) const {
Greg Daniel164a9f02016-02-22 09:56:40 -050045 SkASSERT(!fIsActive);
Greg Daniel164a9f02016-02-22 09:56:40 -050046 for (int i = 0; i < fTrackedResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050047 fTrackedResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel164a9f02016-02-22 09:56:40 -050048 fTrackedResources[i]->unref(gpu);
49 }
halcanary9d524f22016-03-29 09:03:52 -070050
egdanielc1be9bc2016-07-20 08:33:00 -070051 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050052 fTrackedRecycledResources[i]->notifyRemovedFromCommandBuffer();
egdanielc1be9bc2016-07-20 08:33:00 -070053 fTrackedRecycledResources[i]->recycle(const_cast<GrVkGpu*>(gpu));
54 }
55
Greg Daniel7d918fd2018-06-19 15:22:01 -040056 for (int i = 0; i < fTrackedRecordingResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050057 fTrackedRecordingResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel7d918fd2018-06-19 15:22:01 -040058 fTrackedRecordingResources[i]->unref(gpu);
59 }
60
Greg Daniel070cbaf2019-01-03 17:35:54 -050061 if (!this->isWrapped()) {
62 GR_VK_CALL(gpu->vkInterface(), FreeCommandBuffers(gpu->device(), fCmdPool->vkCommandPool(),
63 1, &fCmdBuffer));
64 }
egdaniel9cb63402016-06-23 08:37:05 -070065
66 this->onFreeGPUData(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050067}
68
Greg Danielcef213c2017-04-21 11:52:27 -040069void GrVkCommandBuffer::abandonGPUData() const {
Ethan Nicholas8e265a72018-12-12 16:22:40 -050070 SkDEBUGCODE(fResourcesReleased = true;)
Greg Daniel164a9f02016-02-22 09:56:40 -050071 for (int i = 0; i < fTrackedResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050072 fTrackedResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel164a9f02016-02-22 09:56:40 -050073 fTrackedResources[i]->unrefAndAbandon();
74 }
egdanielc1be9bc2016-07-20 08:33:00 -070075
76 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050077 fTrackedRecycledResources[i]->notifyRemovedFromCommandBuffer();
egdanielc1be9bc2016-07-20 08:33:00 -070078 // We don't recycle resources when abandoning them.
79 fTrackedRecycledResources[i]->unrefAndAbandon();
80 }
Greg Daniel7d918fd2018-06-19 15:22:01 -040081
82 for (int i = 0; i < fTrackedRecordingResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050083 fTrackedRecordingResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel7d918fd2018-06-19 15:22:01 -040084 fTrackedRecordingResources[i]->unrefAndAbandon();
85 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -050086
87 this->onAbandonGPUData();
Greg Daniel164a9f02016-02-22 09:56:40 -050088}
89
Ethan Nicholas8e265a72018-12-12 16:22:40 -050090void GrVkCommandBuffer::releaseResources(GrVkGpu* gpu) {
91 SkDEBUGCODE(fResourcesReleased = true;)
jvanverth7ec92412016-07-06 09:24:57 -070092 SkASSERT(!fIsActive);
93 for (int i = 0; i < fTrackedResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050094 fTrackedResources[i]->notifyRemovedFromCommandBuffer();
jvanverth7ec92412016-07-06 09:24:57 -070095 fTrackedResources[i]->unref(gpu);
96 }
egdanielc1be9bc2016-07-20 08:33:00 -070097 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050098 fTrackedRecycledResources[i]->notifyRemovedFromCommandBuffer();
egdanielc1be9bc2016-07-20 08:33:00 -070099 fTrackedRecycledResources[i]->recycle(const_cast<GrVkGpu*>(gpu));
100 }
egdaniel594739c2016-09-20 12:39:25 -0700101
Greg Daniel7d918fd2018-06-19 15:22:01 -0400102 for (int i = 0; i < fTrackedRecordingResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -0500103 fTrackedRecordingResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel7d918fd2018-06-19 15:22:01 -0400104 fTrackedRecordingResources[i]->unref(gpu);
105 }
106
egdaniel594739c2016-09-20 12:39:25 -0700107 if (++fNumResets > kNumRewindResetsBeforeFullReset) {
108 fTrackedResources.reset();
109 fTrackedRecycledResources.reset();
Greg Daniel7d918fd2018-06-19 15:22:01 -0400110 fTrackedRecordingResources.reset();
egdaniel594739c2016-09-20 12:39:25 -0700111 fTrackedResources.setReserve(kInitialTrackedResourcesCount);
112 fTrackedRecycledResources.setReserve(kInitialTrackedResourcesCount);
Greg Daniel7d918fd2018-06-19 15:22:01 -0400113 fTrackedRecordingResources.setReserve(kInitialTrackedResourcesCount);
egdaniel594739c2016-09-20 12:39:25 -0700114 fNumResets = 0;
115 } else {
116 fTrackedResources.rewind();
117 fTrackedRecycledResources.rewind();
Greg Daniel7d918fd2018-06-19 15:22:01 -0400118 fTrackedRecordingResources.rewind();
egdaniel594739c2016-09-20 12:39:25 -0700119 }
120
jvanverth7ec92412016-07-06 09:24:57 -0700121 this->invalidateState();
122
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500123 this->onReleaseResources(gpu);
jvanverth7ec92412016-07-06 09:24:57 -0700124}
125
Greg Daniel164a9f02016-02-22 09:56:40 -0500126////////////////////////////////////////////////////////////////////////////////
127// CommandBuffer commands
128////////////////////////////////////////////////////////////////////////////////
129
130void GrVkCommandBuffer::pipelineBarrier(const GrVkGpu* gpu,
Greg Daniel59dc1482019-02-22 10:46:38 -0500131 const GrVkResource* resource,
Greg Daniel164a9f02016-02-22 09:56:40 -0500132 VkPipelineStageFlags srcStageMask,
133 VkPipelineStageFlags dstStageMask,
134 bool byRegion,
135 BarrierType barrierType,
Greg Daniel59dc1482019-02-22 10:46:38 -0500136 void* barrier) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500137 SkASSERT(!this->isWrapped());
Greg Daniel164a9f02016-02-22 09:56:40 -0500138 SkASSERT(fIsActive);
egdaniel58a8d922016-04-21 08:03:10 -0700139 // For images we can have barriers inside of render passes but they require us to add more
140 // support in subpasses which need self dependencies to have barriers inside them. Also, we can
141 // never have buffer barriers inside of a render pass. For now we will just assert that we are
142 // not in a render pass.
143 SkASSERT(!fActiveRenderPass);
Greg Danielf346df32019-04-03 14:52:13 -0400144
145 this->addingWork();
146
Greg Daniel164a9f02016-02-22 09:56:40 -0500147 VkDependencyFlags dependencyFlags = byRegion ? VK_DEPENDENCY_BY_REGION_BIT : 0;
148
149 switch (barrierType) {
150 case kMemory_BarrierType: {
151 const VkMemoryBarrier* barrierPtr = reinterpret_cast<VkMemoryBarrier*>(barrier);
152 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(fCmdBuffer, srcStageMask,
153 dstStageMask, dependencyFlags,
154 1, barrierPtr,
155 0, nullptr,
156 0, nullptr));
157 break;
158 }
159
160 case kBufferMemory_BarrierType: {
161 const VkBufferMemoryBarrier* barrierPtr =
162 reinterpret_cast<VkBufferMemoryBarrier*>(barrier);
163 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(fCmdBuffer, srcStageMask,
164 dstStageMask, dependencyFlags,
165 0, nullptr,
166 1, barrierPtr,
167 0, nullptr));
168 break;
169 }
170
171 case kImageMemory_BarrierType: {
172 const VkImageMemoryBarrier* barrierPtr =
173 reinterpret_cast<VkImageMemoryBarrier*>(barrier);
174 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(fCmdBuffer, srcStageMask,
175 dstStageMask, dependencyFlags,
176 0, nullptr,
177 0, nullptr,
178 1, barrierPtr));
179 break;
180 }
181 }
Greg Daniel59dc1482019-02-22 10:46:38 -0500182 if (resource) {
183 this->addResource(resource);
184 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500185}
186
Greg Daniel6ecc9112017-06-16 16:17:03 +0000187void GrVkCommandBuffer::bindInputBuffer(GrVkGpu* gpu, uint32_t binding,
188 const GrVkVertexBuffer* vbuffer) {
189 VkBuffer vkBuffer = vbuffer->buffer();
190 SkASSERT(VK_NULL_HANDLE != vkBuffer);
191 SkASSERT(binding < kMaxInputBuffers);
192 // TODO: once vbuffer->offset() no longer always returns 0, we will need to track the offset
193 // to know if we can skip binding or not.
194 if (vkBuffer != fBoundInputBuffers[binding]) {
195 VkDeviceSize offset = vbuffer->offset();
196 GR_VK_CALL(gpu->vkInterface(), CmdBindVertexBuffers(fCmdBuffer,
197 binding,
198 1,
199 &vkBuffer,
200 &offset));
201 fBoundInputBuffers[binding] = vkBuffer;
Greg Daniel59dc1482019-02-22 10:46:38 -0500202 this->addResource(vbuffer->resource());
Greg Daniel6ecc9112017-06-16 16:17:03 +0000203 }
204}
205
206void GrVkCommandBuffer::bindIndexBuffer(GrVkGpu* gpu, const GrVkIndexBuffer* ibuffer) {
207 VkBuffer vkBuffer = ibuffer->buffer();
208 SkASSERT(VK_NULL_HANDLE != vkBuffer);
209 // TODO: once ibuffer->offset() no longer always returns 0, we will need to track the offset
210 // to know if we can skip binding or not.
211 if (vkBuffer != fBoundIndexBuffer) {
212 GR_VK_CALL(gpu->vkInterface(), CmdBindIndexBuffer(fCmdBuffer,
213 vkBuffer,
214 ibuffer->offset(),
215 VK_INDEX_TYPE_UINT16));
216 fBoundIndexBuffer = vkBuffer;
Greg Daniel59dc1482019-02-22 10:46:38 -0500217 this->addResource(ibuffer->resource());
Greg Daniel6ecc9112017-06-16 16:17:03 +0000218 }
219}
220
Greg Daniel164a9f02016-02-22 09:56:40 -0500221void GrVkCommandBuffer::clearAttachments(const GrVkGpu* gpu,
222 int numAttachments,
223 const VkClearAttachment* attachments,
224 int numRects,
Greg Danielf346df32019-04-03 14:52:13 -0400225 const VkClearRect* clearRects) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500226 SkASSERT(fIsActive);
227 SkASSERT(fActiveRenderPass);
228 SkASSERT(numAttachments > 0);
229 SkASSERT(numRects > 0);
Greg Danielf346df32019-04-03 14:52:13 -0400230
231 this->addingWork();
232
Greg Daniel164a9f02016-02-22 09:56:40 -0500233#ifdef SK_DEBUG
234 for (int i = 0; i < numAttachments; ++i) {
235 if (attachments[i].aspectMask == VK_IMAGE_ASPECT_COLOR_BIT) {
236 uint32_t testIndex;
237 SkAssertResult(fActiveRenderPass->colorAttachmentIndex(&testIndex));
238 SkASSERT(testIndex == attachments[i].colorAttachment);
239 }
240 }
241#endif
242 GR_VK_CALL(gpu->vkInterface(), CmdClearAttachments(fCmdBuffer,
243 numAttachments,
244 attachments,
245 numRects,
246 clearRects));
247}
248
249void GrVkCommandBuffer::bindDescriptorSets(const GrVkGpu* gpu,
egdaniel22281c12016-03-23 13:49:40 -0700250 GrVkPipelineState* pipelineState,
Greg Daniel7d918fd2018-06-19 15:22:01 -0400251 GrVkPipelineLayout* layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500252 uint32_t firstSet,
253 uint32_t setCount,
254 const VkDescriptorSet* descriptorSets,
255 uint32_t dynamicOffsetCount,
256 const uint32_t* dynamicOffsets) {
257 SkASSERT(fIsActive);
258 GR_VK_CALL(gpu->vkInterface(), CmdBindDescriptorSets(fCmdBuffer,
259 VK_PIPELINE_BIND_POINT_GRAPHICS,
Greg Daniel7d918fd2018-06-19 15:22:01 -0400260 layout->layout(),
Greg Daniel164a9f02016-02-22 09:56:40 -0500261 firstSet,
262 setCount,
263 descriptorSets,
264 dynamicOffsetCount,
265 dynamicOffsets));
Greg Daniel7d918fd2018-06-19 15:22:01 -0400266 this->addRecordingResource(layout);
Greg Daniel164a9f02016-02-22 09:56:40 -0500267}
268
egdanielbc9b2962016-09-27 08:00:53 -0700269void GrVkCommandBuffer::bindDescriptorSets(const GrVkGpu* gpu,
270 const SkTArray<const GrVkRecycledResource*>& recycled,
271 const SkTArray<const GrVkResource*>& resources,
Greg Daniel7d918fd2018-06-19 15:22:01 -0400272 GrVkPipelineLayout* layout,
egdanielbc9b2962016-09-27 08:00:53 -0700273 uint32_t firstSet,
274 uint32_t setCount,
275 const VkDescriptorSet* descriptorSets,
276 uint32_t dynamicOffsetCount,
277 const uint32_t* dynamicOffsets) {
278 SkASSERT(fIsActive);
279 GR_VK_CALL(gpu->vkInterface(), CmdBindDescriptorSets(fCmdBuffer,
280 VK_PIPELINE_BIND_POINT_GRAPHICS,
Greg Daniel7d918fd2018-06-19 15:22:01 -0400281 layout->layout(),
egdanielbc9b2962016-09-27 08:00:53 -0700282 firstSet,
283 setCount,
284 descriptorSets,
285 dynamicOffsetCount,
286 dynamicOffsets));
Greg Daniel7d918fd2018-06-19 15:22:01 -0400287 this->addRecordingResource(layout);
egdanielbc9b2962016-09-27 08:00:53 -0700288 for (int i = 0; i < recycled.count(); ++i) {
289 this->addRecycledResource(recycled[i]);
290 }
291 for (int i = 0; i < resources.count(); ++i) {
292 this->addResource(resources[i]);
293 }
294}
295
egdaniel470d77a2016-03-18 12:50:27 -0700296void GrVkCommandBuffer::bindPipeline(const GrVkGpu* gpu, const GrVkPipeline* pipeline) {
297 SkASSERT(fIsActive);
egdaniel470d77a2016-03-18 12:50:27 -0700298 GR_VK_CALL(gpu->vkInterface(), CmdBindPipeline(fCmdBuffer,
299 VK_PIPELINE_BIND_POINT_GRAPHICS,
300 pipeline->pipeline()));
egdanielec440992016-09-13 09:54:11 -0700301 this->addResource(pipeline);
egdaniel470d77a2016-03-18 12:50:27 -0700302}
303
Greg Daniel164a9f02016-02-22 09:56:40 -0500304void GrVkCommandBuffer::drawIndexed(const GrVkGpu* gpu,
305 uint32_t indexCount,
306 uint32_t instanceCount,
307 uint32_t firstIndex,
308 int32_t vertexOffset,
Greg Danielf346df32019-04-03 14:52:13 -0400309 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500310 SkASSERT(fIsActive);
311 SkASSERT(fActiveRenderPass);
Greg Danielf346df32019-04-03 14:52:13 -0400312 this->addingWork();
Greg Daniel164a9f02016-02-22 09:56:40 -0500313 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndexed(fCmdBuffer,
314 indexCount,
315 instanceCount,
316 firstIndex,
317 vertexOffset,
318 firstInstance));
319}
320
321void GrVkCommandBuffer::draw(const GrVkGpu* gpu,
322 uint32_t vertexCount,
323 uint32_t instanceCount,
324 uint32_t firstVertex,
Greg Danielf346df32019-04-03 14:52:13 -0400325 uint32_t firstInstance) {
Greg Daniel164a9f02016-02-22 09:56:40 -0500326 SkASSERT(fIsActive);
327 SkASSERT(fActiveRenderPass);
Greg Danielf346df32019-04-03 14:52:13 -0400328 this->addingWork();
Greg Daniel164a9f02016-02-22 09:56:40 -0500329 GR_VK_CALL(gpu->vkInterface(), CmdDraw(fCmdBuffer,
330 vertexCount,
331 instanceCount,
332 firstVertex,
333 firstInstance));
334}
egdaniel470d77a2016-03-18 12:50:27 -0700335
336void GrVkCommandBuffer::setViewport(const GrVkGpu* gpu,
337 uint32_t firstViewport,
338 uint32_t viewportCount,
339 const VkViewport* viewports) {
340 SkASSERT(fIsActive);
341 SkASSERT(1 == viewportCount);
342 if (memcmp(viewports, &fCachedViewport, sizeof(VkViewport))) {
343 GR_VK_CALL(gpu->vkInterface(), CmdSetViewport(fCmdBuffer,
344 firstViewport,
345 viewportCount,
346 viewports));
347 fCachedViewport = viewports[0];
348 }
349}
350
351void GrVkCommandBuffer::setScissor(const GrVkGpu* gpu,
352 uint32_t firstScissor,
353 uint32_t scissorCount,
354 const VkRect2D* scissors) {
355 SkASSERT(fIsActive);
356 SkASSERT(1 == scissorCount);
357 if (memcmp(scissors, &fCachedScissor, sizeof(VkRect2D))) {
358 GR_VK_CALL(gpu->vkInterface(), CmdSetScissor(fCmdBuffer,
359 firstScissor,
360 scissorCount,
361 scissors));
362 fCachedScissor = scissors[0];
363 }
364}
365
366void GrVkCommandBuffer::setBlendConstants(const GrVkGpu* gpu,
367 const float blendConstants[4]) {
368 SkASSERT(fIsActive);
369 if (memcmp(blendConstants, fCachedBlendConstant, 4 * sizeof(float))) {
370 GR_VK_CALL(gpu->vkInterface(), CmdSetBlendConstants(fCmdBuffer, blendConstants));
371 memcpy(fCachedBlendConstant, blendConstants, 4 * sizeof(float));
372 }
373}
egdaniel9a6cf802016-06-08 08:22:05 -0700374
375///////////////////////////////////////////////////////////////////////////////
376// PrimaryCommandBuffer
377////////////////////////////////////////////////////////////////////////////////
egdaniel9cb63402016-06-23 08:37:05 -0700378GrVkPrimaryCommandBuffer::~GrVkPrimaryCommandBuffer() {
379 // Should have ended any render pass we're in the middle of
380 SkASSERT(!fActiveRenderPass);
381}
382
egdaniel9a6cf802016-06-08 08:22:05 -0700383GrVkPrimaryCommandBuffer* GrVkPrimaryCommandBuffer::Create(const GrVkGpu* gpu,
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500384 GrVkCommandPool* cmdPool) {
egdaniel9a6cf802016-06-08 08:22:05 -0700385 const VkCommandBufferAllocateInfo cmdInfo = {
386 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400387 nullptr, // pNext
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500388 cmdPool->vkCommandPool(), // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700389 VK_COMMAND_BUFFER_LEVEL_PRIMARY, // level
390 1 // bufferCount
391 };
392
393 VkCommandBuffer cmdBuffer;
394 VkResult err = GR_VK_CALL(gpu->vkInterface(), AllocateCommandBuffers(gpu->device(),
395 &cmdInfo,
396 &cmdBuffer));
397 if (err) {
398 return nullptr;
399 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500400 return new GrVkPrimaryCommandBuffer(cmdBuffer, cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700401}
402
403void GrVkPrimaryCommandBuffer::begin(const GrVkGpu* gpu) {
404 SkASSERT(!fIsActive);
405 VkCommandBufferBeginInfo cmdBufferBeginInfo;
406 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
407 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
408 cmdBufferBeginInfo.pNext = nullptr;
409 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
410 cmdBufferBeginInfo.pInheritanceInfo = nullptr;
411
412 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), BeginCommandBuffer(fCmdBuffer,
413 &cmdBufferBeginInfo));
414 fIsActive = true;
415}
416
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500417void GrVkPrimaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700418 SkASSERT(fIsActive);
419 SkASSERT(!fActiveRenderPass);
420 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), EndCommandBuffer(fCmdBuffer));
Greg Daniel7d918fd2018-06-19 15:22:01 -0400421 for (int i = 0; i < fTrackedRecordingResources.count(); ++i) {
422 fTrackedRecordingResources[i]->unref(gpu);
423 }
424 fTrackedRecordingResources.rewind();
egdaniel9a6cf802016-06-08 08:22:05 -0700425 this->invalidateState();
426 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400427 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700428}
429
430void GrVkPrimaryCommandBuffer::beginRenderPass(const GrVkGpu* gpu,
egdaniel9cb63402016-06-23 08:37:05 -0700431 const GrVkRenderPass* renderPass,
Robert Phillips95214472017-08-08 18:00:03 -0400432 const VkClearValue clearValues[],
egdaniel9cb63402016-06-23 08:37:05 -0700433 const GrVkRenderTarget& target,
434 const SkIRect& bounds,
435 bool forSecondaryCB) {
egdaniel9a6cf802016-06-08 08:22:05 -0700436 SkASSERT(fIsActive);
437 SkASSERT(!fActiveRenderPass);
egdaniel9cb63402016-06-23 08:37:05 -0700438 SkASSERT(renderPass->isCompatible(target));
439
Greg Danielf346df32019-04-03 14:52:13 -0400440 this->addingWork();
441
egdaniel9a6cf802016-06-08 08:22:05 -0700442 VkRenderPassBeginInfo beginInfo;
egdaniel9cb63402016-06-23 08:37:05 -0700443 VkRect2D renderArea;
444 renderArea.offset = { bounds.fLeft , bounds.fTop };
445 renderArea.extent = { (uint32_t)bounds.width(), (uint32_t)bounds.height() };
446
447 memset(&beginInfo, 0, sizeof(VkRenderPassBeginInfo));
448 beginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
449 beginInfo.pNext = nullptr;
450 beginInfo.renderPass = renderPass->vkRenderPass();
451 beginInfo.framebuffer = target.framebuffer()->framebuffer();
452 beginInfo.renderArea = renderArea;
Greg Danielb68319a2018-02-23 16:08:28 -0500453 beginInfo.clearValueCount = renderPass->clearValueCount();
egdaniel9cb63402016-06-23 08:37:05 -0700454 beginInfo.pClearValues = clearValues;
455
456 VkSubpassContents contents = forSecondaryCB ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
457 : VK_SUBPASS_CONTENTS_INLINE;
458
egdaniel9a6cf802016-06-08 08:22:05 -0700459 GR_VK_CALL(gpu->vkInterface(), CmdBeginRenderPass(fCmdBuffer, &beginInfo, contents));
460 fActiveRenderPass = renderPass;
461 this->addResource(renderPass);
462 target.addResources(*this);
463}
464
465void GrVkPrimaryCommandBuffer::endRenderPass(const GrVkGpu* gpu) {
466 SkASSERT(fIsActive);
467 SkASSERT(fActiveRenderPass);
Greg Danielf346df32019-04-03 14:52:13 -0400468 this->addingWork();
egdaniel9a6cf802016-06-08 08:22:05 -0700469 GR_VK_CALL(gpu->vkInterface(), CmdEndRenderPass(fCmdBuffer));
470 fActiveRenderPass = nullptr;
471}
472
473void GrVkPrimaryCommandBuffer::executeCommands(const GrVkGpu* gpu,
jvanverth7ec92412016-07-06 09:24:57 -0700474 GrVkSecondaryCommandBuffer* buffer) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500475 // The Vulkan spec allows secondary command buffers to be executed on a primary command buffer
476 // if the command pools both were created from were created with the same queue family. However,
477 // we currently always create them from the same pool.
478 SkASSERT(buffer->commandPool() == fCmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700479 SkASSERT(fIsActive);
Greg Daniel77b53f62016-10-18 11:48:51 -0400480 SkASSERT(!buffer->fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700481 SkASSERT(fActiveRenderPass);
482 SkASSERT(fActiveRenderPass->isCompatible(*buffer->fActiveRenderPass));
483
Greg Danielf346df32019-04-03 14:52:13 -0400484 this->addingWork();
485
egdaniel9a6cf802016-06-08 08:22:05 -0700486 GR_VK_CALL(gpu->vkInterface(), CmdExecuteCommands(fCmdBuffer, 1, &buffer->fCmdBuffer));
jvanverth7ec92412016-07-06 09:24:57 -0700487 buffer->ref();
488 fSecondaryCommandBuffers.push_back(buffer);
egdaniel066df7c2016-06-08 14:02:27 -0700489 // When executing a secondary command buffer all state (besides render pass state) becomes
490 // invalidated and must be reset. This includes bound buffers, pipelines, dynamic state, etc.
491 this->invalidateState();
egdaniel9a6cf802016-06-08 08:22:05 -0700492}
493
Greg Daniel48661b82018-01-22 16:11:35 -0500494static void submit_to_queue(const GrVkInterface* interface,
495 VkQueue queue,
496 VkFence fence,
497 uint32_t waitCount,
498 const VkSemaphore* waitSemaphores,
499 const VkPipelineStageFlags* waitStages,
500 uint32_t commandBufferCount,
501 const VkCommandBuffer* commandBuffers,
502 uint32_t signalCount,
503 const VkSemaphore* signalSemaphores) {
504 VkSubmitInfo submitInfo;
505 memset(&submitInfo, 0, sizeof(VkSubmitInfo));
506 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
507 submitInfo.pNext = nullptr;
508 submitInfo.waitSemaphoreCount = waitCount;
509 submitInfo.pWaitSemaphores = waitSemaphores;
510 submitInfo.pWaitDstStageMask = waitStages;
511 submitInfo.commandBufferCount = commandBufferCount;
512 submitInfo.pCommandBuffers = commandBuffers;
513 submitInfo.signalSemaphoreCount = signalCount;
514 submitInfo.pSignalSemaphores = signalSemaphores;
515 GR_VK_CALL_ERRCHECK(interface, QueueSubmit(queue, 1, &submitInfo, fence));
516}
517
Greg Daniel6be35232017-03-01 17:01:09 -0500518void GrVkPrimaryCommandBuffer::submitToQueue(
519 const GrVkGpu* gpu,
520 VkQueue queue,
521 GrVkGpu::SyncQueue sync,
Greg Daniel48661b82018-01-22 16:11:35 -0500522 SkTArray<GrVkSemaphore::Resource*>& signalSemaphores,
523 SkTArray<GrVkSemaphore::Resource*>& waitSemaphores) {
egdaniel9a6cf802016-06-08 08:22:05 -0700524 SkASSERT(!fIsActive);
525
526 VkResult err;
jvanverth7ec92412016-07-06 09:24:57 -0700527 if (VK_NULL_HANDLE == fSubmitFence) {
528 VkFenceCreateInfo fenceInfo;
529 memset(&fenceInfo, 0, sizeof(VkFenceCreateInfo));
530 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
531 err = GR_VK_CALL(gpu->vkInterface(), CreateFence(gpu->device(), &fenceInfo, nullptr,
532 &fSubmitFence));
533 SkASSERT(!err);
534 } else {
535 GR_VK_CALL(gpu->vkInterface(), ResetFences(gpu->device(), 1, &fSubmitFence));
536 }
egdaniel9a6cf802016-06-08 08:22:05 -0700537
Greg Daniela5cb7812017-06-16 09:45:32 -0400538 int signalCount = signalSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500539 int waitCount = waitSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500540
Greg Daniel48661b82018-01-22 16:11:35 -0500541 if (0 == signalCount && 0 == waitCount) {
542 // This command buffer has no dependent semaphores so we can simply just submit it to the
543 // queue with no worries.
544 submit_to_queue(gpu->vkInterface(), queue, fSubmitFence, 0, nullptr, nullptr,
545 1, &fCmdBuffer, 0, nullptr);
546 } else {
547 GrVkSemaphore::Resource::AcquireMutex();
548
549 SkTArray<VkSemaphore> vkSignalSems(signalCount);
550 for (int i = 0; i < signalCount; ++i) {
551 if (signalSemaphores[i]->shouldSignal()) {
552 this->addResource(signalSemaphores[i]);
553 vkSignalSems.push_back(signalSemaphores[i]->semaphore());
554 }
555 }
556
557 SkTArray<VkSemaphore> vkWaitSems(waitCount);
558 SkTArray<VkPipelineStageFlags> vkWaitStages(waitCount);
559 for (int i = 0; i < waitCount; ++i) {
560 if (waitSemaphores[i]->shouldWait()) {
561 this->addResource(waitSemaphores[i]);
562 vkWaitSems.push_back(waitSemaphores[i]->semaphore());
563 vkWaitStages.push_back(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
564 }
565 }
566 submit_to_queue(gpu->vkInterface(), queue, fSubmitFence,
567 vkWaitSems.count(), vkWaitSems.begin(), vkWaitStages.begin(),
568 1, &fCmdBuffer,
569 vkSignalSems.count(), vkSignalSems.begin());
570 // Since shouldSignal/Wait do not require a mutex to be held, we must make sure that we mark
571 // the semaphores after we've submitted. Thus in the worst case another submit grabs the
572 // mutex and then realizes it doesn't need to submit the semaphore. We will never end up
573 // where a semaphore doesn't think it needs to be submitted (cause of querying
574 // shouldSignal/Wait), but it should need to.
575 for (int i = 0; i < signalCount; ++i) {
576 signalSemaphores[i]->markAsSignaled();
577 }
578 for (int i = 0; i < waitCount; ++i) {
579 waitSemaphores[i]->markAsWaited();
580 }
581
582 GrVkSemaphore::Resource::ReleaseMutex();
583 }
egdaniel9a6cf802016-06-08 08:22:05 -0700584
585 if (GrVkGpu::kForce_SyncQueue == sync) {
586 err = GR_VK_CALL(gpu->vkInterface(),
587 WaitForFences(gpu->device(), 1, &fSubmitFence, true, UINT64_MAX));
588 if (VK_TIMEOUT == err) {
589 SkDebugf("Fence failed to signal: %d\n", err);
Ben Wagnerb4aab9a2017-08-16 10:53:04 -0400590 SK_ABORT("failing");
egdaniel9a6cf802016-06-08 08:22:05 -0700591 }
592 SkASSERT(!err);
593
594 // Destroy the fence
595 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
596 fSubmitFence = VK_NULL_HANDLE;
597 }
598}
599
600bool GrVkPrimaryCommandBuffer::finished(const GrVkGpu* gpu) const {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500601 SkASSERT(!fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700602 if (VK_NULL_HANDLE == fSubmitFence) {
603 return true;
604 }
605
606 VkResult err = GR_VK_CALL(gpu->vkInterface(), GetFenceStatus(gpu->device(), fSubmitFence));
607 switch (err) {
608 case VK_SUCCESS:
609 return true;
610
611 case VK_NOT_READY:
612 return false;
613
614 default:
615 SkDebugf("Error getting fence status: %d\n", err);
Ben Wagnerb4aab9a2017-08-16 10:53:04 -0400616 SK_ABORT("failing");
egdaniel9a6cf802016-06-08 08:22:05 -0700617 break;
618 }
619
620 return false;
621}
622
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500623void GrVkPrimaryCommandBuffer::onReleaseResources(GrVkGpu* gpu) {
jvanverth7ec92412016-07-06 09:24:57 -0700624 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500625 fSecondaryCommandBuffers[i]->releaseResources(gpu);
626 }
627}
628
629void GrVkPrimaryCommandBuffer::recycleSecondaryCommandBuffers() {
630 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
631 SkASSERT(fSecondaryCommandBuffers[i]->commandPool() == fCmdPool);
632 fCmdPool->recycleSecondaryCommandBuffer(fSecondaryCommandBuffers[i]);
jvanverth7ec92412016-07-06 09:24:57 -0700633 }
634 fSecondaryCommandBuffers.reset();
635}
636
egdaniel9a6cf802016-06-08 08:22:05 -0700637void GrVkPrimaryCommandBuffer::copyImage(const GrVkGpu* gpu,
638 GrVkImage* srcImage,
639 VkImageLayout srcLayout,
640 GrVkImage* dstImage,
641 VkImageLayout dstLayout,
642 uint32_t copyRegionCount,
643 const VkImageCopy* copyRegions) {
644 SkASSERT(fIsActive);
645 SkASSERT(!fActiveRenderPass);
Greg Danielf346df32019-04-03 14:52:13 -0400646 this->addingWork();
egdaniel9a6cf802016-06-08 08:22:05 -0700647 this->addResource(srcImage->resource());
648 this->addResource(dstImage->resource());
649 GR_VK_CALL(gpu->vkInterface(), CmdCopyImage(fCmdBuffer,
650 srcImage->image(),
651 srcLayout,
652 dstImage->image(),
653 dstLayout,
654 copyRegionCount,
655 copyRegions));
656}
657
658void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
659 const GrVkResource* srcResource,
660 VkImage srcImage,
661 VkImageLayout srcLayout,
662 const GrVkResource* dstResource,
663 VkImage dstImage,
664 VkImageLayout dstLayout,
665 uint32_t blitRegionCount,
666 const VkImageBlit* blitRegions,
667 VkFilter filter) {
668 SkASSERT(fIsActive);
669 SkASSERT(!fActiveRenderPass);
Greg Danielf346df32019-04-03 14:52:13 -0400670 this->addingWork();
egdaniel9a6cf802016-06-08 08:22:05 -0700671 this->addResource(srcResource);
672 this->addResource(dstResource);
673 GR_VK_CALL(gpu->vkInterface(), CmdBlitImage(fCmdBuffer,
674 srcImage,
675 srcLayout,
676 dstImage,
677 dstLayout,
678 blitRegionCount,
679 blitRegions,
680 filter));
681}
682
Greg Daniel6ecc9112017-06-16 16:17:03 +0000683void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
684 const GrVkImage& srcImage,
685 const GrVkImage& dstImage,
686 uint32_t blitRegionCount,
687 const VkImageBlit* blitRegions,
688 VkFilter filter) {
689 this->blitImage(gpu,
690 srcImage.resource(),
691 srcImage.image(),
692 srcImage.currentLayout(),
693 dstImage.resource(),
694 dstImage.image(),
695 dstImage.currentLayout(),
696 blitRegionCount,
697 blitRegions,
698 filter);
699}
700
701
egdaniel9a6cf802016-06-08 08:22:05 -0700702void GrVkPrimaryCommandBuffer::copyImageToBuffer(const GrVkGpu* gpu,
703 GrVkImage* srcImage,
704 VkImageLayout srcLayout,
705 GrVkTransferBuffer* dstBuffer,
706 uint32_t copyRegionCount,
707 const VkBufferImageCopy* copyRegions) {
708 SkASSERT(fIsActive);
709 SkASSERT(!fActiveRenderPass);
Greg Danielf346df32019-04-03 14:52:13 -0400710 this->addingWork();
egdaniel9a6cf802016-06-08 08:22:05 -0700711 this->addResource(srcImage->resource());
712 this->addResource(dstBuffer->resource());
713 GR_VK_CALL(gpu->vkInterface(), CmdCopyImageToBuffer(fCmdBuffer,
714 srcImage->image(),
715 srcLayout,
716 dstBuffer->buffer(),
717 copyRegionCount,
718 copyRegions));
719}
720
721void GrVkPrimaryCommandBuffer::copyBufferToImage(const GrVkGpu* gpu,
722 GrVkTransferBuffer* srcBuffer,
723 GrVkImage* dstImage,
724 VkImageLayout dstLayout,
725 uint32_t copyRegionCount,
726 const VkBufferImageCopy* copyRegions) {
727 SkASSERT(fIsActive);
728 SkASSERT(!fActiveRenderPass);
Greg Danielf346df32019-04-03 14:52:13 -0400729 this->addingWork();
egdaniel9a6cf802016-06-08 08:22:05 -0700730 this->addResource(srcBuffer->resource());
731 this->addResource(dstImage->resource());
732 GR_VK_CALL(gpu->vkInterface(), CmdCopyBufferToImage(fCmdBuffer,
733 srcBuffer->buffer(),
734 dstImage->image(),
735 dstLayout,
736 copyRegionCount,
737 copyRegions));
738}
739
Greg Daniel6888c0d2017-08-25 11:55:50 -0400740
741void GrVkPrimaryCommandBuffer::copyBuffer(GrVkGpu* gpu,
742 GrVkBuffer* srcBuffer,
743 GrVkBuffer* dstBuffer,
744 uint32_t regionCount,
745 const VkBufferCopy* regions) {
746 SkASSERT(fIsActive);
747 SkASSERT(!fActiveRenderPass);
Greg Danielf346df32019-04-03 14:52:13 -0400748 this->addingWork();
Greg Daniel6888c0d2017-08-25 11:55:50 -0400749#ifdef SK_DEBUG
750 for (uint32_t i = 0; i < regionCount; ++i) {
751 const VkBufferCopy& region = regions[i];
752 SkASSERT(region.size > 0);
753 SkASSERT(region.srcOffset < srcBuffer->size());
754 SkASSERT(region.dstOffset < dstBuffer->size());
755 SkASSERT(region.srcOffset + region.size <= srcBuffer->size());
756 SkASSERT(region.dstOffset + region.size <= dstBuffer->size());
757 }
758#endif
759 this->addResource(srcBuffer->resource());
760 this->addResource(dstBuffer->resource());
761 GR_VK_CALL(gpu->vkInterface(), CmdCopyBuffer(fCmdBuffer,
762 srcBuffer->buffer(),
763 dstBuffer->buffer(),
764 regionCount,
765 regions));
766}
767
jvanvertha584de92016-06-30 09:10:52 -0700768void GrVkPrimaryCommandBuffer::updateBuffer(GrVkGpu* gpu,
769 GrVkBuffer* dstBuffer,
770 VkDeviceSize dstOffset,
771 VkDeviceSize dataSize,
772 const void* data) {
773 SkASSERT(fIsActive);
774 SkASSERT(!fActiveRenderPass);
775 SkASSERT(0 == (dstOffset & 0x03)); // four byte aligned
776 // TODO: handle larger transfer sizes
777 SkASSERT(dataSize <= 65536);
778 SkASSERT(0 == (dataSize & 0x03)); // four byte aligned
Greg Danielf346df32019-04-03 14:52:13 -0400779 this->addingWork();
jvanvertha584de92016-06-30 09:10:52 -0700780 this->addResource(dstBuffer->resource());
781 GR_VK_CALL(gpu->vkInterface(), CmdUpdateBuffer(fCmdBuffer,
782 dstBuffer->buffer(),
783 dstOffset,
784 dataSize,
785 (const uint32_t*) data));
786}
787
egdaniel9a6cf802016-06-08 08:22:05 -0700788void GrVkPrimaryCommandBuffer::clearColorImage(const GrVkGpu* gpu,
789 GrVkImage* image,
790 const VkClearColorValue* color,
791 uint32_t subRangeCount,
792 const VkImageSubresourceRange* subRanges) {
793 SkASSERT(fIsActive);
794 SkASSERT(!fActiveRenderPass);
Greg Danielf346df32019-04-03 14:52:13 -0400795 this->addingWork();
egdaniel9a6cf802016-06-08 08:22:05 -0700796 this->addResource(image->resource());
797 GR_VK_CALL(gpu->vkInterface(), CmdClearColorImage(fCmdBuffer,
798 image->image(),
799 image->currentLayout(),
800 color,
801 subRangeCount,
802 subRanges));
803}
804
805void GrVkPrimaryCommandBuffer::clearDepthStencilImage(const GrVkGpu* gpu,
806 GrVkImage* image,
807 const VkClearDepthStencilValue* color,
808 uint32_t subRangeCount,
809 const VkImageSubresourceRange* subRanges) {
810 SkASSERT(fIsActive);
811 SkASSERT(!fActiveRenderPass);
Greg Danielf346df32019-04-03 14:52:13 -0400812 this->addingWork();
egdaniel9a6cf802016-06-08 08:22:05 -0700813 this->addResource(image->resource());
814 GR_VK_CALL(gpu->vkInterface(), CmdClearDepthStencilImage(fCmdBuffer,
815 image->image(),
816 image->currentLayout(),
817 color,
818 subRangeCount,
819 subRanges));
820}
821
egdaniel52ad2512016-08-04 12:50:01 -0700822void GrVkPrimaryCommandBuffer::resolveImage(GrVkGpu* gpu,
823 const GrVkImage& srcImage,
824 const GrVkImage& dstImage,
825 uint32_t regionCount,
826 const VkImageResolve* regions) {
827 SkASSERT(fIsActive);
828 SkASSERT(!fActiveRenderPass);
829
Greg Danielf346df32019-04-03 14:52:13 -0400830 this->addingWork();
egdaniel52ad2512016-08-04 12:50:01 -0700831 this->addResource(srcImage.resource());
832 this->addResource(dstImage.resource());
833
834 GR_VK_CALL(gpu->vkInterface(), CmdResolveImage(fCmdBuffer,
835 srcImage.image(),
836 srcImage.currentLayout(),
837 dstImage.image(),
838 dstImage.currentLayout(),
839 regionCount,
840 regions));
841}
842
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500843void GrVkPrimaryCommandBuffer::onFreeGPUData(GrVkGpu* gpu) const {
egdaniel9cb63402016-06-23 08:37:05 -0700844 SkASSERT(!fActiveRenderPass);
845 // Destroy the fence, if any
846 if (VK_NULL_HANDLE != fSubmitFence) {
847 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
848 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500849 for (GrVkSecondaryCommandBuffer* buffer : fSecondaryCommandBuffers) {
850 buffer->unref(gpu);
851 }
852}
853
854void GrVkPrimaryCommandBuffer::onAbandonGPUData() const {
855 SkASSERT(!fActiveRenderPass);
856 for (GrVkSecondaryCommandBuffer* buffer : fSecondaryCommandBuffers) {
857 buffer->unrefAndAbandon();
858 }
egdaniel9cb63402016-06-23 08:37:05 -0700859}
860
egdaniel9a6cf802016-06-08 08:22:05 -0700861///////////////////////////////////////////////////////////////////////////////
862// SecondaryCommandBuffer
863////////////////////////////////////////////////////////////////////////////////
864
jvanverth7ec92412016-07-06 09:24:57 -0700865GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(const GrVkGpu* gpu,
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500866 GrVkCommandPool* cmdPool) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500867 SkASSERT(cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700868 const VkCommandBufferAllocateInfo cmdInfo = {
869 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400870 nullptr, // pNext
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500871 cmdPool->vkCommandPool(), // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700872 VK_COMMAND_BUFFER_LEVEL_SECONDARY, // level
873 1 // bufferCount
874 };
875
876 VkCommandBuffer cmdBuffer;
877 VkResult err = GR_VK_CALL(gpu->vkInterface(), AllocateCommandBuffers(gpu->device(),
878 &cmdInfo,
879 &cmdBuffer));
880 if (err) {
881 return nullptr;
882 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500883 return new GrVkSecondaryCommandBuffer(cmdBuffer, cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700884}
885
Greg Daniel070cbaf2019-01-03 17:35:54 -0500886GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(VkCommandBuffer cmdBuffer) {
887 return new GrVkSecondaryCommandBuffer(cmdBuffer, nullptr);
888}
egdaniel9a6cf802016-06-08 08:22:05 -0700889
jvanverth7ec92412016-07-06 09:24:57 -0700890void GrVkSecondaryCommandBuffer::begin(const GrVkGpu* gpu, const GrVkFramebuffer* framebuffer,
891 const GrVkRenderPass* compatibleRenderPass) {
egdaniel9a6cf802016-06-08 08:22:05 -0700892 SkASSERT(!fIsActive);
jvanverth7ec92412016-07-06 09:24:57 -0700893 SkASSERT(compatibleRenderPass);
894 fActiveRenderPass = compatibleRenderPass;
egdaniel9a6cf802016-06-08 08:22:05 -0700895
Greg Daniel070cbaf2019-01-03 17:35:54 -0500896 if (!this->isWrapped()) {
897 VkCommandBufferInheritanceInfo inheritanceInfo;
898 memset(&inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
899 inheritanceInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
900 inheritanceInfo.pNext = nullptr;
901 inheritanceInfo.renderPass = fActiveRenderPass->vkRenderPass();
902 inheritanceInfo.subpass = 0; // Currently only using 1 subpass for each render pass
903 inheritanceInfo.framebuffer = framebuffer ? framebuffer->framebuffer() : VK_NULL_HANDLE;
904 inheritanceInfo.occlusionQueryEnable = false;
905 inheritanceInfo.queryFlags = 0;
906 inheritanceInfo.pipelineStatistics = 0;
egdaniel9a6cf802016-06-08 08:22:05 -0700907
Greg Daniel070cbaf2019-01-03 17:35:54 -0500908 VkCommandBufferBeginInfo cmdBufferBeginInfo;
909 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
910 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
911 cmdBufferBeginInfo.pNext = nullptr;
912 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT |
913 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
914 cmdBufferBeginInfo.pInheritanceInfo = &inheritanceInfo;
egdaniel9a6cf802016-06-08 08:22:05 -0700915
Greg Daniel070cbaf2019-01-03 17:35:54 -0500916 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), BeginCommandBuffer(fCmdBuffer,
917 &cmdBufferBeginInfo));
918 }
egdaniel9a6cf802016-06-08 08:22:05 -0700919 fIsActive = true;
920}
921
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500922void GrVkSecondaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700923 SkASSERT(fIsActive);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500924 if (!this->isWrapped()) {
925 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), EndCommandBuffer(fCmdBuffer));
926 }
egdaniel9a6cf802016-06-08 08:22:05 -0700927 this->invalidateState();
928 fIsActive = false;
Robert Phillips04d2ce22019-04-03 13:20:43 -0400929 fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700930}