blob: a96eef1245613c94311672393b00061f361e41fc [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2015 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkCommandBuffer.h"
9
Ethan Nicholas8e265a72018-12-12 16:22:40 -050010#include "GrVkCommandPool.h"
Greg Daniel6ecc9112017-06-16 16:17:03 +000011#include "GrVkGpu.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050012#include "GrVkFramebuffer.h"
Greg Daniel6ecc9112017-06-16 16:17:03 +000013#include "GrVkImage.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050014#include "GrVkImageView.h"
Greg Daniel6ecc9112017-06-16 16:17:03 +000015#include "GrVkIndexBuffer.h"
egdaniel470d77a2016-03-18 12:50:27 -070016#include "GrVkPipeline.h"
Greg Daniel6ecc9112017-06-16 16:17:03 +000017#include "GrVkPipelineState.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050018#include "GrVkRenderPass.h"
19#include "GrVkRenderTarget.h"
Greg Daniel7d918fd2018-06-19 15:22:01 -040020#include "GrVkPipelineLayout.h"
egdaniel22281c12016-03-23 13:49:40 -070021#include "GrVkPipelineState.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050022#include "GrVkTransferBuffer.h"
23#include "GrVkUtil.h"
Greg Daniel6ecc9112017-06-16 16:17:03 +000024#include "GrVkVertexBuffer.h"
egdaniel9cb63402016-06-23 08:37:05 -070025#include "SkRect.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050026
27void GrVkCommandBuffer::invalidateState() {
Chris Dalton1d616352017-05-31 12:51:23 -060028 for (auto& boundInputBuffer : fBoundInputBuffers) {
29 boundInputBuffer = VK_NULL_HANDLE;
30 }
egdaniel470d77a2016-03-18 12:50:27 -070031 fBoundIndexBuffer = VK_NULL_HANDLE;
egdaniel470d77a2016-03-18 12:50:27 -070032
33 memset(&fCachedViewport, 0, sizeof(VkViewport));
34 fCachedViewport.width = - 1.0f; // Viewport must have a width greater than 0
35
36 memset(&fCachedScissor, 0, sizeof(VkRect2D));
37 fCachedScissor.offset.x = -1; // Scissor offset must be greater that 0 to be valid
38
39 for (int i = 0; i < 4; ++i) {
40 fCachedBlendConstant[i] = -1.0;
41 }
Greg Daniel164a9f02016-02-22 09:56:40 -050042}
43
Ethan Nicholas8e265a72018-12-12 16:22:40 -050044void GrVkCommandBuffer::freeGPUData(GrVkGpu* gpu) const {
Greg Daniel164a9f02016-02-22 09:56:40 -050045 SkASSERT(!fIsActive);
Greg Daniel164a9f02016-02-22 09:56:40 -050046 for (int i = 0; i < fTrackedResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050047 fTrackedResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel164a9f02016-02-22 09:56:40 -050048 fTrackedResources[i]->unref(gpu);
49 }
halcanary9d524f22016-03-29 09:03:52 -070050
egdanielc1be9bc2016-07-20 08:33:00 -070051 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050052 fTrackedRecycledResources[i]->notifyRemovedFromCommandBuffer();
egdanielc1be9bc2016-07-20 08:33:00 -070053 fTrackedRecycledResources[i]->recycle(const_cast<GrVkGpu*>(gpu));
54 }
55
Greg Daniel7d918fd2018-06-19 15:22:01 -040056 for (int i = 0; i < fTrackedRecordingResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050057 fTrackedRecordingResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel7d918fd2018-06-19 15:22:01 -040058 fTrackedRecordingResources[i]->unref(gpu);
59 }
60
Greg Daniel070cbaf2019-01-03 17:35:54 -050061 if (!this->isWrapped()) {
62 GR_VK_CALL(gpu->vkInterface(), FreeCommandBuffers(gpu->device(), fCmdPool->vkCommandPool(),
63 1, &fCmdBuffer));
64 }
egdaniel9cb63402016-06-23 08:37:05 -070065
66 this->onFreeGPUData(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050067}
68
Greg Danielcef213c2017-04-21 11:52:27 -040069void GrVkCommandBuffer::abandonGPUData() const {
Ethan Nicholas8e265a72018-12-12 16:22:40 -050070 SkDEBUGCODE(fResourcesReleased = true;)
Greg Daniel164a9f02016-02-22 09:56:40 -050071 for (int i = 0; i < fTrackedResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050072 fTrackedResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel164a9f02016-02-22 09:56:40 -050073 fTrackedResources[i]->unrefAndAbandon();
74 }
egdanielc1be9bc2016-07-20 08:33:00 -070075
76 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050077 fTrackedRecycledResources[i]->notifyRemovedFromCommandBuffer();
egdanielc1be9bc2016-07-20 08:33:00 -070078 // We don't recycle resources when abandoning them.
79 fTrackedRecycledResources[i]->unrefAndAbandon();
80 }
Greg Daniel7d918fd2018-06-19 15:22:01 -040081
82 for (int i = 0; i < fTrackedRecordingResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050083 fTrackedRecordingResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel7d918fd2018-06-19 15:22:01 -040084 fTrackedRecordingResources[i]->unrefAndAbandon();
85 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -050086
87 this->onAbandonGPUData();
Greg Daniel164a9f02016-02-22 09:56:40 -050088}
89
Ethan Nicholas8e265a72018-12-12 16:22:40 -050090void GrVkCommandBuffer::releaseResources(GrVkGpu* gpu) {
91 SkDEBUGCODE(fResourcesReleased = true;)
jvanverth7ec92412016-07-06 09:24:57 -070092 SkASSERT(!fIsActive);
93 for (int i = 0; i < fTrackedResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050094 fTrackedResources[i]->notifyRemovedFromCommandBuffer();
jvanverth7ec92412016-07-06 09:24:57 -070095 fTrackedResources[i]->unref(gpu);
96 }
egdanielc1be9bc2016-07-20 08:33:00 -070097 for (int i = 0; i < fTrackedRecycledResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -050098 fTrackedRecycledResources[i]->notifyRemovedFromCommandBuffer();
egdanielc1be9bc2016-07-20 08:33:00 -070099 fTrackedRecycledResources[i]->recycle(const_cast<GrVkGpu*>(gpu));
100 }
egdaniel594739c2016-09-20 12:39:25 -0700101
Greg Daniel7d918fd2018-06-19 15:22:01 -0400102 for (int i = 0; i < fTrackedRecordingResources.count(); ++i) {
Brian Salomon614c1a82018-12-19 15:42:06 -0500103 fTrackedRecordingResources[i]->notifyRemovedFromCommandBuffer();
Greg Daniel7d918fd2018-06-19 15:22:01 -0400104 fTrackedRecordingResources[i]->unref(gpu);
105 }
106
egdaniel594739c2016-09-20 12:39:25 -0700107 if (++fNumResets > kNumRewindResetsBeforeFullReset) {
108 fTrackedResources.reset();
109 fTrackedRecycledResources.reset();
Greg Daniel7d918fd2018-06-19 15:22:01 -0400110 fTrackedRecordingResources.reset();
egdaniel594739c2016-09-20 12:39:25 -0700111 fTrackedResources.setReserve(kInitialTrackedResourcesCount);
112 fTrackedRecycledResources.setReserve(kInitialTrackedResourcesCount);
Greg Daniel7d918fd2018-06-19 15:22:01 -0400113 fTrackedRecordingResources.setReserve(kInitialTrackedResourcesCount);
egdaniel594739c2016-09-20 12:39:25 -0700114 fNumResets = 0;
115 } else {
116 fTrackedResources.rewind();
117 fTrackedRecycledResources.rewind();
Greg Daniel7d918fd2018-06-19 15:22:01 -0400118 fTrackedRecordingResources.rewind();
egdaniel594739c2016-09-20 12:39:25 -0700119 }
120
jvanverth7ec92412016-07-06 09:24:57 -0700121 this->invalidateState();
122
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500123 this->onReleaseResources(gpu);
jvanverth7ec92412016-07-06 09:24:57 -0700124}
125
Greg Daniel164a9f02016-02-22 09:56:40 -0500126////////////////////////////////////////////////////////////////////////////////
127// CommandBuffer commands
128////////////////////////////////////////////////////////////////////////////////
129
130void GrVkCommandBuffer::pipelineBarrier(const GrVkGpu* gpu,
Greg Daniel59dc1482019-02-22 10:46:38 -0500131 const GrVkResource* resource,
Greg Daniel164a9f02016-02-22 09:56:40 -0500132 VkPipelineStageFlags srcStageMask,
133 VkPipelineStageFlags dstStageMask,
134 bool byRegion,
135 BarrierType barrierType,
Greg Daniel59dc1482019-02-22 10:46:38 -0500136 void* barrier) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500137 SkASSERT(!this->isWrapped());
Greg Daniel164a9f02016-02-22 09:56:40 -0500138 SkASSERT(fIsActive);
egdaniel58a8d922016-04-21 08:03:10 -0700139 // For images we can have barriers inside of render passes but they require us to add more
140 // support in subpasses which need self dependencies to have barriers inside them. Also, we can
141 // never have buffer barriers inside of a render pass. For now we will just assert that we are
142 // not in a render pass.
143 SkASSERT(!fActiveRenderPass);
Greg Daniel164a9f02016-02-22 09:56:40 -0500144 VkDependencyFlags dependencyFlags = byRegion ? VK_DEPENDENCY_BY_REGION_BIT : 0;
145
146 switch (barrierType) {
147 case kMemory_BarrierType: {
148 const VkMemoryBarrier* barrierPtr = reinterpret_cast<VkMemoryBarrier*>(barrier);
149 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(fCmdBuffer, srcStageMask,
150 dstStageMask, dependencyFlags,
151 1, barrierPtr,
152 0, nullptr,
153 0, nullptr));
154 break;
155 }
156
157 case kBufferMemory_BarrierType: {
158 const VkBufferMemoryBarrier* barrierPtr =
159 reinterpret_cast<VkBufferMemoryBarrier*>(barrier);
160 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(fCmdBuffer, srcStageMask,
161 dstStageMask, dependencyFlags,
162 0, nullptr,
163 1, barrierPtr,
164 0, nullptr));
165 break;
166 }
167
168 case kImageMemory_BarrierType: {
169 const VkImageMemoryBarrier* barrierPtr =
170 reinterpret_cast<VkImageMemoryBarrier*>(barrier);
171 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(fCmdBuffer, srcStageMask,
172 dstStageMask, dependencyFlags,
173 0, nullptr,
174 0, nullptr,
175 1, barrierPtr));
176 break;
177 }
178 }
Greg Daniel59dc1482019-02-22 10:46:38 -0500179 if (resource) {
180 this->addResource(resource);
181 }
Greg Daniel164a9f02016-02-22 09:56:40 -0500182}
183
Greg Daniel6ecc9112017-06-16 16:17:03 +0000184void GrVkCommandBuffer::bindInputBuffer(GrVkGpu* gpu, uint32_t binding,
185 const GrVkVertexBuffer* vbuffer) {
186 VkBuffer vkBuffer = vbuffer->buffer();
187 SkASSERT(VK_NULL_HANDLE != vkBuffer);
188 SkASSERT(binding < kMaxInputBuffers);
189 // TODO: once vbuffer->offset() no longer always returns 0, we will need to track the offset
190 // to know if we can skip binding or not.
191 if (vkBuffer != fBoundInputBuffers[binding]) {
192 VkDeviceSize offset = vbuffer->offset();
193 GR_VK_CALL(gpu->vkInterface(), CmdBindVertexBuffers(fCmdBuffer,
194 binding,
195 1,
196 &vkBuffer,
197 &offset));
198 fBoundInputBuffers[binding] = vkBuffer;
Greg Daniel59dc1482019-02-22 10:46:38 -0500199 this->addResource(vbuffer->resource());
Greg Daniel6ecc9112017-06-16 16:17:03 +0000200 }
201}
202
203void GrVkCommandBuffer::bindIndexBuffer(GrVkGpu* gpu, const GrVkIndexBuffer* ibuffer) {
204 VkBuffer vkBuffer = ibuffer->buffer();
205 SkASSERT(VK_NULL_HANDLE != vkBuffer);
206 // TODO: once ibuffer->offset() no longer always returns 0, we will need to track the offset
207 // to know if we can skip binding or not.
208 if (vkBuffer != fBoundIndexBuffer) {
209 GR_VK_CALL(gpu->vkInterface(), CmdBindIndexBuffer(fCmdBuffer,
210 vkBuffer,
211 ibuffer->offset(),
212 VK_INDEX_TYPE_UINT16));
213 fBoundIndexBuffer = vkBuffer;
Greg Daniel59dc1482019-02-22 10:46:38 -0500214 this->addResource(ibuffer->resource());
Greg Daniel6ecc9112017-06-16 16:17:03 +0000215 }
216}
217
Greg Daniel164a9f02016-02-22 09:56:40 -0500218void GrVkCommandBuffer::clearAttachments(const GrVkGpu* gpu,
219 int numAttachments,
220 const VkClearAttachment* attachments,
221 int numRects,
222 const VkClearRect* clearRects) const {
223 SkASSERT(fIsActive);
224 SkASSERT(fActiveRenderPass);
225 SkASSERT(numAttachments > 0);
226 SkASSERT(numRects > 0);
227#ifdef SK_DEBUG
228 for (int i = 0; i < numAttachments; ++i) {
229 if (attachments[i].aspectMask == VK_IMAGE_ASPECT_COLOR_BIT) {
230 uint32_t testIndex;
231 SkAssertResult(fActiveRenderPass->colorAttachmentIndex(&testIndex));
232 SkASSERT(testIndex == attachments[i].colorAttachment);
233 }
234 }
235#endif
236 GR_VK_CALL(gpu->vkInterface(), CmdClearAttachments(fCmdBuffer,
237 numAttachments,
238 attachments,
239 numRects,
240 clearRects));
241}
242
243void GrVkCommandBuffer::bindDescriptorSets(const GrVkGpu* gpu,
egdaniel22281c12016-03-23 13:49:40 -0700244 GrVkPipelineState* pipelineState,
Greg Daniel7d918fd2018-06-19 15:22:01 -0400245 GrVkPipelineLayout* layout,
Greg Daniel164a9f02016-02-22 09:56:40 -0500246 uint32_t firstSet,
247 uint32_t setCount,
248 const VkDescriptorSet* descriptorSets,
249 uint32_t dynamicOffsetCount,
250 const uint32_t* dynamicOffsets) {
251 SkASSERT(fIsActive);
252 GR_VK_CALL(gpu->vkInterface(), CmdBindDescriptorSets(fCmdBuffer,
253 VK_PIPELINE_BIND_POINT_GRAPHICS,
Greg Daniel7d918fd2018-06-19 15:22:01 -0400254 layout->layout(),
Greg Daniel164a9f02016-02-22 09:56:40 -0500255 firstSet,
256 setCount,
257 descriptorSets,
258 dynamicOffsetCount,
259 dynamicOffsets));
Greg Daniel7d918fd2018-06-19 15:22:01 -0400260 this->addRecordingResource(layout);
Greg Daniel164a9f02016-02-22 09:56:40 -0500261}
262
egdanielbc9b2962016-09-27 08:00:53 -0700263void GrVkCommandBuffer::bindDescriptorSets(const GrVkGpu* gpu,
264 const SkTArray<const GrVkRecycledResource*>& recycled,
265 const SkTArray<const GrVkResource*>& resources,
Greg Daniel7d918fd2018-06-19 15:22:01 -0400266 GrVkPipelineLayout* layout,
egdanielbc9b2962016-09-27 08:00:53 -0700267 uint32_t firstSet,
268 uint32_t setCount,
269 const VkDescriptorSet* descriptorSets,
270 uint32_t dynamicOffsetCount,
271 const uint32_t* dynamicOffsets) {
272 SkASSERT(fIsActive);
273 GR_VK_CALL(gpu->vkInterface(), CmdBindDescriptorSets(fCmdBuffer,
274 VK_PIPELINE_BIND_POINT_GRAPHICS,
Greg Daniel7d918fd2018-06-19 15:22:01 -0400275 layout->layout(),
egdanielbc9b2962016-09-27 08:00:53 -0700276 firstSet,
277 setCount,
278 descriptorSets,
279 dynamicOffsetCount,
280 dynamicOffsets));
Greg Daniel7d918fd2018-06-19 15:22:01 -0400281 this->addRecordingResource(layout);
egdanielbc9b2962016-09-27 08:00:53 -0700282 for (int i = 0; i < recycled.count(); ++i) {
283 this->addRecycledResource(recycled[i]);
284 }
285 for (int i = 0; i < resources.count(); ++i) {
286 this->addResource(resources[i]);
287 }
288}
289
egdaniel470d77a2016-03-18 12:50:27 -0700290void GrVkCommandBuffer::bindPipeline(const GrVkGpu* gpu, const GrVkPipeline* pipeline) {
291 SkASSERT(fIsActive);
egdaniel470d77a2016-03-18 12:50:27 -0700292 GR_VK_CALL(gpu->vkInterface(), CmdBindPipeline(fCmdBuffer,
293 VK_PIPELINE_BIND_POINT_GRAPHICS,
294 pipeline->pipeline()));
egdanielec440992016-09-13 09:54:11 -0700295 this->addResource(pipeline);
egdaniel470d77a2016-03-18 12:50:27 -0700296}
297
Greg Daniel164a9f02016-02-22 09:56:40 -0500298void GrVkCommandBuffer::drawIndexed(const GrVkGpu* gpu,
299 uint32_t indexCount,
300 uint32_t instanceCount,
301 uint32_t firstIndex,
302 int32_t vertexOffset,
303 uint32_t firstInstance) const {
304 SkASSERT(fIsActive);
305 SkASSERT(fActiveRenderPass);
306 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndexed(fCmdBuffer,
307 indexCount,
308 instanceCount,
309 firstIndex,
310 vertexOffset,
311 firstInstance));
312}
313
314void GrVkCommandBuffer::draw(const GrVkGpu* gpu,
315 uint32_t vertexCount,
316 uint32_t instanceCount,
317 uint32_t firstVertex,
318 uint32_t firstInstance) const {
319 SkASSERT(fIsActive);
320 SkASSERT(fActiveRenderPass);
321 GR_VK_CALL(gpu->vkInterface(), CmdDraw(fCmdBuffer,
322 vertexCount,
323 instanceCount,
324 firstVertex,
325 firstInstance));
326}
egdaniel470d77a2016-03-18 12:50:27 -0700327
328void GrVkCommandBuffer::setViewport(const GrVkGpu* gpu,
329 uint32_t firstViewport,
330 uint32_t viewportCount,
331 const VkViewport* viewports) {
332 SkASSERT(fIsActive);
333 SkASSERT(1 == viewportCount);
334 if (memcmp(viewports, &fCachedViewport, sizeof(VkViewport))) {
335 GR_VK_CALL(gpu->vkInterface(), CmdSetViewport(fCmdBuffer,
336 firstViewport,
337 viewportCount,
338 viewports));
339 fCachedViewport = viewports[0];
340 }
341}
342
343void GrVkCommandBuffer::setScissor(const GrVkGpu* gpu,
344 uint32_t firstScissor,
345 uint32_t scissorCount,
346 const VkRect2D* scissors) {
347 SkASSERT(fIsActive);
348 SkASSERT(1 == scissorCount);
349 if (memcmp(scissors, &fCachedScissor, sizeof(VkRect2D))) {
350 GR_VK_CALL(gpu->vkInterface(), CmdSetScissor(fCmdBuffer,
351 firstScissor,
352 scissorCount,
353 scissors));
354 fCachedScissor = scissors[0];
355 }
356}
357
358void GrVkCommandBuffer::setBlendConstants(const GrVkGpu* gpu,
359 const float blendConstants[4]) {
360 SkASSERT(fIsActive);
361 if (memcmp(blendConstants, fCachedBlendConstant, 4 * sizeof(float))) {
362 GR_VK_CALL(gpu->vkInterface(), CmdSetBlendConstants(fCmdBuffer, blendConstants));
363 memcpy(fCachedBlendConstant, blendConstants, 4 * sizeof(float));
364 }
365}
egdaniel9a6cf802016-06-08 08:22:05 -0700366
367///////////////////////////////////////////////////////////////////////////////
368// PrimaryCommandBuffer
369////////////////////////////////////////////////////////////////////////////////
egdaniel9cb63402016-06-23 08:37:05 -0700370GrVkPrimaryCommandBuffer::~GrVkPrimaryCommandBuffer() {
371 // Should have ended any render pass we're in the middle of
372 SkASSERT(!fActiveRenderPass);
373}
374
egdaniel9a6cf802016-06-08 08:22:05 -0700375GrVkPrimaryCommandBuffer* GrVkPrimaryCommandBuffer::Create(const GrVkGpu* gpu,
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500376 GrVkCommandPool* cmdPool) {
egdaniel9a6cf802016-06-08 08:22:05 -0700377 const VkCommandBufferAllocateInfo cmdInfo = {
378 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400379 nullptr, // pNext
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500380 cmdPool->vkCommandPool(), // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700381 VK_COMMAND_BUFFER_LEVEL_PRIMARY, // level
382 1 // bufferCount
383 };
384
385 VkCommandBuffer cmdBuffer;
386 VkResult err = GR_VK_CALL(gpu->vkInterface(), AllocateCommandBuffers(gpu->device(),
387 &cmdInfo,
388 &cmdBuffer));
389 if (err) {
390 return nullptr;
391 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500392 return new GrVkPrimaryCommandBuffer(cmdBuffer, cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700393}
394
395void GrVkPrimaryCommandBuffer::begin(const GrVkGpu* gpu) {
396 SkASSERT(!fIsActive);
397 VkCommandBufferBeginInfo cmdBufferBeginInfo;
398 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
399 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
400 cmdBufferBeginInfo.pNext = nullptr;
401 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
402 cmdBufferBeginInfo.pInheritanceInfo = nullptr;
403
404 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), BeginCommandBuffer(fCmdBuffer,
405 &cmdBufferBeginInfo));
406 fIsActive = true;
407}
408
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500409void GrVkPrimaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700410 SkASSERT(fIsActive);
411 SkASSERT(!fActiveRenderPass);
412 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), EndCommandBuffer(fCmdBuffer));
Greg Daniel7d918fd2018-06-19 15:22:01 -0400413 for (int i = 0; i < fTrackedRecordingResources.count(); ++i) {
414 fTrackedRecordingResources[i]->unref(gpu);
415 }
416 fTrackedRecordingResources.rewind();
egdaniel9a6cf802016-06-08 08:22:05 -0700417 this->invalidateState();
418 fIsActive = false;
419}
420
421void GrVkPrimaryCommandBuffer::beginRenderPass(const GrVkGpu* gpu,
egdaniel9cb63402016-06-23 08:37:05 -0700422 const GrVkRenderPass* renderPass,
Robert Phillips95214472017-08-08 18:00:03 -0400423 const VkClearValue clearValues[],
egdaniel9cb63402016-06-23 08:37:05 -0700424 const GrVkRenderTarget& target,
425 const SkIRect& bounds,
426 bool forSecondaryCB) {
egdaniel9a6cf802016-06-08 08:22:05 -0700427 SkASSERT(fIsActive);
428 SkASSERT(!fActiveRenderPass);
egdaniel9cb63402016-06-23 08:37:05 -0700429 SkASSERT(renderPass->isCompatible(target));
430
egdaniel9a6cf802016-06-08 08:22:05 -0700431 VkRenderPassBeginInfo beginInfo;
egdaniel9cb63402016-06-23 08:37:05 -0700432 VkRect2D renderArea;
433 renderArea.offset = { bounds.fLeft , bounds.fTop };
434 renderArea.extent = { (uint32_t)bounds.width(), (uint32_t)bounds.height() };
435
436 memset(&beginInfo, 0, sizeof(VkRenderPassBeginInfo));
437 beginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
438 beginInfo.pNext = nullptr;
439 beginInfo.renderPass = renderPass->vkRenderPass();
440 beginInfo.framebuffer = target.framebuffer()->framebuffer();
441 beginInfo.renderArea = renderArea;
Greg Danielb68319a2018-02-23 16:08:28 -0500442 beginInfo.clearValueCount = renderPass->clearValueCount();
egdaniel9cb63402016-06-23 08:37:05 -0700443 beginInfo.pClearValues = clearValues;
444
445 VkSubpassContents contents = forSecondaryCB ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
446 : VK_SUBPASS_CONTENTS_INLINE;
447
egdaniel9a6cf802016-06-08 08:22:05 -0700448 GR_VK_CALL(gpu->vkInterface(), CmdBeginRenderPass(fCmdBuffer, &beginInfo, contents));
449 fActiveRenderPass = renderPass;
450 this->addResource(renderPass);
451 target.addResources(*this);
452}
453
454void GrVkPrimaryCommandBuffer::endRenderPass(const GrVkGpu* gpu) {
455 SkASSERT(fIsActive);
456 SkASSERT(fActiveRenderPass);
457 GR_VK_CALL(gpu->vkInterface(), CmdEndRenderPass(fCmdBuffer));
458 fActiveRenderPass = nullptr;
459}
460
461void GrVkPrimaryCommandBuffer::executeCommands(const GrVkGpu* gpu,
jvanverth7ec92412016-07-06 09:24:57 -0700462 GrVkSecondaryCommandBuffer* buffer) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500463 // The Vulkan spec allows secondary command buffers to be executed on a primary command buffer
464 // if the command pools both were created from were created with the same queue family. However,
465 // we currently always create them from the same pool.
466 SkASSERT(buffer->commandPool() == fCmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700467 SkASSERT(fIsActive);
Greg Daniel77b53f62016-10-18 11:48:51 -0400468 SkASSERT(!buffer->fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700469 SkASSERT(fActiveRenderPass);
470 SkASSERT(fActiveRenderPass->isCompatible(*buffer->fActiveRenderPass));
471
472 GR_VK_CALL(gpu->vkInterface(), CmdExecuteCommands(fCmdBuffer, 1, &buffer->fCmdBuffer));
jvanverth7ec92412016-07-06 09:24:57 -0700473 buffer->ref();
474 fSecondaryCommandBuffers.push_back(buffer);
egdaniel066df7c2016-06-08 14:02:27 -0700475 // When executing a secondary command buffer all state (besides render pass state) becomes
476 // invalidated and must be reset. This includes bound buffers, pipelines, dynamic state, etc.
477 this->invalidateState();
egdaniel9a6cf802016-06-08 08:22:05 -0700478}
479
Greg Daniel48661b82018-01-22 16:11:35 -0500480static void submit_to_queue(const GrVkInterface* interface,
481 VkQueue queue,
482 VkFence fence,
483 uint32_t waitCount,
484 const VkSemaphore* waitSemaphores,
485 const VkPipelineStageFlags* waitStages,
486 uint32_t commandBufferCount,
487 const VkCommandBuffer* commandBuffers,
488 uint32_t signalCount,
489 const VkSemaphore* signalSemaphores) {
490 VkSubmitInfo submitInfo;
491 memset(&submitInfo, 0, sizeof(VkSubmitInfo));
492 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
493 submitInfo.pNext = nullptr;
494 submitInfo.waitSemaphoreCount = waitCount;
495 submitInfo.pWaitSemaphores = waitSemaphores;
496 submitInfo.pWaitDstStageMask = waitStages;
497 submitInfo.commandBufferCount = commandBufferCount;
498 submitInfo.pCommandBuffers = commandBuffers;
499 submitInfo.signalSemaphoreCount = signalCount;
500 submitInfo.pSignalSemaphores = signalSemaphores;
501 GR_VK_CALL_ERRCHECK(interface, QueueSubmit(queue, 1, &submitInfo, fence));
502}
503
Greg Daniel6be35232017-03-01 17:01:09 -0500504void GrVkPrimaryCommandBuffer::submitToQueue(
505 const GrVkGpu* gpu,
506 VkQueue queue,
507 GrVkGpu::SyncQueue sync,
Greg Daniel48661b82018-01-22 16:11:35 -0500508 SkTArray<GrVkSemaphore::Resource*>& signalSemaphores,
509 SkTArray<GrVkSemaphore::Resource*>& waitSemaphores) {
egdaniel9a6cf802016-06-08 08:22:05 -0700510 SkASSERT(!fIsActive);
511
512 VkResult err;
jvanverth7ec92412016-07-06 09:24:57 -0700513 if (VK_NULL_HANDLE == fSubmitFence) {
514 VkFenceCreateInfo fenceInfo;
515 memset(&fenceInfo, 0, sizeof(VkFenceCreateInfo));
516 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
517 err = GR_VK_CALL(gpu->vkInterface(), CreateFence(gpu->device(), &fenceInfo, nullptr,
518 &fSubmitFence));
519 SkASSERT(!err);
520 } else {
521 GR_VK_CALL(gpu->vkInterface(), ResetFences(gpu->device(), 1, &fSubmitFence));
522 }
egdaniel9a6cf802016-06-08 08:22:05 -0700523
Greg Daniela5cb7812017-06-16 09:45:32 -0400524 int signalCount = signalSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500525 int waitCount = waitSemaphores.count();
Greg Daniel6be35232017-03-01 17:01:09 -0500526
Greg Daniel48661b82018-01-22 16:11:35 -0500527 if (0 == signalCount && 0 == waitCount) {
528 // This command buffer has no dependent semaphores so we can simply just submit it to the
529 // queue with no worries.
530 submit_to_queue(gpu->vkInterface(), queue, fSubmitFence, 0, nullptr, nullptr,
531 1, &fCmdBuffer, 0, nullptr);
532 } else {
533 GrVkSemaphore::Resource::AcquireMutex();
534
535 SkTArray<VkSemaphore> vkSignalSems(signalCount);
536 for (int i = 0; i < signalCount; ++i) {
537 if (signalSemaphores[i]->shouldSignal()) {
538 this->addResource(signalSemaphores[i]);
539 vkSignalSems.push_back(signalSemaphores[i]->semaphore());
540 }
541 }
542
543 SkTArray<VkSemaphore> vkWaitSems(waitCount);
544 SkTArray<VkPipelineStageFlags> vkWaitStages(waitCount);
545 for (int i = 0; i < waitCount; ++i) {
546 if (waitSemaphores[i]->shouldWait()) {
547 this->addResource(waitSemaphores[i]);
548 vkWaitSems.push_back(waitSemaphores[i]->semaphore());
549 vkWaitStages.push_back(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
550 }
551 }
552 submit_to_queue(gpu->vkInterface(), queue, fSubmitFence,
553 vkWaitSems.count(), vkWaitSems.begin(), vkWaitStages.begin(),
554 1, &fCmdBuffer,
555 vkSignalSems.count(), vkSignalSems.begin());
556 // Since shouldSignal/Wait do not require a mutex to be held, we must make sure that we mark
557 // the semaphores after we've submitted. Thus in the worst case another submit grabs the
558 // mutex and then realizes it doesn't need to submit the semaphore. We will never end up
559 // where a semaphore doesn't think it needs to be submitted (cause of querying
560 // shouldSignal/Wait), but it should need to.
561 for (int i = 0; i < signalCount; ++i) {
562 signalSemaphores[i]->markAsSignaled();
563 }
564 for (int i = 0; i < waitCount; ++i) {
565 waitSemaphores[i]->markAsWaited();
566 }
567
568 GrVkSemaphore::Resource::ReleaseMutex();
569 }
egdaniel9a6cf802016-06-08 08:22:05 -0700570
571 if (GrVkGpu::kForce_SyncQueue == sync) {
572 err = GR_VK_CALL(gpu->vkInterface(),
573 WaitForFences(gpu->device(), 1, &fSubmitFence, true, UINT64_MAX));
574 if (VK_TIMEOUT == err) {
575 SkDebugf("Fence failed to signal: %d\n", err);
Ben Wagnerb4aab9a2017-08-16 10:53:04 -0400576 SK_ABORT("failing");
egdaniel9a6cf802016-06-08 08:22:05 -0700577 }
578 SkASSERT(!err);
579
580 // Destroy the fence
581 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
582 fSubmitFence = VK_NULL_HANDLE;
583 }
584}
585
586bool GrVkPrimaryCommandBuffer::finished(const GrVkGpu* gpu) const {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500587 SkASSERT(!fIsActive);
egdaniel9a6cf802016-06-08 08:22:05 -0700588 if (VK_NULL_HANDLE == fSubmitFence) {
589 return true;
590 }
591
592 VkResult err = GR_VK_CALL(gpu->vkInterface(), GetFenceStatus(gpu->device(), fSubmitFence));
593 switch (err) {
594 case VK_SUCCESS:
595 return true;
596
597 case VK_NOT_READY:
598 return false;
599
600 default:
601 SkDebugf("Error getting fence status: %d\n", err);
Ben Wagnerb4aab9a2017-08-16 10:53:04 -0400602 SK_ABORT("failing");
egdaniel9a6cf802016-06-08 08:22:05 -0700603 break;
604 }
605
606 return false;
607}
608
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500609void GrVkPrimaryCommandBuffer::onReleaseResources(GrVkGpu* gpu) {
jvanverth7ec92412016-07-06 09:24:57 -0700610 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500611 fSecondaryCommandBuffers[i]->releaseResources(gpu);
612 }
613}
614
615void GrVkPrimaryCommandBuffer::recycleSecondaryCommandBuffers() {
616 for (int i = 0; i < fSecondaryCommandBuffers.count(); ++i) {
617 SkASSERT(fSecondaryCommandBuffers[i]->commandPool() == fCmdPool);
618 fCmdPool->recycleSecondaryCommandBuffer(fSecondaryCommandBuffers[i]);
jvanverth7ec92412016-07-06 09:24:57 -0700619 }
620 fSecondaryCommandBuffers.reset();
621}
622
egdaniel9a6cf802016-06-08 08:22:05 -0700623void GrVkPrimaryCommandBuffer::copyImage(const GrVkGpu* gpu,
624 GrVkImage* srcImage,
625 VkImageLayout srcLayout,
626 GrVkImage* dstImage,
627 VkImageLayout dstLayout,
628 uint32_t copyRegionCount,
629 const VkImageCopy* copyRegions) {
630 SkASSERT(fIsActive);
631 SkASSERT(!fActiveRenderPass);
632 this->addResource(srcImage->resource());
633 this->addResource(dstImage->resource());
634 GR_VK_CALL(gpu->vkInterface(), CmdCopyImage(fCmdBuffer,
635 srcImage->image(),
636 srcLayout,
637 dstImage->image(),
638 dstLayout,
639 copyRegionCount,
640 copyRegions));
641}
642
643void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
644 const GrVkResource* srcResource,
645 VkImage srcImage,
646 VkImageLayout srcLayout,
647 const GrVkResource* dstResource,
648 VkImage dstImage,
649 VkImageLayout dstLayout,
650 uint32_t blitRegionCount,
651 const VkImageBlit* blitRegions,
652 VkFilter filter) {
653 SkASSERT(fIsActive);
654 SkASSERT(!fActiveRenderPass);
655 this->addResource(srcResource);
656 this->addResource(dstResource);
657 GR_VK_CALL(gpu->vkInterface(), CmdBlitImage(fCmdBuffer,
658 srcImage,
659 srcLayout,
660 dstImage,
661 dstLayout,
662 blitRegionCount,
663 blitRegions,
664 filter));
665}
666
Greg Daniel6ecc9112017-06-16 16:17:03 +0000667void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
668 const GrVkImage& srcImage,
669 const GrVkImage& dstImage,
670 uint32_t blitRegionCount,
671 const VkImageBlit* blitRegions,
672 VkFilter filter) {
673 this->blitImage(gpu,
674 srcImage.resource(),
675 srcImage.image(),
676 srcImage.currentLayout(),
677 dstImage.resource(),
678 dstImage.image(),
679 dstImage.currentLayout(),
680 blitRegionCount,
681 blitRegions,
682 filter);
683}
684
685
egdaniel9a6cf802016-06-08 08:22:05 -0700686void GrVkPrimaryCommandBuffer::copyImageToBuffer(const GrVkGpu* gpu,
687 GrVkImage* srcImage,
688 VkImageLayout srcLayout,
689 GrVkTransferBuffer* dstBuffer,
690 uint32_t copyRegionCount,
691 const VkBufferImageCopy* copyRegions) {
692 SkASSERT(fIsActive);
693 SkASSERT(!fActiveRenderPass);
694 this->addResource(srcImage->resource());
695 this->addResource(dstBuffer->resource());
696 GR_VK_CALL(gpu->vkInterface(), CmdCopyImageToBuffer(fCmdBuffer,
697 srcImage->image(),
698 srcLayout,
699 dstBuffer->buffer(),
700 copyRegionCount,
701 copyRegions));
702}
703
704void GrVkPrimaryCommandBuffer::copyBufferToImage(const GrVkGpu* gpu,
705 GrVkTransferBuffer* srcBuffer,
706 GrVkImage* dstImage,
707 VkImageLayout dstLayout,
708 uint32_t copyRegionCount,
709 const VkBufferImageCopy* copyRegions) {
710 SkASSERT(fIsActive);
711 SkASSERT(!fActiveRenderPass);
712 this->addResource(srcBuffer->resource());
713 this->addResource(dstImage->resource());
714 GR_VK_CALL(gpu->vkInterface(), CmdCopyBufferToImage(fCmdBuffer,
715 srcBuffer->buffer(),
716 dstImage->image(),
717 dstLayout,
718 copyRegionCount,
719 copyRegions));
720}
721
Greg Daniel6888c0d2017-08-25 11:55:50 -0400722
723void GrVkPrimaryCommandBuffer::copyBuffer(GrVkGpu* gpu,
724 GrVkBuffer* srcBuffer,
725 GrVkBuffer* dstBuffer,
726 uint32_t regionCount,
727 const VkBufferCopy* regions) {
728 SkASSERT(fIsActive);
729 SkASSERT(!fActiveRenderPass);
730#ifdef SK_DEBUG
731 for (uint32_t i = 0; i < regionCount; ++i) {
732 const VkBufferCopy& region = regions[i];
733 SkASSERT(region.size > 0);
734 SkASSERT(region.srcOffset < srcBuffer->size());
735 SkASSERT(region.dstOffset < dstBuffer->size());
736 SkASSERT(region.srcOffset + region.size <= srcBuffer->size());
737 SkASSERT(region.dstOffset + region.size <= dstBuffer->size());
738 }
739#endif
740 this->addResource(srcBuffer->resource());
741 this->addResource(dstBuffer->resource());
742 GR_VK_CALL(gpu->vkInterface(), CmdCopyBuffer(fCmdBuffer,
743 srcBuffer->buffer(),
744 dstBuffer->buffer(),
745 regionCount,
746 regions));
747}
748
jvanvertha584de92016-06-30 09:10:52 -0700749void GrVkPrimaryCommandBuffer::updateBuffer(GrVkGpu* gpu,
750 GrVkBuffer* dstBuffer,
751 VkDeviceSize dstOffset,
752 VkDeviceSize dataSize,
753 const void* data) {
754 SkASSERT(fIsActive);
755 SkASSERT(!fActiveRenderPass);
756 SkASSERT(0 == (dstOffset & 0x03)); // four byte aligned
757 // TODO: handle larger transfer sizes
758 SkASSERT(dataSize <= 65536);
759 SkASSERT(0 == (dataSize & 0x03)); // four byte aligned
760 this->addResource(dstBuffer->resource());
761 GR_VK_CALL(gpu->vkInterface(), CmdUpdateBuffer(fCmdBuffer,
762 dstBuffer->buffer(),
763 dstOffset,
764 dataSize,
765 (const uint32_t*) data));
766}
767
egdaniel9a6cf802016-06-08 08:22:05 -0700768void GrVkPrimaryCommandBuffer::clearColorImage(const GrVkGpu* gpu,
769 GrVkImage* image,
770 const VkClearColorValue* color,
771 uint32_t subRangeCount,
772 const VkImageSubresourceRange* subRanges) {
773 SkASSERT(fIsActive);
774 SkASSERT(!fActiveRenderPass);
775 this->addResource(image->resource());
776 GR_VK_CALL(gpu->vkInterface(), CmdClearColorImage(fCmdBuffer,
777 image->image(),
778 image->currentLayout(),
779 color,
780 subRangeCount,
781 subRanges));
782}
783
784void GrVkPrimaryCommandBuffer::clearDepthStencilImage(const GrVkGpu* gpu,
785 GrVkImage* image,
786 const VkClearDepthStencilValue* color,
787 uint32_t subRangeCount,
788 const VkImageSubresourceRange* subRanges) {
789 SkASSERT(fIsActive);
790 SkASSERT(!fActiveRenderPass);
791 this->addResource(image->resource());
792 GR_VK_CALL(gpu->vkInterface(), CmdClearDepthStencilImage(fCmdBuffer,
793 image->image(),
794 image->currentLayout(),
795 color,
796 subRangeCount,
797 subRanges));
798}
799
egdaniel52ad2512016-08-04 12:50:01 -0700800void GrVkPrimaryCommandBuffer::resolveImage(GrVkGpu* gpu,
801 const GrVkImage& srcImage,
802 const GrVkImage& dstImage,
803 uint32_t regionCount,
804 const VkImageResolve* regions) {
805 SkASSERT(fIsActive);
806 SkASSERT(!fActiveRenderPass);
807
808 this->addResource(srcImage.resource());
809 this->addResource(dstImage.resource());
810
811 GR_VK_CALL(gpu->vkInterface(), CmdResolveImage(fCmdBuffer,
812 srcImage.image(),
813 srcImage.currentLayout(),
814 dstImage.image(),
815 dstImage.currentLayout(),
816 regionCount,
817 regions));
818}
819
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500820void GrVkPrimaryCommandBuffer::onFreeGPUData(GrVkGpu* gpu) const {
egdaniel9cb63402016-06-23 08:37:05 -0700821 SkASSERT(!fActiveRenderPass);
822 // Destroy the fence, if any
823 if (VK_NULL_HANDLE != fSubmitFence) {
824 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
825 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500826 for (GrVkSecondaryCommandBuffer* buffer : fSecondaryCommandBuffers) {
827 buffer->unref(gpu);
828 }
829}
830
831void GrVkPrimaryCommandBuffer::onAbandonGPUData() const {
832 SkASSERT(!fActiveRenderPass);
833 for (GrVkSecondaryCommandBuffer* buffer : fSecondaryCommandBuffers) {
834 buffer->unrefAndAbandon();
835 }
egdaniel9cb63402016-06-23 08:37:05 -0700836}
837
egdaniel9a6cf802016-06-08 08:22:05 -0700838///////////////////////////////////////////////////////////////////////////////
839// SecondaryCommandBuffer
840////////////////////////////////////////////////////////////////////////////////
841
jvanverth7ec92412016-07-06 09:24:57 -0700842GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(const GrVkGpu* gpu,
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500843 GrVkCommandPool* cmdPool) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500844 SkASSERT(cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700845 const VkCommandBufferAllocateInfo cmdInfo = {
846 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
Ben Wagnera93a14a2017-08-28 10:34:05 -0400847 nullptr, // pNext
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500848 cmdPool->vkCommandPool(), // commandPool
egdaniel9a6cf802016-06-08 08:22:05 -0700849 VK_COMMAND_BUFFER_LEVEL_SECONDARY, // level
850 1 // bufferCount
851 };
852
853 VkCommandBuffer cmdBuffer;
854 VkResult err = GR_VK_CALL(gpu->vkInterface(), AllocateCommandBuffers(gpu->device(),
855 &cmdInfo,
856 &cmdBuffer));
857 if (err) {
858 return nullptr;
859 }
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500860 return new GrVkSecondaryCommandBuffer(cmdBuffer, cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700861}
862
Greg Daniel070cbaf2019-01-03 17:35:54 -0500863GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(VkCommandBuffer cmdBuffer) {
864 return new GrVkSecondaryCommandBuffer(cmdBuffer, nullptr);
865}
egdaniel9a6cf802016-06-08 08:22:05 -0700866
jvanverth7ec92412016-07-06 09:24:57 -0700867void GrVkSecondaryCommandBuffer::begin(const GrVkGpu* gpu, const GrVkFramebuffer* framebuffer,
868 const GrVkRenderPass* compatibleRenderPass) {
egdaniel9a6cf802016-06-08 08:22:05 -0700869 SkASSERT(!fIsActive);
jvanverth7ec92412016-07-06 09:24:57 -0700870 SkASSERT(compatibleRenderPass);
871 fActiveRenderPass = compatibleRenderPass;
egdaniel9a6cf802016-06-08 08:22:05 -0700872
Greg Daniel070cbaf2019-01-03 17:35:54 -0500873 if (!this->isWrapped()) {
874 VkCommandBufferInheritanceInfo inheritanceInfo;
875 memset(&inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
876 inheritanceInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
877 inheritanceInfo.pNext = nullptr;
878 inheritanceInfo.renderPass = fActiveRenderPass->vkRenderPass();
879 inheritanceInfo.subpass = 0; // Currently only using 1 subpass for each render pass
880 inheritanceInfo.framebuffer = framebuffer ? framebuffer->framebuffer() : VK_NULL_HANDLE;
881 inheritanceInfo.occlusionQueryEnable = false;
882 inheritanceInfo.queryFlags = 0;
883 inheritanceInfo.pipelineStatistics = 0;
egdaniel9a6cf802016-06-08 08:22:05 -0700884
Greg Daniel070cbaf2019-01-03 17:35:54 -0500885 VkCommandBufferBeginInfo cmdBufferBeginInfo;
886 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
887 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
888 cmdBufferBeginInfo.pNext = nullptr;
889 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT |
890 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
891 cmdBufferBeginInfo.pInheritanceInfo = &inheritanceInfo;
egdaniel9a6cf802016-06-08 08:22:05 -0700892
Greg Daniel070cbaf2019-01-03 17:35:54 -0500893 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), BeginCommandBuffer(fCmdBuffer,
894 &cmdBufferBeginInfo));
895 }
egdaniel9a6cf802016-06-08 08:22:05 -0700896 fIsActive = true;
897}
898
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500899void GrVkSecondaryCommandBuffer::end(GrVkGpu* gpu) {
egdaniel9a6cf802016-06-08 08:22:05 -0700900 SkASSERT(fIsActive);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500901 if (!this->isWrapped()) {
902 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), EndCommandBuffer(fCmdBuffer));
903 }
egdaniel9a6cf802016-06-08 08:22:05 -0700904 this->invalidateState();
905 fIsActive = false;
906}