blob: 4a469fe42ef0f0262529eabc34af83cf9ae9ed8b [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2015 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkCommandBuffer.h"
9
10#include "GrVkFramebuffer.h"
11#include "GrVkImageView.h"
egdaniel470d77a2016-03-18 12:50:27 -070012#include "GrVkPipeline.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050013#include "GrVkRenderPass.h"
14#include "GrVkRenderTarget.h"
egdaniel22281c12016-03-23 13:49:40 -070015#include "GrVkPipelineState.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050016#include "GrVkTransferBuffer.h"
17#include "GrVkUtil.h"
egdaniel9cb63402016-06-23 08:37:05 -070018#include "SkRect.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050019
20void GrVkCommandBuffer::invalidateState() {
egdaniel470d77a2016-03-18 12:50:27 -070021 fBoundVertexBuffer = VK_NULL_HANDLE;
Greg Daniel164a9f02016-02-22 09:56:40 -050022 fBoundVertexBufferIsValid = false;
egdaniel470d77a2016-03-18 12:50:27 -070023 fBoundIndexBuffer = VK_NULL_HANDLE;
Greg Daniel164a9f02016-02-22 09:56:40 -050024 fBoundIndexBufferIsValid = false;
egdaniel470d77a2016-03-18 12:50:27 -070025
26 memset(&fCachedViewport, 0, sizeof(VkViewport));
27 fCachedViewport.width = - 1.0f; // Viewport must have a width greater than 0
28
29 memset(&fCachedScissor, 0, sizeof(VkRect2D));
30 fCachedScissor.offset.x = -1; // Scissor offset must be greater that 0 to be valid
31
32 for (int i = 0; i < 4; ++i) {
33 fCachedBlendConstant[i] = -1.0;
34 }
Greg Daniel164a9f02016-02-22 09:56:40 -050035}
36
37void GrVkCommandBuffer::freeGPUData(const GrVkGpu* gpu) const {
38 SkASSERT(!fIsActive);
Greg Daniel164a9f02016-02-22 09:56:40 -050039 for (int i = 0; i < fTrackedResources.count(); ++i) {
40 fTrackedResources[i]->unref(gpu);
41 }
halcanary9d524f22016-03-29 09:03:52 -070042
Greg Daniel164a9f02016-02-22 09:56:40 -050043 GR_VK_CALL(gpu->vkInterface(), FreeCommandBuffers(gpu->device(), gpu->cmdPool(),
44 1, &fCmdBuffer));
egdaniel9cb63402016-06-23 08:37:05 -070045
46 this->onFreeGPUData(gpu);
Greg Daniel164a9f02016-02-22 09:56:40 -050047}
48
49void GrVkCommandBuffer::abandonSubResources() const {
50 for (int i = 0; i < fTrackedResources.count(); ++i) {
51 fTrackedResources[i]->unrefAndAbandon();
52 }
53}
54
Greg Daniel164a9f02016-02-22 09:56:40 -050055////////////////////////////////////////////////////////////////////////////////
56// CommandBuffer commands
57////////////////////////////////////////////////////////////////////////////////
58
59void GrVkCommandBuffer::pipelineBarrier(const GrVkGpu* gpu,
60 VkPipelineStageFlags srcStageMask,
61 VkPipelineStageFlags dstStageMask,
62 bool byRegion,
63 BarrierType barrierType,
64 void* barrier) const {
65 SkASSERT(fIsActive);
egdaniel58a8d922016-04-21 08:03:10 -070066 // For images we can have barriers inside of render passes but they require us to add more
67 // support in subpasses which need self dependencies to have barriers inside them. Also, we can
68 // never have buffer barriers inside of a render pass. For now we will just assert that we are
69 // not in a render pass.
70 SkASSERT(!fActiveRenderPass);
Greg Daniel164a9f02016-02-22 09:56:40 -050071 VkDependencyFlags dependencyFlags = byRegion ? VK_DEPENDENCY_BY_REGION_BIT : 0;
72
73 switch (barrierType) {
74 case kMemory_BarrierType: {
75 const VkMemoryBarrier* barrierPtr = reinterpret_cast<VkMemoryBarrier*>(barrier);
76 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(fCmdBuffer, srcStageMask,
77 dstStageMask, dependencyFlags,
78 1, barrierPtr,
79 0, nullptr,
80 0, nullptr));
81 break;
82 }
83
84 case kBufferMemory_BarrierType: {
85 const VkBufferMemoryBarrier* barrierPtr =
86 reinterpret_cast<VkBufferMemoryBarrier*>(barrier);
87 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(fCmdBuffer, srcStageMask,
88 dstStageMask, dependencyFlags,
89 0, nullptr,
90 1, barrierPtr,
91 0, nullptr));
92 break;
93 }
94
95 case kImageMemory_BarrierType: {
96 const VkImageMemoryBarrier* barrierPtr =
97 reinterpret_cast<VkImageMemoryBarrier*>(barrier);
98 GR_VK_CALL(gpu->vkInterface(), CmdPipelineBarrier(fCmdBuffer, srcStageMask,
99 dstStageMask, dependencyFlags,
100 0, nullptr,
101 0, nullptr,
102 1, barrierPtr));
103 break;
104 }
105 }
106
107}
108
Greg Daniel164a9f02016-02-22 09:56:40 -0500109void GrVkCommandBuffer::clearAttachments(const GrVkGpu* gpu,
110 int numAttachments,
111 const VkClearAttachment* attachments,
112 int numRects,
113 const VkClearRect* clearRects) const {
114 SkASSERT(fIsActive);
115 SkASSERT(fActiveRenderPass);
116 SkASSERT(numAttachments > 0);
117 SkASSERT(numRects > 0);
118#ifdef SK_DEBUG
119 for (int i = 0; i < numAttachments; ++i) {
120 if (attachments[i].aspectMask == VK_IMAGE_ASPECT_COLOR_BIT) {
121 uint32_t testIndex;
122 SkAssertResult(fActiveRenderPass->colorAttachmentIndex(&testIndex));
123 SkASSERT(testIndex == attachments[i].colorAttachment);
124 }
125 }
126#endif
127 GR_VK_CALL(gpu->vkInterface(), CmdClearAttachments(fCmdBuffer,
128 numAttachments,
129 attachments,
130 numRects,
131 clearRects));
132}
133
134void GrVkCommandBuffer::bindDescriptorSets(const GrVkGpu* gpu,
egdaniel22281c12016-03-23 13:49:40 -0700135 GrVkPipelineState* pipelineState,
Greg Daniel164a9f02016-02-22 09:56:40 -0500136 VkPipelineLayout layout,
137 uint32_t firstSet,
138 uint32_t setCount,
139 const VkDescriptorSet* descriptorSets,
140 uint32_t dynamicOffsetCount,
141 const uint32_t* dynamicOffsets) {
142 SkASSERT(fIsActive);
143 GR_VK_CALL(gpu->vkInterface(), CmdBindDescriptorSets(fCmdBuffer,
144 VK_PIPELINE_BIND_POINT_GRAPHICS,
145 layout,
146 firstSet,
147 setCount,
148 descriptorSets,
149 dynamicOffsetCount,
150 dynamicOffsets));
egdaniel22281c12016-03-23 13:49:40 -0700151 pipelineState->addUniformResources(*this);
Greg Daniel164a9f02016-02-22 09:56:40 -0500152}
153
egdaniel470d77a2016-03-18 12:50:27 -0700154void GrVkCommandBuffer::bindPipeline(const GrVkGpu* gpu, const GrVkPipeline* pipeline) {
155 SkASSERT(fIsActive);
egdaniel470d77a2016-03-18 12:50:27 -0700156 GR_VK_CALL(gpu->vkInterface(), CmdBindPipeline(fCmdBuffer,
157 VK_PIPELINE_BIND_POINT_GRAPHICS,
158 pipeline->pipeline()));
159 addResource(pipeline);
160}
161
Greg Daniel164a9f02016-02-22 09:56:40 -0500162void GrVkCommandBuffer::drawIndexed(const GrVkGpu* gpu,
163 uint32_t indexCount,
164 uint32_t instanceCount,
165 uint32_t firstIndex,
166 int32_t vertexOffset,
167 uint32_t firstInstance) const {
168 SkASSERT(fIsActive);
169 SkASSERT(fActiveRenderPass);
170 GR_VK_CALL(gpu->vkInterface(), CmdDrawIndexed(fCmdBuffer,
171 indexCount,
172 instanceCount,
173 firstIndex,
174 vertexOffset,
175 firstInstance));
176}
177
178void GrVkCommandBuffer::draw(const GrVkGpu* gpu,
179 uint32_t vertexCount,
180 uint32_t instanceCount,
181 uint32_t firstVertex,
182 uint32_t firstInstance) const {
183 SkASSERT(fIsActive);
184 SkASSERT(fActiveRenderPass);
185 GR_VK_CALL(gpu->vkInterface(), CmdDraw(fCmdBuffer,
186 vertexCount,
187 instanceCount,
188 firstVertex,
189 firstInstance));
190}
egdaniel470d77a2016-03-18 12:50:27 -0700191
192void GrVkCommandBuffer::setViewport(const GrVkGpu* gpu,
193 uint32_t firstViewport,
194 uint32_t viewportCount,
195 const VkViewport* viewports) {
196 SkASSERT(fIsActive);
197 SkASSERT(1 == viewportCount);
198 if (memcmp(viewports, &fCachedViewport, sizeof(VkViewport))) {
199 GR_VK_CALL(gpu->vkInterface(), CmdSetViewport(fCmdBuffer,
200 firstViewport,
201 viewportCount,
202 viewports));
203 fCachedViewport = viewports[0];
204 }
205}
206
207void GrVkCommandBuffer::setScissor(const GrVkGpu* gpu,
208 uint32_t firstScissor,
209 uint32_t scissorCount,
210 const VkRect2D* scissors) {
211 SkASSERT(fIsActive);
212 SkASSERT(1 == scissorCount);
213 if (memcmp(scissors, &fCachedScissor, sizeof(VkRect2D))) {
214 GR_VK_CALL(gpu->vkInterface(), CmdSetScissor(fCmdBuffer,
215 firstScissor,
216 scissorCount,
217 scissors));
218 fCachedScissor = scissors[0];
219 }
220}
221
222void GrVkCommandBuffer::setBlendConstants(const GrVkGpu* gpu,
223 const float blendConstants[4]) {
224 SkASSERT(fIsActive);
225 if (memcmp(blendConstants, fCachedBlendConstant, 4 * sizeof(float))) {
226 GR_VK_CALL(gpu->vkInterface(), CmdSetBlendConstants(fCmdBuffer, blendConstants));
227 memcpy(fCachedBlendConstant, blendConstants, 4 * sizeof(float));
228 }
229}
egdaniel9a6cf802016-06-08 08:22:05 -0700230
231///////////////////////////////////////////////////////////////////////////////
232// PrimaryCommandBuffer
233////////////////////////////////////////////////////////////////////////////////
egdaniel9cb63402016-06-23 08:37:05 -0700234GrVkPrimaryCommandBuffer::~GrVkPrimaryCommandBuffer() {
235 // Should have ended any render pass we're in the middle of
236 SkASSERT(!fActiveRenderPass);
237}
238
egdaniel9a6cf802016-06-08 08:22:05 -0700239GrVkPrimaryCommandBuffer* GrVkPrimaryCommandBuffer::Create(const GrVkGpu* gpu,
240 VkCommandPool cmdPool) {
241 const VkCommandBufferAllocateInfo cmdInfo = {
242 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
243 NULL, // pNext
244 cmdPool, // commandPool
245 VK_COMMAND_BUFFER_LEVEL_PRIMARY, // level
246 1 // bufferCount
247 };
248
249 VkCommandBuffer cmdBuffer;
250 VkResult err = GR_VK_CALL(gpu->vkInterface(), AllocateCommandBuffers(gpu->device(),
251 &cmdInfo,
252 &cmdBuffer));
253 if (err) {
254 return nullptr;
255 }
256 return new GrVkPrimaryCommandBuffer(cmdBuffer);
257}
258
259void GrVkPrimaryCommandBuffer::begin(const GrVkGpu* gpu) {
260 SkASSERT(!fIsActive);
261 VkCommandBufferBeginInfo cmdBufferBeginInfo;
262 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
263 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
264 cmdBufferBeginInfo.pNext = nullptr;
265 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
266 cmdBufferBeginInfo.pInheritanceInfo = nullptr;
267
268 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), BeginCommandBuffer(fCmdBuffer,
269 &cmdBufferBeginInfo));
270 fIsActive = true;
271}
272
273void GrVkPrimaryCommandBuffer::end(const GrVkGpu* gpu) {
274 SkASSERT(fIsActive);
275 SkASSERT(!fActiveRenderPass);
276 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), EndCommandBuffer(fCmdBuffer));
277 this->invalidateState();
278 fIsActive = false;
279}
280
281void GrVkPrimaryCommandBuffer::beginRenderPass(const GrVkGpu* gpu,
egdaniel9cb63402016-06-23 08:37:05 -0700282 const GrVkRenderPass* renderPass,
283 uint32_t clearCount,
284 const VkClearValue* clearValues,
285 const GrVkRenderTarget& target,
286 const SkIRect& bounds,
287 bool forSecondaryCB) {
egdaniel9a6cf802016-06-08 08:22:05 -0700288 SkASSERT(fIsActive);
289 SkASSERT(!fActiveRenderPass);
egdaniel9cb63402016-06-23 08:37:05 -0700290 SkASSERT(renderPass->isCompatible(target));
291
egdaniel9a6cf802016-06-08 08:22:05 -0700292 VkRenderPassBeginInfo beginInfo;
egdaniel9cb63402016-06-23 08:37:05 -0700293 VkRect2D renderArea;
294 renderArea.offset = { bounds.fLeft , bounds.fTop };
295 renderArea.extent = { (uint32_t)bounds.width(), (uint32_t)bounds.height() };
296
297 memset(&beginInfo, 0, sizeof(VkRenderPassBeginInfo));
298 beginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
299 beginInfo.pNext = nullptr;
300 beginInfo.renderPass = renderPass->vkRenderPass();
301 beginInfo.framebuffer = target.framebuffer()->framebuffer();
302 beginInfo.renderArea = renderArea;
303 beginInfo.clearValueCount = clearCount;
304 beginInfo.pClearValues = clearValues;
305
306 VkSubpassContents contents = forSecondaryCB ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
307 : VK_SUBPASS_CONTENTS_INLINE;
308
egdaniel9a6cf802016-06-08 08:22:05 -0700309 GR_VK_CALL(gpu->vkInterface(), CmdBeginRenderPass(fCmdBuffer, &beginInfo, contents));
310 fActiveRenderPass = renderPass;
311 this->addResource(renderPass);
312 target.addResources(*this);
313}
314
315void GrVkPrimaryCommandBuffer::endRenderPass(const GrVkGpu* gpu) {
316 SkASSERT(fIsActive);
317 SkASSERT(fActiveRenderPass);
318 GR_VK_CALL(gpu->vkInterface(), CmdEndRenderPass(fCmdBuffer));
319 fActiveRenderPass = nullptr;
320}
321
322void GrVkPrimaryCommandBuffer::executeCommands(const GrVkGpu* gpu,
323 const GrVkSecondaryCommandBuffer* buffer) {
324 SkASSERT(fIsActive);
325 SkASSERT(fActiveRenderPass);
326 SkASSERT(fActiveRenderPass->isCompatible(*buffer->fActiveRenderPass));
327
328 GR_VK_CALL(gpu->vkInterface(), CmdExecuteCommands(fCmdBuffer, 1, &buffer->fCmdBuffer));
329 this->addResource(buffer);
egdaniel066df7c2016-06-08 14:02:27 -0700330 // When executing a secondary command buffer all state (besides render pass state) becomes
331 // invalidated and must be reset. This includes bound buffers, pipelines, dynamic state, etc.
332 this->invalidateState();
egdaniel9a6cf802016-06-08 08:22:05 -0700333}
334
335void GrVkPrimaryCommandBuffer::submitToQueue(const GrVkGpu* gpu,
336 VkQueue queue,
337 GrVkGpu::SyncQueue sync) {
338 SkASSERT(!fIsActive);
339
340 VkResult err;
341 VkFenceCreateInfo fenceInfo;
342 memset(&fenceInfo, 0, sizeof(VkFenceCreateInfo));
343 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
344 err = GR_VK_CALL(gpu->vkInterface(), CreateFence(gpu->device(), &fenceInfo, nullptr,
345 &fSubmitFence));
346 SkASSERT(!err);
347
348 VkSubmitInfo submitInfo;
349 memset(&submitInfo, 0, sizeof(VkSubmitInfo));
350 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
351 submitInfo.pNext = nullptr;
352 submitInfo.waitSemaphoreCount = 0;
353 submitInfo.pWaitSemaphores = nullptr;
354 submitInfo.pWaitDstStageMask = 0;
355 submitInfo.commandBufferCount = 1;
356 submitInfo.pCommandBuffers = &fCmdBuffer;
357 submitInfo.signalSemaphoreCount = 0;
358 submitInfo.pSignalSemaphores = nullptr;
359 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), QueueSubmit(queue, 1, &submitInfo, fSubmitFence));
360
361 if (GrVkGpu::kForce_SyncQueue == sync) {
362 err = GR_VK_CALL(gpu->vkInterface(),
363 WaitForFences(gpu->device(), 1, &fSubmitFence, true, UINT64_MAX));
364 if (VK_TIMEOUT == err) {
365 SkDebugf("Fence failed to signal: %d\n", err);
366 SkFAIL("failing");
367 }
368 SkASSERT(!err);
369
370 // Destroy the fence
371 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
372 fSubmitFence = VK_NULL_HANDLE;
373 }
374}
375
376bool GrVkPrimaryCommandBuffer::finished(const GrVkGpu* gpu) const {
377 if (VK_NULL_HANDLE == fSubmitFence) {
378 return true;
379 }
380
381 VkResult err = GR_VK_CALL(gpu->vkInterface(), GetFenceStatus(gpu->device(), fSubmitFence));
382 switch (err) {
383 case VK_SUCCESS:
384 return true;
385
386 case VK_NOT_READY:
387 return false;
388
389 default:
390 SkDebugf("Error getting fence status: %d\n", err);
391 SkFAIL("failing");
392 break;
393 }
394
395 return false;
396}
397
398void GrVkPrimaryCommandBuffer::copyImage(const GrVkGpu* gpu,
399 GrVkImage* srcImage,
400 VkImageLayout srcLayout,
401 GrVkImage* dstImage,
402 VkImageLayout dstLayout,
403 uint32_t copyRegionCount,
404 const VkImageCopy* copyRegions) {
405 SkASSERT(fIsActive);
406 SkASSERT(!fActiveRenderPass);
407 this->addResource(srcImage->resource());
408 this->addResource(dstImage->resource());
409 GR_VK_CALL(gpu->vkInterface(), CmdCopyImage(fCmdBuffer,
410 srcImage->image(),
411 srcLayout,
412 dstImage->image(),
413 dstLayout,
414 copyRegionCount,
415 copyRegions));
416}
417
418void GrVkPrimaryCommandBuffer::blitImage(const GrVkGpu* gpu,
419 const GrVkResource* srcResource,
420 VkImage srcImage,
421 VkImageLayout srcLayout,
422 const GrVkResource* dstResource,
423 VkImage dstImage,
424 VkImageLayout dstLayout,
425 uint32_t blitRegionCount,
426 const VkImageBlit* blitRegions,
427 VkFilter filter) {
428 SkASSERT(fIsActive);
429 SkASSERT(!fActiveRenderPass);
430 this->addResource(srcResource);
431 this->addResource(dstResource);
432 GR_VK_CALL(gpu->vkInterface(), CmdBlitImage(fCmdBuffer,
433 srcImage,
434 srcLayout,
435 dstImage,
436 dstLayout,
437 blitRegionCount,
438 blitRegions,
439 filter));
440}
441
442void GrVkPrimaryCommandBuffer::copyImageToBuffer(const GrVkGpu* gpu,
443 GrVkImage* srcImage,
444 VkImageLayout srcLayout,
445 GrVkTransferBuffer* dstBuffer,
446 uint32_t copyRegionCount,
447 const VkBufferImageCopy* copyRegions) {
448 SkASSERT(fIsActive);
449 SkASSERT(!fActiveRenderPass);
450 this->addResource(srcImage->resource());
451 this->addResource(dstBuffer->resource());
452 GR_VK_CALL(gpu->vkInterface(), CmdCopyImageToBuffer(fCmdBuffer,
453 srcImage->image(),
454 srcLayout,
455 dstBuffer->buffer(),
456 copyRegionCount,
457 copyRegions));
458}
459
460void GrVkPrimaryCommandBuffer::copyBufferToImage(const GrVkGpu* gpu,
461 GrVkTransferBuffer* srcBuffer,
462 GrVkImage* dstImage,
463 VkImageLayout dstLayout,
464 uint32_t copyRegionCount,
465 const VkBufferImageCopy* copyRegions) {
466 SkASSERT(fIsActive);
467 SkASSERT(!fActiveRenderPass);
468 this->addResource(srcBuffer->resource());
469 this->addResource(dstImage->resource());
470 GR_VK_CALL(gpu->vkInterface(), CmdCopyBufferToImage(fCmdBuffer,
471 srcBuffer->buffer(),
472 dstImage->image(),
473 dstLayout,
474 copyRegionCount,
475 copyRegions));
476}
477
478void GrVkPrimaryCommandBuffer::clearColorImage(const GrVkGpu* gpu,
479 GrVkImage* image,
480 const VkClearColorValue* color,
481 uint32_t subRangeCount,
482 const VkImageSubresourceRange* subRanges) {
483 SkASSERT(fIsActive);
484 SkASSERT(!fActiveRenderPass);
485 this->addResource(image->resource());
486 GR_VK_CALL(gpu->vkInterface(), CmdClearColorImage(fCmdBuffer,
487 image->image(),
488 image->currentLayout(),
489 color,
490 subRangeCount,
491 subRanges));
492}
493
494void GrVkPrimaryCommandBuffer::clearDepthStencilImage(const GrVkGpu* gpu,
495 GrVkImage* image,
496 const VkClearDepthStencilValue* color,
497 uint32_t subRangeCount,
498 const VkImageSubresourceRange* subRanges) {
499 SkASSERT(fIsActive);
500 SkASSERT(!fActiveRenderPass);
501 this->addResource(image->resource());
502 GR_VK_CALL(gpu->vkInterface(), CmdClearDepthStencilImage(fCmdBuffer,
503 image->image(),
504 image->currentLayout(),
505 color,
506 subRangeCount,
507 subRanges));
508}
509
egdaniel9cb63402016-06-23 08:37:05 -0700510void GrVkPrimaryCommandBuffer::onFreeGPUData(const GrVkGpu* gpu) const {
511 SkASSERT(!fActiveRenderPass);
512 // Destroy the fence, if any
513 if (VK_NULL_HANDLE != fSubmitFence) {
514 GR_VK_CALL(gpu->vkInterface(), DestroyFence(gpu->device(), fSubmitFence, nullptr));
515 }
516}
517
egdaniel9a6cf802016-06-08 08:22:05 -0700518///////////////////////////////////////////////////////////////////////////////
519// SecondaryCommandBuffer
520////////////////////////////////////////////////////////////////////////////////
521
522GrVkSecondaryCommandBuffer* GrVkSecondaryCommandBuffer::Create(
523 const GrVkGpu* gpu,
524 VkCommandPool cmdPool,
525 const GrVkRenderPass* compatibleRenderPass) {
526 const VkCommandBufferAllocateInfo cmdInfo = {
527 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // sType
528 NULL, // pNext
529 cmdPool, // commandPool
530 VK_COMMAND_BUFFER_LEVEL_SECONDARY, // level
531 1 // bufferCount
532 };
533
534 VkCommandBuffer cmdBuffer;
535 VkResult err = GR_VK_CALL(gpu->vkInterface(), AllocateCommandBuffers(gpu->device(),
536 &cmdInfo,
537 &cmdBuffer));
538 if (err) {
539 return nullptr;
540 }
541 return new GrVkSecondaryCommandBuffer(cmdBuffer, compatibleRenderPass);
542}
543
544
545void GrVkSecondaryCommandBuffer::begin(const GrVkGpu* gpu, const GrVkFramebuffer* framebuffer) {
546 SkASSERT(!fIsActive);
547 SkASSERT(fActiveRenderPass);
548
549 VkCommandBufferInheritanceInfo inheritanceInfo;
550 memset(&inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
551 inheritanceInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
552 inheritanceInfo.pNext = nullptr;
553 inheritanceInfo.renderPass = fActiveRenderPass->vkRenderPass();
554 inheritanceInfo.subpass = 0; // Currently only using 1 subpass for each render pass
555 inheritanceInfo.framebuffer = framebuffer ? framebuffer->framebuffer() : VK_NULL_HANDLE;
556 inheritanceInfo.occlusionQueryEnable = false;
557 inheritanceInfo.queryFlags = 0;
558 inheritanceInfo.pipelineStatistics = 0;
559
560 VkCommandBufferBeginInfo cmdBufferBeginInfo;
561 memset(&cmdBufferBeginInfo, 0, sizeof(VkCommandBufferBeginInfo));
562 cmdBufferBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
563 cmdBufferBeginInfo.pNext = nullptr;
564 cmdBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT |
565 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
566 cmdBufferBeginInfo.pInheritanceInfo = &inheritanceInfo;
567
568 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), BeginCommandBuffer(fCmdBuffer,
569 &cmdBufferBeginInfo));
570 fIsActive = true;
571}
572
573void GrVkSecondaryCommandBuffer::end(const GrVkGpu* gpu) {
574 SkASSERT(fIsActive);
575 GR_VK_CALL_ERRCHECK(gpu->vkInterface(), EndCommandBuffer(fCmdBuffer));
576 this->invalidateState();
577 fIsActive = false;
578}
579