blob: 1ebae0b0616aca3ed4afefeedc2c8a1b504d5f92 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2015 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#ifndef GrVkCommandBuffer_DEFINED
9#define GrVkCommandBuffer_DEFINED
10
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "include/gpu/vk/GrVkTypes.h"
Jim Van Verth3e192162020-03-10 16:23:16 -040012#include "src/gpu/GrManagedResource.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "src/gpu/vk/GrVkGpu.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050014#include "src/gpu/vk/GrVkSemaphore.h"
15#include "src/gpu/vk/GrVkUtil.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050016
Greg Daniel6ecc9112017-06-16 16:17:03 +000017class GrVkBuffer;
egdaniel9a6cf802016-06-08 08:22:05 -070018class GrVkFramebuffer;
Greg Daniel6ecc9112017-06-16 16:17:03 +000019class GrVkImage;
Chris Dalton10ee0b22020-04-02 16:28:52 -060020class GrVkMeshBuffer;
egdaniel470d77a2016-03-18 12:50:27 -070021class GrVkPipeline;
Greg Daniel6ecc9112017-06-16 16:17:03 +000022class GrVkPipelineState;
Greg Daniel164a9f02016-02-22 09:56:40 -050023class GrVkRenderPass;
24class GrVkRenderTarget;
25class GrVkTransferBuffer;
26
Greg Daniel8daf3b72019-07-30 09:57:26 -040027class GrVkCommandBuffer {
Greg Daniel164a9f02016-02-22 09:56:40 -050028public:
Greg Daniel8daf3b72019-07-30 09:57:26 -040029 virtual ~GrVkCommandBuffer() {}
30
Greg Daniel164a9f02016-02-22 09:56:40 -050031 void invalidateState();
32
Greg Daniel164a9f02016-02-22 09:56:40 -050033 ////////////////////////////////////////////////////////////////////////////
Greg Daniel164a9f02016-02-22 09:56:40 -050034 // CommandBuffer commands
35 ////////////////////////////////////////////////////////////////////////////
36 enum BarrierType {
Greg Daniel164a9f02016-02-22 09:56:40 -050037 kBufferMemory_BarrierType,
38 kImageMemory_BarrierType
39 };
40
41 void pipelineBarrier(const GrVkGpu* gpu,
Jim Van Verth3e192162020-03-10 16:23:16 -040042 const GrManagedResource* resource,
Greg Daniel164a9f02016-02-22 09:56:40 -050043 VkPipelineStageFlags srcStageMask,
44 VkPipelineStageFlags dstStageMask,
45 bool byRegion,
46 BarrierType barrierType,
Greg Daniel59dc1482019-02-22 10:46:38 -050047 void* barrier);
Greg Daniel164a9f02016-02-22 09:56:40 -050048
Chris Dalton10ee0b22020-04-02 16:28:52 -060049 void bindInputBuffer(GrVkGpu* gpu, uint32_t binding, const GrVkMeshBuffer* vbuffer);
Chris Dalton1d616352017-05-31 12:51:23 -060050
Chris Dalton10ee0b22020-04-02 16:28:52 -060051 void bindIndexBuffer(GrVkGpu* gpu, const GrVkMeshBuffer* ibuffer);
Greg Daniel164a9f02016-02-22 09:56:40 -050052
egdaniel58a8d922016-04-21 08:03:10 -070053 void bindPipeline(const GrVkGpu* gpu, const GrVkPipeline* pipeline);
54
Greg Daniel164a9f02016-02-22 09:56:40 -050055 void bindDescriptorSets(const GrVkGpu* gpu,
egdaniel22281c12016-03-23 13:49:40 -070056 GrVkPipelineState*,
Greg Danieleecc6872019-07-29 13:21:37 -040057 VkPipelineLayout layout,
egdanielbc9b2962016-09-27 08:00:53 -070058 uint32_t firstSet,
59 uint32_t setCount,
60 const VkDescriptorSet* descriptorSets,
61 uint32_t dynamicOffsetCount,
62 const uint32_t* dynamicOffsets);
63
egdaniel470d77a2016-03-18 12:50:27 -070064 void setViewport(const GrVkGpu* gpu,
65 uint32_t firstViewport,
66 uint32_t viewportCount,
67 const VkViewport* viewports);
68
69 void setScissor(const GrVkGpu* gpu,
70 uint32_t firstScissor,
71 uint32_t scissorCount,
72 const VkRect2D* scissors);
73
74 void setBlendConstants(const GrVkGpu* gpu, const float blendConstants[4]);
75
egdaniel9a6cf802016-06-08 08:22:05 -070076 // Commands that only work inside of a render pass
77 void clearAttachments(const GrVkGpu* gpu,
78 int numAttachments,
79 const VkClearAttachment* attachments,
80 int numRects,
Greg Danielf346df32019-04-03 14:52:13 -040081 const VkClearRect* clearRects);
egdaniel9a6cf802016-06-08 08:22:05 -070082
83 void drawIndexed(const GrVkGpu* gpu,
84 uint32_t indexCount,
85 uint32_t instanceCount,
86 uint32_t firstIndex,
87 int32_t vertexOffset,
Greg Danielf346df32019-04-03 14:52:13 -040088 uint32_t firstInstance);
egdaniel9a6cf802016-06-08 08:22:05 -070089
90 void draw(const GrVkGpu* gpu,
91 uint32_t vertexCount,
92 uint32_t instanceCount,
93 uint32_t firstVertex,
Greg Danielf346df32019-04-03 14:52:13 -040094 uint32_t firstInstance);
egdaniel9a6cf802016-06-08 08:22:05 -070095
Chris Dalton03fdf6a2020-04-07 12:31:59 -060096 void drawIndirect(const GrVkGpu* gpu,
97 const GrVkMeshBuffer* indirectBuffer,
98 VkDeviceSize offset,
99 uint32_t drawCount,
100 uint32_t stride);
101
102 void drawIndexedIndirect(const GrVkGpu* gpu,
103 const GrVkMeshBuffer* indirectBuffer,
104 VkDeviceSize offset,
105 uint32_t drawCount,
106 uint32_t stride);
107
Greg Daniel7d918fd2018-06-19 15:22:01 -0400108 // Add ref-counted resource that will be tracked and released when this command buffer finishes
109 // execution
Jim Van Verth3e192162020-03-10 16:23:16 -0400110 void addResource(const GrManagedResource* resource) {
Greg Danielfa3adf72019-11-07 09:53:41 -0500111 SkASSERT(resource);
egdaniel9a6cf802016-06-08 08:22:05 -0700112 resource->ref();
Jim Van Verth3e192162020-03-10 16:23:16 -0400113 resource->notifyQueuedForWorkOnGpu();
egdaniel594739c2016-09-20 12:39:25 -0700114 fTrackedResources.append(1, &resource);
egdaniel9a6cf802016-06-08 08:22:05 -0700115 }
116
egdanielc1be9bc2016-07-20 08:33:00 -0700117 // Add ref-counted resource that will be tracked and released when this command buffer finishes
118 // execution. When it is released, it will signal that the resource can be recycled for reuse.
Jim Van Verth3e192162020-03-10 16:23:16 -0400119 void addRecycledResource(const GrRecycledResource* resource) {
egdanielc1be9bc2016-07-20 08:33:00 -0700120 resource->ref();
Jim Van Verth3e192162020-03-10 16:23:16 -0400121 resource->notifyQueuedForWorkOnGpu();
egdaniel594739c2016-09-20 12:39:25 -0700122 fTrackedRecycledResources.append(1, &resource);
egdanielc1be9bc2016-07-20 08:33:00 -0700123 }
124
Jim Van Verth5082df12020-03-11 16:14:51 -0400125 void releaseResources();
jvanverth7ec92412016-07-06 09:24:57 -0700126
Jim Van Verth5082df12020-03-11 16:14:51 -0400127 void freeGPUData(const GrGpu* gpu, VkCommandPool pool) const;
Greg Daniel8daf3b72019-07-30 09:57:26 -0400128
Robert Phillipsce0a2bf2019-04-02 13:37:34 -0400129 bool hasWork() const { return fHasWork; }
130
egdaniel9a6cf802016-06-08 08:22:05 -0700131protected:
Greg Daniel0addbdf2019-11-25 15:03:58 -0500132 GrVkCommandBuffer(VkCommandBuffer cmdBuffer, bool isWrapped = false)
133 : fCmdBuffer(cmdBuffer)
134 , fIsWrapped(isWrapped) {
Greg Daniel8daf3b72019-07-30 09:57:26 -0400135 fTrackedResources.setReserve(kInitialTrackedResourcesCount);
136 fTrackedRecycledResources.setReserve(kInitialTrackedResourcesCount);
137 this->invalidateState();
138 }
egdaniel594739c2016-09-20 12:39:25 -0700139
Greg Daniel0addbdf2019-11-25 15:03:58 -0500140 bool isWrapped() const { return fIsWrapped; }
Greg Daniel070cbaf2019-01-03 17:35:54 -0500141
Greg Daniel8daf3b72019-07-30 09:57:26 -0400142 void addingWork(const GrVkGpu* gpu);
Greg Danielee54f232019-04-03 14:58:40 -0400143
Greg Daniel8daf3b72019-07-30 09:57:26 -0400144 void submitPipelineBarriers(const GrVkGpu* gpu);
Robert Phillipsce0a2bf2019-04-02 13:37:34 -0400145
Jim Van Verth3e192162020-03-10 16:23:16 -0400146 SkTDArray<const GrManagedResource*> fTrackedResources;
147 SkTDArray<const GrRecycledResource*> fTrackedRecycledResources;
egdaniel9a6cf802016-06-08 08:22:05 -0700148
Greg Daniel8daf3b72019-07-30 09:57:26 -0400149 // Tracks whether we are in the middle of a command buffer begin/end calls and thus can add
150 // new commands to the buffer;
Greg Daniel0addbdf2019-11-25 15:03:58 -0500151 bool fIsActive = false;
Greg Daniel8daf3b72019-07-30 09:57:26 -0400152 bool fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700153
Greg Daniel8daf3b72019-07-30 09:57:26 -0400154 // Stores a pointer to the current active render pass (i.e. begin has been called but not
155 // end). A nullptr means there is no active render pass. The GrVKCommandBuffer does not own
156 // the render pass.
Greg Daniel0addbdf2019-11-25 15:03:58 -0500157 const GrVkRenderPass* fActiveRenderPass = nullptr;
egdaniel9a6cf802016-06-08 08:22:05 -0700158
Greg Daniel8daf3b72019-07-30 09:57:26 -0400159 VkCommandBuffer fCmdBuffer;
egdaniel9a6cf802016-06-08 08:22:05 -0700160
161private:
162 static const int kInitialTrackedResourcesCount = 32;
163
Jim Van Verth5082df12020-03-11 16:14:51 -0400164 virtual void onReleaseResources() {}
165 virtual void onFreeGPUData(const GrVkGpu* gpu) const = 0;
jvanverth7ec92412016-07-06 09:24:57 -0700166
Greg Daniel6ecc9112017-06-16 16:17:03 +0000167 static constexpr uint32_t kMaxInputBuffers = 2;
168
Chris Dalton1d616352017-05-31 12:51:23 -0600169 VkBuffer fBoundInputBuffers[kMaxInputBuffers];
170 VkBuffer fBoundIndexBuffer;
egdaniel9a6cf802016-06-08 08:22:05 -0700171
egdaniel594739c2016-09-20 12:39:25 -0700172 // When resetting the command buffer, we remove the tracked resources from their arrays, and
173 // we prefer to not free all the memory every time so usually we just rewind. However, to avoid
174 // all arrays growing to the max size, after so many resets we'll do a full reset of the tracked
175 // resource arrays.
176 static const int kNumRewindResetsBeforeFullReset = 8;
Greg Daniel0addbdf2019-11-25 15:03:58 -0500177 int fNumResets = 0;
egdaniel594739c2016-09-20 12:39:25 -0700178
egdaniel9a6cf802016-06-08 08:22:05 -0700179 // Cached values used for dynamic state updates
180 VkViewport fCachedViewport;
181 VkRect2D fCachedScissor;
182 float fCachedBlendConstant[4];
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500183
Greg Danielee54f232019-04-03 14:58:40 -0400184 // Tracking of memory barriers so that we can submit them all in a batch together.
185 SkSTArray<4, VkBufferMemoryBarrier> fBufferBarriers;
186 SkSTArray<1, VkImageMemoryBarrier> fImageBarriers;
187 bool fBarriersByRegion = false;
188 VkPipelineStageFlags fSrcStageMask = 0;
189 VkPipelineStageFlags fDstStageMask = 0;
Greg Daniel0addbdf2019-11-25 15:03:58 -0500190
191 bool fIsWrapped;
egdaniel9a6cf802016-06-08 08:22:05 -0700192};
193
194class GrVkSecondaryCommandBuffer;
195
196class GrVkPrimaryCommandBuffer : public GrVkCommandBuffer {
197public:
egdaniel9cb63402016-06-23 08:37:05 -0700198 ~GrVkPrimaryCommandBuffer() override;
199
Greg Daniel315c8dc2019-11-26 15:41:27 -0500200 static GrVkPrimaryCommandBuffer* Create(GrVkGpu* gpu, VkCommandPool cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700201
Greg Daniele643da62019-11-05 12:36:42 -0500202 void begin(GrVkGpu* gpu);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500203 void end(GrVkGpu* gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700204
205 // Begins render pass on this command buffer. The framebuffer from GrVkRenderTarget will be used
206 // in the render pass.
Greg Danielfa3adf72019-11-07 09:53:41 -0500207 bool beginRenderPass(GrVkGpu* gpu,
egdaniel9a6cf802016-06-08 08:22:05 -0700208 const GrVkRenderPass* renderPass,
Robert Phillips95214472017-08-08 18:00:03 -0400209 const VkClearValue clearValues[],
Greg Danielfa3adf72019-11-07 09:53:41 -0500210 GrVkRenderTarget* target,
egdaniel9cb63402016-06-23 08:37:05 -0700211 const SkIRect& bounds,
212 bool forSecondaryCB);
egdaniel9a6cf802016-06-08 08:22:05 -0700213 void endRenderPass(const GrVkGpu* gpu);
214
215 // Submits the SecondaryCommandBuffer into this command buffer. It is required that we are
216 // currently inside a render pass that is compatible with the one used to create the
217 // SecondaryCommandBuffer.
218 void executeCommands(const GrVkGpu* gpu,
Greg Daniel8daf3b72019-07-30 09:57:26 -0400219 std::unique_ptr<GrVkSecondaryCommandBuffer> secondaryBuffer);
egdaniel9a6cf802016-06-08 08:22:05 -0700220
Greg Daniel164a9f02016-02-22 09:56:40 -0500221 // Commands that only work outside of a render pass
222 void clearColorImage(const GrVkGpu* gpu,
223 GrVkImage* image,
224 const VkClearColorValue* color,
225 uint32_t subRangeCount,
226 const VkImageSubresourceRange* subRanges);
227
egdaniel3d5d9ac2016-03-01 12:56:15 -0800228 void clearDepthStencilImage(const GrVkGpu* gpu,
229 GrVkImage* image,
230 const VkClearDepthStencilValue* color,
231 uint32_t subRangeCount,
232 const VkImageSubresourceRange* subRanges);
233
Greg Daniel164a9f02016-02-22 09:56:40 -0500234 void copyImage(const GrVkGpu* gpu,
235 GrVkImage* srcImage,
236 VkImageLayout srcLayout,
237 GrVkImage* dstImage,
238 VkImageLayout dstLayout,
239 uint32_t copyRegionCount,
240 const VkImageCopy* copyRegions);
241
egdaniel17b89252016-04-05 07:23:38 -0700242 void blitImage(const GrVkGpu* gpu,
Jim Van Verth3e192162020-03-10 16:23:16 -0400243 const GrManagedResource* srcResource,
egdanielb2df0c22016-05-13 11:30:37 -0700244 VkImage srcImage,
egdaniel17b89252016-04-05 07:23:38 -0700245 VkImageLayout srcLayout,
Jim Van Verth3e192162020-03-10 16:23:16 -0400246 const GrManagedResource* dstResource,
egdanielb2df0c22016-05-13 11:30:37 -0700247 VkImage dstImage,
egdaniel17b89252016-04-05 07:23:38 -0700248 VkImageLayout dstLayout,
249 uint32_t blitRegionCount,
250 const VkImageBlit* blitRegions,
251 VkFilter filter);
252
egdanielb2df0c22016-05-13 11:30:37 -0700253 void blitImage(const GrVkGpu* gpu,
254 const GrVkImage& srcImage,
255 const GrVkImage& dstImage,
256 uint32_t blitRegionCount,
257 const VkImageBlit* blitRegions,
Greg Daniel6ecc9112017-06-16 16:17:03 +0000258 VkFilter filter);
egdanielb2df0c22016-05-13 11:30:37 -0700259
Greg Daniel164a9f02016-02-22 09:56:40 -0500260 void copyImageToBuffer(const GrVkGpu* gpu,
261 GrVkImage* srcImage,
262 VkImageLayout srcLayout,
263 GrVkTransferBuffer* dstBuffer,
264 uint32_t copyRegionCount,
265 const VkBufferImageCopy* copyRegions);
266
267 void copyBufferToImage(const GrVkGpu* gpu,
268 GrVkTransferBuffer* srcBuffer,
269 GrVkImage* dstImage,
270 VkImageLayout dstLayout,
271 uint32_t copyRegionCount,
272 const VkBufferImageCopy* copyRegions);
273
Greg Daniel6888c0d2017-08-25 11:55:50 -0400274 void copyBuffer(GrVkGpu* gpu,
275 GrVkBuffer* srcBuffer,
276 GrVkBuffer* dstBuffer,
277 uint32_t regionCount,
278 const VkBufferCopy* regions);
279
jvanvertha584de92016-06-30 09:10:52 -0700280 void updateBuffer(GrVkGpu* gpu,
281 GrVkBuffer* dstBuffer,
282 VkDeviceSize dstOffset,
283 VkDeviceSize dataSize,
284 const void* data);
285
egdaniel52ad2512016-08-04 12:50:01 -0700286 void resolveImage(GrVkGpu* gpu,
287 const GrVkImage& srcImage,
288 const GrVkImage& dstImage,
289 uint32_t regionCount,
290 const VkImageResolve* regions);
291
Greg Daniele1185582019-12-04 11:29:44 -0500292 bool submitToQueue(GrVkGpu* gpu, VkQueue queue,
Greg Daniel48661b82018-01-22 16:11:35 -0500293 SkTArray<GrVkSemaphore::Resource*>& signalSemaphores,
294 SkTArray<GrVkSemaphore::Resource*>& waitSemaphores);
Greg Daniele1185582019-12-04 11:29:44 -0500295
296 void forceSync(GrVkGpu* gpu);
297
298 bool finished(GrVkGpu* gpu);
Greg Daniela3aa75a2019-04-12 14:24:55 -0400299
300 void addFinishedProc(sk_sp<GrRefCntedCallback> finishedProc);
Greg Daniel164a9f02016-02-22 09:56:40 -0500301
Greg Danielfe159622020-04-10 17:43:51 +0000302 void callFinishedProcs() {
303 fFinishedProcs.reset();
304 }
305
Greg Daniel0addbdf2019-11-25 15:03:58 -0500306 void recycleSecondaryCommandBuffers(GrVkCommandPool* cmdPool);
jvanverth7ec92412016-07-06 09:24:57 -0700307
Greg Daniel164a9f02016-02-22 09:56:40 -0500308private:
Greg Daniel0addbdf2019-11-25 15:03:58 -0500309 explicit GrVkPrimaryCommandBuffer(VkCommandBuffer cmdBuffer)
310 : INHERITED(cmdBuffer)
egdaniel9cb63402016-06-23 08:37:05 -0700311 , fSubmitFence(VK_NULL_HANDLE) {}
312
Jim Van Verth5082df12020-03-11 16:14:51 -0400313 void onFreeGPUData(const GrVkGpu* gpu) const override;
egdaniel9cb63402016-06-23 08:37:05 -0700314
Jim Van Verth5082df12020-03-11 16:14:51 -0400315 void onReleaseResources() override;
jvanverth7ec92412016-07-06 09:24:57 -0700316
Greg Daniel8daf3b72019-07-30 09:57:26 -0400317 SkTArray<std::unique_ptr<GrVkSecondaryCommandBuffer>, true> fSecondaryCommandBuffers;
318 VkFence fSubmitFence;
319 SkTArray<sk_sp<GrRefCntedCallback>> fFinishedProcs;
Greg Daniel164a9f02016-02-22 09:56:40 -0500320
egdaniel9a6cf802016-06-08 08:22:05 -0700321 typedef GrVkCommandBuffer INHERITED;
Greg Daniel164a9f02016-02-22 09:56:40 -0500322};
323
egdaniel9a6cf802016-06-08 08:22:05 -0700324class GrVkSecondaryCommandBuffer : public GrVkCommandBuffer {
325public:
Greg Daniel315c8dc2019-11-26 15:41:27 -0500326 static GrVkSecondaryCommandBuffer* Create(GrVkGpu* gpu, GrVkCommandPool* cmdPool);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500327 // Used for wrapping an external secondary command buffer.
328 static GrVkSecondaryCommandBuffer* Create(VkCommandBuffer externalSecondaryCB);
egdaniel9a6cf802016-06-08 08:22:05 -0700329
Greg Daniele643da62019-11-05 12:36:42 -0500330 void begin(GrVkGpu* gpu, const GrVkFramebuffer* framebuffer,
jvanverth7ec92412016-07-06 09:24:57 -0700331 const GrVkRenderPass* compatibleRenderPass);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500332 void end(GrVkGpu* gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700333
Greg Daniel0addbdf2019-11-25 15:03:58 -0500334 void recycle(GrVkCommandPool* cmdPool);
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400335
Greg Daniel8daf3b72019-07-30 09:57:26 -0400336 VkCommandBuffer vkCommandBuffer() { return fCmdBuffer; }
jvanverth7ec92412016-07-06 09:24:57 -0700337
egdaniel9a6cf802016-06-08 08:22:05 -0700338private:
Greg Daniel0addbdf2019-11-25 15:03:58 -0500339 explicit GrVkSecondaryCommandBuffer(VkCommandBuffer cmdBuffer, bool isWrapped)
340 : INHERITED(cmdBuffer, isWrapped) {}
egdaniel9a6cf802016-06-08 08:22:05 -0700341
Jim Van Verth5082df12020-03-11 16:14:51 -0400342 void onFreeGPUData(const GrVkGpu* gpu) const override {}
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500343
Greg Daniel315c8dc2019-11-26 15:41:27 -0500344 // Used for accessing fIsActive (on GrVkCommandBuffer)
egdaniel9a6cf802016-06-08 08:22:05 -0700345 friend class GrVkPrimaryCommandBuffer;
346
347 typedef GrVkCommandBuffer INHERITED;
348};
Greg Daniel164a9f02016-02-22 09:56:40 -0500349
350#endif