blob: 7b34e2b3bce365e0bf1eb465d47411c5bda59334 [file] [log] [blame]
Greg Daniel164a9f02016-02-22 09:56:40 -05001/*
2* Copyright 2015 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#ifndef GrVkCommandBuffer_DEFINED
9#define GrVkCommandBuffer_DEFINED
10
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "include/gpu/vk/GrVkTypes.h"
12#include "src/gpu/vk/GrVkGpu.h"
13#include "src/gpu/vk/GrVkResource.h"
14#include "src/gpu/vk/GrVkSemaphore.h"
15#include "src/gpu/vk/GrVkUtil.h"
Greg Daniel164a9f02016-02-22 09:56:40 -050016
Greg Daniel6ecc9112017-06-16 16:17:03 +000017class GrVkBuffer;
egdaniel9a6cf802016-06-08 08:22:05 -070018class GrVkFramebuffer;
Greg Daniel6ecc9112017-06-16 16:17:03 +000019class GrVkIndexBuffer;
20class GrVkImage;
egdaniel470d77a2016-03-18 12:50:27 -070021class GrVkPipeline;
Greg Daniel6ecc9112017-06-16 16:17:03 +000022class GrVkPipelineState;
Greg Daniel164a9f02016-02-22 09:56:40 -050023class GrVkRenderPass;
24class GrVkRenderTarget;
25class GrVkTransferBuffer;
Greg Daniel6ecc9112017-06-16 16:17:03 +000026class GrVkVertexBuffer;
Greg Daniel164a9f02016-02-22 09:56:40 -050027
Greg Daniel8daf3b72019-07-30 09:57:26 -040028class GrVkCommandBuffer {
Greg Daniel164a9f02016-02-22 09:56:40 -050029public:
Greg Daniel8daf3b72019-07-30 09:57:26 -040030 virtual ~GrVkCommandBuffer() {}
31
Greg Daniel164a9f02016-02-22 09:56:40 -050032 void invalidateState();
33
Greg Daniel164a9f02016-02-22 09:56:40 -050034 ////////////////////////////////////////////////////////////////////////////
Greg Daniel164a9f02016-02-22 09:56:40 -050035 // CommandBuffer commands
36 ////////////////////////////////////////////////////////////////////////////
37 enum BarrierType {
Greg Daniel164a9f02016-02-22 09:56:40 -050038 kBufferMemory_BarrierType,
39 kImageMemory_BarrierType
40 };
41
42 void pipelineBarrier(const GrVkGpu* gpu,
Greg Daniel59dc1482019-02-22 10:46:38 -050043 const GrVkResource* resource,
Greg Daniel164a9f02016-02-22 09:56:40 -050044 VkPipelineStageFlags srcStageMask,
45 VkPipelineStageFlags dstStageMask,
46 bool byRegion,
47 BarrierType barrierType,
Greg Daniel59dc1482019-02-22 10:46:38 -050048 void* barrier);
Greg Daniel164a9f02016-02-22 09:56:40 -050049
Greg Daniel6ecc9112017-06-16 16:17:03 +000050 void bindInputBuffer(GrVkGpu* gpu, uint32_t binding, const GrVkVertexBuffer* vbuffer);
Chris Dalton1d616352017-05-31 12:51:23 -060051
Greg Daniel6ecc9112017-06-16 16:17:03 +000052 void bindIndexBuffer(GrVkGpu* gpu, const GrVkIndexBuffer* ibuffer);
Greg Daniel164a9f02016-02-22 09:56:40 -050053
egdaniel58a8d922016-04-21 08:03:10 -070054 void bindPipeline(const GrVkGpu* gpu, const GrVkPipeline* pipeline);
55
Greg Daniel164a9f02016-02-22 09:56:40 -050056 void bindDescriptorSets(const GrVkGpu* gpu,
egdaniel22281c12016-03-23 13:49:40 -070057 GrVkPipelineState*,
Greg Danieleecc6872019-07-29 13:21:37 -040058 VkPipelineLayout layout,
egdanielbc9b2962016-09-27 08:00:53 -070059 uint32_t firstSet,
60 uint32_t setCount,
61 const VkDescriptorSet* descriptorSets,
62 uint32_t dynamicOffsetCount,
63 const uint32_t* dynamicOffsets);
64
Ethan Nicholas8e265a72018-12-12 16:22:40 -050065 GrVkCommandPool* commandPool() { return fCmdPool; }
66
egdaniel470d77a2016-03-18 12:50:27 -070067 void setViewport(const GrVkGpu* gpu,
68 uint32_t firstViewport,
69 uint32_t viewportCount,
70 const VkViewport* viewports);
71
72 void setScissor(const GrVkGpu* gpu,
73 uint32_t firstScissor,
74 uint32_t scissorCount,
75 const VkRect2D* scissors);
76
77 void setBlendConstants(const GrVkGpu* gpu, const float blendConstants[4]);
78
egdaniel9a6cf802016-06-08 08:22:05 -070079 // Commands that only work inside of a render pass
80 void clearAttachments(const GrVkGpu* gpu,
81 int numAttachments,
82 const VkClearAttachment* attachments,
83 int numRects,
Greg Danielf346df32019-04-03 14:52:13 -040084 const VkClearRect* clearRects);
egdaniel9a6cf802016-06-08 08:22:05 -070085
86 void drawIndexed(const GrVkGpu* gpu,
87 uint32_t indexCount,
88 uint32_t instanceCount,
89 uint32_t firstIndex,
90 int32_t vertexOffset,
Greg Danielf346df32019-04-03 14:52:13 -040091 uint32_t firstInstance);
egdaniel9a6cf802016-06-08 08:22:05 -070092
93 void draw(const GrVkGpu* gpu,
94 uint32_t vertexCount,
95 uint32_t instanceCount,
96 uint32_t firstVertex,
Greg Danielf346df32019-04-03 14:52:13 -040097 uint32_t firstInstance);
egdaniel9a6cf802016-06-08 08:22:05 -070098
Greg Daniel7d918fd2018-06-19 15:22:01 -040099 // Add ref-counted resource that will be tracked and released when this command buffer finishes
100 // execution
egdaniel9a6cf802016-06-08 08:22:05 -0700101 void addResource(const GrVkResource* resource) {
102 resource->ref();
Brian Salomon614c1a82018-12-19 15:42:06 -0500103 resource->notifyAddedToCommandBuffer();
egdaniel594739c2016-09-20 12:39:25 -0700104 fTrackedResources.append(1, &resource);
egdaniel9a6cf802016-06-08 08:22:05 -0700105 }
106
egdanielc1be9bc2016-07-20 08:33:00 -0700107 // Add ref-counted resource that will be tracked and released when this command buffer finishes
108 // execution. When it is released, it will signal that the resource can be recycled for reuse.
109 void addRecycledResource(const GrVkRecycledResource* resource) {
110 resource->ref();
Brian Salomon614c1a82018-12-19 15:42:06 -0500111 resource->notifyAddedToCommandBuffer();
egdaniel594739c2016-09-20 12:39:25 -0700112 fTrackedRecycledResources.append(1, &resource);
egdanielc1be9bc2016-07-20 08:33:00 -0700113 }
114
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500115 void releaseResources(GrVkGpu* gpu);
jvanverth7ec92412016-07-06 09:24:57 -0700116
Greg Daniel8daf3b72019-07-30 09:57:26 -0400117 void freeGPUData(GrVkGpu* gpu) const;
118 void abandonGPUData() const;
119
Robert Phillipsce0a2bf2019-04-02 13:37:34 -0400120 bool hasWork() const { return fHasWork; }
121
egdaniel9a6cf802016-06-08 08:22:05 -0700122protected:
Greg Daniel8daf3b72019-07-30 09:57:26 -0400123 GrVkCommandBuffer(VkCommandBuffer cmdBuffer, GrVkCommandPool* cmdPool,
124 const GrVkRenderPass* rp = nullptr)
egdaniel594739c2016-09-20 12:39:25 -0700125 : fIsActive(false)
egdaniel9a6cf802016-06-08 08:22:05 -0700126 , fActiveRenderPass(rp)
127 , fCmdBuffer(cmdBuffer)
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500128 , fCmdPool(cmdPool)
egdaniel594739c2016-09-20 12:39:25 -0700129 , fNumResets(0) {
Greg Daniel8daf3b72019-07-30 09:57:26 -0400130 fTrackedResources.setReserve(kInitialTrackedResourcesCount);
131 fTrackedRecycledResources.setReserve(kInitialTrackedResourcesCount);
132 this->invalidateState();
133 }
egdaniel594739c2016-09-20 12:39:25 -0700134
Greg Daniel8daf3b72019-07-30 09:57:26 -0400135 bool isWrapped() const { return fCmdPool == nullptr; }
Greg Daniel070cbaf2019-01-03 17:35:54 -0500136
Greg Daniel8daf3b72019-07-30 09:57:26 -0400137 void addingWork(const GrVkGpu* gpu);
Greg Danielee54f232019-04-03 14:58:40 -0400138
Greg Daniel8daf3b72019-07-30 09:57:26 -0400139 void submitPipelineBarriers(const GrVkGpu* gpu);
Robert Phillipsce0a2bf2019-04-02 13:37:34 -0400140
Greg Daniel8daf3b72019-07-30 09:57:26 -0400141 SkTDArray<const GrVkResource*> fTrackedResources;
142 SkTDArray<const GrVkRecycledResource*> fTrackedRecycledResources;
egdaniel9a6cf802016-06-08 08:22:05 -0700143
Greg Daniel8daf3b72019-07-30 09:57:26 -0400144 // Tracks whether we are in the middle of a command buffer begin/end calls and thus can add
145 // new commands to the buffer;
146 bool fIsActive;
147 bool fHasWork = false;
egdaniel9a6cf802016-06-08 08:22:05 -0700148
Greg Daniel8daf3b72019-07-30 09:57:26 -0400149 // Stores a pointer to the current active render pass (i.e. begin has been called but not
150 // end). A nullptr means there is no active render pass. The GrVKCommandBuffer does not own
151 // the render pass.
152 const GrVkRenderPass* fActiveRenderPass;
egdaniel9a6cf802016-06-08 08:22:05 -0700153
Greg Daniel8daf3b72019-07-30 09:57:26 -0400154 VkCommandBuffer fCmdBuffer;
egdaniel9a6cf802016-06-08 08:22:05 -0700155
Greg Daniel8daf3b72019-07-30 09:57:26 -0400156 // Raw pointer, not refcounted. The command pool controls the command buffer's lifespan, so
157 // it's guaranteed to outlive us.
158 GrVkCommandPool* fCmdPool;
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500159
egdaniel9a6cf802016-06-08 08:22:05 -0700160private:
161 static const int kInitialTrackedResourcesCount = 32;
162
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500163 virtual void onReleaseResources(GrVkGpu* gpu) {}
Greg Daniel8daf3b72019-07-30 09:57:26 -0400164 virtual void onFreeGPUData(GrVkGpu* gpu) const = 0;
165 virtual void onAbandonGPUData() const = 0;
jvanverth7ec92412016-07-06 09:24:57 -0700166
Greg Daniel6ecc9112017-06-16 16:17:03 +0000167 static constexpr uint32_t kMaxInputBuffers = 2;
168
Chris Dalton1d616352017-05-31 12:51:23 -0600169 VkBuffer fBoundInputBuffers[kMaxInputBuffers];
170 VkBuffer fBoundIndexBuffer;
egdaniel9a6cf802016-06-08 08:22:05 -0700171
egdaniel594739c2016-09-20 12:39:25 -0700172 // When resetting the command buffer, we remove the tracked resources from their arrays, and
173 // we prefer to not free all the memory every time so usually we just rewind. However, to avoid
174 // all arrays growing to the max size, after so many resets we'll do a full reset of the tracked
175 // resource arrays.
176 static const int kNumRewindResetsBeforeFullReset = 8;
177 int fNumResets;
178
egdaniel9a6cf802016-06-08 08:22:05 -0700179 // Cached values used for dynamic state updates
180 VkViewport fCachedViewport;
181 VkRect2D fCachedScissor;
182 float fCachedBlendConstant[4];
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500183
184#ifdef SK_DEBUG
185 mutable bool fResourcesReleased = false;
186#endif
Greg Danielee54f232019-04-03 14:58:40 -0400187 // Tracking of memory barriers so that we can submit them all in a batch together.
188 SkSTArray<4, VkBufferMemoryBarrier> fBufferBarriers;
189 SkSTArray<1, VkImageMemoryBarrier> fImageBarriers;
190 bool fBarriersByRegion = false;
191 VkPipelineStageFlags fSrcStageMask = 0;
192 VkPipelineStageFlags fDstStageMask = 0;
egdaniel9a6cf802016-06-08 08:22:05 -0700193};
194
195class GrVkSecondaryCommandBuffer;
196
197class GrVkPrimaryCommandBuffer : public GrVkCommandBuffer {
198public:
egdaniel9cb63402016-06-23 08:37:05 -0700199 ~GrVkPrimaryCommandBuffer() override;
200
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500201 static GrVkPrimaryCommandBuffer* Create(const GrVkGpu* gpu, GrVkCommandPool* cmdPool);
egdaniel9a6cf802016-06-08 08:22:05 -0700202
Greg Daniele643da62019-11-05 12:36:42 -0500203 void begin(GrVkGpu* gpu);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500204 void end(GrVkGpu* gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700205
206 // Begins render pass on this command buffer. The framebuffer from GrVkRenderTarget will be used
207 // in the render pass.
208 void beginRenderPass(const GrVkGpu* gpu,
209 const GrVkRenderPass* renderPass,
Robert Phillips95214472017-08-08 18:00:03 -0400210 const VkClearValue clearValues[],
egdaniel9cb63402016-06-23 08:37:05 -0700211 const GrVkRenderTarget& target,
212 const SkIRect& bounds,
213 bool forSecondaryCB);
egdaniel9a6cf802016-06-08 08:22:05 -0700214 void endRenderPass(const GrVkGpu* gpu);
215
216 // Submits the SecondaryCommandBuffer into this command buffer. It is required that we are
217 // currently inside a render pass that is compatible with the one used to create the
218 // SecondaryCommandBuffer.
219 void executeCommands(const GrVkGpu* gpu,
Greg Daniel8daf3b72019-07-30 09:57:26 -0400220 std::unique_ptr<GrVkSecondaryCommandBuffer> secondaryBuffer);
egdaniel9a6cf802016-06-08 08:22:05 -0700221
Greg Daniel164a9f02016-02-22 09:56:40 -0500222 // Commands that only work outside of a render pass
223 void clearColorImage(const GrVkGpu* gpu,
224 GrVkImage* image,
225 const VkClearColorValue* color,
226 uint32_t subRangeCount,
227 const VkImageSubresourceRange* subRanges);
228
egdaniel3d5d9ac2016-03-01 12:56:15 -0800229 void clearDepthStencilImage(const GrVkGpu* gpu,
230 GrVkImage* image,
231 const VkClearDepthStencilValue* color,
232 uint32_t subRangeCount,
233 const VkImageSubresourceRange* subRanges);
234
Greg Daniel164a9f02016-02-22 09:56:40 -0500235 void copyImage(const GrVkGpu* gpu,
236 GrVkImage* srcImage,
237 VkImageLayout srcLayout,
238 GrVkImage* dstImage,
239 VkImageLayout dstLayout,
240 uint32_t copyRegionCount,
241 const VkImageCopy* copyRegions);
242
egdaniel17b89252016-04-05 07:23:38 -0700243 void blitImage(const GrVkGpu* gpu,
egdanielb2df0c22016-05-13 11:30:37 -0700244 const GrVkResource* srcResource,
245 VkImage srcImage,
egdaniel17b89252016-04-05 07:23:38 -0700246 VkImageLayout srcLayout,
egdanielb2df0c22016-05-13 11:30:37 -0700247 const GrVkResource* dstResource,
248 VkImage dstImage,
egdaniel17b89252016-04-05 07:23:38 -0700249 VkImageLayout dstLayout,
250 uint32_t blitRegionCount,
251 const VkImageBlit* blitRegions,
252 VkFilter filter);
253
egdanielb2df0c22016-05-13 11:30:37 -0700254 void blitImage(const GrVkGpu* gpu,
255 const GrVkImage& srcImage,
256 const GrVkImage& dstImage,
257 uint32_t blitRegionCount,
258 const VkImageBlit* blitRegions,
Greg Daniel6ecc9112017-06-16 16:17:03 +0000259 VkFilter filter);
egdanielb2df0c22016-05-13 11:30:37 -0700260
Greg Daniel164a9f02016-02-22 09:56:40 -0500261 void copyImageToBuffer(const GrVkGpu* gpu,
262 GrVkImage* srcImage,
263 VkImageLayout srcLayout,
264 GrVkTransferBuffer* dstBuffer,
265 uint32_t copyRegionCount,
266 const VkBufferImageCopy* copyRegions);
267
268 void copyBufferToImage(const GrVkGpu* gpu,
269 GrVkTransferBuffer* srcBuffer,
270 GrVkImage* dstImage,
271 VkImageLayout dstLayout,
272 uint32_t copyRegionCount,
273 const VkBufferImageCopy* copyRegions);
274
Greg Daniel6888c0d2017-08-25 11:55:50 -0400275 void copyBuffer(GrVkGpu* gpu,
276 GrVkBuffer* srcBuffer,
277 GrVkBuffer* dstBuffer,
278 uint32_t regionCount,
279 const VkBufferCopy* regions);
280
jvanvertha584de92016-06-30 09:10:52 -0700281 void updateBuffer(GrVkGpu* gpu,
282 GrVkBuffer* dstBuffer,
283 VkDeviceSize dstOffset,
284 VkDeviceSize dataSize,
285 const void* data);
286
egdaniel52ad2512016-08-04 12:50:01 -0700287 void resolveImage(GrVkGpu* gpu,
288 const GrVkImage& srcImage,
289 const GrVkImage& dstImage,
290 uint32_t regionCount,
291 const VkImageResolve* regions);
292
Greg Daniele643da62019-11-05 12:36:42 -0500293 void submitToQueue(GrVkGpu* gpu, VkQueue queue, GrVkGpu::SyncQueue sync,
Greg Daniel48661b82018-01-22 16:11:35 -0500294 SkTArray<GrVkSemaphore::Resource*>& signalSemaphores,
295 SkTArray<GrVkSemaphore::Resource*>& waitSemaphores);
Greg Daniela3aa75a2019-04-12 14:24:55 -0400296 bool finished(const GrVkGpu* gpu);
297
298 void addFinishedProc(sk_sp<GrRefCntedCallback> finishedProc);
Greg Daniel164a9f02016-02-22 09:56:40 -0500299
Greg Daniel8daf3b72019-07-30 09:57:26 -0400300 void recycleSecondaryCommandBuffers(GrVkGpu* gpu);
jvanverth7ec92412016-07-06 09:24:57 -0700301
Greg Daniel164a9f02016-02-22 09:56:40 -0500302private:
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500303 explicit GrVkPrimaryCommandBuffer(VkCommandBuffer cmdBuffer, GrVkCommandPool* cmdPool)
304 : INHERITED(cmdBuffer, cmdPool)
egdaniel9cb63402016-06-23 08:37:05 -0700305 , fSubmitFence(VK_NULL_HANDLE) {}
306
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500307 void onFreeGPUData(GrVkGpu* gpu) const override;
egdaniel9cb63402016-06-23 08:37:05 -0700308
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500309 void onAbandonGPUData() const override;
310
311 void onReleaseResources(GrVkGpu* gpu) override;
jvanverth7ec92412016-07-06 09:24:57 -0700312
Greg Daniel8daf3b72019-07-30 09:57:26 -0400313 SkTArray<std::unique_ptr<GrVkSecondaryCommandBuffer>, true> fSecondaryCommandBuffers;
314 VkFence fSubmitFence;
315 SkTArray<sk_sp<GrRefCntedCallback>> fFinishedProcs;
Greg Daniel164a9f02016-02-22 09:56:40 -0500316
egdaniel9a6cf802016-06-08 08:22:05 -0700317 typedef GrVkCommandBuffer INHERITED;
Greg Daniel164a9f02016-02-22 09:56:40 -0500318};
319
egdaniel9a6cf802016-06-08 08:22:05 -0700320class GrVkSecondaryCommandBuffer : public GrVkCommandBuffer {
321public:
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500322 static GrVkSecondaryCommandBuffer* Create(const GrVkGpu* gpu, GrVkCommandPool* cmdPool);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500323 // Used for wrapping an external secondary command buffer.
324 static GrVkSecondaryCommandBuffer* Create(VkCommandBuffer externalSecondaryCB);
egdaniel9a6cf802016-06-08 08:22:05 -0700325
Greg Daniele643da62019-11-05 12:36:42 -0500326 void begin(GrVkGpu* gpu, const GrVkFramebuffer* framebuffer,
jvanverth7ec92412016-07-06 09:24:57 -0700327 const GrVkRenderPass* compatibleRenderPass);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500328 void end(GrVkGpu* gpu);
egdaniel9a6cf802016-06-08 08:22:05 -0700329
Greg Daniel8daf3b72019-07-30 09:57:26 -0400330 void recycle(GrVkGpu* gpu);
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400331
Greg Daniel8daf3b72019-07-30 09:57:26 -0400332 VkCommandBuffer vkCommandBuffer() { return fCmdBuffer; }
jvanverth7ec92412016-07-06 09:24:57 -0700333
egdaniel9a6cf802016-06-08 08:22:05 -0700334private:
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500335 explicit GrVkSecondaryCommandBuffer(VkCommandBuffer cmdBuffer, GrVkCommandPool* cmdPool)
336 : INHERITED(cmdBuffer, cmdPool) {}
egdaniel9a6cf802016-06-08 08:22:05 -0700337
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500338 void onFreeGPUData(GrVkGpu* gpu) const override {}
339
340 void onAbandonGPUData() const override {}
egdaniel9cb63402016-06-23 08:37:05 -0700341
egdaniel9a6cf802016-06-08 08:22:05 -0700342 friend class GrVkPrimaryCommandBuffer;
343
344 typedef GrVkCommandBuffer INHERITED;
345};
Greg Daniel164a9f02016-02-22 09:56:40 -0500346
347#endif