Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrVkCommandBuffer_DEFINED |
| 9 | #define GrVkCommandBuffer_DEFINED |
| 10 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 11 | #include "include/gpu/vk/GrVkTypes.h" |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 12 | #include "src/gpu/GrManagedResource.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 13 | #include "src/gpu/vk/GrVkGpu.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 14 | #include "src/gpu/vk/GrVkSemaphore.h" |
| 15 | #include "src/gpu/vk/GrVkUtil.h" |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 16 | |
Greg Daniel | 6ecc911 | 2017-06-16 16:17:03 +0000 | [diff] [blame] | 17 | class GrVkBuffer; |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 18 | class GrVkFramebuffer; |
Greg Daniel | 6ecc911 | 2017-06-16 16:17:03 +0000 | [diff] [blame] | 19 | class GrVkImage; |
Chris Dalton | 10ee0b2 | 2020-04-02 16:28:52 -0600 | [diff] [blame] | 20 | class GrVkMeshBuffer; |
egdaniel | 470d77a | 2016-03-18 12:50:27 -0700 | [diff] [blame] | 21 | class GrVkPipeline; |
Greg Daniel | 6ecc911 | 2017-06-16 16:17:03 +0000 | [diff] [blame] | 22 | class GrVkPipelineState; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 23 | class GrVkRenderPass; |
| 24 | class GrVkRenderTarget; |
| 25 | class GrVkTransferBuffer; |
| 26 | |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 27 | class GrVkCommandBuffer { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 28 | public: |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 29 | virtual ~GrVkCommandBuffer() {} |
| 30 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 31 | void invalidateState(); |
| 32 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 33 | //////////////////////////////////////////////////////////////////////////// |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 34 | // CommandBuffer commands |
| 35 | //////////////////////////////////////////////////////////////////////////// |
| 36 | enum BarrierType { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 37 | kBufferMemory_BarrierType, |
| 38 | kImageMemory_BarrierType |
| 39 | }; |
| 40 | |
| 41 | void pipelineBarrier(const GrVkGpu* gpu, |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 42 | const GrManagedResource* resource, |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 43 | VkPipelineStageFlags srcStageMask, |
| 44 | VkPipelineStageFlags dstStageMask, |
| 45 | bool byRegion, |
| 46 | BarrierType barrierType, |
Greg Daniel | 59dc148 | 2019-02-22 10:46:38 -0500 | [diff] [blame] | 47 | void* barrier); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 48 | |
Chris Dalton | 10ee0b2 | 2020-04-02 16:28:52 -0600 | [diff] [blame] | 49 | void bindInputBuffer(GrVkGpu* gpu, uint32_t binding, const GrVkMeshBuffer* vbuffer); |
Chris Dalton | 1d61635 | 2017-05-31 12:51:23 -0600 | [diff] [blame] | 50 | |
Chris Dalton | 10ee0b2 | 2020-04-02 16:28:52 -0600 | [diff] [blame] | 51 | void bindIndexBuffer(GrVkGpu* gpu, const GrVkMeshBuffer* ibuffer); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 52 | |
egdaniel | 58a8d92 | 2016-04-21 08:03:10 -0700 | [diff] [blame] | 53 | void bindPipeline(const GrVkGpu* gpu, const GrVkPipeline* pipeline); |
| 54 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 55 | void bindDescriptorSets(const GrVkGpu* gpu, |
egdaniel | 22281c1 | 2016-03-23 13:49:40 -0700 | [diff] [blame] | 56 | GrVkPipelineState*, |
Greg Daniel | eecc687 | 2019-07-29 13:21:37 -0400 | [diff] [blame] | 57 | VkPipelineLayout layout, |
egdaniel | bc9b296 | 2016-09-27 08:00:53 -0700 | [diff] [blame] | 58 | uint32_t firstSet, |
| 59 | uint32_t setCount, |
| 60 | const VkDescriptorSet* descriptorSets, |
| 61 | uint32_t dynamicOffsetCount, |
| 62 | const uint32_t* dynamicOffsets); |
| 63 | |
egdaniel | 470d77a | 2016-03-18 12:50:27 -0700 | [diff] [blame] | 64 | void setViewport(const GrVkGpu* gpu, |
| 65 | uint32_t firstViewport, |
| 66 | uint32_t viewportCount, |
| 67 | const VkViewport* viewports); |
| 68 | |
| 69 | void setScissor(const GrVkGpu* gpu, |
| 70 | uint32_t firstScissor, |
| 71 | uint32_t scissorCount, |
| 72 | const VkRect2D* scissors); |
| 73 | |
| 74 | void setBlendConstants(const GrVkGpu* gpu, const float blendConstants[4]); |
| 75 | |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 76 | // Commands that only work inside of a render pass |
| 77 | void clearAttachments(const GrVkGpu* gpu, |
| 78 | int numAttachments, |
| 79 | const VkClearAttachment* attachments, |
| 80 | int numRects, |
Greg Daniel | f346df3 | 2019-04-03 14:52:13 -0400 | [diff] [blame] | 81 | const VkClearRect* clearRects); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 82 | |
| 83 | void drawIndexed(const GrVkGpu* gpu, |
| 84 | uint32_t indexCount, |
| 85 | uint32_t instanceCount, |
| 86 | uint32_t firstIndex, |
| 87 | int32_t vertexOffset, |
Greg Daniel | f346df3 | 2019-04-03 14:52:13 -0400 | [diff] [blame] | 88 | uint32_t firstInstance); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 89 | |
| 90 | void draw(const GrVkGpu* gpu, |
| 91 | uint32_t vertexCount, |
| 92 | uint32_t instanceCount, |
| 93 | uint32_t firstVertex, |
Greg Daniel | f346df3 | 2019-04-03 14:52:13 -0400 | [diff] [blame] | 94 | uint32_t firstInstance); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 95 | |
Chris Dalton | 03fdf6a | 2020-04-07 12:31:59 -0600 | [diff] [blame] | 96 | void drawIndirect(const GrVkGpu* gpu, |
| 97 | const GrVkMeshBuffer* indirectBuffer, |
| 98 | VkDeviceSize offset, |
| 99 | uint32_t drawCount, |
| 100 | uint32_t stride); |
| 101 | |
| 102 | void drawIndexedIndirect(const GrVkGpu* gpu, |
| 103 | const GrVkMeshBuffer* indirectBuffer, |
| 104 | VkDeviceSize offset, |
| 105 | uint32_t drawCount, |
| 106 | uint32_t stride); |
| 107 | |
Greg Daniel | 7d918fd | 2018-06-19 15:22:01 -0400 | [diff] [blame] | 108 | // Add ref-counted resource that will be tracked and released when this command buffer finishes |
| 109 | // execution |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 110 | void addResource(const GrManagedResource* resource) { |
Greg Daniel | fa3adf7 | 2019-11-07 09:53:41 -0500 | [diff] [blame] | 111 | SkASSERT(resource); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 112 | resource->ref(); |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 113 | resource->notifyQueuedForWorkOnGpu(); |
egdaniel | 594739c | 2016-09-20 12:39:25 -0700 | [diff] [blame] | 114 | fTrackedResources.append(1, &resource); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 115 | } |
| 116 | |
egdaniel | c1be9bc | 2016-07-20 08:33:00 -0700 | [diff] [blame] | 117 | // Add ref-counted resource that will be tracked and released when this command buffer finishes |
| 118 | // execution. When it is released, it will signal that the resource can be recycled for reuse. |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 119 | void addRecycledResource(const GrRecycledResource* resource) { |
egdaniel | c1be9bc | 2016-07-20 08:33:00 -0700 | [diff] [blame] | 120 | resource->ref(); |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 121 | resource->notifyQueuedForWorkOnGpu(); |
egdaniel | 594739c | 2016-09-20 12:39:25 -0700 | [diff] [blame] | 122 | fTrackedRecycledResources.append(1, &resource); |
egdaniel | c1be9bc | 2016-07-20 08:33:00 -0700 | [diff] [blame] | 123 | } |
| 124 | |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 125 | void releaseResources(); |
jvanverth | 7ec9241 | 2016-07-06 09:24:57 -0700 | [diff] [blame] | 126 | |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 127 | void freeGPUData(const GrGpu* gpu, VkCommandPool pool) const; |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 128 | |
Robert Phillips | ce0a2bf | 2019-04-02 13:37:34 -0400 | [diff] [blame] | 129 | bool hasWork() const { return fHasWork; } |
| 130 | |
Greg Daniel | d922f33 | 2020-04-27 11:21:36 -0400 | [diff] [blame] | 131 | #ifdef SK_DEBUG |
| 132 | bool validateNoSharedImageResources(const GrVkCommandBuffer* other); |
| 133 | #endif |
| 134 | |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 135 | protected: |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 136 | GrVkCommandBuffer(VkCommandBuffer cmdBuffer, bool isWrapped = false) |
| 137 | : fCmdBuffer(cmdBuffer) |
| 138 | , fIsWrapped(isWrapped) { |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 139 | fTrackedResources.setReserve(kInitialTrackedResourcesCount); |
| 140 | fTrackedRecycledResources.setReserve(kInitialTrackedResourcesCount); |
| 141 | this->invalidateState(); |
| 142 | } |
egdaniel | 594739c | 2016-09-20 12:39:25 -0700 | [diff] [blame] | 143 | |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 144 | bool isWrapped() const { return fIsWrapped; } |
Greg Daniel | 070cbaf | 2019-01-03 17:35:54 -0500 | [diff] [blame] | 145 | |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 146 | void addingWork(const GrVkGpu* gpu); |
Greg Daniel | ee54f23 | 2019-04-03 14:58:40 -0400 | [diff] [blame] | 147 | |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 148 | void submitPipelineBarriers(const GrVkGpu* gpu); |
Robert Phillips | ce0a2bf | 2019-04-02 13:37:34 -0400 | [diff] [blame] | 149 | |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 150 | SkTDArray<const GrManagedResource*> fTrackedResources; |
| 151 | SkTDArray<const GrRecycledResource*> fTrackedRecycledResources; |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 152 | |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 153 | // Tracks whether we are in the middle of a command buffer begin/end calls and thus can add |
| 154 | // new commands to the buffer; |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 155 | bool fIsActive = false; |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 156 | bool fHasWork = false; |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 157 | |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 158 | // Stores a pointer to the current active render pass (i.e. begin has been called but not |
| 159 | // end). A nullptr means there is no active render pass. The GrVKCommandBuffer does not own |
| 160 | // the render pass. |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 161 | const GrVkRenderPass* fActiveRenderPass = nullptr; |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 162 | |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 163 | VkCommandBuffer fCmdBuffer; |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 164 | |
| 165 | private: |
| 166 | static const int kInitialTrackedResourcesCount = 32; |
| 167 | |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 168 | virtual void onReleaseResources() {} |
| 169 | virtual void onFreeGPUData(const GrVkGpu* gpu) const = 0; |
jvanverth | 7ec9241 | 2016-07-06 09:24:57 -0700 | [diff] [blame] | 170 | |
Greg Daniel | 6ecc911 | 2017-06-16 16:17:03 +0000 | [diff] [blame] | 171 | static constexpr uint32_t kMaxInputBuffers = 2; |
| 172 | |
Chris Dalton | 1d61635 | 2017-05-31 12:51:23 -0600 | [diff] [blame] | 173 | VkBuffer fBoundInputBuffers[kMaxInputBuffers]; |
| 174 | VkBuffer fBoundIndexBuffer; |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 175 | |
egdaniel | 594739c | 2016-09-20 12:39:25 -0700 | [diff] [blame] | 176 | // When resetting the command buffer, we remove the tracked resources from their arrays, and |
| 177 | // we prefer to not free all the memory every time so usually we just rewind. However, to avoid |
| 178 | // all arrays growing to the max size, after so many resets we'll do a full reset of the tracked |
| 179 | // resource arrays. |
| 180 | static const int kNumRewindResetsBeforeFullReset = 8; |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 181 | int fNumResets = 0; |
egdaniel | 594739c | 2016-09-20 12:39:25 -0700 | [diff] [blame] | 182 | |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 183 | // Cached values used for dynamic state updates |
| 184 | VkViewport fCachedViewport; |
| 185 | VkRect2D fCachedScissor; |
| 186 | float fCachedBlendConstant[4]; |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 187 | |
Greg Daniel | ee54f23 | 2019-04-03 14:58:40 -0400 | [diff] [blame] | 188 | // Tracking of memory barriers so that we can submit them all in a batch together. |
| 189 | SkSTArray<4, VkBufferMemoryBarrier> fBufferBarriers; |
| 190 | SkSTArray<1, VkImageMemoryBarrier> fImageBarriers; |
| 191 | bool fBarriersByRegion = false; |
| 192 | VkPipelineStageFlags fSrcStageMask = 0; |
| 193 | VkPipelineStageFlags fDstStageMask = 0; |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 194 | |
| 195 | bool fIsWrapped; |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 196 | }; |
| 197 | |
| 198 | class GrVkSecondaryCommandBuffer; |
| 199 | |
| 200 | class GrVkPrimaryCommandBuffer : public GrVkCommandBuffer { |
| 201 | public: |
egdaniel | 9cb6340 | 2016-06-23 08:37:05 -0700 | [diff] [blame] | 202 | ~GrVkPrimaryCommandBuffer() override; |
| 203 | |
Greg Daniel | 315c8dc | 2019-11-26 15:41:27 -0500 | [diff] [blame] | 204 | static GrVkPrimaryCommandBuffer* Create(GrVkGpu* gpu, VkCommandPool cmdPool); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 205 | |
Greg Daniel | e643da6 | 2019-11-05 12:36:42 -0500 | [diff] [blame] | 206 | void begin(GrVkGpu* gpu); |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 207 | void end(GrVkGpu* gpu); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 208 | |
| 209 | // Begins render pass on this command buffer. The framebuffer from GrVkRenderTarget will be used |
| 210 | // in the render pass. |
Greg Daniel | fa3adf7 | 2019-11-07 09:53:41 -0500 | [diff] [blame] | 211 | bool beginRenderPass(GrVkGpu* gpu, |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 212 | const GrVkRenderPass* renderPass, |
Robert Phillips | 9521447 | 2017-08-08 18:00:03 -0400 | [diff] [blame] | 213 | const VkClearValue clearValues[], |
Greg Daniel | fa3adf7 | 2019-11-07 09:53:41 -0500 | [diff] [blame] | 214 | GrVkRenderTarget* target, |
egdaniel | 9cb6340 | 2016-06-23 08:37:05 -0700 | [diff] [blame] | 215 | const SkIRect& bounds, |
| 216 | bool forSecondaryCB); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 217 | void endRenderPass(const GrVkGpu* gpu); |
| 218 | |
| 219 | // Submits the SecondaryCommandBuffer into this command buffer. It is required that we are |
| 220 | // currently inside a render pass that is compatible with the one used to create the |
| 221 | // SecondaryCommandBuffer. |
| 222 | void executeCommands(const GrVkGpu* gpu, |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 223 | std::unique_ptr<GrVkSecondaryCommandBuffer> secondaryBuffer); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 224 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 225 | // Commands that only work outside of a render pass |
| 226 | void clearColorImage(const GrVkGpu* gpu, |
| 227 | GrVkImage* image, |
| 228 | const VkClearColorValue* color, |
| 229 | uint32_t subRangeCount, |
| 230 | const VkImageSubresourceRange* subRanges); |
| 231 | |
egdaniel | 3d5d9ac | 2016-03-01 12:56:15 -0800 | [diff] [blame] | 232 | void clearDepthStencilImage(const GrVkGpu* gpu, |
| 233 | GrVkImage* image, |
| 234 | const VkClearDepthStencilValue* color, |
| 235 | uint32_t subRangeCount, |
| 236 | const VkImageSubresourceRange* subRanges); |
| 237 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 238 | void copyImage(const GrVkGpu* gpu, |
| 239 | GrVkImage* srcImage, |
| 240 | VkImageLayout srcLayout, |
| 241 | GrVkImage* dstImage, |
| 242 | VkImageLayout dstLayout, |
| 243 | uint32_t copyRegionCount, |
| 244 | const VkImageCopy* copyRegions); |
| 245 | |
egdaniel | 17b8925 | 2016-04-05 07:23:38 -0700 | [diff] [blame] | 246 | void blitImage(const GrVkGpu* gpu, |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 247 | const GrManagedResource* srcResource, |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 248 | VkImage srcImage, |
egdaniel | 17b8925 | 2016-04-05 07:23:38 -0700 | [diff] [blame] | 249 | VkImageLayout srcLayout, |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 250 | const GrManagedResource* dstResource, |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 251 | VkImage dstImage, |
egdaniel | 17b8925 | 2016-04-05 07:23:38 -0700 | [diff] [blame] | 252 | VkImageLayout dstLayout, |
| 253 | uint32_t blitRegionCount, |
| 254 | const VkImageBlit* blitRegions, |
| 255 | VkFilter filter); |
| 256 | |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 257 | void blitImage(const GrVkGpu* gpu, |
| 258 | const GrVkImage& srcImage, |
| 259 | const GrVkImage& dstImage, |
| 260 | uint32_t blitRegionCount, |
| 261 | const VkImageBlit* blitRegions, |
Greg Daniel | 6ecc911 | 2017-06-16 16:17:03 +0000 | [diff] [blame] | 262 | VkFilter filter); |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 263 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 264 | void copyImageToBuffer(const GrVkGpu* gpu, |
| 265 | GrVkImage* srcImage, |
| 266 | VkImageLayout srcLayout, |
| 267 | GrVkTransferBuffer* dstBuffer, |
| 268 | uint32_t copyRegionCount, |
| 269 | const VkBufferImageCopy* copyRegions); |
| 270 | |
| 271 | void copyBufferToImage(const GrVkGpu* gpu, |
| 272 | GrVkTransferBuffer* srcBuffer, |
| 273 | GrVkImage* dstImage, |
| 274 | VkImageLayout dstLayout, |
| 275 | uint32_t copyRegionCount, |
| 276 | const VkBufferImageCopy* copyRegions); |
| 277 | |
Greg Daniel | 6888c0d | 2017-08-25 11:55:50 -0400 | [diff] [blame] | 278 | void copyBuffer(GrVkGpu* gpu, |
| 279 | GrVkBuffer* srcBuffer, |
| 280 | GrVkBuffer* dstBuffer, |
| 281 | uint32_t regionCount, |
| 282 | const VkBufferCopy* regions); |
| 283 | |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 284 | void updateBuffer(GrVkGpu* gpu, |
| 285 | GrVkBuffer* dstBuffer, |
| 286 | VkDeviceSize dstOffset, |
| 287 | VkDeviceSize dataSize, |
| 288 | const void* data); |
| 289 | |
egdaniel | 52ad251 | 2016-08-04 12:50:01 -0700 | [diff] [blame] | 290 | void resolveImage(GrVkGpu* gpu, |
| 291 | const GrVkImage& srcImage, |
| 292 | const GrVkImage& dstImage, |
| 293 | uint32_t regionCount, |
| 294 | const VkImageResolve* regions); |
| 295 | |
Greg Daniel | e118558 | 2019-12-04 11:29:44 -0500 | [diff] [blame] | 296 | bool submitToQueue(GrVkGpu* gpu, VkQueue queue, |
Greg Daniel | 48661b8 | 2018-01-22 16:11:35 -0500 | [diff] [blame] | 297 | SkTArray<GrVkSemaphore::Resource*>& signalSemaphores, |
| 298 | SkTArray<GrVkSemaphore::Resource*>& waitSemaphores); |
Greg Daniel | e118558 | 2019-12-04 11:29:44 -0500 | [diff] [blame] | 299 | |
| 300 | void forceSync(GrVkGpu* gpu); |
| 301 | |
| 302 | bool finished(GrVkGpu* gpu); |
Greg Daniel | a3aa75a | 2019-04-12 14:24:55 -0400 | [diff] [blame] | 303 | |
| 304 | void addFinishedProc(sk_sp<GrRefCntedCallback> finishedProc); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 305 | |
Greg Daniel | fe15962 | 2020-04-10 17:43:51 +0000 | [diff] [blame] | 306 | void callFinishedProcs() { |
| 307 | fFinishedProcs.reset(); |
| 308 | } |
| 309 | |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 310 | void recycleSecondaryCommandBuffers(GrVkCommandPool* cmdPool); |
jvanverth | 7ec9241 | 2016-07-06 09:24:57 -0700 | [diff] [blame] | 311 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 312 | private: |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 313 | explicit GrVkPrimaryCommandBuffer(VkCommandBuffer cmdBuffer) |
| 314 | : INHERITED(cmdBuffer) |
egdaniel | 9cb6340 | 2016-06-23 08:37:05 -0700 | [diff] [blame] | 315 | , fSubmitFence(VK_NULL_HANDLE) {} |
| 316 | |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 317 | void onFreeGPUData(const GrVkGpu* gpu) const override; |
egdaniel | 9cb6340 | 2016-06-23 08:37:05 -0700 | [diff] [blame] | 318 | |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 319 | void onReleaseResources() override; |
jvanverth | 7ec9241 | 2016-07-06 09:24:57 -0700 | [diff] [blame] | 320 | |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 321 | SkTArray<std::unique_ptr<GrVkSecondaryCommandBuffer>, true> fSecondaryCommandBuffers; |
| 322 | VkFence fSubmitFence; |
| 323 | SkTArray<sk_sp<GrRefCntedCallback>> fFinishedProcs; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 324 | |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 325 | typedef GrVkCommandBuffer INHERITED; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 326 | }; |
| 327 | |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 328 | class GrVkSecondaryCommandBuffer : public GrVkCommandBuffer { |
| 329 | public: |
Greg Daniel | 315c8dc | 2019-11-26 15:41:27 -0500 | [diff] [blame] | 330 | static GrVkSecondaryCommandBuffer* Create(GrVkGpu* gpu, GrVkCommandPool* cmdPool); |
Greg Daniel | 070cbaf | 2019-01-03 17:35:54 -0500 | [diff] [blame] | 331 | // Used for wrapping an external secondary command buffer. |
| 332 | static GrVkSecondaryCommandBuffer* Create(VkCommandBuffer externalSecondaryCB); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 333 | |
Greg Daniel | e643da6 | 2019-11-05 12:36:42 -0500 | [diff] [blame] | 334 | void begin(GrVkGpu* gpu, const GrVkFramebuffer* framebuffer, |
jvanverth | 7ec9241 | 2016-07-06 09:24:57 -0700 | [diff] [blame] | 335 | const GrVkRenderPass* compatibleRenderPass); |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 336 | void end(GrVkGpu* gpu); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 337 | |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 338 | void recycle(GrVkCommandPool* cmdPool); |
Greg Daniel | 64cc9aa | 2018-10-19 13:54:56 -0400 | [diff] [blame] | 339 | |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 340 | VkCommandBuffer vkCommandBuffer() { return fCmdBuffer; } |
jvanverth | 7ec9241 | 2016-07-06 09:24:57 -0700 | [diff] [blame] | 341 | |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 342 | private: |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 343 | explicit GrVkSecondaryCommandBuffer(VkCommandBuffer cmdBuffer, bool isWrapped) |
| 344 | : INHERITED(cmdBuffer, isWrapped) {} |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 345 | |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 346 | void onFreeGPUData(const GrVkGpu* gpu) const override {} |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 347 | |
Greg Daniel | 315c8dc | 2019-11-26 15:41:27 -0500 | [diff] [blame] | 348 | // Used for accessing fIsActive (on GrVkCommandBuffer) |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 349 | friend class GrVkPrimaryCommandBuffer; |
| 350 | |
| 351 | typedef GrVkCommandBuffer INHERITED; |
| 352 | }; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 353 | |
| 354 | #endif |