Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrVkCommandBuffer_DEFINED |
| 9 | #define GrVkCommandBuffer_DEFINED |
| 10 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 11 | #include "include/gpu/vk/GrVkTypes.h" |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 12 | #include "src/gpu/GrManagedResource.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 13 | #include "src/gpu/vk/GrVkGpu.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 14 | #include "src/gpu/vk/GrVkSemaphore.h" |
| 15 | #include "src/gpu/vk/GrVkUtil.h" |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 16 | |
Greg Daniel | 6ecc911 | 2017-06-16 16:17:03 +0000 | [diff] [blame] | 17 | class GrVkBuffer; |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 18 | class GrVkFramebuffer; |
Greg Daniel | 6ecc911 | 2017-06-16 16:17:03 +0000 | [diff] [blame] | 19 | class GrVkImage; |
Chris Dalton | 10ee0b2 | 2020-04-02 16:28:52 -0600 | [diff] [blame] | 20 | class GrVkMeshBuffer; |
egdaniel | 470d77a | 2016-03-18 12:50:27 -0700 | [diff] [blame] | 21 | class GrVkPipeline; |
Greg Daniel | 6ecc911 | 2017-06-16 16:17:03 +0000 | [diff] [blame] | 22 | class GrVkPipelineState; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 23 | class GrVkRenderPass; |
| 24 | class GrVkRenderTarget; |
| 25 | class GrVkTransferBuffer; |
| 26 | |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 27 | class GrVkCommandBuffer { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 28 | public: |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 29 | virtual ~GrVkCommandBuffer() {} |
| 30 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 31 | void invalidateState(); |
| 32 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 33 | //////////////////////////////////////////////////////////////////////////// |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 34 | // CommandBuffer commands |
| 35 | //////////////////////////////////////////////////////////////////////////// |
| 36 | enum BarrierType { |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 37 | kBufferMemory_BarrierType, |
| 38 | kImageMemory_BarrierType |
| 39 | }; |
| 40 | |
| 41 | void pipelineBarrier(const GrVkGpu* gpu, |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 42 | const GrManagedResource* resource, |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 43 | VkPipelineStageFlags srcStageMask, |
| 44 | VkPipelineStageFlags dstStageMask, |
| 45 | bool byRegion, |
| 46 | BarrierType barrierType, |
Greg Daniel | 59dc148 | 2019-02-22 10:46:38 -0500 | [diff] [blame] | 47 | void* barrier); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 48 | |
Greg Daniel | 426274b | 2020-07-20 11:37:38 -0400 | [diff] [blame] | 49 | void bindInputBuffer(GrVkGpu* gpu, uint32_t binding, sk_sp<const GrBuffer> buffer); |
Chris Dalton | 1d61635 | 2017-05-31 12:51:23 -0600 | [diff] [blame] | 50 | |
Greg Daniel | 426274b | 2020-07-20 11:37:38 -0400 | [diff] [blame] | 51 | void bindIndexBuffer(GrVkGpu* gpu, sk_sp<const GrBuffer> buffer); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 52 | |
egdaniel | 58a8d92 | 2016-04-21 08:03:10 -0700 | [diff] [blame] | 53 | void bindPipeline(const GrVkGpu* gpu, const GrVkPipeline* pipeline); |
| 54 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 55 | void bindDescriptorSets(const GrVkGpu* gpu, |
egdaniel | 22281c1 | 2016-03-23 13:49:40 -0700 | [diff] [blame] | 56 | GrVkPipelineState*, |
Greg Daniel | eecc687 | 2019-07-29 13:21:37 -0400 | [diff] [blame] | 57 | VkPipelineLayout layout, |
egdaniel | bc9b296 | 2016-09-27 08:00:53 -0700 | [diff] [blame] | 58 | uint32_t firstSet, |
| 59 | uint32_t setCount, |
| 60 | const VkDescriptorSet* descriptorSets, |
| 61 | uint32_t dynamicOffsetCount, |
| 62 | const uint32_t* dynamicOffsets); |
| 63 | |
egdaniel | 470d77a | 2016-03-18 12:50:27 -0700 | [diff] [blame] | 64 | void setViewport(const GrVkGpu* gpu, |
| 65 | uint32_t firstViewport, |
| 66 | uint32_t viewportCount, |
| 67 | const VkViewport* viewports); |
| 68 | |
| 69 | void setScissor(const GrVkGpu* gpu, |
| 70 | uint32_t firstScissor, |
| 71 | uint32_t scissorCount, |
| 72 | const VkRect2D* scissors); |
| 73 | |
| 74 | void setBlendConstants(const GrVkGpu* gpu, const float blendConstants[4]); |
| 75 | |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 76 | // Commands that only work inside of a render pass |
| 77 | void clearAttachments(const GrVkGpu* gpu, |
| 78 | int numAttachments, |
| 79 | const VkClearAttachment* attachments, |
| 80 | int numRects, |
Greg Daniel | f346df3 | 2019-04-03 14:52:13 -0400 | [diff] [blame] | 81 | const VkClearRect* clearRects); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 82 | |
| 83 | void drawIndexed(const GrVkGpu* gpu, |
| 84 | uint32_t indexCount, |
| 85 | uint32_t instanceCount, |
| 86 | uint32_t firstIndex, |
| 87 | int32_t vertexOffset, |
Greg Daniel | f346df3 | 2019-04-03 14:52:13 -0400 | [diff] [blame] | 88 | uint32_t firstInstance); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 89 | |
| 90 | void draw(const GrVkGpu* gpu, |
| 91 | uint32_t vertexCount, |
| 92 | uint32_t instanceCount, |
| 93 | uint32_t firstVertex, |
Greg Daniel | f346df3 | 2019-04-03 14:52:13 -0400 | [diff] [blame] | 94 | uint32_t firstInstance); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 95 | |
Chris Dalton | 03fdf6a | 2020-04-07 12:31:59 -0600 | [diff] [blame] | 96 | void drawIndirect(const GrVkGpu* gpu, |
| 97 | const GrVkMeshBuffer* indirectBuffer, |
| 98 | VkDeviceSize offset, |
| 99 | uint32_t drawCount, |
| 100 | uint32_t stride); |
| 101 | |
| 102 | void drawIndexedIndirect(const GrVkGpu* gpu, |
| 103 | const GrVkMeshBuffer* indirectBuffer, |
| 104 | VkDeviceSize offset, |
| 105 | uint32_t drawCount, |
| 106 | uint32_t stride); |
| 107 | |
Greg Daniel | 7d918fd | 2018-06-19 15:22:01 -0400 | [diff] [blame] | 108 | // Add ref-counted resource that will be tracked and released when this command buffer finishes |
| 109 | // execution |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 110 | void addResource(const GrManagedResource* resource) { |
Greg Daniel | fa3adf7 | 2019-11-07 09:53:41 -0500 | [diff] [blame] | 111 | SkASSERT(resource); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 112 | resource->ref(); |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 113 | resource->notifyQueuedForWorkOnGpu(); |
egdaniel | 594739c | 2016-09-20 12:39:25 -0700 | [diff] [blame] | 114 | fTrackedResources.append(1, &resource); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 115 | } |
| 116 | |
egdaniel | c1be9bc | 2016-07-20 08:33:00 -0700 | [diff] [blame] | 117 | // Add ref-counted resource that will be tracked and released when this command buffer finishes |
| 118 | // execution. When it is released, it will signal that the resource can be recycled for reuse. |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 119 | void addRecycledResource(const GrRecycledResource* resource) { |
egdaniel | c1be9bc | 2016-07-20 08:33:00 -0700 | [diff] [blame] | 120 | resource->ref(); |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 121 | resource->notifyQueuedForWorkOnGpu(); |
egdaniel | 594739c | 2016-09-20 12:39:25 -0700 | [diff] [blame] | 122 | fTrackedRecycledResources.append(1, &resource); |
egdaniel | c1be9bc | 2016-07-20 08:33:00 -0700 | [diff] [blame] | 123 | } |
| 124 | |
Greg Daniel | 426274b | 2020-07-20 11:37:38 -0400 | [diff] [blame] | 125 | void addGrBuffer(sk_sp<const GrBuffer> buffer) { |
Greg Daniel | a58db7f | 2020-07-15 09:17:59 -0400 | [diff] [blame] | 126 | fTrackedGpuBuffers.push_back(std::move(buffer)); |
| 127 | } |
| 128 | |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 129 | void releaseResources(); |
jvanverth | 7ec9241 | 2016-07-06 09:24:57 -0700 | [diff] [blame] | 130 | |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 131 | void freeGPUData(const GrGpu* gpu, VkCommandPool pool) const; |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 132 | |
Robert Phillips | ce0a2bf | 2019-04-02 13:37:34 -0400 | [diff] [blame] | 133 | bool hasWork() const { return fHasWork; } |
| 134 | |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 135 | protected: |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 136 | GrVkCommandBuffer(VkCommandBuffer cmdBuffer, bool isWrapped = false) |
| 137 | : fCmdBuffer(cmdBuffer) |
| 138 | , fIsWrapped(isWrapped) { |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 139 | fTrackedResources.setReserve(kInitialTrackedResourcesCount); |
| 140 | fTrackedRecycledResources.setReserve(kInitialTrackedResourcesCount); |
| 141 | this->invalidateState(); |
| 142 | } |
egdaniel | 594739c | 2016-09-20 12:39:25 -0700 | [diff] [blame] | 143 | |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 144 | bool isWrapped() const { return fIsWrapped; } |
Greg Daniel | 070cbaf | 2019-01-03 17:35:54 -0500 | [diff] [blame] | 145 | |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 146 | void addingWork(const GrVkGpu* gpu); |
Greg Daniel | ee54f23 | 2019-04-03 14:58:40 -0400 | [diff] [blame] | 147 | |
Greg Daniel | 9a18b08 | 2020-08-14 14:03:50 -0400 | [diff] [blame] | 148 | void submitPipelineBarriers(const GrVkGpu* gpu, bool forSelfDependency = false); |
Robert Phillips | ce0a2bf | 2019-04-02 13:37:34 -0400 | [diff] [blame] | 149 | |
Greg Daniel | a58db7f | 2020-07-15 09:17:59 -0400 | [diff] [blame] | 150 | SkTDArray<const GrManagedResource*> fTrackedResources; |
| 151 | SkTDArray<const GrRecycledResource*> fTrackedRecycledResources; |
Greg Daniel | 426274b | 2020-07-20 11:37:38 -0400 | [diff] [blame] | 152 | SkSTArray<16, sk_sp<const GrBuffer>> fTrackedGpuBuffers; |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 153 | |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 154 | // Tracks whether we are in the middle of a command buffer begin/end calls and thus can add |
| 155 | // new commands to the buffer; |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 156 | bool fIsActive = false; |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 157 | bool fHasWork = false; |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 158 | |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 159 | // Stores a pointer to the current active render pass (i.e. begin has been called but not |
| 160 | // end). A nullptr means there is no active render pass. The GrVKCommandBuffer does not own |
| 161 | // the render pass. |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 162 | const GrVkRenderPass* fActiveRenderPass = nullptr; |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 163 | |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 164 | VkCommandBuffer fCmdBuffer; |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 165 | |
| 166 | private: |
| 167 | static const int kInitialTrackedResourcesCount = 32; |
| 168 | |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 169 | virtual void onReleaseResources() {} |
| 170 | virtual void onFreeGPUData(const GrVkGpu* gpu) const = 0; |
jvanverth | 7ec9241 | 2016-07-06 09:24:57 -0700 | [diff] [blame] | 171 | |
Greg Daniel | 6ecc911 | 2017-06-16 16:17:03 +0000 | [diff] [blame] | 172 | static constexpr uint32_t kMaxInputBuffers = 2; |
| 173 | |
Chris Dalton | 1d61635 | 2017-05-31 12:51:23 -0600 | [diff] [blame] | 174 | VkBuffer fBoundInputBuffers[kMaxInputBuffers]; |
| 175 | VkBuffer fBoundIndexBuffer; |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 176 | |
egdaniel | 594739c | 2016-09-20 12:39:25 -0700 | [diff] [blame] | 177 | // When resetting the command buffer, we remove the tracked resources from their arrays, and |
| 178 | // we prefer to not free all the memory every time so usually we just rewind. However, to avoid |
| 179 | // all arrays growing to the max size, after so many resets we'll do a full reset of the tracked |
| 180 | // resource arrays. |
| 181 | static const int kNumRewindResetsBeforeFullReset = 8; |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 182 | int fNumResets = 0; |
egdaniel | 594739c | 2016-09-20 12:39:25 -0700 | [diff] [blame] | 183 | |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 184 | // Cached values used for dynamic state updates |
| 185 | VkViewport fCachedViewport; |
| 186 | VkRect2D fCachedScissor; |
| 187 | float fCachedBlendConstant[4]; |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 188 | |
Greg Daniel | ee54f23 | 2019-04-03 14:58:40 -0400 | [diff] [blame] | 189 | // Tracking of memory barriers so that we can submit them all in a batch together. |
| 190 | SkSTArray<4, VkBufferMemoryBarrier> fBufferBarriers; |
| 191 | SkSTArray<1, VkImageMemoryBarrier> fImageBarriers; |
| 192 | bool fBarriersByRegion = false; |
| 193 | VkPipelineStageFlags fSrcStageMask = 0; |
| 194 | VkPipelineStageFlags fDstStageMask = 0; |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 195 | |
| 196 | bool fIsWrapped; |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 197 | }; |
| 198 | |
| 199 | class GrVkSecondaryCommandBuffer; |
| 200 | |
| 201 | class GrVkPrimaryCommandBuffer : public GrVkCommandBuffer { |
| 202 | public: |
egdaniel | 9cb6340 | 2016-06-23 08:37:05 -0700 | [diff] [blame] | 203 | ~GrVkPrimaryCommandBuffer() override; |
| 204 | |
Greg Daniel | 315c8dc | 2019-11-26 15:41:27 -0500 | [diff] [blame] | 205 | static GrVkPrimaryCommandBuffer* Create(GrVkGpu* gpu, VkCommandPool cmdPool); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 206 | |
Greg Daniel | e643da6 | 2019-11-05 12:36:42 -0500 | [diff] [blame] | 207 | void begin(GrVkGpu* gpu); |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 208 | void end(GrVkGpu* gpu); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 209 | |
| 210 | // Begins render pass on this command buffer. The framebuffer from GrVkRenderTarget will be used |
| 211 | // in the render pass. |
Greg Daniel | fa3adf7 | 2019-11-07 09:53:41 -0500 | [diff] [blame] | 212 | bool beginRenderPass(GrVkGpu* gpu, |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 213 | const GrVkRenderPass* renderPass, |
Robert Phillips | 9521447 | 2017-08-08 18:00:03 -0400 | [diff] [blame] | 214 | const VkClearValue clearValues[], |
Greg Daniel | fa3adf7 | 2019-11-07 09:53:41 -0500 | [diff] [blame] | 215 | GrVkRenderTarget* target, |
egdaniel | 9cb6340 | 2016-06-23 08:37:05 -0700 | [diff] [blame] | 216 | const SkIRect& bounds, |
| 217 | bool forSecondaryCB); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 218 | void endRenderPass(const GrVkGpu* gpu); |
| 219 | |
| 220 | // Submits the SecondaryCommandBuffer into this command buffer. It is required that we are |
| 221 | // currently inside a render pass that is compatible with the one used to create the |
| 222 | // SecondaryCommandBuffer. |
| 223 | void executeCommands(const GrVkGpu* gpu, |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 224 | std::unique_ptr<GrVkSecondaryCommandBuffer> secondaryBuffer); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 225 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 226 | // Commands that only work outside of a render pass |
| 227 | void clearColorImage(const GrVkGpu* gpu, |
| 228 | GrVkImage* image, |
| 229 | const VkClearColorValue* color, |
| 230 | uint32_t subRangeCount, |
| 231 | const VkImageSubresourceRange* subRanges); |
| 232 | |
egdaniel | 3d5d9ac | 2016-03-01 12:56:15 -0800 | [diff] [blame] | 233 | void clearDepthStencilImage(const GrVkGpu* gpu, |
| 234 | GrVkImage* image, |
| 235 | const VkClearDepthStencilValue* color, |
| 236 | uint32_t subRangeCount, |
| 237 | const VkImageSubresourceRange* subRanges); |
| 238 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 239 | void copyImage(const GrVkGpu* gpu, |
| 240 | GrVkImage* srcImage, |
| 241 | VkImageLayout srcLayout, |
| 242 | GrVkImage* dstImage, |
| 243 | VkImageLayout dstLayout, |
| 244 | uint32_t copyRegionCount, |
| 245 | const VkImageCopy* copyRegions); |
| 246 | |
egdaniel | 17b8925 | 2016-04-05 07:23:38 -0700 | [diff] [blame] | 247 | void blitImage(const GrVkGpu* gpu, |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 248 | const GrManagedResource* srcResource, |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 249 | VkImage srcImage, |
egdaniel | 17b8925 | 2016-04-05 07:23:38 -0700 | [diff] [blame] | 250 | VkImageLayout srcLayout, |
Jim Van Verth | 3e19216 | 2020-03-10 16:23:16 -0400 | [diff] [blame] | 251 | const GrManagedResource* dstResource, |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 252 | VkImage dstImage, |
egdaniel | 17b8925 | 2016-04-05 07:23:38 -0700 | [diff] [blame] | 253 | VkImageLayout dstLayout, |
| 254 | uint32_t blitRegionCount, |
| 255 | const VkImageBlit* blitRegions, |
| 256 | VkFilter filter); |
| 257 | |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 258 | void blitImage(const GrVkGpu* gpu, |
| 259 | const GrVkImage& srcImage, |
| 260 | const GrVkImage& dstImage, |
| 261 | uint32_t blitRegionCount, |
| 262 | const VkImageBlit* blitRegions, |
Greg Daniel | 6ecc911 | 2017-06-16 16:17:03 +0000 | [diff] [blame] | 263 | VkFilter filter); |
egdaniel | b2df0c2 | 2016-05-13 11:30:37 -0700 | [diff] [blame] | 264 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 265 | void copyImageToBuffer(const GrVkGpu* gpu, |
| 266 | GrVkImage* srcImage, |
| 267 | VkImageLayout srcLayout, |
| 268 | GrVkTransferBuffer* dstBuffer, |
| 269 | uint32_t copyRegionCount, |
| 270 | const VkBufferImageCopy* copyRegions); |
| 271 | |
| 272 | void copyBufferToImage(const GrVkGpu* gpu, |
| 273 | GrVkTransferBuffer* srcBuffer, |
| 274 | GrVkImage* dstImage, |
| 275 | VkImageLayout dstLayout, |
| 276 | uint32_t copyRegionCount, |
| 277 | const VkBufferImageCopy* copyRegions); |
| 278 | |
Greg Daniel | 6888c0d | 2017-08-25 11:55:50 -0400 | [diff] [blame] | 279 | void copyBuffer(GrVkGpu* gpu, |
| 280 | GrVkBuffer* srcBuffer, |
| 281 | GrVkBuffer* dstBuffer, |
| 282 | uint32_t regionCount, |
| 283 | const VkBufferCopy* regions); |
| 284 | |
jvanverth | a584de9 | 2016-06-30 09:10:52 -0700 | [diff] [blame] | 285 | void updateBuffer(GrVkGpu* gpu, |
| 286 | GrVkBuffer* dstBuffer, |
| 287 | VkDeviceSize dstOffset, |
| 288 | VkDeviceSize dataSize, |
| 289 | const void* data); |
| 290 | |
egdaniel | 52ad251 | 2016-08-04 12:50:01 -0700 | [diff] [blame] | 291 | void resolveImage(GrVkGpu* gpu, |
| 292 | const GrVkImage& srcImage, |
| 293 | const GrVkImage& dstImage, |
| 294 | uint32_t regionCount, |
| 295 | const VkImageResolve* regions); |
| 296 | |
Greg Daniel | e118558 | 2019-12-04 11:29:44 -0500 | [diff] [blame] | 297 | bool submitToQueue(GrVkGpu* gpu, VkQueue queue, |
Greg Daniel | 48661b8 | 2018-01-22 16:11:35 -0500 | [diff] [blame] | 298 | SkTArray<GrVkSemaphore::Resource*>& signalSemaphores, |
| 299 | SkTArray<GrVkSemaphore::Resource*>& waitSemaphores); |
Greg Daniel | e118558 | 2019-12-04 11:29:44 -0500 | [diff] [blame] | 300 | |
| 301 | void forceSync(GrVkGpu* gpu); |
| 302 | |
| 303 | bool finished(GrVkGpu* gpu); |
Greg Daniel | a3aa75a | 2019-04-12 14:24:55 -0400 | [diff] [blame] | 304 | |
| 305 | void addFinishedProc(sk_sp<GrRefCntedCallback> finishedProc); |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 306 | |
Greg Daniel | fe15962 | 2020-04-10 17:43:51 +0000 | [diff] [blame] | 307 | void callFinishedProcs() { |
| 308 | fFinishedProcs.reset(); |
| 309 | } |
| 310 | |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 311 | void recycleSecondaryCommandBuffers(GrVkCommandPool* cmdPool); |
jvanverth | 7ec9241 | 2016-07-06 09:24:57 -0700 | [diff] [blame] | 312 | |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 313 | private: |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 314 | explicit GrVkPrimaryCommandBuffer(VkCommandBuffer cmdBuffer) |
| 315 | : INHERITED(cmdBuffer) |
egdaniel | 9cb6340 | 2016-06-23 08:37:05 -0700 | [diff] [blame] | 316 | , fSubmitFence(VK_NULL_HANDLE) {} |
| 317 | |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 318 | void onFreeGPUData(const GrVkGpu* gpu) const override; |
egdaniel | 9cb6340 | 2016-06-23 08:37:05 -0700 | [diff] [blame] | 319 | |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 320 | void onReleaseResources() override; |
jvanverth | 7ec9241 | 2016-07-06 09:24:57 -0700 | [diff] [blame] | 321 | |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 322 | SkTArray<std::unique_ptr<GrVkSecondaryCommandBuffer>, true> fSecondaryCommandBuffers; |
| 323 | VkFence fSubmitFence; |
| 324 | SkTArray<sk_sp<GrRefCntedCallback>> fFinishedProcs; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 325 | |
John Stiles | 7571f9e | 2020-09-02 22:42:33 -0400 | [diff] [blame] | 326 | using INHERITED = GrVkCommandBuffer; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 327 | }; |
| 328 | |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 329 | class GrVkSecondaryCommandBuffer : public GrVkCommandBuffer { |
| 330 | public: |
Greg Daniel | 315c8dc | 2019-11-26 15:41:27 -0500 | [diff] [blame] | 331 | static GrVkSecondaryCommandBuffer* Create(GrVkGpu* gpu, GrVkCommandPool* cmdPool); |
Greg Daniel | 070cbaf | 2019-01-03 17:35:54 -0500 | [diff] [blame] | 332 | // Used for wrapping an external secondary command buffer. |
| 333 | static GrVkSecondaryCommandBuffer* Create(VkCommandBuffer externalSecondaryCB); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 334 | |
Greg Daniel | e643da6 | 2019-11-05 12:36:42 -0500 | [diff] [blame] | 335 | void begin(GrVkGpu* gpu, const GrVkFramebuffer* framebuffer, |
jvanverth | 7ec9241 | 2016-07-06 09:24:57 -0700 | [diff] [blame] | 336 | const GrVkRenderPass* compatibleRenderPass); |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 337 | void end(GrVkGpu* gpu); |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 338 | |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 339 | void recycle(GrVkCommandPool* cmdPool); |
Greg Daniel | 64cc9aa | 2018-10-19 13:54:56 -0400 | [diff] [blame] | 340 | |
Greg Daniel | 8daf3b7 | 2019-07-30 09:57:26 -0400 | [diff] [blame] | 341 | VkCommandBuffer vkCommandBuffer() { return fCmdBuffer; } |
jvanverth | 7ec9241 | 2016-07-06 09:24:57 -0700 | [diff] [blame] | 342 | |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 343 | private: |
Greg Daniel | 0addbdf | 2019-11-25 15:03:58 -0500 | [diff] [blame] | 344 | explicit GrVkSecondaryCommandBuffer(VkCommandBuffer cmdBuffer, bool isWrapped) |
| 345 | : INHERITED(cmdBuffer, isWrapped) {} |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 346 | |
Jim Van Verth | 5082df1 | 2020-03-11 16:14:51 -0400 | [diff] [blame] | 347 | void onFreeGPUData(const GrVkGpu* gpu) const override {} |
Ethan Nicholas | 8e265a7 | 2018-12-12 16:22:40 -0500 | [diff] [blame] | 348 | |
Greg Daniel | 315c8dc | 2019-11-26 15:41:27 -0500 | [diff] [blame] | 349 | // Used for accessing fIsActive (on GrVkCommandBuffer) |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 350 | friend class GrVkPrimaryCommandBuffer; |
| 351 | |
John Stiles | 7571f9e | 2020-09-02 22:42:33 -0400 | [diff] [blame] | 352 | using INHERITED = GrVkCommandBuffer; |
egdaniel | 9a6cf80 | 2016-06-08 08:22:05 -0700 | [diff] [blame] | 353 | }; |
Greg Daniel | 164a9f0 | 2016-02-22 09:56:40 -0500 | [diff] [blame] | 354 | |
| 355 | #endif |