blob: 10682c6703446f016fe59bb0a97f404754c7c111 [file] [log] [blame]
egdaniel066df7c2016-06-08 14:02:27 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkGpuCommandBuffer.h"
egdaniel066df7c2016-06-08 14:02:27 -07009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkDrawable.h"
11#include "include/core/SkRect.h"
12#include "include/gpu/GrBackendDrawableInfo.h"
13#include "src/gpu/GrContextPriv.h"
14#include "src/gpu/GrFixedClip.h"
15#include "src/gpu/GrMesh.h"
16#include "src/gpu/GrOpFlushState.h"
17#include "src/gpu/GrPipeline.h"
18#include "src/gpu/GrRenderTargetPriv.h"
19#include "src/gpu/GrTexturePriv.h"
20#include "src/gpu/vk/GrVkCommandBuffer.h"
21#include "src/gpu/vk/GrVkCommandPool.h"
22#include "src/gpu/vk/GrVkGpu.h"
23#include "src/gpu/vk/GrVkPipeline.h"
24#include "src/gpu/vk/GrVkRenderPass.h"
25#include "src/gpu/vk/GrVkRenderTarget.h"
26#include "src/gpu/vk/GrVkResourceProvider.h"
27#include "src/gpu/vk/GrVkSemaphore.h"
28#include "src/gpu/vk/GrVkTexture.h"
egdaniel066df7c2016-06-08 14:02:27 -070029
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040030GrVkPrimaryCommandBufferTask::~GrVkPrimaryCommandBufferTask() = default;
31GrVkPrimaryCommandBufferTask::GrVkPrimaryCommandBufferTask() = default;
32
33namespace {
34
35class InlineUpload : public GrVkPrimaryCommandBufferTask {
36public:
37 InlineUpload(GrOpFlushState* state, const GrDeferredTextureUploadFn& upload)
38 : fFlushState(state), fUpload(upload) {}
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040039
40 void execute(const Args& args) override { fFlushState->doUpload(fUpload); }
41
42private:
43 GrOpFlushState* fFlushState;
44 GrDeferredTextureUploadFn fUpload;
45};
46
47class Copy : public GrVkPrimaryCommandBufferTask {
48public:
Greg Daniel84ea0492019-06-05 16:52:02 -040049 Copy(GrSurface* src, const SkIRect& srcRect, const SkIPoint& dstPoint, bool shouldDiscardDst)
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040050 : fSrc(src)
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040051 , fSrcRect(srcRect)
52 , fDstPoint(dstPoint)
53 , fShouldDiscardDst(shouldDiscardDst) {}
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040054
55 void execute(const Args& args) override {
Greg Daniel84ea0492019-06-05 16:52:02 -040056 args.fGpu->copySurface(args.fSurface, fSrc.get(), fSrcRect, fDstPoint, fShouldDiscardDst);
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040057 }
58
59private:
60 using Src = GrPendingIOResource<GrSurface, kRead_GrIOType>;
61 Src fSrc;
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040062 SkIRect fSrcRect;
63 SkIPoint fDstPoint;
64 bool fShouldDiscardDst;
65};
66
Brian Salomonab32f652019-05-10 14:24:50 -040067class TransferFrom : public GrVkPrimaryCommandBufferTask {
68public:
69 TransferFrom(const SkIRect& srcRect, GrColorType bufferColorType, GrGpuBuffer* transferBuffer,
70 size_t offset)
71 : fTransferBuffer(sk_ref_sp(transferBuffer))
72 , fOffset(offset)
73 , fSrcRect(srcRect)
74 , fBufferColorType(bufferColorType) {}
75
76 void execute(const Args& args) override {
77 args.fGpu->transferPixelsFrom(args.fSurface, fSrcRect.fLeft, fSrcRect.fTop,
78 fSrcRect.width(), fSrcRect.height(), fBufferColorType,
79 fTransferBuffer.get(), fOffset);
80 }
81
82private:
83 sk_sp<GrGpuBuffer> fTransferBuffer;
84 size_t fOffset;
85 SkIRect fSrcRect;
86 GrColorType fBufferColorType;
87};
88
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040089} // anonymous namespace
90
91/////////////////////////////////////////////////////////////////////////////
92
Greg Daniel84ea0492019-06-05 16:52:02 -040093void GrVkGpuTextureCommandBuffer::copy(GrSurface* src, const SkIRect& srcRect,
94 const SkIPoint& dstPoint) {
95 fTasks.emplace<Copy>(src, srcRect, dstPoint, false);
Greg Daniel500d58b2017-08-24 15:59:33 -040096}
97
Brian Salomonab32f652019-05-10 14:24:50 -040098void GrVkGpuTextureCommandBuffer::transferFrom(const SkIRect& srcRect, GrColorType bufferColorType,
99 GrGpuBuffer* transferBuffer, size_t offset) {
100 fTasks.emplace<TransferFrom>(srcRect, bufferColorType, transferBuffer, offset);
101}
102
Greg Daniel500d58b2017-08-24 15:59:33 -0400103void GrVkGpuTextureCommandBuffer::insertEventMarker(const char* msg) {
104 // TODO: does Vulkan have a correlate?
105}
106
107void GrVkGpuTextureCommandBuffer::submit() {
Greg Daniel84ea0492019-06-05 16:52:02 -0400108 GrVkPrimaryCommandBufferTask::Args taskArgs{fGpu, fTexture};
Brian Salomon5d8f1cc2019-04-24 09:03:53 -0400109 for (auto& task : fTasks) {
110 task.execute(taskArgs);
Greg Daniel500d58b2017-08-24 15:59:33 -0400111 }
112}
113
Greg Daniel500d58b2017-08-24 15:59:33 -0400114////////////////////////////////////////////////////////////////////////////////
115
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400116void get_vk_load_store_ops(GrLoadOp loadOpIn, GrStoreOp storeOpIn,
egdaniel066df7c2016-06-08 14:02:27 -0700117 VkAttachmentLoadOp* loadOp, VkAttachmentStoreOp* storeOp) {
Robert Phillips95214472017-08-08 18:00:03 -0400118 switch (loadOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400119 case GrLoadOp::kLoad:
egdaniel066df7c2016-06-08 14:02:27 -0700120 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel066df7c2016-06-08 14:02:27 -0700121 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400122 case GrLoadOp::kClear:
egdaniel9cb63402016-06-23 08:37:05 -0700123 *loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
124 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400125 case GrLoadOp::kDiscard:
egdaniel9cb63402016-06-23 08:37:05 -0700126 *loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
127 break;
128 default:
129 SK_ABORT("Invalid LoadOp");
egdaniel066df7c2016-06-08 14:02:27 -0700130 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel9cb63402016-06-23 08:37:05 -0700131 }
132
Robert Phillips95214472017-08-08 18:00:03 -0400133 switch (storeOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400134 case GrStoreOp::kStore:
egdaniel066df7c2016-06-08 14:02:27 -0700135 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
136 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400137 case GrStoreOp::kDiscard:
egdaniel066df7c2016-06-08 14:02:27 -0700138 *storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
139 break;
brianosman0bbc3712016-06-14 04:53:09 -0700140 default:
egdaniel9cb63402016-06-23 08:37:05 -0700141 SK_ABORT("Invalid StoreOp");
brianosman0bbc3712016-06-14 04:53:09 -0700142 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
egdaniel066df7c2016-06-08 14:02:27 -0700143 }
144}
145
Brian Salomon24d377e2019-04-23 15:24:31 -0400146GrVkGpuRTCommandBuffer::GrVkGpuRTCommandBuffer(GrVkGpu* gpu) : fGpu(gpu) {}
Brian Salomonc293a292016-11-30 13:38:32 -0500147
Greg Daniel500d58b2017-08-24 15:59:33 -0400148void GrVkGpuRTCommandBuffer::init() {
Brian Salomonc293a292016-11-30 13:38:32 -0500149 GrVkRenderPass::LoadStoreOps vkColorOps(fVkColorLoadOp, fVkColorStoreOp);
150 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -0700151
Greg Daniel36a77ee2016-10-18 10:33:25 -0400152 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Brian Salomonc293a292016-11-30 13:38:32 -0500153 SkASSERT(fCommandBufferInfos.count() == 1);
Greg Daniel22bc8652017-03-22 15:45:43 -0400154 fCurrentCmdInfo = 0;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400155
Robert Phillips19e51dc2017-08-09 09:30:51 -0400156 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
157 const GrVkResourceProvider::CompatibleRPHandle& rpHandle = vkRT->compatibleRenderPassHandle();
egdaniel066df7c2016-06-08 14:02:27 -0700158 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400159 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
160 vkColorOps,
161 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -0700162 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400163 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400164 vkColorOps,
165 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -0700166 }
167
Brian Osmancb3d0872018-10-16 15:19:28 -0400168 cbInfo.fColorClearValue.color.float32[0] = fClearColor[0];
169 cbInfo.fColorClearValue.color.float32[1] = fClearColor[1];
170 cbInfo.fColorClearValue.color.float32[2] = fClearColor[2];
171 cbInfo.fColorClearValue.color.float32[3] = fClearColor[3];
egdaniel9cb63402016-06-23 08:37:05 -0700172
Robert Phillips380b90c2017-08-30 07:41:07 -0400173 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000174 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Robert Phillips380b90c2017-08-30 07:41:07 -0400175 } else {
176 cbInfo.fBounds.setEmpty();
177 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400178
179 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
180 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
181 } else if (VK_ATTACHMENT_LOAD_OP_LOAD == fVkColorLoadOp &&
182 VK_ATTACHMENT_STORE_OP_STORE == fVkColorStoreOp) {
183 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
184 } else if (VK_ATTACHMENT_LOAD_OP_DONT_CARE == fVkColorLoadOp) {
185 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithDiscard;
186 }
Greg Daniel36a77ee2016-10-18 10:33:25 -0400187
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500188 cbInfo.fCommandBuffers.push_back(fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu));
Robert Phillips19e51dc2017-08-09 09:30:51 -0400189 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
egdaniel066df7c2016-06-08 14:02:27 -0700190}
191
Greg Daniel070cbaf2019-01-03 17:35:54 -0500192void GrVkGpuRTCommandBuffer::initWrapped() {
193 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
194 SkASSERT(fCommandBufferInfos.count() == 1);
195 fCurrentCmdInfo = 0;
196
197 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
198 SkASSERT(vkRT->wrapsSecondaryCommandBuffer());
199 cbInfo.fRenderPass = vkRT->externalRenderPass();
200 cbInfo.fRenderPass->ref();
201
202 cbInfo.fBounds.setEmpty();
203 cbInfo.fCommandBuffers.push_back(vkRT->getExternalSecondaryCommandBuffer());
204 cbInfo.fCommandBuffers[0]->ref();
205 cbInfo.currentCmdBuf()->begin(fGpu, nullptr, cbInfo.fRenderPass);
206}
Brian Salomonc293a292016-11-30 13:38:32 -0500207
Greg Daniel500d58b2017-08-24 15:59:33 -0400208GrVkGpuRTCommandBuffer::~GrVkGpuRTCommandBuffer() {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400209 this->reset();
egdaniel066df7c2016-06-08 14:02:27 -0700210}
211
Greg Daniel500d58b2017-08-24 15:59:33 -0400212GrGpu* GrVkGpuRTCommandBuffer::gpu() { return fGpu; }
egdaniel9cb63402016-06-23 08:37:05 -0700213
Greg Daniel500d58b2017-08-24 15:59:33 -0400214void GrVkGpuRTCommandBuffer::end() {
Greg Daniel22bc8652017-03-22 15:45:43 -0400215 if (fCurrentCmdInfo >= 0) {
216 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Brian Salomonc293a292016-11-30 13:38:32 -0500217 }
egdaniel066df7c2016-06-08 14:02:27 -0700218}
219
Greg Daniel500d58b2017-08-24 15:59:33 -0400220void GrVkGpuRTCommandBuffer::submit() {
Brian Salomonc293a292016-11-30 13:38:32 -0500221 if (!fRenderTarget) {
222 return;
223 }
Robert Phillips19e51dc2017-08-09 09:30:51 -0400224
225 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Robert Phillips19e51dc2017-08-09 09:30:51 -0400226 GrVkImage* targetImage = vkRT->msaaImage() ? vkRT->msaaImage() : vkRT;
Greg Daniel45a44de2018-02-27 10:07:29 -0500227 GrStencilAttachment* stencil = fRenderTarget->renderTargetPriv().getStencilAttachment();
Brian Salomon24d377e2019-04-23 15:24:31 -0400228 auto currPreCmd = fPreCommandBufferTasks.begin();
egdaniel9cb63402016-06-23 08:37:05 -0700229
Greg Daniel84ea0492019-06-05 16:52:02 -0400230 GrVkPrimaryCommandBufferTask::Args taskArgs{fGpu, fRenderTarget};
Greg Daniel36a77ee2016-10-18 10:33:25 -0400231 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
232 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
233
Brian Salomon24d377e2019-04-23 15:24:31 -0400234 for (int c = 0; c < cbInfo.fNumPreCmds; ++c, ++currPreCmd) {
Brian Salomon5d8f1cc2019-04-24 09:03:53 -0400235 currPreCmd->execute(taskArgs);
Greg Daniel77b53f62016-10-18 11:48:51 -0400236 }
237
Greg Daniel38c3d932018-03-16 14:22:30 -0400238 // TODO: Many things create a scratch texture which adds the discard immediately, but then
239 // don't draw to it right away. This causes the discard to be ignored and we get yelled at
240 // for loading uninitialized data. However, once MDB lands with reordering, the discard will
241 // get reordered with the rest of the draw commands and we can remove the discard check.
242 if (cbInfo.fIsEmpty &&
243 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithClear &&
244 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithDiscard) {
Greg Daniel77b53f62016-10-18 11:48:51 -0400245 // We have sumbitted no actual draw commands to the command buffer and we are not using
246 // the render pass to do a clear so there is no need to submit anything.
247 continue;
248 }
Greg Daniel38c3d932018-03-16 14:22:30 -0400249
Greg Daniel070cbaf2019-01-03 17:35:54 -0500250 // We don't want to actually submit the secondary command buffer if it is wrapped.
251 if (this->wrapsSecondaryCommandBuffer()) {
252 // If we have any sampled images set their layout now.
Chris Dalton298238a2019-02-21 16:28:44 -0500253 for (int j = 0; j < cbInfo.fSampledTextures.count(); ++j) {
254 cbInfo.fSampledTextures[j]->setImageLayout(
255 fGpu, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_ACCESS_SHADER_READ_BIT,
256 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, false);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500257 }
258
259 // There should have only been one secondary command buffer in the wrapped case so it is
260 // safe to just return here.
261 SkASSERT(fCommandBufferInfos.count() == 1);
262 return;
263 }
264
Greg Danieldbdba602018-04-20 11:52:43 -0400265 // Make sure if we only have a discard load that we execute the discard on the whole image.
266 // TODO: Once we improve our tracking of discards so that we never end up flushing a discard
267 // call with no actually ops, remove this.
268 if (cbInfo.fIsEmpty && cbInfo.fLoadStoreState == LoadStoreState::kStartsWithDiscard) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000269 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Greg Danieldbdba602018-04-20 11:52:43 -0400270 }
271
Greg Daniela41a74a2018-10-09 12:59:23 +0000272 if (cbInfo.fBounds.intersect(0, 0,
273 SkIntToScalar(fRenderTarget->width()),
274 SkIntToScalar(fRenderTarget->height()))) {
Greg Daniel38c3d932018-03-16 14:22:30 -0400275 // Make sure we do the following layout changes after all copies, uploads, or any other
276 // pre-work is done since we may change the layouts in the pre-work. Also since the
277 // draws will be submitted in different render passes, we need to guard againts write
278 // and write issues.
279
280 // Change layout of our render target so it can be used as the color attachment.
Greg Danielf7828d02018-10-09 12:01:32 -0400281 // TODO: If we know that we will never be blending or loading the attachment we could
282 // drop the VK_ACCESS_COLOR_ATTACHMENT_READ_BIT.
Greg Daniel38c3d932018-03-16 14:22:30 -0400283 targetImage->setImageLayout(fGpu,
284 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
Greg Danielf7828d02018-10-09 12:01:32 -0400285 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
Greg Daniel38c3d932018-03-16 14:22:30 -0400286 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400287 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400288 false);
289
290 // If we are using a stencil attachment we also need to update its layout
291 if (stencil) {
292 GrVkStencilAttachment* vkStencil = (GrVkStencilAttachment*)stencil;
Greg Danielf7828d02018-10-09 12:01:32 -0400293 // We need the write and read access bits since we may load and store the stencil.
294 // The initial load happens in the VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT so we
295 // wait there.
Greg Daniel38c3d932018-03-16 14:22:30 -0400296 vkStencil->setImageLayout(fGpu,
297 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
298 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
299 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400300 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400301 false);
302 }
303
304 // If we have any sampled images set their layout now.
Chris Dalton298238a2019-02-21 16:28:44 -0500305 for (int j = 0; j < cbInfo.fSampledTextures.count(); ++j) {
306 cbInfo.fSampledTextures[j]->setImageLayout(
307 fGpu, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_ACCESS_SHADER_READ_BIT,
308 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, false);
Greg Daniel38c3d932018-03-16 14:22:30 -0400309 }
310
Greg Daniel36a77ee2016-10-18 10:33:25 -0400311 SkIRect iBounds;
312 cbInfo.fBounds.roundOut(&iBounds);
313
Greg Daniel22bc8652017-03-22 15:45:43 -0400314 fGpu->submitSecondaryCommandBuffer(cbInfo.fCommandBuffers, cbInfo.fRenderPass,
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400315 &cbInfo.fColorClearValue, vkRT, fOrigin, iBounds);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400316 }
317 }
Brian Salomon24d377e2019-04-23 15:24:31 -0400318 SkASSERT(currPreCmd == fPreCommandBufferTasks.end());
egdaniel9cb63402016-06-23 08:37:05 -0700319}
320
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400321void GrVkGpuRTCommandBuffer::set(GrRenderTarget* rt, GrSurfaceOrigin origin,
322 const GrGpuRTCommandBuffer::LoadAndStoreInfo& colorInfo,
323 const GrGpuRTCommandBuffer::StencilLoadAndStoreInfo& stencilInfo) {
324 SkASSERT(!fRenderTarget);
325 SkASSERT(fCommandBufferInfos.empty());
326 SkASSERT(-1 == fCurrentCmdInfo);
Robert Phillips9da87e02019-02-04 13:26:26 -0500327 SkASSERT(fGpu == rt->getContext()->priv().getGpu());
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400328 SkASSERT(!fLastPipelineState);
329
Greg Danielb0c7ad12019-06-06 17:23:35 +0000330#ifdef SK_DEBUG
331 fIsActive = true;
332#endif
333
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400334 this->INHERITED::set(rt, origin);
335
Greg Daniel070cbaf2019-01-03 17:35:54 -0500336 if (this->wrapsSecondaryCommandBuffer()) {
337 this->initWrapped();
338 return;
339 }
340
Brian Osman9a9baae2018-11-05 15:06:26 -0500341 fClearColor = colorInfo.fClearColor;
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400342
343 get_vk_load_store_ops(colorInfo.fLoadOp, colorInfo.fStoreOp,
344 &fVkColorLoadOp, &fVkColorStoreOp);
345
346 get_vk_load_store_ops(stencilInfo.fLoadOp, stencilInfo.fStoreOp,
347 &fVkStencilLoadOp, &fVkStencilStoreOp);
348
349 this->init();
350}
351
352void GrVkGpuRTCommandBuffer::reset() {
353 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
354 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
355 for (int j = 0; j < cbInfo.fCommandBuffers.count(); ++j) {
356 cbInfo.fCommandBuffers[j]->unref(fGpu);
357 }
358 cbInfo.fRenderPass->unref(fGpu);
359 }
360 fCommandBufferInfos.reset();
Brian Salomon24d377e2019-04-23 15:24:31 -0400361 fPreCommandBufferTasks.reset();
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400362
363 fCurrentCmdInfo = -1;
364
365 fLastPipelineState = nullptr;
366 fRenderTarget = nullptr;
Greg Danielb0c7ad12019-06-06 17:23:35 +0000367
368#ifdef SK_DEBUG
369 fIsActive = false;
370#endif
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400371}
372
Greg Daniel070cbaf2019-01-03 17:35:54 -0500373bool GrVkGpuRTCommandBuffer::wrapsSecondaryCommandBuffer() const {
374 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
375 return vkRT->wrapsSecondaryCommandBuffer();
376}
377
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400378////////////////////////////////////////////////////////////////////////////////
379
Greg Daniel500d58b2017-08-24 15:59:33 -0400380void GrVkGpuRTCommandBuffer::discard() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400381 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Brian Salomonc293a292016-11-30 13:38:32 -0500382
Greg Daniel22bc8652017-03-22 15:45:43 -0400383 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Greg Daniel77b53f62016-10-18 11:48:51 -0400384 if (cbInfo.fIsEmpty) {
Robert Phillips74c627f2017-08-09 10:28:00 -0400385 // Change the render pass to do a don't-care load for both color & stencil
egdaniel37535c92016-06-30 08:23:30 -0700386 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_DONT_CARE,
387 VK_ATTACHMENT_STORE_OP_STORE);
388 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_DONT_CARE,
389 VK_ATTACHMENT_STORE_OP_STORE);
egdaniel37535c92016-06-30 08:23:30 -0700390
Greg Daniel36a77ee2016-10-18 10:33:25 -0400391 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
egdaniel37535c92016-06-30 08:23:30 -0700392
egdaniel37535c92016-06-30 08:23:30 -0700393 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400394 vkRT->compatibleRenderPassHandle();
egdaniel37535c92016-06-30 08:23:30 -0700395 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400396 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
397 vkColorOps,
398 vkStencilOps);
egdaniel37535c92016-06-30 08:23:30 -0700399 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400400 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400401 vkColorOps,
402 vkStencilOps);
egdaniel37535c92016-06-30 08:23:30 -0700403 }
404
Greg Daniel36a77ee2016-10-18 10:33:25 -0400405 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
egdaniel37535c92016-06-30 08:23:30 -0700406 oldRP->unref(fGpu);
Greg Daniel5011f852016-10-28 15:07:16 -0400407 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
Greg Daniela3c68df2018-03-16 13:46:53 -0400408 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithDiscard;
egdaniel37535c92016-06-30 08:23:30 -0700409 }
410}
411
Greg Daniel500d58b2017-08-24 15:59:33 -0400412void GrVkGpuRTCommandBuffer::insertEventMarker(const char* msg) {
Robert Phillips65a88fa2017-08-08 08:36:22 -0400413 // TODO: does Vulkan have a correlate?
414}
415
Greg Daniel500d58b2017-08-24 15:59:33 -0400416void GrVkGpuRTCommandBuffer::onClearStencilClip(const GrFixedClip& clip, bool insideStencilMask) {
Chris Dalton94c04682017-11-01 17:15:06 -0600417 SkASSERT(!clip.hasWindowRectangles());
418
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000419 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
420
Greg Daniel65a09272016-10-12 09:47:22 -0400421 GrStencilAttachment* sb = fRenderTarget->renderTargetPriv().getStencilAttachment();
egdaniel9cb63402016-06-23 08:37:05 -0700422 // this should only be called internally when we know we have a
423 // stencil buffer.
424 SkASSERT(sb);
425 int stencilBitCount = sb->bits();
426
427 // The contract with the callers does not guarantee that we preserve all bits in the stencil
428 // during this clear. Thus we will clear the entire stencil to the desired value.
429
430 VkClearDepthStencilValue vkStencilColor;
431 memset(&vkStencilColor, 0, sizeof(VkClearDepthStencilValue));
csmartdalton29df7602016-08-31 11:55:52 -0700432 if (insideStencilMask) {
egdaniel9cb63402016-06-23 08:37:05 -0700433 vkStencilColor.stencil = (1 << (stencilBitCount - 1));
434 } else {
435 vkStencilColor.stencil = 0;
436 }
437
438 VkClearRect clearRect;
439 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700440 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000441 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000442 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400443 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700444 vkRect = clip.scissorRect();
445 } else {
446 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400447 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
448 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700449 }
450
451 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
452 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
453
454 clearRect.baseArrayLayer = 0;
455 clearRect.layerCount = 1;
456
457 uint32_t stencilIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400458 SkAssertResult(cbInfo.fRenderPass->stencilAttachmentIndex(&stencilIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700459
460 VkClearAttachment attachment;
461 attachment.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
462 attachment.colorAttachment = 0; // this value shouldn't matter
463 attachment.clearValue.depthStencil = vkStencilColor;
464
Greg Daniel22bc8652017-03-22 15:45:43 -0400465 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400466 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400467
468 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000469 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400470 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
471 } else {
472 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
473 }
egdaniel9cb63402016-06-23 08:37:05 -0700474}
475
Brian Osman9a9baae2018-11-05 15:06:26 -0500476void GrVkGpuRTCommandBuffer::onClear(const GrFixedClip& clip, const SkPMColor4f& color) {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400477 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
478
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000479 // parent class should never let us get here with no RT
csmartdaltonbf4a8f92016-09-06 10:01:06 -0700480 SkASSERT(!clip.hasWindowRectangles());
egdaniel9cb63402016-06-23 08:37:05 -0700481
Greg Daniel22bc8652017-03-22 15:45:43 -0400482 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Greg Daniel36a77ee2016-10-18 10:33:25 -0400483
Brian Osman9a9baae2018-11-05 15:06:26 -0500484 VkClearColorValue vkColor = {{color.fR, color.fG, color.fB, color.fA}};
egdaniel9cb63402016-06-23 08:37:05 -0700485
Brian Salomond818ebf2018-07-02 14:08:49 +0000486 if (cbInfo.fIsEmpty && !clip.scissorEnabled()) {
Robert Phillips74c627f2017-08-09 10:28:00 -0400487 // Change the render pass to do a clear load
egdaniel9cb63402016-06-23 08:37:05 -0700488 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_CLEAR,
489 VK_ATTACHMENT_STORE_OP_STORE);
Robert Phillips74c627f2017-08-09 10:28:00 -0400490 // Preserve the stencil buffer's load & store settings
491 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -0700492
Greg Daniel36a77ee2016-10-18 10:33:25 -0400493 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
egdaniel9cb63402016-06-23 08:37:05 -0700494
495 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400496 vkRT->compatibleRenderPassHandle();
egdaniel9cb63402016-06-23 08:37:05 -0700497 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400498 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
499 vkColorOps,
500 vkStencilOps);
egdaniel9cb63402016-06-23 08:37:05 -0700501 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400502 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400503 vkColorOps,
504 vkStencilOps);
egdaniel9cb63402016-06-23 08:37:05 -0700505 }
506
Greg Daniel36a77ee2016-10-18 10:33:25 -0400507 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
egdaniel9cb63402016-06-23 08:37:05 -0700508 oldRP->unref(fGpu);
509
Brian Osman9a9baae2018-11-05 15:06:26 -0500510 cbInfo.fColorClearValue.color = {{color.fR, color.fG, color.fB, color.fA}};
Greg Daniela3c68df2018-03-16 13:46:53 -0400511 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400512 // Update command buffer bounds
513 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
egdaniel9cb63402016-06-23 08:37:05 -0700514 return;
515 }
516
517 // We always do a sub rect clear with clearAttachments since we are inside a render pass
518 VkClearRect clearRect;
519 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700520 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000521 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000522 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400523 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700524 vkRect = clip.scissorRect();
525 } else {
526 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400527 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
528 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700529 }
530 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
531 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
532 clearRect.baseArrayLayer = 0;
533 clearRect.layerCount = 1;
534
535 uint32_t colorIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400536 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&colorIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700537
538 VkClearAttachment attachment;
539 attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
540 attachment.colorAttachment = colorIndex;
541 attachment.clearValue.color = vkColor;
542
Greg Daniel22bc8652017-03-22 15:45:43 -0400543 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400544 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400545
546 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000547 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400548 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
549 } else {
550 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
551 }
egdaniel9cb63402016-06-23 08:37:05 -0700552 return;
553}
554
Greg Daniel500d58b2017-08-24 15:59:33 -0400555////////////////////////////////////////////////////////////////////////////////
556
557void GrVkGpuRTCommandBuffer::addAdditionalCommandBuffer() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400558 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
559
Greg Daniel22bc8652017-03-22 15:45:43 -0400560 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
561 cbInfo.currentCmdBuf()->end(fGpu);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500562 cbInfo.fCommandBuffers.push_back(fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu));
Robert Phillips19e51dc2017-08-09 09:30:51 -0400563 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
Greg Daniel22bc8652017-03-22 15:45:43 -0400564}
565
Greg Daniel500d58b2017-08-24 15:59:33 -0400566void GrVkGpuRTCommandBuffer::addAdditionalRenderPass() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400567 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
568
Greg Daniel22bc8652017-03-22 15:45:43 -0400569 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Greg Daniel77b53f62016-10-18 11:48:51 -0400570
571 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Greg Daniel22bc8652017-03-22 15:45:43 -0400572 fCurrentCmdInfo++;
Greg Daniel77b53f62016-10-18 11:48:51 -0400573
574 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
575 VK_ATTACHMENT_STORE_OP_STORE);
576 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
577 VK_ATTACHMENT_STORE_OP_STORE);
578
579 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400580 vkRT->compatibleRenderPassHandle();
Greg Daniel77b53f62016-10-18 11:48:51 -0400581 if (rpHandle.isValid()) {
582 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
583 vkColorOps,
584 vkStencilOps);
585 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400586 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel77b53f62016-10-18 11:48:51 -0400587 vkColorOps,
588 vkStencilOps);
589 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400590 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
Greg Daniel77b53f62016-10-18 11:48:51 -0400591
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500592 cbInfo.fCommandBuffers.push_back(fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu));
Greg Daniel77b53f62016-10-18 11:48:51 -0400593 // It shouldn't matter what we set the clear color to here since we will assume loading of the
594 // attachment.
595 memset(&cbInfo.fColorClearValue, 0, sizeof(VkClearValue));
596 cbInfo.fBounds.setEmpty();
Greg Daniel77b53f62016-10-18 11:48:51 -0400597
Robert Phillips19e51dc2017-08-09 09:30:51 -0400598 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
Greg Daniel77b53f62016-10-18 11:48:51 -0400599}
600
Brian Salomon943ed792017-10-30 09:37:55 -0400601void GrVkGpuRTCommandBuffer::inlineUpload(GrOpFlushState* state,
602 GrDeferredTextureUploadFn& upload) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400603 if (!fCommandBufferInfos[fCurrentCmdInfo].fIsEmpty) {
604 this->addAdditionalRenderPass();
Greg Daniel77b53f62016-10-18 11:48:51 -0400605 }
Brian Salomon24d377e2019-04-23 15:24:31 -0400606
Brian Salomon24d377e2019-04-23 15:24:31 -0400607 fPreCommandBufferTasks.emplace<InlineUpload>(state, upload);
608 ++fCommandBufferInfos[fCurrentCmdInfo].fNumPreCmds;
Greg Daniel77b53f62016-10-18 11:48:51 -0400609}
610
Greg Daniel84ea0492019-06-05 16:52:02 -0400611void GrVkGpuRTCommandBuffer::copy(GrSurface* src, const SkIRect& srcRect,
Greg Daniel500d58b2017-08-24 15:59:33 -0400612 const SkIPoint& dstPoint) {
Greg Daniela3c68df2018-03-16 13:46:53 -0400613 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
614 if (!cbInfo.fIsEmpty || LoadStoreState::kStartsWithClear == cbInfo.fLoadStoreState) {
Greg Daniel500d58b2017-08-24 15:59:33 -0400615 this->addAdditionalRenderPass();
616 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400617
Brian Salomon24d377e2019-04-23 15:24:31 -0400618 fPreCommandBufferTasks.emplace<Copy>(
Greg Daniel84ea0492019-06-05 16:52:02 -0400619 src, srcRect, dstPoint, LoadStoreState::kStartsWithDiscard == cbInfo.fLoadStoreState);
Brian Salomon24d377e2019-04-23 15:24:31 -0400620 ++fCommandBufferInfos[fCurrentCmdInfo].fNumPreCmds;
Greg Daniel55fa6472018-03-16 16:13:10 -0400621
Greg Daniela3c68df2018-03-16 13:46:53 -0400622 if (LoadStoreState::kLoadAndStore != cbInfo.fLoadStoreState) {
623 // Change the render pass to do a load and store so we don't lose the results of our copy
624 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
625 VK_ATTACHMENT_STORE_OP_STORE);
626 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
627 VK_ATTACHMENT_STORE_OP_STORE);
628
629 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
630
631 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
632 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
633 vkRT->compatibleRenderPassHandle();
634 if (rpHandle.isValid()) {
635 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
636 vkColorOps,
637 vkStencilOps);
638 } else {
639 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
640 vkColorOps,
641 vkStencilOps);
642 }
643 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
644 oldRP->unref(fGpu);
645
646 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
647
648 }
Greg Daniel500d58b2017-08-24 15:59:33 -0400649}
650
Brian Salomonab32f652019-05-10 14:24:50 -0400651void GrVkGpuRTCommandBuffer::transferFrom(const SkIRect& srcRect, GrColorType bufferColorType,
652 GrGpuBuffer* transferBuffer, size_t offset) {
653 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
654 if (!cbInfo.fIsEmpty) {
655 this->addAdditionalRenderPass();
656 }
657 fPreCommandBufferTasks.emplace<TransferFrom>(srcRect, bufferColorType, transferBuffer, offset);
658 ++fCommandBufferInfos[fCurrentCmdInfo].fNumPreCmds;
659}
660
egdaniel9cb63402016-06-23 08:37:05 -0700661////////////////////////////////////////////////////////////////////////////////
662
Brian Salomondbf70722019-02-07 11:31:24 -0500663void GrVkGpuRTCommandBuffer::bindGeometry(const GrGpuBuffer* indexBuffer,
664 const GrGpuBuffer* vertexBuffer,
665 const GrGpuBuffer* instanceBuffer) {
Chris Daltonff926502017-05-03 14:36:54 -0400666 GrVkSecondaryCommandBuffer* currCmdBuf = fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf();
egdaniel9cb63402016-06-23 08:37:05 -0700667 // There is no need to put any memory barriers to make sure host writes have finished here.
668 // When a command buffer is submitted to a queue, there is an implicit memory barrier that
669 // occurs for all host writes. Additionally, BufferMemoryBarriers are not allowed inside of
670 // an active RenderPass.
egdaniel9cb63402016-06-23 08:37:05 -0700671
Chris Dalton1d616352017-05-31 12:51:23 -0600672 // Here our vertex and instance inputs need to match the same 0-based bindings they were
673 // assigned in GrVkPipeline. That is, vertex first (if any) followed by instance.
674 uint32_t binding = 0;
675
Brian Salomon802cb312018-06-08 18:05:20 -0400676 if (vertexBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600677 SkASSERT(vertexBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600678 SkASSERT(!vertexBuffer->isMapped());
679
680 currCmdBuf->bindInputBuffer(fGpu, binding++,
681 static_cast<const GrVkVertexBuffer*>(vertexBuffer));
682 }
683
Brian Salomon802cb312018-06-08 18:05:20 -0400684 if (instanceBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600685 SkASSERT(instanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600686 SkASSERT(!instanceBuffer->isMapped());
687
688 currCmdBuf->bindInputBuffer(fGpu, binding++,
689 static_cast<const GrVkVertexBuffer*>(instanceBuffer));
690 }
Chris Daltonff926502017-05-03 14:36:54 -0400691 if (indexBuffer) {
692 SkASSERT(indexBuffer);
693 SkASSERT(!indexBuffer->isMapped());
egdaniel9cb63402016-06-23 08:37:05 -0700694
Chris Daltonff926502017-05-03 14:36:54 -0400695 currCmdBuf->bindIndexBuffer(fGpu, static_cast<const GrVkIndexBuffer*>(indexBuffer));
egdaniel9cb63402016-06-23 08:37:05 -0700696 }
697}
698
Brian Salomon49348902018-06-26 09:12:38 -0400699GrVkPipelineState* GrVkGpuRTCommandBuffer::prepareDrawState(
700 const GrPrimitiveProcessor& primProc,
701 const GrPipeline& pipeline,
702 const GrPipeline::FixedDynamicState* fixedDynamicState,
703 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
704 GrPrimitiveType primitiveType) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400705 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
706 SkASSERT(cbInfo.fRenderPass);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400707
Greg Daniel99b88e02018-10-03 15:31:20 -0400708 VkRenderPass compatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
709
Greg Daniel9a51a862018-11-30 10:18:14 -0500710 const GrTextureProxy* const* primProcProxies = nullptr;
711 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
712 primProcProxies = dynamicStateArrays->fPrimitiveProcessorTextures;
713 } else if (fixedDynamicState) {
714 primProcProxies = fixedDynamicState->fPrimitiveProcessorTextures;
715 }
716
717 SkASSERT(SkToBool(primProcProxies) == SkToBool(primProc.numTextureSamplers()));
718
Greg Daniel09eeefb2017-10-16 15:15:02 -0400719 GrVkPipelineState* pipelineState =
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500720 fGpu->resourceProvider().findOrCreateCompatiblePipelineState(fRenderTarget, fOrigin,
721 pipeline,
egdaniel9cb63402016-06-23 08:37:05 -0700722 primProc,
Greg Daniel9a51a862018-11-30 10:18:14 -0500723 primProcProxies,
egdaniel9cb63402016-06-23 08:37:05 -0700724 primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400725 compatibleRenderPass);
egdaniel9cb63402016-06-23 08:37:05 -0700726 if (!pipelineState) {
727 return pipelineState;
728 }
729
Greg Daniel22bc8652017-03-22 15:45:43 -0400730 if (!cbInfo.fIsEmpty &&
Greg Daniel09eeefb2017-10-16 15:15:02 -0400731 fLastPipelineState && fLastPipelineState != pipelineState &&
Greg Daniele3cd6912017-05-17 11:15:55 -0400732 fGpu->vkCaps().newCBOnPipelineChange()) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400733 this->addAdditionalCommandBuffer();
734 }
Greg Daniel09eeefb2017-10-16 15:15:02 -0400735 fLastPipelineState = pipelineState;
Greg Daniel22bc8652017-03-22 15:45:43 -0400736
Brian Salomonf7232642018-09-19 08:58:08 -0400737 pipelineState->bindPipeline(fGpu, cbInfo.currentCmdBuf());
Brian Salomoncd7907b2018-08-30 08:36:18 -0400738
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500739 pipelineState->setAndBindUniforms(fGpu, fRenderTarget, fOrigin,
740 primProc, pipeline, cbInfo.currentCmdBuf());
Brian Salomonf7232642018-09-19 08:58:08 -0400741
742 // Check whether we need to bind textures between each GrMesh. If not we can bind them all now.
743 bool setTextures = !(dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures);
744 if (setTextures) {
Brian Salomonf7232642018-09-19 08:58:08 -0400745 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, primProcProxies,
746 cbInfo.currentCmdBuf());
747 }
egdaniel9cb63402016-06-23 08:37:05 -0700748
Brian Salomond818ebf2018-07-02 14:08:49 +0000749 if (!pipeline.isScissorEnabled()) {
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400750 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(),
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500751 fRenderTarget, fOrigin,
752 SkIRect::MakeWH(fRenderTarget->width(),
753 fRenderTarget->height()));
Brian Salomon49348902018-06-26 09:12:38 -0400754 } else if (!dynamicStateArrays || !dynamicStateArrays->fScissorRects) {
755 SkASSERT(fixedDynamicState);
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500756 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget,
757 fOrigin,
Brian Salomon49348902018-06-26 09:12:38 -0400758 fixedDynamicState->fScissorRect);
Chris Dalton46983b72017-06-06 12:27:16 -0600759 }
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500760 GrVkPipeline::SetDynamicViewportState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget);
761 GrVkPipeline::SetDynamicBlendConstantState(fGpu, cbInfo.currentCmdBuf(),
762 fRenderTarget->config(),
Chris Dalton46983b72017-06-06 12:27:16 -0600763 pipeline.getXferProcessor());
egdaniel9cb63402016-06-23 08:37:05 -0700764
765 return pipelineState;
766}
767
Brian Salomonff168d92018-06-23 15:17:27 -0400768void GrVkGpuRTCommandBuffer::onDraw(const GrPrimitiveProcessor& primProc,
769 const GrPipeline& pipeline,
Brian Salomon49348902018-06-26 09:12:38 -0400770 const GrPipeline::FixedDynamicState* fixedDynamicState,
771 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
Greg Daniel500d58b2017-08-24 15:59:33 -0400772 const GrMesh meshes[],
Greg Daniel500d58b2017-08-24 15:59:33 -0400773 int meshCount,
774 const SkRect& bounds) {
egdaniel9cb63402016-06-23 08:37:05 -0700775 if (!meshCount) {
776 return;
777 }
Greg Danielea022cd2018-03-16 11:10:03 -0400778
779 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
780
Brian Salomone782f842018-07-31 13:53:11 -0400781 auto prepareSampledImage = [&](GrTexture* texture, GrSamplerState::Filter filter) {
782 GrVkTexture* vkTexture = static_cast<GrVkTexture*>(texture);
783 // We may need to resolve the texture first if it is also a render target
784 GrVkRenderTarget* texRT = static_cast<GrVkRenderTarget*>(vkTexture->asRenderTarget());
785 if (texRT) {
Greg Daniel0a77f432018-12-06 11:23:32 -0500786 fGpu->resolveRenderTargetNoFlush(texRT);
Brian Salomone782f842018-07-31 13:53:11 -0400787 }
788
789 // Check if we need to regenerate any mip maps
790 if (GrSamplerState::Filter::kMipMap == filter &&
791 (vkTexture->width() != 1 || vkTexture->height() != 1)) {
792 SkASSERT(vkTexture->texturePriv().mipMapped() == GrMipMapped::kYes);
793 if (vkTexture->texturePriv().mipMapsAreDirty()) {
794 fGpu->regenerateMipMapLevels(vkTexture);
795 }
796 }
Brian Salomon5fd10572019-04-01 12:07:05 -0400797 cbInfo.fSampledTextures.push_back(vkTexture);
Brian Salomone782f842018-07-31 13:53:11 -0400798 };
799
Brian Salomonf7232642018-09-19 08:58:08 -0400800 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
801 for (int m = 0, i = 0; m < meshCount; ++m) {
802 for (int s = 0; s < primProc.numTextureSamplers(); ++s, ++i) {
803 auto texture = dynamicStateArrays->fPrimitiveProcessorTextures[i]->peekTexture();
804 prepareSampledImage(texture, primProc.textureSampler(s).samplerState().filter());
805 }
806 }
807 } else {
808 for (int i = 0; i < primProc.numTextureSamplers(); ++i) {
809 auto texture = fixedDynamicState->fPrimitiveProcessorTextures[i]->peekTexture();
810 prepareSampledImage(texture, primProc.textureSampler(i).samplerState().filter());
811 }
Brian Salomone782f842018-07-31 13:53:11 -0400812 }
bsalomonb58a2b42016-09-26 06:55:02 -0700813 GrFragmentProcessor::Iter iter(pipeline);
814 while (const GrFragmentProcessor* fp = iter.next()) {
Brian Salomone782f842018-07-31 13:53:11 -0400815 for (int i = 0; i < fp->numTextureSamplers(); ++i) {
816 const GrFragmentProcessor::TextureSampler& sampler = fp->textureSampler(i);
817 prepareSampledImage(sampler.peekTexture(), sampler.samplerState().filter());
818 }
egdaniel2f5792a2016-07-06 08:51:23 -0700819 }
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400820 if (GrTexture* dstTexture = pipeline.peekDstTexture()) {
Chris Dalton298238a2019-02-21 16:28:44 -0500821 cbInfo.fSampledTextures.push_back(sk_ref_sp(static_cast<GrVkTexture*>(dstTexture)));
Brian Salomon18dfa982017-04-03 16:57:43 -0400822 }
egdaniel2f5792a2016-07-06 08:51:23 -0700823
Chris Daltonbca46e22017-05-15 11:03:26 -0600824 GrPrimitiveType primitiveType = meshes[0].primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400825 GrVkPipelineState* pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
826 dynamicStateArrays, primitiveType);
egdaniel9cb63402016-06-23 08:37:05 -0700827 if (!pipelineState) {
828 return;
829 }
830
Brian Salomond818ebf2018-07-02 14:08:49 +0000831 bool dynamicScissor =
832 pipeline.isScissorEnabled() && dynamicStateArrays && dynamicStateArrays->fScissorRects;
Brian Salomonf7232642018-09-19 08:58:08 -0400833 bool dynamicTextures = dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures;
Brian Salomon49348902018-06-26 09:12:38 -0400834
egdaniel9cb63402016-06-23 08:37:05 -0700835 for (int i = 0; i < meshCount; ++i) {
836 const GrMesh& mesh = meshes[i];
Chris Daltonbca46e22017-05-15 11:03:26 -0600837 if (mesh.primitiveType() != primitiveType) {
Chris Dalton6f241802017-05-08 13:58:38 -0400838 SkDEBUGCODE(pipelineState = nullptr);
Chris Daltonbca46e22017-05-15 11:03:26 -0600839 primitiveType = mesh.primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400840 pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
841 dynamicStateArrays, primitiveType);
Chris Dalton6f241802017-05-08 13:58:38 -0400842 if (!pipelineState) {
843 return;
egdaniel9cb63402016-06-23 08:37:05 -0700844 }
Chris Dalton6f241802017-05-08 13:58:38 -0400845 }
egdaniel9cb63402016-06-23 08:37:05 -0700846
Brian Salomon49348902018-06-26 09:12:38 -0400847 if (dynamicScissor) {
848 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget,
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500849 fOrigin,
Brian Salomon49348902018-06-26 09:12:38 -0400850 dynamicStateArrays->fScissorRects[i]);
Chris Dalton46983b72017-06-06 12:27:16 -0600851 }
Brian Salomonf7232642018-09-19 08:58:08 -0400852 if (dynamicTextures) {
853 GrTextureProxy* const* meshProxies = dynamicStateArrays->fPrimitiveProcessorTextures +
854 primProc.numTextureSamplers() * i;
855 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, meshProxies,
856 cbInfo.currentCmdBuf());
857 }
Chris Daltonbca46e22017-05-15 11:03:26 -0600858 SkASSERT(pipelineState);
Brian Salomon802cb312018-06-08 18:05:20 -0400859 mesh.sendToGpu(this);
egdaniel9cb63402016-06-23 08:37:05 -0700860 }
861
Greg Daniel36a77ee2016-10-18 10:33:25 -0400862 cbInfo.fBounds.join(bounds);
Chris Dalton114a3c02017-05-26 15:17:19 -0600863 cbInfo.fIsEmpty = false;
egdaniel066df7c2016-06-08 14:02:27 -0700864}
865
Brian Salomon802cb312018-06-08 18:05:20 -0400866void GrVkGpuRTCommandBuffer::sendInstancedMeshToGpu(GrPrimitiveType,
Greg Daniel500d58b2017-08-24 15:59:33 -0400867 const GrBuffer* vertexBuffer,
868 int vertexCount,
869 int baseVertex,
870 const GrBuffer* instanceBuffer,
871 int instanceCount,
872 int baseInstance) {
Chris Dalton114a3c02017-05-26 15:17:19 -0600873 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomondbf70722019-02-07 11:31:24 -0500874 SkASSERT(!vertexBuffer || !vertexBuffer->isCpuBuffer());
875 SkASSERT(!instanceBuffer || !instanceBuffer->isCpuBuffer());
876 auto gpuVertexBuffer = static_cast<const GrGpuBuffer*>(vertexBuffer);
877 auto gpuInstanceBuffer = static_cast<const GrGpuBuffer*>(instanceBuffer);
878 this->bindGeometry(nullptr, gpuVertexBuffer, gpuInstanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600879 cbInfo.currentCmdBuf()->draw(fGpu, vertexCount, instanceCount, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600880 fGpu->stats()->incNumDraws();
881}
882
Brian Salomon802cb312018-06-08 18:05:20 -0400883void GrVkGpuRTCommandBuffer::sendIndexedInstancedMeshToGpu(GrPrimitiveType,
Greg Daniel500d58b2017-08-24 15:59:33 -0400884 const GrBuffer* indexBuffer,
885 int indexCount,
886 int baseIndex,
887 const GrBuffer* vertexBuffer,
888 int baseVertex,
889 const GrBuffer* instanceBuffer,
890 int instanceCount,
Brian Salomon802cb312018-06-08 18:05:20 -0400891 int baseInstance,
892 GrPrimitiveRestart restart) {
893 SkASSERT(restart == GrPrimitiveRestart::kNo);
Chris Dalton114a3c02017-05-26 15:17:19 -0600894 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomondbf70722019-02-07 11:31:24 -0500895 SkASSERT(!vertexBuffer || !vertexBuffer->isCpuBuffer());
896 SkASSERT(!instanceBuffer || !instanceBuffer->isCpuBuffer());
897 SkASSERT(!indexBuffer->isCpuBuffer());
898 auto gpuIndexxBuffer = static_cast<const GrGpuBuffer*>(indexBuffer);
899 auto gpuVertexBuffer = static_cast<const GrGpuBuffer*>(vertexBuffer);
900 auto gpuInstanceBuffer = static_cast<const GrGpuBuffer*>(instanceBuffer);
901 this->bindGeometry(gpuIndexxBuffer, gpuVertexBuffer, gpuInstanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600902 cbInfo.currentCmdBuf()->drawIndexed(fGpu, indexCount, instanceCount,
903 baseIndex, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600904 fGpu->stats()->incNumDraws();
905}
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400906
907////////////////////////////////////////////////////////////////////////////////
908
909void GrVkGpuRTCommandBuffer::executeDrawable(std::unique_ptr<SkDrawable::GpuDrawHandler> drawable) {
910 GrVkRenderTarget* target = static_cast<GrVkRenderTarget*>(fRenderTarget);
911
912 GrVkImage* targetImage = target->msaaImage() ? target->msaaImage() : target;
913
914 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
915 VkRect2D bounds;
916 bounds.offset = { 0, 0 };
917 bounds.extent = { 0, 0 };
918
919 GrVkDrawableInfo vkInfo;
920 vkInfo.fSecondaryCommandBuffer = cbInfo.currentCmdBuf()->vkCommandBuffer();
921 vkInfo.fCompatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
Greg Danielb353eeb2018-12-05 11:01:58 -0500922 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&vkInfo.fColorAttachmentIndex));
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400923 vkInfo.fFormat = targetImage->imageFormat();
924 vkInfo.fDrawBounds = &bounds;
Stan Ilievcb580602019-02-26 11:36:07 -0500925#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
926 vkInfo.fImage = targetImage->image();
927#else
928 vkInfo.fImage = VK_NULL_HANDLE;
929#endif //SK_BUILD_FOR_ANDROID_FRAMEWORK
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400930
931 GrBackendDrawableInfo info(vkInfo);
932
Eric Karlc0b2ba22019-01-22 19:40:35 -0800933 // After we draw into the command buffer via the drawable, cached state we have may be invalid.
934 cbInfo.currentCmdBuf()->invalidateState();
Eric Karla8878a12019-02-07 18:17:43 -0800935 // Also assume that the drawable produced output.
936 cbInfo.fIsEmpty = false;
Eric Karlc0b2ba22019-01-22 19:40:35 -0800937
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400938 drawable->draw(info);
939 fGpu->addDrawable(std::move(drawable));
940
941 if (bounds.extent.width == 0 || bounds.extent.height == 0) {
942 cbInfo.fBounds.join(target->getBoundsRect());
943 } else {
944 cbInfo.fBounds.join(SkRect::MakeXYWH(bounds.offset.x, bounds.offset.y,
945 bounds.extent.width, bounds.extent.height));
946 }
947}
948