blob: 0376a0f821d9bb5df8ec82940be16780a22db586 [file] [log] [blame]
egdaniel066df7c2016-06-08 14:02:27 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkGpuCommandBuffer.h"
egdaniel066df7c2016-06-08 14:02:27 -07009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkDrawable.h"
11#include "include/core/SkRect.h"
12#include "include/gpu/GrBackendDrawableInfo.h"
13#include "src/gpu/GrContextPriv.h"
14#include "src/gpu/GrFixedClip.h"
15#include "src/gpu/GrMesh.h"
16#include "src/gpu/GrOpFlushState.h"
17#include "src/gpu/GrPipeline.h"
18#include "src/gpu/GrRenderTargetPriv.h"
19#include "src/gpu/GrTexturePriv.h"
20#include "src/gpu/vk/GrVkCommandBuffer.h"
21#include "src/gpu/vk/GrVkCommandPool.h"
22#include "src/gpu/vk/GrVkGpu.h"
23#include "src/gpu/vk/GrVkPipeline.h"
24#include "src/gpu/vk/GrVkRenderPass.h"
25#include "src/gpu/vk/GrVkRenderTarget.h"
26#include "src/gpu/vk/GrVkResourceProvider.h"
27#include "src/gpu/vk/GrVkSemaphore.h"
28#include "src/gpu/vk/GrVkTexture.h"
egdaniel066df7c2016-06-08 14:02:27 -070029
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040030GrVkPrimaryCommandBufferTask::~GrVkPrimaryCommandBufferTask() = default;
31GrVkPrimaryCommandBufferTask::GrVkPrimaryCommandBufferTask() = default;
32
33namespace {
34
35class InlineUpload : public GrVkPrimaryCommandBufferTask {
36public:
37 InlineUpload(GrOpFlushState* state, const GrDeferredTextureUploadFn& upload)
38 : fFlushState(state), fUpload(upload) {}
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040039
40 void execute(const Args& args) override { fFlushState->doUpload(fUpload); }
41
42private:
43 GrOpFlushState* fFlushState;
44 GrDeferredTextureUploadFn fUpload;
45};
46
47class Copy : public GrVkPrimaryCommandBufferTask {
48public:
Greg Daniel46cfbc62019-06-07 11:43:30 -040049 Copy(GrSurface* src, const SkIRect& srcRect, const SkIPoint& dstPoint, bool shouldDiscardDst)
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040050 : fSrc(src)
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040051 , fSrcRect(srcRect)
52 , fDstPoint(dstPoint)
53 , fShouldDiscardDst(shouldDiscardDst) {}
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040054
55 void execute(const Args& args) override {
Greg Daniel46cfbc62019-06-07 11:43:30 -040056 args.fGpu->copySurface(args.fSurface, fSrc.get(), fSrcRect, fDstPoint, fShouldDiscardDst);
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040057 }
58
59private:
60 using Src = GrPendingIOResource<GrSurface, kRead_GrIOType>;
61 Src fSrc;
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040062 SkIRect fSrcRect;
63 SkIPoint fDstPoint;
64 bool fShouldDiscardDst;
65};
66
67} // anonymous namespace
68
69/////////////////////////////////////////////////////////////////////////////
70
Greg Daniel46cfbc62019-06-07 11:43:30 -040071void GrVkGpuTextureCommandBuffer::copy(GrSurface* src, const SkIRect& srcRect,
72 const SkIPoint& dstPoint) {
Emircan Uysaler23ca4e72019-06-24 10:53:09 -040073 SkASSERT(!src->isProtected() || (fTexture->isProtected() && fGpu->protectedContext()));
Greg Daniel46cfbc62019-06-07 11:43:30 -040074 fTasks.emplace<Copy>(src, srcRect, dstPoint, false);
Greg Daniel500d58b2017-08-24 15:59:33 -040075}
76
77void GrVkGpuTextureCommandBuffer::insertEventMarker(const char* msg) {
78 // TODO: does Vulkan have a correlate?
79}
80
81void GrVkGpuTextureCommandBuffer::submit() {
Greg Daniel46cfbc62019-06-07 11:43:30 -040082 GrVkPrimaryCommandBufferTask::Args taskArgs{fGpu, fTexture};
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040083 for (auto& task : fTasks) {
84 task.execute(taskArgs);
Greg Daniel500d58b2017-08-24 15:59:33 -040085 }
86}
87
Greg Daniel500d58b2017-08-24 15:59:33 -040088////////////////////////////////////////////////////////////////////////////////
89
Robert Phillips6b47c7d2017-08-29 07:24:09 -040090void get_vk_load_store_ops(GrLoadOp loadOpIn, GrStoreOp storeOpIn,
egdaniel066df7c2016-06-08 14:02:27 -070091 VkAttachmentLoadOp* loadOp, VkAttachmentStoreOp* storeOp) {
Robert Phillips95214472017-08-08 18:00:03 -040092 switch (loadOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -040093 case GrLoadOp::kLoad:
egdaniel066df7c2016-06-08 14:02:27 -070094 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel066df7c2016-06-08 14:02:27 -070095 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040096 case GrLoadOp::kClear:
egdaniel9cb63402016-06-23 08:37:05 -070097 *loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
98 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040099 case GrLoadOp::kDiscard:
egdaniel9cb63402016-06-23 08:37:05 -0700100 *loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
101 break;
102 default:
103 SK_ABORT("Invalid LoadOp");
egdaniel066df7c2016-06-08 14:02:27 -0700104 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel9cb63402016-06-23 08:37:05 -0700105 }
106
Robert Phillips95214472017-08-08 18:00:03 -0400107 switch (storeOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400108 case GrStoreOp::kStore:
egdaniel066df7c2016-06-08 14:02:27 -0700109 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
110 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400111 case GrStoreOp::kDiscard:
egdaniel066df7c2016-06-08 14:02:27 -0700112 *storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
113 break;
brianosman0bbc3712016-06-14 04:53:09 -0700114 default:
egdaniel9cb63402016-06-23 08:37:05 -0700115 SK_ABORT("Invalid StoreOp");
brianosman0bbc3712016-06-14 04:53:09 -0700116 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
egdaniel066df7c2016-06-08 14:02:27 -0700117 }
118}
119
Brian Salomon24d377e2019-04-23 15:24:31 -0400120GrVkGpuRTCommandBuffer::GrVkGpuRTCommandBuffer(GrVkGpu* gpu) : fGpu(gpu) {}
Brian Salomonc293a292016-11-30 13:38:32 -0500121
Greg Daniel500d58b2017-08-24 15:59:33 -0400122void GrVkGpuRTCommandBuffer::init() {
Brian Salomonc293a292016-11-30 13:38:32 -0500123 GrVkRenderPass::LoadStoreOps vkColorOps(fVkColorLoadOp, fVkColorStoreOp);
124 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -0700125
Greg Daniel36a77ee2016-10-18 10:33:25 -0400126 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Brian Salomonc293a292016-11-30 13:38:32 -0500127 SkASSERT(fCommandBufferInfos.count() == 1);
Greg Daniel22bc8652017-03-22 15:45:43 -0400128 fCurrentCmdInfo = 0;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400129
Robert Phillips19e51dc2017-08-09 09:30:51 -0400130 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
131 const GrVkResourceProvider::CompatibleRPHandle& rpHandle = vkRT->compatibleRenderPassHandle();
egdaniel066df7c2016-06-08 14:02:27 -0700132 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400133 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
134 vkColorOps,
135 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -0700136 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400137 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400138 vkColorOps,
139 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -0700140 }
141
Brian Osmancb3d0872018-10-16 15:19:28 -0400142 cbInfo.fColorClearValue.color.float32[0] = fClearColor[0];
143 cbInfo.fColorClearValue.color.float32[1] = fClearColor[1];
144 cbInfo.fColorClearValue.color.float32[2] = fClearColor[2];
145 cbInfo.fColorClearValue.color.float32[3] = fClearColor[3];
egdaniel9cb63402016-06-23 08:37:05 -0700146
Robert Phillips380b90c2017-08-30 07:41:07 -0400147 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000148 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Robert Phillips380b90c2017-08-30 07:41:07 -0400149 } else {
150 cbInfo.fBounds.setEmpty();
151 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400152
153 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
154 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
155 } else if (VK_ATTACHMENT_LOAD_OP_LOAD == fVkColorLoadOp &&
156 VK_ATTACHMENT_STORE_OP_STORE == fVkColorStoreOp) {
157 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
158 } else if (VK_ATTACHMENT_LOAD_OP_DONT_CARE == fVkColorLoadOp) {
159 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithDiscard;
160 }
Greg Daniel36a77ee2016-10-18 10:33:25 -0400161
Greg Daniel228518f2019-08-07 16:55:17 -0400162 cbInfo.fCommandBuffer = fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu);
Robert Phillips19e51dc2017-08-09 09:30:51 -0400163 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
egdaniel066df7c2016-06-08 14:02:27 -0700164}
165
Greg Daniel070cbaf2019-01-03 17:35:54 -0500166void GrVkGpuRTCommandBuffer::initWrapped() {
167 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
168 SkASSERT(fCommandBufferInfos.count() == 1);
169 fCurrentCmdInfo = 0;
170
171 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
172 SkASSERT(vkRT->wrapsSecondaryCommandBuffer());
173 cbInfo.fRenderPass = vkRT->externalRenderPass();
174 cbInfo.fRenderPass->ref();
175
176 cbInfo.fBounds.setEmpty();
Greg Daniel228518f2019-08-07 16:55:17 -0400177 cbInfo.fCommandBuffer.reset(
Greg Daniel8daf3b72019-07-30 09:57:26 -0400178 GrVkSecondaryCommandBuffer::Create(vkRT->getExternalSecondaryCommandBuffer()));
Greg Daniel070cbaf2019-01-03 17:35:54 -0500179 cbInfo.currentCmdBuf()->begin(fGpu, nullptr, cbInfo.fRenderPass);
180}
Brian Salomonc293a292016-11-30 13:38:32 -0500181
Greg Daniel500d58b2017-08-24 15:59:33 -0400182GrVkGpuRTCommandBuffer::~GrVkGpuRTCommandBuffer() {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400183 this->reset();
egdaniel066df7c2016-06-08 14:02:27 -0700184}
185
Greg Daniel500d58b2017-08-24 15:59:33 -0400186GrGpu* GrVkGpuRTCommandBuffer::gpu() { return fGpu; }
egdaniel9cb63402016-06-23 08:37:05 -0700187
Greg Daniel500d58b2017-08-24 15:59:33 -0400188void GrVkGpuRTCommandBuffer::end() {
Greg Daniel22bc8652017-03-22 15:45:43 -0400189 if (fCurrentCmdInfo >= 0) {
190 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Brian Salomonc293a292016-11-30 13:38:32 -0500191 }
egdaniel066df7c2016-06-08 14:02:27 -0700192}
193
Greg Daniel500d58b2017-08-24 15:59:33 -0400194void GrVkGpuRTCommandBuffer::submit() {
Brian Salomonc293a292016-11-30 13:38:32 -0500195 if (!fRenderTarget) {
196 return;
197 }
Robert Phillips19e51dc2017-08-09 09:30:51 -0400198
199 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Robert Phillips19e51dc2017-08-09 09:30:51 -0400200 GrVkImage* targetImage = vkRT->msaaImage() ? vkRT->msaaImage() : vkRT;
Greg Daniel45a44de2018-02-27 10:07:29 -0500201 GrStencilAttachment* stencil = fRenderTarget->renderTargetPriv().getStencilAttachment();
Brian Salomon24d377e2019-04-23 15:24:31 -0400202 auto currPreCmd = fPreCommandBufferTasks.begin();
egdaniel9cb63402016-06-23 08:37:05 -0700203
Greg Daniel46cfbc62019-06-07 11:43:30 -0400204 GrVkPrimaryCommandBufferTask::Args taskArgs{fGpu, fRenderTarget};
Greg Daniel36a77ee2016-10-18 10:33:25 -0400205 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
206 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
207
Brian Salomon24d377e2019-04-23 15:24:31 -0400208 for (int c = 0; c < cbInfo.fNumPreCmds; ++c, ++currPreCmd) {
Brian Salomon5d8f1cc2019-04-24 09:03:53 -0400209 currPreCmd->execute(taskArgs);
Greg Daniel77b53f62016-10-18 11:48:51 -0400210 }
211
Greg Daniel38c3d932018-03-16 14:22:30 -0400212 // TODO: Many things create a scratch texture which adds the discard immediately, but then
213 // don't draw to it right away. This causes the discard to be ignored and we get yelled at
214 // for loading uninitialized data. However, once MDB lands with reordering, the discard will
215 // get reordered with the rest of the draw commands and we can remove the discard check.
216 if (cbInfo.fIsEmpty &&
217 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithClear &&
218 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithDiscard) {
Greg Daniel77b53f62016-10-18 11:48:51 -0400219 // We have sumbitted no actual draw commands to the command buffer and we are not using
220 // the render pass to do a clear so there is no need to submit anything.
221 continue;
222 }
Greg Daniel38c3d932018-03-16 14:22:30 -0400223
Greg Daniel070cbaf2019-01-03 17:35:54 -0500224 // We don't want to actually submit the secondary command buffer if it is wrapped.
225 if (this->wrapsSecondaryCommandBuffer()) {
226 // If we have any sampled images set their layout now.
Chris Dalton298238a2019-02-21 16:28:44 -0500227 for (int j = 0; j < cbInfo.fSampledTextures.count(); ++j) {
228 cbInfo.fSampledTextures[j]->setImageLayout(
229 fGpu, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_ACCESS_SHADER_READ_BIT,
230 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, false);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500231 }
232
233 // There should have only been one secondary command buffer in the wrapped case so it is
234 // safe to just return here.
235 SkASSERT(fCommandBufferInfos.count() == 1);
236 return;
237 }
238
Greg Danieldbdba602018-04-20 11:52:43 -0400239 // Make sure if we only have a discard load that we execute the discard on the whole image.
240 // TODO: Once we improve our tracking of discards so that we never end up flushing a discard
241 // call with no actually ops, remove this.
242 if (cbInfo.fIsEmpty && cbInfo.fLoadStoreState == LoadStoreState::kStartsWithDiscard) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000243 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Greg Danieldbdba602018-04-20 11:52:43 -0400244 }
245
Greg Daniela41a74a2018-10-09 12:59:23 +0000246 if (cbInfo.fBounds.intersect(0, 0,
247 SkIntToScalar(fRenderTarget->width()),
248 SkIntToScalar(fRenderTarget->height()))) {
Greg Daniel38c3d932018-03-16 14:22:30 -0400249 // Make sure we do the following layout changes after all copies, uploads, or any other
250 // pre-work is done since we may change the layouts in the pre-work. Also since the
251 // draws will be submitted in different render passes, we need to guard againts write
252 // and write issues.
253
254 // Change layout of our render target so it can be used as the color attachment.
Greg Danielf7828d02018-10-09 12:01:32 -0400255 // TODO: If we know that we will never be blending or loading the attachment we could
256 // drop the VK_ACCESS_COLOR_ATTACHMENT_READ_BIT.
Greg Daniel38c3d932018-03-16 14:22:30 -0400257 targetImage->setImageLayout(fGpu,
258 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
Greg Danielf7828d02018-10-09 12:01:32 -0400259 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
Greg Daniel38c3d932018-03-16 14:22:30 -0400260 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400261 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400262 false);
263
264 // If we are using a stencil attachment we also need to update its layout
265 if (stencil) {
266 GrVkStencilAttachment* vkStencil = (GrVkStencilAttachment*)stencil;
Greg Danielf7828d02018-10-09 12:01:32 -0400267 // We need the write and read access bits since we may load and store the stencil.
268 // The initial load happens in the VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT so we
269 // wait there.
Greg Daniel38c3d932018-03-16 14:22:30 -0400270 vkStencil->setImageLayout(fGpu,
271 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
272 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
273 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400274 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400275 false);
276 }
277
278 // If we have any sampled images set their layout now.
Chris Dalton298238a2019-02-21 16:28:44 -0500279 for (int j = 0; j < cbInfo.fSampledTextures.count(); ++j) {
280 cbInfo.fSampledTextures[j]->setImageLayout(
281 fGpu, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_ACCESS_SHADER_READ_BIT,
282 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, false);
Greg Daniel38c3d932018-03-16 14:22:30 -0400283 }
284
Greg Daniel36a77ee2016-10-18 10:33:25 -0400285 SkIRect iBounds;
286 cbInfo.fBounds.roundOut(&iBounds);
287
Greg Daniel228518f2019-08-07 16:55:17 -0400288 fGpu->submitSecondaryCommandBuffer(std::move(cbInfo.fCommandBuffer), cbInfo.fRenderPass,
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400289 &cbInfo.fColorClearValue, vkRT, fOrigin, iBounds);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400290 }
291 }
Brian Salomon24d377e2019-04-23 15:24:31 -0400292 SkASSERT(currPreCmd == fPreCommandBufferTasks.end());
egdaniel9cb63402016-06-23 08:37:05 -0700293}
294
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400295void GrVkGpuRTCommandBuffer::set(GrRenderTarget* rt, GrSurfaceOrigin origin,
296 const GrGpuRTCommandBuffer::LoadAndStoreInfo& colorInfo,
297 const GrGpuRTCommandBuffer::StencilLoadAndStoreInfo& stencilInfo) {
298 SkASSERT(!fRenderTarget);
299 SkASSERT(fCommandBufferInfos.empty());
300 SkASSERT(-1 == fCurrentCmdInfo);
Robert Phillips9da87e02019-02-04 13:26:26 -0500301 SkASSERT(fGpu == rt->getContext()->priv().getGpu());
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400302 SkASSERT(!fLastPipelineState);
303
Greg Danielb0c7ad12019-06-06 17:23:35 +0000304#ifdef SK_DEBUG
305 fIsActive = true;
306#endif
307
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400308 this->INHERITED::set(rt, origin);
309
Greg Daniel070cbaf2019-01-03 17:35:54 -0500310 if (this->wrapsSecondaryCommandBuffer()) {
311 this->initWrapped();
312 return;
313 }
314
Brian Osman9a9baae2018-11-05 15:06:26 -0500315 fClearColor = colorInfo.fClearColor;
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400316
317 get_vk_load_store_ops(colorInfo.fLoadOp, colorInfo.fStoreOp,
318 &fVkColorLoadOp, &fVkColorStoreOp);
319
320 get_vk_load_store_ops(stencilInfo.fLoadOp, stencilInfo.fStoreOp,
321 &fVkStencilLoadOp, &fVkStencilStoreOp);
322
323 this->init();
324}
325
326void GrVkGpuRTCommandBuffer::reset() {
327 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
328 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
Greg Daniel228518f2019-08-07 16:55:17 -0400329 if (cbInfo.fCommandBuffer) {
330 cbInfo.fCommandBuffer.release()->recycle(fGpu);
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400331 }
332 cbInfo.fRenderPass->unref(fGpu);
333 }
334 fCommandBufferInfos.reset();
Brian Salomon24d377e2019-04-23 15:24:31 -0400335 fPreCommandBufferTasks.reset();
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400336
337 fCurrentCmdInfo = -1;
338
339 fLastPipelineState = nullptr;
340 fRenderTarget = nullptr;
Greg Danielb0c7ad12019-06-06 17:23:35 +0000341
342#ifdef SK_DEBUG
343 fIsActive = false;
344#endif
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400345}
346
Greg Daniel070cbaf2019-01-03 17:35:54 -0500347bool GrVkGpuRTCommandBuffer::wrapsSecondaryCommandBuffer() const {
348 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
349 return vkRT->wrapsSecondaryCommandBuffer();
350}
351
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400352////////////////////////////////////////////////////////////////////////////////
353
Greg Daniel500d58b2017-08-24 15:59:33 -0400354void GrVkGpuRTCommandBuffer::insertEventMarker(const char* msg) {
Robert Phillips65a88fa2017-08-08 08:36:22 -0400355 // TODO: does Vulkan have a correlate?
356}
357
Greg Daniel500d58b2017-08-24 15:59:33 -0400358void GrVkGpuRTCommandBuffer::onClearStencilClip(const GrFixedClip& clip, bool insideStencilMask) {
Chris Dalton94c04682017-11-01 17:15:06 -0600359 SkASSERT(!clip.hasWindowRectangles());
360
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000361 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
362
Greg Daniel65a09272016-10-12 09:47:22 -0400363 GrStencilAttachment* sb = fRenderTarget->renderTargetPriv().getStencilAttachment();
egdaniel9cb63402016-06-23 08:37:05 -0700364 // this should only be called internally when we know we have a
365 // stencil buffer.
366 SkASSERT(sb);
367 int stencilBitCount = sb->bits();
368
369 // The contract with the callers does not guarantee that we preserve all bits in the stencil
370 // during this clear. Thus we will clear the entire stencil to the desired value.
371
372 VkClearDepthStencilValue vkStencilColor;
373 memset(&vkStencilColor, 0, sizeof(VkClearDepthStencilValue));
csmartdalton29df7602016-08-31 11:55:52 -0700374 if (insideStencilMask) {
egdaniel9cb63402016-06-23 08:37:05 -0700375 vkStencilColor.stencil = (1 << (stencilBitCount - 1));
376 } else {
377 vkStencilColor.stencil = 0;
378 }
379
380 VkClearRect clearRect;
381 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700382 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000383 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000384 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400385 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700386 vkRect = clip.scissorRect();
387 } else {
388 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400389 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
390 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700391 }
392
393 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
394 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
395
396 clearRect.baseArrayLayer = 0;
397 clearRect.layerCount = 1;
398
399 uint32_t stencilIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400400 SkAssertResult(cbInfo.fRenderPass->stencilAttachmentIndex(&stencilIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700401
402 VkClearAttachment attachment;
403 attachment.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
404 attachment.colorAttachment = 0; // this value shouldn't matter
405 attachment.clearValue.depthStencil = vkStencilColor;
406
Greg Daniel22bc8652017-03-22 15:45:43 -0400407 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400408 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400409
410 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000411 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400412 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
413 } else {
414 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
415 }
egdaniel9cb63402016-06-23 08:37:05 -0700416}
417
Brian Osman9a9baae2018-11-05 15:06:26 -0500418void GrVkGpuRTCommandBuffer::onClear(const GrFixedClip& clip, const SkPMColor4f& color) {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400419 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
420
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000421 // parent class should never let us get here with no RT
csmartdaltonbf4a8f92016-09-06 10:01:06 -0700422 SkASSERT(!clip.hasWindowRectangles());
egdaniel9cb63402016-06-23 08:37:05 -0700423
Greg Daniel22bc8652017-03-22 15:45:43 -0400424 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Greg Daniel36a77ee2016-10-18 10:33:25 -0400425
Brian Osman9a9baae2018-11-05 15:06:26 -0500426 VkClearColorValue vkColor = {{color.fR, color.fG, color.fB, color.fA}};
egdaniel9cb63402016-06-23 08:37:05 -0700427
Brian Salomond818ebf2018-07-02 14:08:49 +0000428 if (cbInfo.fIsEmpty && !clip.scissorEnabled()) {
Robert Phillips74c627f2017-08-09 10:28:00 -0400429 // Change the render pass to do a clear load
egdaniel9cb63402016-06-23 08:37:05 -0700430 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_CLEAR,
431 VK_ATTACHMENT_STORE_OP_STORE);
Robert Phillips74c627f2017-08-09 10:28:00 -0400432 // Preserve the stencil buffer's load & store settings
433 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -0700434
Greg Daniel36a77ee2016-10-18 10:33:25 -0400435 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
egdaniel9cb63402016-06-23 08:37:05 -0700436
437 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400438 vkRT->compatibleRenderPassHandle();
egdaniel9cb63402016-06-23 08:37:05 -0700439 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400440 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
441 vkColorOps,
442 vkStencilOps);
egdaniel9cb63402016-06-23 08:37:05 -0700443 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400444 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400445 vkColorOps,
446 vkStencilOps);
egdaniel9cb63402016-06-23 08:37:05 -0700447 }
448
Greg Daniel36a77ee2016-10-18 10:33:25 -0400449 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
egdaniel9cb63402016-06-23 08:37:05 -0700450 oldRP->unref(fGpu);
451
Brian Osman9a9baae2018-11-05 15:06:26 -0500452 cbInfo.fColorClearValue.color = {{color.fR, color.fG, color.fB, color.fA}};
Greg Daniela3c68df2018-03-16 13:46:53 -0400453 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400454 // Update command buffer bounds
455 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
egdaniel9cb63402016-06-23 08:37:05 -0700456 return;
457 }
458
459 // We always do a sub rect clear with clearAttachments since we are inside a render pass
460 VkClearRect clearRect;
461 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700462 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000463 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000464 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400465 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700466 vkRect = clip.scissorRect();
467 } else {
468 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400469 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
470 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700471 }
472 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
473 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
474 clearRect.baseArrayLayer = 0;
475 clearRect.layerCount = 1;
476
477 uint32_t colorIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400478 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&colorIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700479
480 VkClearAttachment attachment;
481 attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
482 attachment.colorAttachment = colorIndex;
483 attachment.clearValue.color = vkColor;
484
Greg Daniel22bc8652017-03-22 15:45:43 -0400485 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400486 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400487
488 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000489 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400490 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
491 } else {
492 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
493 }
egdaniel9cb63402016-06-23 08:37:05 -0700494 return;
495}
496
Greg Daniel500d58b2017-08-24 15:59:33 -0400497////////////////////////////////////////////////////////////////////////////////
498
Greg Daniel500d58b2017-08-24 15:59:33 -0400499void GrVkGpuRTCommandBuffer::addAdditionalRenderPass() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400500 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
501
Greg Daniel22bc8652017-03-22 15:45:43 -0400502 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Greg Daniel77b53f62016-10-18 11:48:51 -0400503
504 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Greg Daniel22bc8652017-03-22 15:45:43 -0400505 fCurrentCmdInfo++;
Greg Daniel77b53f62016-10-18 11:48:51 -0400506
507 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
508 VK_ATTACHMENT_STORE_OP_STORE);
509 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
510 VK_ATTACHMENT_STORE_OP_STORE);
511
512 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400513 vkRT->compatibleRenderPassHandle();
Greg Daniel77b53f62016-10-18 11:48:51 -0400514 if (rpHandle.isValid()) {
515 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
516 vkColorOps,
517 vkStencilOps);
518 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400519 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel77b53f62016-10-18 11:48:51 -0400520 vkColorOps,
521 vkStencilOps);
522 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400523 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
Greg Daniel77b53f62016-10-18 11:48:51 -0400524
Greg Daniel228518f2019-08-07 16:55:17 -0400525 cbInfo.fCommandBuffer = fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu);
Greg Daniel77b53f62016-10-18 11:48:51 -0400526 // It shouldn't matter what we set the clear color to here since we will assume loading of the
527 // attachment.
528 memset(&cbInfo.fColorClearValue, 0, sizeof(VkClearValue));
529 cbInfo.fBounds.setEmpty();
Greg Daniel77b53f62016-10-18 11:48:51 -0400530
Robert Phillips19e51dc2017-08-09 09:30:51 -0400531 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
Greg Daniel77b53f62016-10-18 11:48:51 -0400532}
533
Brian Salomon943ed792017-10-30 09:37:55 -0400534void GrVkGpuRTCommandBuffer::inlineUpload(GrOpFlushState* state,
535 GrDeferredTextureUploadFn& upload) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400536 if (!fCommandBufferInfos[fCurrentCmdInfo].fIsEmpty) {
537 this->addAdditionalRenderPass();
Greg Daniel77b53f62016-10-18 11:48:51 -0400538 }
Brian Salomon24d377e2019-04-23 15:24:31 -0400539
Brian Salomon24d377e2019-04-23 15:24:31 -0400540 fPreCommandBufferTasks.emplace<InlineUpload>(state, upload);
541 ++fCommandBufferInfos[fCurrentCmdInfo].fNumPreCmds;
Greg Daniel77b53f62016-10-18 11:48:51 -0400542}
543
Greg Daniel46cfbc62019-06-07 11:43:30 -0400544void GrVkGpuRTCommandBuffer::copy(GrSurface* src, const SkIRect& srcRect,
Greg Daniel500d58b2017-08-24 15:59:33 -0400545 const SkIPoint& dstPoint) {
Greg Daniela3c68df2018-03-16 13:46:53 -0400546 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
547 if (!cbInfo.fIsEmpty || LoadStoreState::kStartsWithClear == cbInfo.fLoadStoreState) {
Greg Daniel500d58b2017-08-24 15:59:33 -0400548 this->addAdditionalRenderPass();
549 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400550
Brian Salomon24d377e2019-04-23 15:24:31 -0400551 fPreCommandBufferTasks.emplace<Copy>(
Greg Daniel46cfbc62019-06-07 11:43:30 -0400552 src, srcRect, dstPoint, LoadStoreState::kStartsWithDiscard == cbInfo.fLoadStoreState);
Brian Salomon24d377e2019-04-23 15:24:31 -0400553 ++fCommandBufferInfos[fCurrentCmdInfo].fNumPreCmds;
Greg Daniel55fa6472018-03-16 16:13:10 -0400554
Greg Daniela3c68df2018-03-16 13:46:53 -0400555 if (LoadStoreState::kLoadAndStore != cbInfo.fLoadStoreState) {
556 // Change the render pass to do a load and store so we don't lose the results of our copy
557 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
558 VK_ATTACHMENT_STORE_OP_STORE);
559 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
560 VK_ATTACHMENT_STORE_OP_STORE);
561
562 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
563
564 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400565 SkASSERT(!src->isProtected() || (fRenderTarget->isProtected() && fGpu->protectedContext()));
Greg Daniela3c68df2018-03-16 13:46:53 -0400566 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
567 vkRT->compatibleRenderPassHandle();
568 if (rpHandle.isValid()) {
569 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
570 vkColorOps,
571 vkStencilOps);
572 } else {
573 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
574 vkColorOps,
575 vkStencilOps);
576 }
577 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
578 oldRP->unref(fGpu);
579
580 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
581
582 }
Greg Daniel500d58b2017-08-24 15:59:33 -0400583}
584
egdaniel9cb63402016-06-23 08:37:05 -0700585////////////////////////////////////////////////////////////////////////////////
586
Brian Salomondbf70722019-02-07 11:31:24 -0500587void GrVkGpuRTCommandBuffer::bindGeometry(const GrGpuBuffer* indexBuffer,
588 const GrGpuBuffer* vertexBuffer,
589 const GrGpuBuffer* instanceBuffer) {
Chris Daltonff926502017-05-03 14:36:54 -0400590 GrVkSecondaryCommandBuffer* currCmdBuf = fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf();
egdaniel9cb63402016-06-23 08:37:05 -0700591 // There is no need to put any memory barriers to make sure host writes have finished here.
592 // When a command buffer is submitted to a queue, there is an implicit memory barrier that
593 // occurs for all host writes. Additionally, BufferMemoryBarriers are not allowed inside of
594 // an active RenderPass.
egdaniel9cb63402016-06-23 08:37:05 -0700595
Chris Dalton1d616352017-05-31 12:51:23 -0600596 // Here our vertex and instance inputs need to match the same 0-based bindings they were
597 // assigned in GrVkPipeline. That is, vertex first (if any) followed by instance.
598 uint32_t binding = 0;
599
Brian Salomon802cb312018-06-08 18:05:20 -0400600 if (vertexBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600601 SkASSERT(vertexBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600602 SkASSERT(!vertexBuffer->isMapped());
603
604 currCmdBuf->bindInputBuffer(fGpu, binding++,
605 static_cast<const GrVkVertexBuffer*>(vertexBuffer));
606 }
607
Brian Salomon802cb312018-06-08 18:05:20 -0400608 if (instanceBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600609 SkASSERT(instanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600610 SkASSERT(!instanceBuffer->isMapped());
611
612 currCmdBuf->bindInputBuffer(fGpu, binding++,
613 static_cast<const GrVkVertexBuffer*>(instanceBuffer));
614 }
Chris Daltonff926502017-05-03 14:36:54 -0400615 if (indexBuffer) {
616 SkASSERT(indexBuffer);
617 SkASSERT(!indexBuffer->isMapped());
egdaniel9cb63402016-06-23 08:37:05 -0700618
Chris Daltonff926502017-05-03 14:36:54 -0400619 currCmdBuf->bindIndexBuffer(fGpu, static_cast<const GrVkIndexBuffer*>(indexBuffer));
egdaniel9cb63402016-06-23 08:37:05 -0700620 }
621}
622
Brian Salomon49348902018-06-26 09:12:38 -0400623GrVkPipelineState* GrVkGpuRTCommandBuffer::prepareDrawState(
624 const GrPrimitiveProcessor& primProc,
625 const GrPipeline& pipeline,
626 const GrPipeline::FixedDynamicState* fixedDynamicState,
627 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
628 GrPrimitiveType primitiveType) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400629 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
630 SkASSERT(cbInfo.fRenderPass);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400631
Greg Daniel99b88e02018-10-03 15:31:20 -0400632 VkRenderPass compatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
633
Greg Daniel9a51a862018-11-30 10:18:14 -0500634 const GrTextureProxy* const* primProcProxies = nullptr;
635 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
636 primProcProxies = dynamicStateArrays->fPrimitiveProcessorTextures;
637 } else if (fixedDynamicState) {
638 primProcProxies = fixedDynamicState->fPrimitiveProcessorTextures;
639 }
640
641 SkASSERT(SkToBool(primProcProxies) == SkToBool(primProc.numTextureSamplers()));
642
Greg Daniel09eeefb2017-10-16 15:15:02 -0400643 GrVkPipelineState* pipelineState =
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500644 fGpu->resourceProvider().findOrCreateCompatiblePipelineState(fRenderTarget, fOrigin,
645 pipeline,
egdaniel9cb63402016-06-23 08:37:05 -0700646 primProc,
Greg Daniel9a51a862018-11-30 10:18:14 -0500647 primProcProxies,
egdaniel9cb63402016-06-23 08:37:05 -0700648 primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400649 compatibleRenderPass);
egdaniel9cb63402016-06-23 08:37:05 -0700650 if (!pipelineState) {
651 return pipelineState;
652 }
653
Greg Daniel09eeefb2017-10-16 15:15:02 -0400654 fLastPipelineState = pipelineState;
Greg Daniel22bc8652017-03-22 15:45:43 -0400655
Brian Salomonf7232642018-09-19 08:58:08 -0400656 pipelineState->bindPipeline(fGpu, cbInfo.currentCmdBuf());
Brian Salomoncd7907b2018-08-30 08:36:18 -0400657
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500658 pipelineState->setAndBindUniforms(fGpu, fRenderTarget, fOrigin,
659 primProc, pipeline, cbInfo.currentCmdBuf());
Brian Salomonf7232642018-09-19 08:58:08 -0400660
661 // Check whether we need to bind textures between each GrMesh. If not we can bind them all now.
662 bool setTextures = !(dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures);
663 if (setTextures) {
Brian Salomonf7232642018-09-19 08:58:08 -0400664 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, primProcProxies,
665 cbInfo.currentCmdBuf());
666 }
egdaniel9cb63402016-06-23 08:37:05 -0700667
Brian Salomond818ebf2018-07-02 14:08:49 +0000668 if (!pipeline.isScissorEnabled()) {
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400669 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(),
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500670 fRenderTarget, fOrigin,
671 SkIRect::MakeWH(fRenderTarget->width(),
672 fRenderTarget->height()));
Brian Salomon49348902018-06-26 09:12:38 -0400673 } else if (!dynamicStateArrays || !dynamicStateArrays->fScissorRects) {
674 SkASSERT(fixedDynamicState);
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500675 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget,
676 fOrigin,
Brian Salomon49348902018-06-26 09:12:38 -0400677 fixedDynamicState->fScissorRect);
Chris Dalton46983b72017-06-06 12:27:16 -0600678 }
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500679 GrVkPipeline::SetDynamicViewportState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget);
680 GrVkPipeline::SetDynamicBlendConstantState(fGpu, cbInfo.currentCmdBuf(),
Greg Daniel2c3398d2019-06-19 11:58:01 -0400681 pipeline.outputSwizzle(),
Chris Dalton46983b72017-06-06 12:27:16 -0600682 pipeline.getXferProcessor());
egdaniel9cb63402016-06-23 08:37:05 -0700683
684 return pipelineState;
685}
686
Brian Salomonff168d92018-06-23 15:17:27 -0400687void GrVkGpuRTCommandBuffer::onDraw(const GrPrimitiveProcessor& primProc,
688 const GrPipeline& pipeline,
Brian Salomon49348902018-06-26 09:12:38 -0400689 const GrPipeline::FixedDynamicState* fixedDynamicState,
690 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
Greg Daniel500d58b2017-08-24 15:59:33 -0400691 const GrMesh meshes[],
Greg Daniel500d58b2017-08-24 15:59:33 -0400692 int meshCount,
693 const SkRect& bounds) {
egdaniel9cb63402016-06-23 08:37:05 -0700694 if (!meshCount) {
695 return;
696 }
Greg Danielea022cd2018-03-16 11:10:03 -0400697
698 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
699
Brian Salomone782f842018-07-31 13:53:11 -0400700 auto prepareSampledImage = [&](GrTexture* texture, GrSamplerState::Filter filter) {
701 GrVkTexture* vkTexture = static_cast<GrVkTexture*>(texture);
702 // We may need to resolve the texture first if it is also a render target
703 GrVkRenderTarget* texRT = static_cast<GrVkRenderTarget*>(vkTexture->asRenderTarget());
Chris Dalton3d770272019-08-14 09:24:37 -0600704 if (texRT && texRT->needsResolve()) {
Greg Daniel0a77f432018-12-06 11:23:32 -0500705 fGpu->resolveRenderTargetNoFlush(texRT);
Chris Dalton3d770272019-08-14 09:24:37 -0600706 // TEMPORARY: MSAA resolve will have dirtied mipmaps. This goes away once we switch
707 // to resolving MSAA from the opList as well.
708 if (GrSamplerState::Filter::kMipMap == filter &&
709 (vkTexture->width() != 1 || vkTexture->height() != 1)) {
710 SkASSERT(vkTexture->texturePriv().mipMapped() == GrMipMapped::kYes);
711 SkASSERT(vkTexture->texturePriv().mipMapsAreDirty());
712 fGpu->regenerateMipMapLevels(vkTexture);
713 }
Brian Salomone782f842018-07-31 13:53:11 -0400714 }
715
Chris Dalton3d770272019-08-14 09:24:37 -0600716 // Ensure mip maps were all resolved ahead of time by the opList.
Brian Salomone782f842018-07-31 13:53:11 -0400717 if (GrSamplerState::Filter::kMipMap == filter &&
718 (vkTexture->width() != 1 || vkTexture->height() != 1)) {
719 SkASSERT(vkTexture->texturePriv().mipMapped() == GrMipMapped::kYes);
Chris Dalton3d770272019-08-14 09:24:37 -0600720 SkASSERT(!vkTexture->texturePriv().mipMapsAreDirty());
Brian Salomone782f842018-07-31 13:53:11 -0400721 }
Brian Salomone782f842018-07-31 13:53:11 -0400722 };
723
Brian Salomonf7232642018-09-19 08:58:08 -0400724 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
725 for (int m = 0, i = 0; m < meshCount; ++m) {
726 for (int s = 0; s < primProc.numTextureSamplers(); ++s, ++i) {
727 auto texture = dynamicStateArrays->fPrimitiveProcessorTextures[i]->peekTexture();
728 prepareSampledImage(texture, primProc.textureSampler(s).samplerState().filter());
Chris Dalton0d6f7752019-08-19 12:21:27 -0600729 this->appendSampledTexture(texture);
Brian Salomonf7232642018-09-19 08:58:08 -0400730 }
731 }
732 } else {
733 for (int i = 0; i < primProc.numTextureSamplers(); ++i) {
734 auto texture = fixedDynamicState->fPrimitiveProcessorTextures[i]->peekTexture();
735 prepareSampledImage(texture, primProc.textureSampler(i).samplerState().filter());
Chris Dalton0d6f7752019-08-19 12:21:27 -0600736 this->appendSampledTexture(texture);
Brian Salomonf7232642018-09-19 08:58:08 -0400737 }
Brian Salomone782f842018-07-31 13:53:11 -0400738 }
bsalomonb58a2b42016-09-26 06:55:02 -0700739 GrFragmentProcessor::Iter iter(pipeline);
740 while (const GrFragmentProcessor* fp = iter.next()) {
Brian Salomone782f842018-07-31 13:53:11 -0400741 for (int i = 0; i < fp->numTextureSamplers(); ++i) {
742 const GrFragmentProcessor::TextureSampler& sampler = fp->textureSampler(i);
743 prepareSampledImage(sampler.peekTexture(), sampler.samplerState().filter());
Chris Dalton0d6f7752019-08-19 12:21:27 -0600744 this->appendSampledTexture(sampler.peekTexture());
Brian Salomone782f842018-07-31 13:53:11 -0400745 }
egdaniel2f5792a2016-07-06 08:51:23 -0700746 }
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400747 if (GrTexture* dstTexture = pipeline.peekDstTexture()) {
Chris Dalton0d6f7752019-08-19 12:21:27 -0600748 this->appendSampledTexture(dstTexture);
Brian Salomon18dfa982017-04-03 16:57:43 -0400749 }
egdaniel2f5792a2016-07-06 08:51:23 -0700750
Chris Daltonbca46e22017-05-15 11:03:26 -0600751 GrPrimitiveType primitiveType = meshes[0].primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400752 GrVkPipelineState* pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
753 dynamicStateArrays, primitiveType);
egdaniel9cb63402016-06-23 08:37:05 -0700754 if (!pipelineState) {
755 return;
756 }
757
Brian Salomond818ebf2018-07-02 14:08:49 +0000758 bool dynamicScissor =
759 pipeline.isScissorEnabled() && dynamicStateArrays && dynamicStateArrays->fScissorRects;
Brian Salomonf7232642018-09-19 08:58:08 -0400760 bool dynamicTextures = dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures;
Brian Salomon49348902018-06-26 09:12:38 -0400761
egdaniel9cb63402016-06-23 08:37:05 -0700762 for (int i = 0; i < meshCount; ++i) {
763 const GrMesh& mesh = meshes[i];
Chris Daltonbca46e22017-05-15 11:03:26 -0600764 if (mesh.primitiveType() != primitiveType) {
Chris Dalton6f241802017-05-08 13:58:38 -0400765 SkDEBUGCODE(pipelineState = nullptr);
Chris Daltonbca46e22017-05-15 11:03:26 -0600766 primitiveType = mesh.primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400767 pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
768 dynamicStateArrays, primitiveType);
Chris Dalton6f241802017-05-08 13:58:38 -0400769 if (!pipelineState) {
770 return;
egdaniel9cb63402016-06-23 08:37:05 -0700771 }
Chris Dalton6f241802017-05-08 13:58:38 -0400772 }
egdaniel9cb63402016-06-23 08:37:05 -0700773
Brian Salomon49348902018-06-26 09:12:38 -0400774 if (dynamicScissor) {
775 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget,
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500776 fOrigin,
Brian Salomon49348902018-06-26 09:12:38 -0400777 dynamicStateArrays->fScissorRects[i]);
Chris Dalton46983b72017-06-06 12:27:16 -0600778 }
Brian Salomonf7232642018-09-19 08:58:08 -0400779 if (dynamicTextures) {
780 GrTextureProxy* const* meshProxies = dynamicStateArrays->fPrimitiveProcessorTextures +
781 primProc.numTextureSamplers() * i;
782 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, meshProxies,
783 cbInfo.currentCmdBuf());
784 }
Chris Daltonbca46e22017-05-15 11:03:26 -0600785 SkASSERT(pipelineState);
Brian Salomon802cb312018-06-08 18:05:20 -0400786 mesh.sendToGpu(this);
egdaniel9cb63402016-06-23 08:37:05 -0700787 }
788
Greg Daniel36a77ee2016-10-18 10:33:25 -0400789 cbInfo.fBounds.join(bounds);
Chris Dalton114a3c02017-05-26 15:17:19 -0600790 cbInfo.fIsEmpty = false;
egdaniel066df7c2016-06-08 14:02:27 -0700791}
792
Chris Dalton0d6f7752019-08-19 12:21:27 -0600793void GrVkGpuRTCommandBuffer::appendSampledTexture(GrTexture* tex) {
794 SkASSERT(!tex->isProtected() || (fRenderTarget->isProtected() && fGpu->protectedContext()));
795 fCommandBufferInfos[fCurrentCmdInfo].fSampledTextures.push_back(static_cast<GrVkTexture*>(tex));
796}
797
Brian Salomon802cb312018-06-08 18:05:20 -0400798void GrVkGpuRTCommandBuffer::sendInstancedMeshToGpu(GrPrimitiveType,
Greg Daniel500d58b2017-08-24 15:59:33 -0400799 const GrBuffer* vertexBuffer,
800 int vertexCount,
801 int baseVertex,
802 const GrBuffer* instanceBuffer,
803 int instanceCount,
804 int baseInstance) {
Chris Dalton114a3c02017-05-26 15:17:19 -0600805 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomondbf70722019-02-07 11:31:24 -0500806 SkASSERT(!vertexBuffer || !vertexBuffer->isCpuBuffer());
807 SkASSERT(!instanceBuffer || !instanceBuffer->isCpuBuffer());
808 auto gpuVertexBuffer = static_cast<const GrGpuBuffer*>(vertexBuffer);
809 auto gpuInstanceBuffer = static_cast<const GrGpuBuffer*>(instanceBuffer);
810 this->bindGeometry(nullptr, gpuVertexBuffer, gpuInstanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600811 cbInfo.currentCmdBuf()->draw(fGpu, vertexCount, instanceCount, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600812 fGpu->stats()->incNumDraws();
813}
814
Brian Salomon802cb312018-06-08 18:05:20 -0400815void GrVkGpuRTCommandBuffer::sendIndexedInstancedMeshToGpu(GrPrimitiveType,
Greg Daniel500d58b2017-08-24 15:59:33 -0400816 const GrBuffer* indexBuffer,
817 int indexCount,
818 int baseIndex,
819 const GrBuffer* vertexBuffer,
820 int baseVertex,
821 const GrBuffer* instanceBuffer,
822 int instanceCount,
Brian Salomon802cb312018-06-08 18:05:20 -0400823 int baseInstance,
824 GrPrimitiveRestart restart) {
825 SkASSERT(restart == GrPrimitiveRestart::kNo);
Chris Dalton114a3c02017-05-26 15:17:19 -0600826 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomondbf70722019-02-07 11:31:24 -0500827 SkASSERT(!vertexBuffer || !vertexBuffer->isCpuBuffer());
828 SkASSERT(!instanceBuffer || !instanceBuffer->isCpuBuffer());
829 SkASSERT(!indexBuffer->isCpuBuffer());
830 auto gpuIndexxBuffer = static_cast<const GrGpuBuffer*>(indexBuffer);
831 auto gpuVertexBuffer = static_cast<const GrGpuBuffer*>(vertexBuffer);
832 auto gpuInstanceBuffer = static_cast<const GrGpuBuffer*>(instanceBuffer);
833 this->bindGeometry(gpuIndexxBuffer, gpuVertexBuffer, gpuInstanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600834 cbInfo.currentCmdBuf()->drawIndexed(fGpu, indexCount, instanceCount,
835 baseIndex, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600836 fGpu->stats()->incNumDraws();
837}
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400838
839////////////////////////////////////////////////////////////////////////////////
840
841void GrVkGpuRTCommandBuffer::executeDrawable(std::unique_ptr<SkDrawable::GpuDrawHandler> drawable) {
842 GrVkRenderTarget* target = static_cast<GrVkRenderTarget*>(fRenderTarget);
843
844 GrVkImage* targetImage = target->msaaImage() ? target->msaaImage() : target;
845
846 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
847 VkRect2D bounds;
848 bounds.offset = { 0, 0 };
849 bounds.extent = { 0, 0 };
850
851 GrVkDrawableInfo vkInfo;
852 vkInfo.fSecondaryCommandBuffer = cbInfo.currentCmdBuf()->vkCommandBuffer();
853 vkInfo.fCompatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
Greg Danielb353eeb2018-12-05 11:01:58 -0500854 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&vkInfo.fColorAttachmentIndex));
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400855 vkInfo.fFormat = targetImage->imageFormat();
856 vkInfo.fDrawBounds = &bounds;
Stan Ilievcb580602019-02-26 11:36:07 -0500857#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
858 vkInfo.fImage = targetImage->image();
859#else
860 vkInfo.fImage = VK_NULL_HANDLE;
861#endif //SK_BUILD_FOR_ANDROID_FRAMEWORK
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400862
863 GrBackendDrawableInfo info(vkInfo);
864
Eric Karlc0b2ba22019-01-22 19:40:35 -0800865 // After we draw into the command buffer via the drawable, cached state we have may be invalid.
866 cbInfo.currentCmdBuf()->invalidateState();
Eric Karla8878a12019-02-07 18:17:43 -0800867 // Also assume that the drawable produced output.
868 cbInfo.fIsEmpty = false;
Eric Karlc0b2ba22019-01-22 19:40:35 -0800869
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400870 drawable->draw(info);
871 fGpu->addDrawable(std::move(drawable));
872
873 if (bounds.extent.width == 0 || bounds.extent.height == 0) {
874 cbInfo.fBounds.join(target->getBoundsRect());
875 } else {
876 cbInfo.fBounds.join(SkRect::MakeXYWH(bounds.offset.x, bounds.offset.y,
877 bounds.extent.width, bounds.extent.height));
878 }
879}
880