blob: ee9f2858d305bc46e127195e4235226f5251e297 [file] [log] [blame]
egdaniel066df7c2016-06-08 14:02:27 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkGpuCommandBuffer.h"
egdaniel066df7c2016-06-08 14:02:27 -07009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkDrawable.h"
11#include "include/core/SkRect.h"
12#include "include/gpu/GrBackendDrawableInfo.h"
13#include "src/gpu/GrContextPriv.h"
14#include "src/gpu/GrFixedClip.h"
15#include "src/gpu/GrMesh.h"
16#include "src/gpu/GrOpFlushState.h"
17#include "src/gpu/GrPipeline.h"
18#include "src/gpu/GrRenderTargetPriv.h"
19#include "src/gpu/GrTexturePriv.h"
20#include "src/gpu/vk/GrVkCommandBuffer.h"
21#include "src/gpu/vk/GrVkCommandPool.h"
22#include "src/gpu/vk/GrVkGpu.h"
23#include "src/gpu/vk/GrVkPipeline.h"
24#include "src/gpu/vk/GrVkRenderPass.h"
25#include "src/gpu/vk/GrVkRenderTarget.h"
26#include "src/gpu/vk/GrVkResourceProvider.h"
27#include "src/gpu/vk/GrVkSemaphore.h"
28#include "src/gpu/vk/GrVkTexture.h"
egdaniel066df7c2016-06-08 14:02:27 -070029
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040030GrVkPrimaryCommandBufferTask::~GrVkPrimaryCommandBufferTask() = default;
31GrVkPrimaryCommandBufferTask::GrVkPrimaryCommandBufferTask() = default;
32
33namespace {
34
35class InlineUpload : public GrVkPrimaryCommandBufferTask {
36public:
37 InlineUpload(GrOpFlushState* state, const GrDeferredTextureUploadFn& upload)
38 : fFlushState(state), fUpload(upload) {}
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040039
40 void execute(const Args& args) override { fFlushState->doUpload(fUpload); }
41
42private:
43 GrOpFlushState* fFlushState;
44 GrDeferredTextureUploadFn fUpload;
45};
46
47class Copy : public GrVkPrimaryCommandBufferTask {
48public:
Greg Daniel46cfbc62019-06-07 11:43:30 -040049 Copy(GrSurface* src, const SkIRect& srcRect, const SkIPoint& dstPoint, bool shouldDiscardDst)
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040050 : fSrc(src)
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040051 , fSrcRect(srcRect)
52 , fDstPoint(dstPoint)
53 , fShouldDiscardDst(shouldDiscardDst) {}
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040054
55 void execute(const Args& args) override {
Greg Daniel46cfbc62019-06-07 11:43:30 -040056 args.fGpu->copySurface(args.fSurface, fSrc.get(), fSrcRect, fDstPoint, fShouldDiscardDst);
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040057 }
58
59private:
60 using Src = GrPendingIOResource<GrSurface, kRead_GrIOType>;
61 Src fSrc;
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040062 SkIRect fSrcRect;
63 SkIPoint fDstPoint;
64 bool fShouldDiscardDst;
65};
66
Brian Salomonab32f652019-05-10 14:24:50 -040067class TransferFrom : public GrVkPrimaryCommandBufferTask {
68public:
Brian Salomonf77c1462019-08-01 15:19:29 -040069 TransferFrom(const SkIRect& srcRect, GrColorType surfaceColorType, GrColorType bufferColorType,
70 GrGpuBuffer* transferBuffer, size_t offset)
Brian Salomonab32f652019-05-10 14:24:50 -040071 : fTransferBuffer(sk_ref_sp(transferBuffer))
72 , fOffset(offset)
73 , fSrcRect(srcRect)
Brian Salomonf77c1462019-08-01 15:19:29 -040074 , fSurfaceColorType(surfaceColorType)
Brian Salomonab32f652019-05-10 14:24:50 -040075 , fBufferColorType(bufferColorType) {}
76
77 void execute(const Args& args) override {
78 args.fGpu->transferPixelsFrom(args.fSurface, fSrcRect.fLeft, fSrcRect.fTop,
Brian Salomonf77c1462019-08-01 15:19:29 -040079 fSrcRect.width(), fSrcRect.height(), fSurfaceColorType,
80 fBufferColorType, fTransferBuffer.get(), fOffset);
Brian Salomonab32f652019-05-10 14:24:50 -040081 }
82
83private:
84 sk_sp<GrGpuBuffer> fTransferBuffer;
85 size_t fOffset;
86 SkIRect fSrcRect;
Brian Salomonf77c1462019-08-01 15:19:29 -040087 GrColorType fSurfaceColorType;
Brian Salomonab32f652019-05-10 14:24:50 -040088 GrColorType fBufferColorType;
89};
90
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040091} // anonymous namespace
92
93/////////////////////////////////////////////////////////////////////////////
94
Greg Daniel46cfbc62019-06-07 11:43:30 -040095void GrVkGpuTextureCommandBuffer::copy(GrSurface* src, const SkIRect& srcRect,
96 const SkIPoint& dstPoint) {
Emircan Uysaler23ca4e72019-06-24 10:53:09 -040097 SkASSERT(!src->isProtected() || (fTexture->isProtected() && fGpu->protectedContext()));
Greg Daniel46cfbc62019-06-07 11:43:30 -040098 fTasks.emplace<Copy>(src, srcRect, dstPoint, false);
Greg Daniel500d58b2017-08-24 15:59:33 -040099}
100
Brian Salomonf77c1462019-08-01 15:19:29 -0400101void GrVkGpuTextureCommandBuffer::transferFrom(const SkIRect& srcRect, GrColorType surfaceColorType,
102 GrColorType bufferColorType,
Brian Salomonab32f652019-05-10 14:24:50 -0400103 GrGpuBuffer* transferBuffer, size_t offset) {
Brian Salomonf77c1462019-08-01 15:19:29 -0400104 fTasks.emplace<TransferFrom>(srcRect, surfaceColorType, bufferColorType, transferBuffer,
105 offset);
Brian Salomonab32f652019-05-10 14:24:50 -0400106}
107
Greg Daniel500d58b2017-08-24 15:59:33 -0400108void GrVkGpuTextureCommandBuffer::insertEventMarker(const char* msg) {
109 // TODO: does Vulkan have a correlate?
110}
111
112void GrVkGpuTextureCommandBuffer::submit() {
Greg Daniel46cfbc62019-06-07 11:43:30 -0400113 GrVkPrimaryCommandBufferTask::Args taskArgs{fGpu, fTexture};
Brian Salomon5d8f1cc2019-04-24 09:03:53 -0400114 for (auto& task : fTasks) {
115 task.execute(taskArgs);
Greg Daniel500d58b2017-08-24 15:59:33 -0400116 }
117}
118
Greg Daniel500d58b2017-08-24 15:59:33 -0400119////////////////////////////////////////////////////////////////////////////////
120
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400121void get_vk_load_store_ops(GrLoadOp loadOpIn, GrStoreOp storeOpIn,
egdaniel066df7c2016-06-08 14:02:27 -0700122 VkAttachmentLoadOp* loadOp, VkAttachmentStoreOp* storeOp) {
Robert Phillips95214472017-08-08 18:00:03 -0400123 switch (loadOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400124 case GrLoadOp::kLoad:
egdaniel066df7c2016-06-08 14:02:27 -0700125 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel066df7c2016-06-08 14:02:27 -0700126 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400127 case GrLoadOp::kClear:
egdaniel9cb63402016-06-23 08:37:05 -0700128 *loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
129 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400130 case GrLoadOp::kDiscard:
egdaniel9cb63402016-06-23 08:37:05 -0700131 *loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
132 break;
133 default:
134 SK_ABORT("Invalid LoadOp");
egdaniel066df7c2016-06-08 14:02:27 -0700135 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel9cb63402016-06-23 08:37:05 -0700136 }
137
Robert Phillips95214472017-08-08 18:00:03 -0400138 switch (storeOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400139 case GrStoreOp::kStore:
egdaniel066df7c2016-06-08 14:02:27 -0700140 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
141 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400142 case GrStoreOp::kDiscard:
egdaniel066df7c2016-06-08 14:02:27 -0700143 *storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
144 break;
brianosman0bbc3712016-06-14 04:53:09 -0700145 default:
egdaniel9cb63402016-06-23 08:37:05 -0700146 SK_ABORT("Invalid StoreOp");
brianosman0bbc3712016-06-14 04:53:09 -0700147 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
egdaniel066df7c2016-06-08 14:02:27 -0700148 }
149}
150
Brian Salomon24d377e2019-04-23 15:24:31 -0400151GrVkGpuRTCommandBuffer::GrVkGpuRTCommandBuffer(GrVkGpu* gpu) : fGpu(gpu) {}
Brian Salomonc293a292016-11-30 13:38:32 -0500152
Greg Daniel500d58b2017-08-24 15:59:33 -0400153void GrVkGpuRTCommandBuffer::init() {
Brian Salomonc293a292016-11-30 13:38:32 -0500154 GrVkRenderPass::LoadStoreOps vkColorOps(fVkColorLoadOp, fVkColorStoreOp);
155 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -0700156
Greg Daniel36a77ee2016-10-18 10:33:25 -0400157 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Brian Salomonc293a292016-11-30 13:38:32 -0500158 SkASSERT(fCommandBufferInfos.count() == 1);
Greg Daniel22bc8652017-03-22 15:45:43 -0400159 fCurrentCmdInfo = 0;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400160
Robert Phillips19e51dc2017-08-09 09:30:51 -0400161 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
162 const GrVkResourceProvider::CompatibleRPHandle& rpHandle = vkRT->compatibleRenderPassHandle();
egdaniel066df7c2016-06-08 14:02:27 -0700163 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400164 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
165 vkColorOps,
166 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -0700167 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400168 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400169 vkColorOps,
170 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -0700171 }
172
Brian Osmancb3d0872018-10-16 15:19:28 -0400173 cbInfo.fColorClearValue.color.float32[0] = fClearColor[0];
174 cbInfo.fColorClearValue.color.float32[1] = fClearColor[1];
175 cbInfo.fColorClearValue.color.float32[2] = fClearColor[2];
176 cbInfo.fColorClearValue.color.float32[3] = fClearColor[3];
egdaniel9cb63402016-06-23 08:37:05 -0700177
Robert Phillips380b90c2017-08-30 07:41:07 -0400178 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000179 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Robert Phillips380b90c2017-08-30 07:41:07 -0400180 } else {
181 cbInfo.fBounds.setEmpty();
182 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400183
184 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
185 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
186 } else if (VK_ATTACHMENT_LOAD_OP_LOAD == fVkColorLoadOp &&
187 VK_ATTACHMENT_STORE_OP_STORE == fVkColorStoreOp) {
188 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
189 } else if (VK_ATTACHMENT_LOAD_OP_DONT_CARE == fVkColorLoadOp) {
190 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithDiscard;
191 }
Greg Daniel36a77ee2016-10-18 10:33:25 -0400192
Greg Daniel228518f2019-08-07 16:55:17 -0400193 cbInfo.fCommandBuffer = fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu);
Robert Phillips19e51dc2017-08-09 09:30:51 -0400194 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
egdaniel066df7c2016-06-08 14:02:27 -0700195}
196
Greg Daniel070cbaf2019-01-03 17:35:54 -0500197void GrVkGpuRTCommandBuffer::initWrapped() {
198 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
199 SkASSERT(fCommandBufferInfos.count() == 1);
200 fCurrentCmdInfo = 0;
201
202 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
203 SkASSERT(vkRT->wrapsSecondaryCommandBuffer());
204 cbInfo.fRenderPass = vkRT->externalRenderPass();
205 cbInfo.fRenderPass->ref();
206
207 cbInfo.fBounds.setEmpty();
Greg Daniel228518f2019-08-07 16:55:17 -0400208 cbInfo.fCommandBuffer.reset(
Greg Daniel8daf3b72019-07-30 09:57:26 -0400209 GrVkSecondaryCommandBuffer::Create(vkRT->getExternalSecondaryCommandBuffer()));
Greg Daniel070cbaf2019-01-03 17:35:54 -0500210 cbInfo.currentCmdBuf()->begin(fGpu, nullptr, cbInfo.fRenderPass);
211}
Brian Salomonc293a292016-11-30 13:38:32 -0500212
Greg Daniel500d58b2017-08-24 15:59:33 -0400213GrVkGpuRTCommandBuffer::~GrVkGpuRTCommandBuffer() {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400214 this->reset();
egdaniel066df7c2016-06-08 14:02:27 -0700215}
216
Greg Daniel500d58b2017-08-24 15:59:33 -0400217GrGpu* GrVkGpuRTCommandBuffer::gpu() { return fGpu; }
egdaniel9cb63402016-06-23 08:37:05 -0700218
Greg Daniel500d58b2017-08-24 15:59:33 -0400219void GrVkGpuRTCommandBuffer::end() {
Greg Daniel22bc8652017-03-22 15:45:43 -0400220 if (fCurrentCmdInfo >= 0) {
221 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Brian Salomonc293a292016-11-30 13:38:32 -0500222 }
egdaniel066df7c2016-06-08 14:02:27 -0700223}
224
Greg Daniel500d58b2017-08-24 15:59:33 -0400225void GrVkGpuRTCommandBuffer::submit() {
Brian Salomonc293a292016-11-30 13:38:32 -0500226 if (!fRenderTarget) {
227 return;
228 }
Robert Phillips19e51dc2017-08-09 09:30:51 -0400229
230 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Robert Phillips19e51dc2017-08-09 09:30:51 -0400231 GrVkImage* targetImage = vkRT->msaaImage() ? vkRT->msaaImage() : vkRT;
Greg Daniel45a44de2018-02-27 10:07:29 -0500232 GrStencilAttachment* stencil = fRenderTarget->renderTargetPriv().getStencilAttachment();
Brian Salomon24d377e2019-04-23 15:24:31 -0400233 auto currPreCmd = fPreCommandBufferTasks.begin();
egdaniel9cb63402016-06-23 08:37:05 -0700234
Greg Daniel46cfbc62019-06-07 11:43:30 -0400235 GrVkPrimaryCommandBufferTask::Args taskArgs{fGpu, fRenderTarget};
Greg Daniel36a77ee2016-10-18 10:33:25 -0400236 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
237 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
238
Brian Salomon24d377e2019-04-23 15:24:31 -0400239 for (int c = 0; c < cbInfo.fNumPreCmds; ++c, ++currPreCmd) {
Brian Salomon5d8f1cc2019-04-24 09:03:53 -0400240 currPreCmd->execute(taskArgs);
Greg Daniel77b53f62016-10-18 11:48:51 -0400241 }
242
Greg Daniel38c3d932018-03-16 14:22:30 -0400243 // TODO: Many things create a scratch texture which adds the discard immediately, but then
244 // don't draw to it right away. This causes the discard to be ignored and we get yelled at
245 // for loading uninitialized data. However, once MDB lands with reordering, the discard will
246 // get reordered with the rest of the draw commands and we can remove the discard check.
247 if (cbInfo.fIsEmpty &&
248 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithClear &&
249 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithDiscard) {
Greg Daniel77b53f62016-10-18 11:48:51 -0400250 // We have sumbitted no actual draw commands to the command buffer and we are not using
251 // the render pass to do a clear so there is no need to submit anything.
252 continue;
253 }
Greg Daniel38c3d932018-03-16 14:22:30 -0400254
Greg Daniel070cbaf2019-01-03 17:35:54 -0500255 // We don't want to actually submit the secondary command buffer if it is wrapped.
256 if (this->wrapsSecondaryCommandBuffer()) {
257 // If we have any sampled images set their layout now.
Chris Dalton298238a2019-02-21 16:28:44 -0500258 for (int j = 0; j < cbInfo.fSampledTextures.count(); ++j) {
259 cbInfo.fSampledTextures[j]->setImageLayout(
260 fGpu, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_ACCESS_SHADER_READ_BIT,
261 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, false);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500262 }
263
264 // There should have only been one secondary command buffer in the wrapped case so it is
265 // safe to just return here.
266 SkASSERT(fCommandBufferInfos.count() == 1);
267 return;
268 }
269
Greg Danieldbdba602018-04-20 11:52:43 -0400270 // Make sure if we only have a discard load that we execute the discard on the whole image.
271 // TODO: Once we improve our tracking of discards so that we never end up flushing a discard
272 // call with no actually ops, remove this.
273 if (cbInfo.fIsEmpty && cbInfo.fLoadStoreState == LoadStoreState::kStartsWithDiscard) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000274 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Greg Danieldbdba602018-04-20 11:52:43 -0400275 }
276
Greg Daniela41a74a2018-10-09 12:59:23 +0000277 if (cbInfo.fBounds.intersect(0, 0,
278 SkIntToScalar(fRenderTarget->width()),
279 SkIntToScalar(fRenderTarget->height()))) {
Greg Daniel38c3d932018-03-16 14:22:30 -0400280 // Make sure we do the following layout changes after all copies, uploads, or any other
281 // pre-work is done since we may change the layouts in the pre-work. Also since the
282 // draws will be submitted in different render passes, we need to guard againts write
283 // and write issues.
284
285 // Change layout of our render target so it can be used as the color attachment.
Greg Danielf7828d02018-10-09 12:01:32 -0400286 // TODO: If we know that we will never be blending or loading the attachment we could
287 // drop the VK_ACCESS_COLOR_ATTACHMENT_READ_BIT.
Greg Daniel38c3d932018-03-16 14:22:30 -0400288 targetImage->setImageLayout(fGpu,
289 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
Greg Danielf7828d02018-10-09 12:01:32 -0400290 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
Greg Daniel38c3d932018-03-16 14:22:30 -0400291 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400292 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400293 false);
294
295 // If we are using a stencil attachment we also need to update its layout
296 if (stencil) {
297 GrVkStencilAttachment* vkStencil = (GrVkStencilAttachment*)stencil;
Greg Danielf7828d02018-10-09 12:01:32 -0400298 // We need the write and read access bits since we may load and store the stencil.
299 // The initial load happens in the VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT so we
300 // wait there.
Greg Daniel38c3d932018-03-16 14:22:30 -0400301 vkStencil->setImageLayout(fGpu,
302 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
303 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
304 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400305 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400306 false);
307 }
308
309 // If we have any sampled images set their layout now.
Chris Dalton298238a2019-02-21 16:28:44 -0500310 for (int j = 0; j < cbInfo.fSampledTextures.count(); ++j) {
311 cbInfo.fSampledTextures[j]->setImageLayout(
312 fGpu, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_ACCESS_SHADER_READ_BIT,
313 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, false);
Greg Daniel38c3d932018-03-16 14:22:30 -0400314 }
315
Greg Daniel36a77ee2016-10-18 10:33:25 -0400316 SkIRect iBounds;
317 cbInfo.fBounds.roundOut(&iBounds);
318
Greg Daniel228518f2019-08-07 16:55:17 -0400319 fGpu->submitSecondaryCommandBuffer(std::move(cbInfo.fCommandBuffer), cbInfo.fRenderPass,
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400320 &cbInfo.fColorClearValue, vkRT, fOrigin, iBounds);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400321 }
322 }
Brian Salomon24d377e2019-04-23 15:24:31 -0400323 SkASSERT(currPreCmd == fPreCommandBufferTasks.end());
egdaniel9cb63402016-06-23 08:37:05 -0700324}
325
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400326void GrVkGpuRTCommandBuffer::set(GrRenderTarget* rt, GrSurfaceOrigin origin,
327 const GrGpuRTCommandBuffer::LoadAndStoreInfo& colorInfo,
328 const GrGpuRTCommandBuffer::StencilLoadAndStoreInfo& stencilInfo) {
329 SkASSERT(!fRenderTarget);
330 SkASSERT(fCommandBufferInfos.empty());
331 SkASSERT(-1 == fCurrentCmdInfo);
Robert Phillips9da87e02019-02-04 13:26:26 -0500332 SkASSERT(fGpu == rt->getContext()->priv().getGpu());
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400333 SkASSERT(!fLastPipelineState);
334
Greg Danielb0c7ad12019-06-06 17:23:35 +0000335#ifdef SK_DEBUG
336 fIsActive = true;
337#endif
338
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400339 this->INHERITED::set(rt, origin);
340
Greg Daniel070cbaf2019-01-03 17:35:54 -0500341 if (this->wrapsSecondaryCommandBuffer()) {
342 this->initWrapped();
343 return;
344 }
345
Brian Osman9a9baae2018-11-05 15:06:26 -0500346 fClearColor = colorInfo.fClearColor;
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400347
348 get_vk_load_store_ops(colorInfo.fLoadOp, colorInfo.fStoreOp,
349 &fVkColorLoadOp, &fVkColorStoreOp);
350
351 get_vk_load_store_ops(stencilInfo.fLoadOp, stencilInfo.fStoreOp,
352 &fVkStencilLoadOp, &fVkStencilStoreOp);
353
354 this->init();
355}
356
357void GrVkGpuRTCommandBuffer::reset() {
358 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
359 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
Greg Daniel228518f2019-08-07 16:55:17 -0400360 if (cbInfo.fCommandBuffer) {
361 cbInfo.fCommandBuffer.release()->recycle(fGpu);
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400362 }
363 cbInfo.fRenderPass->unref(fGpu);
364 }
365 fCommandBufferInfos.reset();
Brian Salomon24d377e2019-04-23 15:24:31 -0400366 fPreCommandBufferTasks.reset();
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400367
368 fCurrentCmdInfo = -1;
369
370 fLastPipelineState = nullptr;
371 fRenderTarget = nullptr;
Greg Danielb0c7ad12019-06-06 17:23:35 +0000372
373#ifdef SK_DEBUG
374 fIsActive = false;
375#endif
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400376}
377
Greg Daniel070cbaf2019-01-03 17:35:54 -0500378bool GrVkGpuRTCommandBuffer::wrapsSecondaryCommandBuffer() const {
379 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
380 return vkRT->wrapsSecondaryCommandBuffer();
381}
382
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400383////////////////////////////////////////////////////////////////////////////////
384
Greg Daniel500d58b2017-08-24 15:59:33 -0400385void GrVkGpuRTCommandBuffer::insertEventMarker(const char* msg) {
Robert Phillips65a88fa2017-08-08 08:36:22 -0400386 // TODO: does Vulkan have a correlate?
387}
388
Greg Daniel500d58b2017-08-24 15:59:33 -0400389void GrVkGpuRTCommandBuffer::onClearStencilClip(const GrFixedClip& clip, bool insideStencilMask) {
Chris Dalton94c04682017-11-01 17:15:06 -0600390 SkASSERT(!clip.hasWindowRectangles());
391
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000392 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
393
Greg Daniel65a09272016-10-12 09:47:22 -0400394 GrStencilAttachment* sb = fRenderTarget->renderTargetPriv().getStencilAttachment();
egdaniel9cb63402016-06-23 08:37:05 -0700395 // this should only be called internally when we know we have a
396 // stencil buffer.
397 SkASSERT(sb);
398 int stencilBitCount = sb->bits();
399
400 // The contract with the callers does not guarantee that we preserve all bits in the stencil
401 // during this clear. Thus we will clear the entire stencil to the desired value.
402
403 VkClearDepthStencilValue vkStencilColor;
404 memset(&vkStencilColor, 0, sizeof(VkClearDepthStencilValue));
csmartdalton29df7602016-08-31 11:55:52 -0700405 if (insideStencilMask) {
egdaniel9cb63402016-06-23 08:37:05 -0700406 vkStencilColor.stencil = (1 << (stencilBitCount - 1));
407 } else {
408 vkStencilColor.stencil = 0;
409 }
410
411 VkClearRect clearRect;
412 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700413 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000414 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000415 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400416 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700417 vkRect = clip.scissorRect();
418 } else {
419 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400420 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
421 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700422 }
423
424 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
425 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
426
427 clearRect.baseArrayLayer = 0;
428 clearRect.layerCount = 1;
429
430 uint32_t stencilIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400431 SkAssertResult(cbInfo.fRenderPass->stencilAttachmentIndex(&stencilIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700432
433 VkClearAttachment attachment;
434 attachment.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
435 attachment.colorAttachment = 0; // this value shouldn't matter
436 attachment.clearValue.depthStencil = vkStencilColor;
437
Greg Daniel22bc8652017-03-22 15:45:43 -0400438 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400439 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400440
441 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000442 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400443 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
444 } else {
445 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
446 }
egdaniel9cb63402016-06-23 08:37:05 -0700447}
448
Brian Osman9a9baae2018-11-05 15:06:26 -0500449void GrVkGpuRTCommandBuffer::onClear(const GrFixedClip& clip, const SkPMColor4f& color) {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400450 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
451
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000452 // parent class should never let us get here with no RT
csmartdaltonbf4a8f92016-09-06 10:01:06 -0700453 SkASSERT(!clip.hasWindowRectangles());
egdaniel9cb63402016-06-23 08:37:05 -0700454
Greg Daniel22bc8652017-03-22 15:45:43 -0400455 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Greg Daniel36a77ee2016-10-18 10:33:25 -0400456
Brian Osman9a9baae2018-11-05 15:06:26 -0500457 VkClearColorValue vkColor = {{color.fR, color.fG, color.fB, color.fA}};
egdaniel9cb63402016-06-23 08:37:05 -0700458
Brian Salomond818ebf2018-07-02 14:08:49 +0000459 if (cbInfo.fIsEmpty && !clip.scissorEnabled()) {
Robert Phillips74c627f2017-08-09 10:28:00 -0400460 // Change the render pass to do a clear load
egdaniel9cb63402016-06-23 08:37:05 -0700461 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_CLEAR,
462 VK_ATTACHMENT_STORE_OP_STORE);
Robert Phillips74c627f2017-08-09 10:28:00 -0400463 // Preserve the stencil buffer's load & store settings
464 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -0700465
Greg Daniel36a77ee2016-10-18 10:33:25 -0400466 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
egdaniel9cb63402016-06-23 08:37:05 -0700467
468 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400469 vkRT->compatibleRenderPassHandle();
egdaniel9cb63402016-06-23 08:37:05 -0700470 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400471 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
472 vkColorOps,
473 vkStencilOps);
egdaniel9cb63402016-06-23 08:37:05 -0700474 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400475 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400476 vkColorOps,
477 vkStencilOps);
egdaniel9cb63402016-06-23 08:37:05 -0700478 }
479
Greg Daniel36a77ee2016-10-18 10:33:25 -0400480 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
egdaniel9cb63402016-06-23 08:37:05 -0700481 oldRP->unref(fGpu);
482
Brian Osman9a9baae2018-11-05 15:06:26 -0500483 cbInfo.fColorClearValue.color = {{color.fR, color.fG, color.fB, color.fA}};
Greg Daniela3c68df2018-03-16 13:46:53 -0400484 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400485 // Update command buffer bounds
486 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
egdaniel9cb63402016-06-23 08:37:05 -0700487 return;
488 }
489
490 // We always do a sub rect clear with clearAttachments since we are inside a render pass
491 VkClearRect clearRect;
492 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700493 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000494 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000495 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400496 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700497 vkRect = clip.scissorRect();
498 } else {
499 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400500 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
501 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700502 }
503 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
504 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
505 clearRect.baseArrayLayer = 0;
506 clearRect.layerCount = 1;
507
508 uint32_t colorIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400509 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&colorIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700510
511 VkClearAttachment attachment;
512 attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
513 attachment.colorAttachment = colorIndex;
514 attachment.clearValue.color = vkColor;
515
Greg Daniel22bc8652017-03-22 15:45:43 -0400516 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400517 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400518
519 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000520 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400521 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
522 } else {
523 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
524 }
egdaniel9cb63402016-06-23 08:37:05 -0700525 return;
526}
527
Greg Daniel500d58b2017-08-24 15:59:33 -0400528////////////////////////////////////////////////////////////////////////////////
529
Greg Daniel500d58b2017-08-24 15:59:33 -0400530void GrVkGpuRTCommandBuffer::addAdditionalRenderPass() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400531 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
532
Greg Daniel22bc8652017-03-22 15:45:43 -0400533 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Greg Daniel77b53f62016-10-18 11:48:51 -0400534
535 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Greg Daniel22bc8652017-03-22 15:45:43 -0400536 fCurrentCmdInfo++;
Greg Daniel77b53f62016-10-18 11:48:51 -0400537
538 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
539 VK_ATTACHMENT_STORE_OP_STORE);
540 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
541 VK_ATTACHMENT_STORE_OP_STORE);
542
543 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400544 vkRT->compatibleRenderPassHandle();
Greg Daniel77b53f62016-10-18 11:48:51 -0400545 if (rpHandle.isValid()) {
546 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
547 vkColorOps,
548 vkStencilOps);
549 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400550 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel77b53f62016-10-18 11:48:51 -0400551 vkColorOps,
552 vkStencilOps);
553 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400554 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
Greg Daniel77b53f62016-10-18 11:48:51 -0400555
Greg Daniel228518f2019-08-07 16:55:17 -0400556 cbInfo.fCommandBuffer = fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu);
Greg Daniel77b53f62016-10-18 11:48:51 -0400557 // It shouldn't matter what we set the clear color to here since we will assume loading of the
558 // attachment.
559 memset(&cbInfo.fColorClearValue, 0, sizeof(VkClearValue));
560 cbInfo.fBounds.setEmpty();
Greg Daniel77b53f62016-10-18 11:48:51 -0400561
Robert Phillips19e51dc2017-08-09 09:30:51 -0400562 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
Greg Daniel77b53f62016-10-18 11:48:51 -0400563}
564
Brian Salomon943ed792017-10-30 09:37:55 -0400565void GrVkGpuRTCommandBuffer::inlineUpload(GrOpFlushState* state,
566 GrDeferredTextureUploadFn& upload) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400567 if (!fCommandBufferInfos[fCurrentCmdInfo].fIsEmpty) {
568 this->addAdditionalRenderPass();
Greg Daniel77b53f62016-10-18 11:48:51 -0400569 }
Brian Salomon24d377e2019-04-23 15:24:31 -0400570
Brian Salomon24d377e2019-04-23 15:24:31 -0400571 fPreCommandBufferTasks.emplace<InlineUpload>(state, upload);
572 ++fCommandBufferInfos[fCurrentCmdInfo].fNumPreCmds;
Greg Daniel77b53f62016-10-18 11:48:51 -0400573}
574
Greg Daniel46cfbc62019-06-07 11:43:30 -0400575void GrVkGpuRTCommandBuffer::copy(GrSurface* src, const SkIRect& srcRect,
Greg Daniel500d58b2017-08-24 15:59:33 -0400576 const SkIPoint& dstPoint) {
Greg Daniela3c68df2018-03-16 13:46:53 -0400577 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
578 if (!cbInfo.fIsEmpty || LoadStoreState::kStartsWithClear == cbInfo.fLoadStoreState) {
Greg Daniel500d58b2017-08-24 15:59:33 -0400579 this->addAdditionalRenderPass();
580 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400581
Brian Salomon24d377e2019-04-23 15:24:31 -0400582 fPreCommandBufferTasks.emplace<Copy>(
Greg Daniel46cfbc62019-06-07 11:43:30 -0400583 src, srcRect, dstPoint, LoadStoreState::kStartsWithDiscard == cbInfo.fLoadStoreState);
Brian Salomon24d377e2019-04-23 15:24:31 -0400584 ++fCommandBufferInfos[fCurrentCmdInfo].fNumPreCmds;
Greg Daniel55fa6472018-03-16 16:13:10 -0400585
Greg Daniela3c68df2018-03-16 13:46:53 -0400586 if (LoadStoreState::kLoadAndStore != cbInfo.fLoadStoreState) {
587 // Change the render pass to do a load and store so we don't lose the results of our copy
588 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
589 VK_ATTACHMENT_STORE_OP_STORE);
590 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
591 VK_ATTACHMENT_STORE_OP_STORE);
592
593 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
594
595 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400596 SkASSERT(!src->isProtected() || (fRenderTarget->isProtected() && fGpu->protectedContext()));
Greg Daniela3c68df2018-03-16 13:46:53 -0400597 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
598 vkRT->compatibleRenderPassHandle();
599 if (rpHandle.isValid()) {
600 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
601 vkColorOps,
602 vkStencilOps);
603 } else {
604 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
605 vkColorOps,
606 vkStencilOps);
607 }
608 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
609 oldRP->unref(fGpu);
610
611 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
612
613 }
Greg Daniel500d58b2017-08-24 15:59:33 -0400614}
615
Brian Salomonf77c1462019-08-01 15:19:29 -0400616void GrVkGpuRTCommandBuffer::transferFrom(const SkIRect& srcRect, GrColorType surfaceColorType,
617 GrColorType bufferColorType, GrGpuBuffer* transferBuffer,
618 size_t offset) {
Brian Salomonab32f652019-05-10 14:24:50 -0400619 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
620 if (!cbInfo.fIsEmpty) {
621 this->addAdditionalRenderPass();
622 }
Brian Salomonf77c1462019-08-01 15:19:29 -0400623 fPreCommandBufferTasks.emplace<TransferFrom>(srcRect, surfaceColorType, bufferColorType,
624 transferBuffer, offset);
Brian Salomonab32f652019-05-10 14:24:50 -0400625 ++fCommandBufferInfos[fCurrentCmdInfo].fNumPreCmds;
626}
627
egdaniel9cb63402016-06-23 08:37:05 -0700628////////////////////////////////////////////////////////////////////////////////
629
Brian Salomondbf70722019-02-07 11:31:24 -0500630void GrVkGpuRTCommandBuffer::bindGeometry(const GrGpuBuffer* indexBuffer,
631 const GrGpuBuffer* vertexBuffer,
632 const GrGpuBuffer* instanceBuffer) {
Chris Daltonff926502017-05-03 14:36:54 -0400633 GrVkSecondaryCommandBuffer* currCmdBuf = fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf();
egdaniel9cb63402016-06-23 08:37:05 -0700634 // There is no need to put any memory barriers to make sure host writes have finished here.
635 // When a command buffer is submitted to a queue, there is an implicit memory barrier that
636 // occurs for all host writes. Additionally, BufferMemoryBarriers are not allowed inside of
637 // an active RenderPass.
egdaniel9cb63402016-06-23 08:37:05 -0700638
Chris Dalton1d616352017-05-31 12:51:23 -0600639 // Here our vertex and instance inputs need to match the same 0-based bindings they were
640 // assigned in GrVkPipeline. That is, vertex first (if any) followed by instance.
641 uint32_t binding = 0;
642
Brian Salomon802cb312018-06-08 18:05:20 -0400643 if (vertexBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600644 SkASSERT(vertexBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600645 SkASSERT(!vertexBuffer->isMapped());
646
647 currCmdBuf->bindInputBuffer(fGpu, binding++,
648 static_cast<const GrVkVertexBuffer*>(vertexBuffer));
649 }
650
Brian Salomon802cb312018-06-08 18:05:20 -0400651 if (instanceBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600652 SkASSERT(instanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600653 SkASSERT(!instanceBuffer->isMapped());
654
655 currCmdBuf->bindInputBuffer(fGpu, binding++,
656 static_cast<const GrVkVertexBuffer*>(instanceBuffer));
657 }
Chris Daltonff926502017-05-03 14:36:54 -0400658 if (indexBuffer) {
659 SkASSERT(indexBuffer);
660 SkASSERT(!indexBuffer->isMapped());
egdaniel9cb63402016-06-23 08:37:05 -0700661
Chris Daltonff926502017-05-03 14:36:54 -0400662 currCmdBuf->bindIndexBuffer(fGpu, static_cast<const GrVkIndexBuffer*>(indexBuffer));
egdaniel9cb63402016-06-23 08:37:05 -0700663 }
664}
665
Brian Salomon49348902018-06-26 09:12:38 -0400666GrVkPipelineState* GrVkGpuRTCommandBuffer::prepareDrawState(
667 const GrPrimitiveProcessor& primProc,
668 const GrPipeline& pipeline,
669 const GrPipeline::FixedDynamicState* fixedDynamicState,
670 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
671 GrPrimitiveType primitiveType) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400672 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
673 SkASSERT(cbInfo.fRenderPass);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400674
Greg Daniel99b88e02018-10-03 15:31:20 -0400675 VkRenderPass compatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
676
Greg Daniel9a51a862018-11-30 10:18:14 -0500677 const GrTextureProxy* const* primProcProxies = nullptr;
678 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
679 primProcProxies = dynamicStateArrays->fPrimitiveProcessorTextures;
680 } else if (fixedDynamicState) {
681 primProcProxies = fixedDynamicState->fPrimitiveProcessorTextures;
682 }
683
684 SkASSERT(SkToBool(primProcProxies) == SkToBool(primProc.numTextureSamplers()));
685
Greg Daniel09eeefb2017-10-16 15:15:02 -0400686 GrVkPipelineState* pipelineState =
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500687 fGpu->resourceProvider().findOrCreateCompatiblePipelineState(fRenderTarget, fOrigin,
688 pipeline,
egdaniel9cb63402016-06-23 08:37:05 -0700689 primProc,
Greg Daniel9a51a862018-11-30 10:18:14 -0500690 primProcProxies,
egdaniel9cb63402016-06-23 08:37:05 -0700691 primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400692 compatibleRenderPass);
egdaniel9cb63402016-06-23 08:37:05 -0700693 if (!pipelineState) {
694 return pipelineState;
695 }
696
Greg Daniel09eeefb2017-10-16 15:15:02 -0400697 fLastPipelineState = pipelineState;
Greg Daniel22bc8652017-03-22 15:45:43 -0400698
Brian Salomonf7232642018-09-19 08:58:08 -0400699 pipelineState->bindPipeline(fGpu, cbInfo.currentCmdBuf());
Brian Salomoncd7907b2018-08-30 08:36:18 -0400700
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500701 pipelineState->setAndBindUniforms(fGpu, fRenderTarget, fOrigin,
702 primProc, pipeline, cbInfo.currentCmdBuf());
Brian Salomonf7232642018-09-19 08:58:08 -0400703
704 // Check whether we need to bind textures between each GrMesh. If not we can bind them all now.
705 bool setTextures = !(dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures);
706 if (setTextures) {
Brian Salomonf7232642018-09-19 08:58:08 -0400707 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, primProcProxies,
708 cbInfo.currentCmdBuf());
709 }
egdaniel9cb63402016-06-23 08:37:05 -0700710
Brian Salomond818ebf2018-07-02 14:08:49 +0000711 if (!pipeline.isScissorEnabled()) {
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400712 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(),
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500713 fRenderTarget, fOrigin,
714 SkIRect::MakeWH(fRenderTarget->width(),
715 fRenderTarget->height()));
Brian Salomon49348902018-06-26 09:12:38 -0400716 } else if (!dynamicStateArrays || !dynamicStateArrays->fScissorRects) {
717 SkASSERT(fixedDynamicState);
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500718 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget,
719 fOrigin,
Brian Salomon49348902018-06-26 09:12:38 -0400720 fixedDynamicState->fScissorRect);
Chris Dalton46983b72017-06-06 12:27:16 -0600721 }
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500722 GrVkPipeline::SetDynamicViewportState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget);
723 GrVkPipeline::SetDynamicBlendConstantState(fGpu, cbInfo.currentCmdBuf(),
Greg Daniel2c3398d2019-06-19 11:58:01 -0400724 pipeline.outputSwizzle(),
Chris Dalton46983b72017-06-06 12:27:16 -0600725 pipeline.getXferProcessor());
egdaniel9cb63402016-06-23 08:37:05 -0700726
727 return pipelineState;
728}
729
Brian Salomonff168d92018-06-23 15:17:27 -0400730void GrVkGpuRTCommandBuffer::onDraw(const GrPrimitiveProcessor& primProc,
731 const GrPipeline& pipeline,
Brian Salomon49348902018-06-26 09:12:38 -0400732 const GrPipeline::FixedDynamicState* fixedDynamicState,
733 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
Greg Daniel500d58b2017-08-24 15:59:33 -0400734 const GrMesh meshes[],
Greg Daniel500d58b2017-08-24 15:59:33 -0400735 int meshCount,
736 const SkRect& bounds) {
egdaniel9cb63402016-06-23 08:37:05 -0700737 if (!meshCount) {
738 return;
739 }
Greg Danielea022cd2018-03-16 11:10:03 -0400740
741 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
742
Brian Salomone782f842018-07-31 13:53:11 -0400743 auto prepareSampledImage = [&](GrTexture* texture, GrSamplerState::Filter filter) {
744 GrVkTexture* vkTexture = static_cast<GrVkTexture*>(texture);
745 // We may need to resolve the texture first if it is also a render target
746 GrVkRenderTarget* texRT = static_cast<GrVkRenderTarget*>(vkTexture->asRenderTarget());
Chris Dalton3d770272019-08-14 09:24:37 -0600747 if (texRT && texRT->needsResolve()) {
Greg Daniel0a77f432018-12-06 11:23:32 -0500748 fGpu->resolveRenderTargetNoFlush(texRT);
Chris Dalton3d770272019-08-14 09:24:37 -0600749 // TEMPORARY: MSAA resolve will have dirtied mipmaps. This goes away once we switch
750 // to resolving MSAA from the opList as well.
751 if (GrSamplerState::Filter::kMipMap == filter &&
752 (vkTexture->width() != 1 || vkTexture->height() != 1)) {
753 SkASSERT(vkTexture->texturePriv().mipMapped() == GrMipMapped::kYes);
754 SkASSERT(vkTexture->texturePriv().mipMapsAreDirty());
755 fGpu->regenerateMipMapLevels(vkTexture);
756 }
Brian Salomone782f842018-07-31 13:53:11 -0400757 }
758
Chris Dalton3d770272019-08-14 09:24:37 -0600759 // Ensure mip maps were all resolved ahead of time by the opList.
Brian Salomone782f842018-07-31 13:53:11 -0400760 if (GrSamplerState::Filter::kMipMap == filter &&
761 (vkTexture->width() != 1 || vkTexture->height() != 1)) {
762 SkASSERT(vkTexture->texturePriv().mipMapped() == GrMipMapped::kYes);
Chris Dalton3d770272019-08-14 09:24:37 -0600763 SkASSERT(!vkTexture->texturePriv().mipMapsAreDirty());
Brian Salomone782f842018-07-31 13:53:11 -0400764 }
Brian Salomon5fd10572019-04-01 12:07:05 -0400765 cbInfo.fSampledTextures.push_back(vkTexture);
Emircan Uysaler23ca4e72019-06-24 10:53:09 -0400766
767 SkASSERT(!texture->isProtected() ||
768 (fRenderTarget->isProtected() && fGpu->protectedContext()));
Brian Salomone782f842018-07-31 13:53:11 -0400769 };
770
Brian Salomonf7232642018-09-19 08:58:08 -0400771 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
772 for (int m = 0, i = 0; m < meshCount; ++m) {
773 for (int s = 0; s < primProc.numTextureSamplers(); ++s, ++i) {
774 auto texture = dynamicStateArrays->fPrimitiveProcessorTextures[i]->peekTexture();
775 prepareSampledImage(texture, primProc.textureSampler(s).samplerState().filter());
776 }
777 }
778 } else {
779 for (int i = 0; i < primProc.numTextureSamplers(); ++i) {
780 auto texture = fixedDynamicState->fPrimitiveProcessorTextures[i]->peekTexture();
781 prepareSampledImage(texture, primProc.textureSampler(i).samplerState().filter());
782 }
Brian Salomone782f842018-07-31 13:53:11 -0400783 }
bsalomonb58a2b42016-09-26 06:55:02 -0700784 GrFragmentProcessor::Iter iter(pipeline);
785 while (const GrFragmentProcessor* fp = iter.next()) {
Brian Salomone782f842018-07-31 13:53:11 -0400786 for (int i = 0; i < fp->numTextureSamplers(); ++i) {
787 const GrFragmentProcessor::TextureSampler& sampler = fp->textureSampler(i);
788 prepareSampledImage(sampler.peekTexture(), sampler.samplerState().filter());
789 }
egdaniel2f5792a2016-07-06 08:51:23 -0700790 }
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400791 if (GrTexture* dstTexture = pipeline.peekDstTexture()) {
Chris Dalton298238a2019-02-21 16:28:44 -0500792 cbInfo.fSampledTextures.push_back(sk_ref_sp(static_cast<GrVkTexture*>(dstTexture)));
Brian Salomon18dfa982017-04-03 16:57:43 -0400793 }
egdaniel2f5792a2016-07-06 08:51:23 -0700794
Chris Daltonbca46e22017-05-15 11:03:26 -0600795 GrPrimitiveType primitiveType = meshes[0].primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400796 GrVkPipelineState* pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
797 dynamicStateArrays, primitiveType);
egdaniel9cb63402016-06-23 08:37:05 -0700798 if (!pipelineState) {
799 return;
800 }
801
Brian Salomond818ebf2018-07-02 14:08:49 +0000802 bool dynamicScissor =
803 pipeline.isScissorEnabled() && dynamicStateArrays && dynamicStateArrays->fScissorRects;
Brian Salomonf7232642018-09-19 08:58:08 -0400804 bool dynamicTextures = dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures;
Brian Salomon49348902018-06-26 09:12:38 -0400805
egdaniel9cb63402016-06-23 08:37:05 -0700806 for (int i = 0; i < meshCount; ++i) {
807 const GrMesh& mesh = meshes[i];
Chris Daltonbca46e22017-05-15 11:03:26 -0600808 if (mesh.primitiveType() != primitiveType) {
Chris Dalton6f241802017-05-08 13:58:38 -0400809 SkDEBUGCODE(pipelineState = nullptr);
Chris Daltonbca46e22017-05-15 11:03:26 -0600810 primitiveType = mesh.primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400811 pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
812 dynamicStateArrays, primitiveType);
Chris Dalton6f241802017-05-08 13:58:38 -0400813 if (!pipelineState) {
814 return;
egdaniel9cb63402016-06-23 08:37:05 -0700815 }
Chris Dalton6f241802017-05-08 13:58:38 -0400816 }
egdaniel9cb63402016-06-23 08:37:05 -0700817
Brian Salomon49348902018-06-26 09:12:38 -0400818 if (dynamicScissor) {
819 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget,
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500820 fOrigin,
Brian Salomon49348902018-06-26 09:12:38 -0400821 dynamicStateArrays->fScissorRects[i]);
Chris Dalton46983b72017-06-06 12:27:16 -0600822 }
Brian Salomonf7232642018-09-19 08:58:08 -0400823 if (dynamicTextures) {
824 GrTextureProxy* const* meshProxies = dynamicStateArrays->fPrimitiveProcessorTextures +
825 primProc.numTextureSamplers() * i;
826 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, meshProxies,
827 cbInfo.currentCmdBuf());
828 }
Chris Daltonbca46e22017-05-15 11:03:26 -0600829 SkASSERT(pipelineState);
Brian Salomon802cb312018-06-08 18:05:20 -0400830 mesh.sendToGpu(this);
egdaniel9cb63402016-06-23 08:37:05 -0700831 }
832
Greg Daniel36a77ee2016-10-18 10:33:25 -0400833 cbInfo.fBounds.join(bounds);
Chris Dalton114a3c02017-05-26 15:17:19 -0600834 cbInfo.fIsEmpty = false;
egdaniel066df7c2016-06-08 14:02:27 -0700835}
836
Brian Salomon802cb312018-06-08 18:05:20 -0400837void GrVkGpuRTCommandBuffer::sendInstancedMeshToGpu(GrPrimitiveType,
Greg Daniel500d58b2017-08-24 15:59:33 -0400838 const GrBuffer* vertexBuffer,
839 int vertexCount,
840 int baseVertex,
841 const GrBuffer* instanceBuffer,
842 int instanceCount,
843 int baseInstance) {
Chris Dalton114a3c02017-05-26 15:17:19 -0600844 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomondbf70722019-02-07 11:31:24 -0500845 SkASSERT(!vertexBuffer || !vertexBuffer->isCpuBuffer());
846 SkASSERT(!instanceBuffer || !instanceBuffer->isCpuBuffer());
847 auto gpuVertexBuffer = static_cast<const GrGpuBuffer*>(vertexBuffer);
848 auto gpuInstanceBuffer = static_cast<const GrGpuBuffer*>(instanceBuffer);
849 this->bindGeometry(nullptr, gpuVertexBuffer, gpuInstanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600850 cbInfo.currentCmdBuf()->draw(fGpu, vertexCount, instanceCount, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600851 fGpu->stats()->incNumDraws();
852}
853
Brian Salomon802cb312018-06-08 18:05:20 -0400854void GrVkGpuRTCommandBuffer::sendIndexedInstancedMeshToGpu(GrPrimitiveType,
Greg Daniel500d58b2017-08-24 15:59:33 -0400855 const GrBuffer* indexBuffer,
856 int indexCount,
857 int baseIndex,
858 const GrBuffer* vertexBuffer,
859 int baseVertex,
860 const GrBuffer* instanceBuffer,
861 int instanceCount,
Brian Salomon802cb312018-06-08 18:05:20 -0400862 int baseInstance,
863 GrPrimitiveRestart restart) {
864 SkASSERT(restart == GrPrimitiveRestart::kNo);
Chris Dalton114a3c02017-05-26 15:17:19 -0600865 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomondbf70722019-02-07 11:31:24 -0500866 SkASSERT(!vertexBuffer || !vertexBuffer->isCpuBuffer());
867 SkASSERT(!instanceBuffer || !instanceBuffer->isCpuBuffer());
868 SkASSERT(!indexBuffer->isCpuBuffer());
869 auto gpuIndexxBuffer = static_cast<const GrGpuBuffer*>(indexBuffer);
870 auto gpuVertexBuffer = static_cast<const GrGpuBuffer*>(vertexBuffer);
871 auto gpuInstanceBuffer = static_cast<const GrGpuBuffer*>(instanceBuffer);
872 this->bindGeometry(gpuIndexxBuffer, gpuVertexBuffer, gpuInstanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600873 cbInfo.currentCmdBuf()->drawIndexed(fGpu, indexCount, instanceCount,
874 baseIndex, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600875 fGpu->stats()->incNumDraws();
876}
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400877
878////////////////////////////////////////////////////////////////////////////////
879
880void GrVkGpuRTCommandBuffer::executeDrawable(std::unique_ptr<SkDrawable::GpuDrawHandler> drawable) {
881 GrVkRenderTarget* target = static_cast<GrVkRenderTarget*>(fRenderTarget);
882
883 GrVkImage* targetImage = target->msaaImage() ? target->msaaImage() : target;
884
885 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
886 VkRect2D bounds;
887 bounds.offset = { 0, 0 };
888 bounds.extent = { 0, 0 };
889
890 GrVkDrawableInfo vkInfo;
891 vkInfo.fSecondaryCommandBuffer = cbInfo.currentCmdBuf()->vkCommandBuffer();
892 vkInfo.fCompatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
Greg Danielb353eeb2018-12-05 11:01:58 -0500893 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&vkInfo.fColorAttachmentIndex));
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400894 vkInfo.fFormat = targetImage->imageFormat();
895 vkInfo.fDrawBounds = &bounds;
Stan Ilievcb580602019-02-26 11:36:07 -0500896#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
897 vkInfo.fImage = targetImage->image();
898#else
899 vkInfo.fImage = VK_NULL_HANDLE;
900#endif //SK_BUILD_FOR_ANDROID_FRAMEWORK
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400901
902 GrBackendDrawableInfo info(vkInfo);
903
Eric Karlc0b2ba22019-01-22 19:40:35 -0800904 // After we draw into the command buffer via the drawable, cached state we have may be invalid.
905 cbInfo.currentCmdBuf()->invalidateState();
Eric Karla8878a12019-02-07 18:17:43 -0800906 // Also assume that the drawable produced output.
907 cbInfo.fIsEmpty = false;
Eric Karlc0b2ba22019-01-22 19:40:35 -0800908
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400909 drawable->draw(info);
910 fGpu->addDrawable(std::move(drawable));
911
912 if (bounds.extent.width == 0 || bounds.extent.height == 0) {
913 cbInfo.fBounds.join(target->getBoundsRect());
914 } else {
915 cbInfo.fBounds.join(SkRect::MakeXYWH(bounds.offset.x, bounds.offset.y,
916 bounds.extent.width, bounds.extent.height));
917 }
918}
919