blob: a42bef402844b6e5cde469270d703f4e5c3efb9e [file] [log] [blame]
egdaniel066df7c2016-06-08 14:02:27 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Greg Daniel2d41d0d2019-08-26 11:08:51 -04008#include "src/gpu/vk/GrVkOpsRenderPass.h"
egdaniel066df7c2016-06-08 14:02:27 -07009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkDrawable.h"
11#include "include/core/SkRect.h"
12#include "include/gpu/GrBackendDrawableInfo.h"
13#include "src/gpu/GrContextPriv.h"
14#include "src/gpu/GrFixedClip.h"
15#include "src/gpu/GrMesh.h"
16#include "src/gpu/GrOpFlushState.h"
17#include "src/gpu/GrPipeline.h"
18#include "src/gpu/GrRenderTargetPriv.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050019#include "src/gpu/vk/GrVkCommandBuffer.h"
20#include "src/gpu/vk/GrVkCommandPool.h"
21#include "src/gpu/vk/GrVkGpu.h"
22#include "src/gpu/vk/GrVkPipeline.h"
23#include "src/gpu/vk/GrVkRenderPass.h"
24#include "src/gpu/vk/GrVkRenderTarget.h"
25#include "src/gpu/vk/GrVkResourceProvider.h"
26#include "src/gpu/vk/GrVkSemaphore.h"
27#include "src/gpu/vk/GrVkTexture.h"
egdaniel066df7c2016-06-08 14:02:27 -070028
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040029GrVkPrimaryCommandBufferTask::~GrVkPrimaryCommandBufferTask() = default;
30GrVkPrimaryCommandBufferTask::GrVkPrimaryCommandBufferTask() = default;
31
32namespace {
33
34class InlineUpload : public GrVkPrimaryCommandBufferTask {
35public:
36 InlineUpload(GrOpFlushState* state, const GrDeferredTextureUploadFn& upload)
37 : fFlushState(state), fUpload(upload) {}
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040038
Greg Danielb20d7e52019-09-03 13:54:39 -040039 void execute(const Args& args) override { fFlushState->doUpload(fUpload, true); }
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040040
41private:
42 GrOpFlushState* fFlushState;
43 GrDeferredTextureUploadFn fUpload;
44};
45
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040046} // anonymous namespace
47
48/////////////////////////////////////////////////////////////////////////////
49
Robert Phillips6b47c7d2017-08-29 07:24:09 -040050void get_vk_load_store_ops(GrLoadOp loadOpIn, GrStoreOp storeOpIn,
egdaniel066df7c2016-06-08 14:02:27 -070051 VkAttachmentLoadOp* loadOp, VkAttachmentStoreOp* storeOp) {
Robert Phillips95214472017-08-08 18:00:03 -040052 switch (loadOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -040053 case GrLoadOp::kLoad:
egdaniel066df7c2016-06-08 14:02:27 -070054 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel066df7c2016-06-08 14:02:27 -070055 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040056 case GrLoadOp::kClear:
egdaniel9cb63402016-06-23 08:37:05 -070057 *loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
58 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040059 case GrLoadOp::kDiscard:
egdaniel9cb63402016-06-23 08:37:05 -070060 *loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
61 break;
62 default:
63 SK_ABORT("Invalid LoadOp");
egdaniel066df7c2016-06-08 14:02:27 -070064 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel9cb63402016-06-23 08:37:05 -070065 }
66
Robert Phillips95214472017-08-08 18:00:03 -040067 switch (storeOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -040068 case GrStoreOp::kStore:
egdaniel066df7c2016-06-08 14:02:27 -070069 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
70 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040071 case GrStoreOp::kDiscard:
egdaniel066df7c2016-06-08 14:02:27 -070072 *storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
73 break;
brianosman0bbc3712016-06-14 04:53:09 -070074 default:
egdaniel9cb63402016-06-23 08:37:05 -070075 SK_ABORT("Invalid StoreOp");
brianosman0bbc3712016-06-14 04:53:09 -070076 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
egdaniel066df7c2016-06-08 14:02:27 -070077 }
78}
79
Greg Daniel2d41d0d2019-08-26 11:08:51 -040080GrVkOpsRenderPass::GrVkOpsRenderPass(GrVkGpu* gpu) : fGpu(gpu) {}
Brian Salomonc293a292016-11-30 13:38:32 -050081
Greg Daniel2d41d0d2019-08-26 11:08:51 -040082void GrVkOpsRenderPass::init() {
Brian Salomonc293a292016-11-30 13:38:32 -050083 GrVkRenderPass::LoadStoreOps vkColorOps(fVkColorLoadOp, fVkColorStoreOp);
84 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -070085
Greg Daniel36a77ee2016-10-18 10:33:25 -040086 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Brian Salomonc293a292016-11-30 13:38:32 -050087 SkASSERT(fCommandBufferInfos.count() == 1);
Greg Daniel22bc8652017-03-22 15:45:43 -040088 fCurrentCmdInfo = 0;
Greg Daniel36a77ee2016-10-18 10:33:25 -040089
Robert Phillips19e51dc2017-08-09 09:30:51 -040090 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
91 const GrVkResourceProvider::CompatibleRPHandle& rpHandle = vkRT->compatibleRenderPassHandle();
egdaniel066df7c2016-06-08 14:02:27 -070092 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -040093 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
94 vkColorOps,
95 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -070096 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -040097 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -040098 vkColorOps,
99 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -0700100 }
101
Brian Osmancb3d0872018-10-16 15:19:28 -0400102 cbInfo.fColorClearValue.color.float32[0] = fClearColor[0];
103 cbInfo.fColorClearValue.color.float32[1] = fClearColor[1];
104 cbInfo.fColorClearValue.color.float32[2] = fClearColor[2];
105 cbInfo.fColorClearValue.color.float32[3] = fClearColor[3];
egdaniel9cb63402016-06-23 08:37:05 -0700106
Robert Phillips380b90c2017-08-30 07:41:07 -0400107 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000108 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Robert Phillips380b90c2017-08-30 07:41:07 -0400109 } else {
110 cbInfo.fBounds.setEmpty();
111 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400112
113 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
114 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
115 } else if (VK_ATTACHMENT_LOAD_OP_LOAD == fVkColorLoadOp &&
116 VK_ATTACHMENT_STORE_OP_STORE == fVkColorStoreOp) {
117 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
118 } else if (VK_ATTACHMENT_LOAD_OP_DONT_CARE == fVkColorLoadOp) {
119 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithDiscard;
120 }
Greg Daniel36a77ee2016-10-18 10:33:25 -0400121
Greg Daniel228518f2019-08-07 16:55:17 -0400122 cbInfo.fCommandBuffer = fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu);
Robert Phillips19e51dc2017-08-09 09:30:51 -0400123 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
egdaniel066df7c2016-06-08 14:02:27 -0700124}
125
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400126void GrVkOpsRenderPass::initWrapped() {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500127 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
128 SkASSERT(fCommandBufferInfos.count() == 1);
129 fCurrentCmdInfo = 0;
130
131 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
132 SkASSERT(vkRT->wrapsSecondaryCommandBuffer());
133 cbInfo.fRenderPass = vkRT->externalRenderPass();
134 cbInfo.fRenderPass->ref();
135
136 cbInfo.fBounds.setEmpty();
Greg Daniel228518f2019-08-07 16:55:17 -0400137 cbInfo.fCommandBuffer.reset(
Greg Daniel8daf3b72019-07-30 09:57:26 -0400138 GrVkSecondaryCommandBuffer::Create(vkRT->getExternalSecondaryCommandBuffer()));
Greg Daniel070cbaf2019-01-03 17:35:54 -0500139 cbInfo.currentCmdBuf()->begin(fGpu, nullptr, cbInfo.fRenderPass);
140}
Brian Salomonc293a292016-11-30 13:38:32 -0500141
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400142GrVkOpsRenderPass::~GrVkOpsRenderPass() {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400143 this->reset();
egdaniel066df7c2016-06-08 14:02:27 -0700144}
145
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400146GrGpu* GrVkOpsRenderPass::gpu() { return fGpu; }
egdaniel9cb63402016-06-23 08:37:05 -0700147
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400148void GrVkOpsRenderPass::end() {
Greg Daniel22bc8652017-03-22 15:45:43 -0400149 if (fCurrentCmdInfo >= 0) {
150 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Brian Salomonc293a292016-11-30 13:38:32 -0500151 }
egdaniel066df7c2016-06-08 14:02:27 -0700152}
153
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400154void GrVkOpsRenderPass::submit() {
Brian Salomonc293a292016-11-30 13:38:32 -0500155 if (!fRenderTarget) {
156 return;
157 }
Robert Phillips19e51dc2017-08-09 09:30:51 -0400158
159 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Robert Phillips19e51dc2017-08-09 09:30:51 -0400160 GrVkImage* targetImage = vkRT->msaaImage() ? vkRT->msaaImage() : vkRT;
Greg Daniel45a44de2018-02-27 10:07:29 -0500161 GrStencilAttachment* stencil = fRenderTarget->renderTargetPriv().getStencilAttachment();
Brian Salomon24d377e2019-04-23 15:24:31 -0400162 auto currPreCmd = fPreCommandBufferTasks.begin();
egdaniel9cb63402016-06-23 08:37:05 -0700163
Greg Daniel46cfbc62019-06-07 11:43:30 -0400164 GrVkPrimaryCommandBufferTask::Args taskArgs{fGpu, fRenderTarget};
Greg Daniel36a77ee2016-10-18 10:33:25 -0400165 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
166 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
167
Brian Salomon24d377e2019-04-23 15:24:31 -0400168 for (int c = 0; c < cbInfo.fNumPreCmds; ++c, ++currPreCmd) {
Brian Salomon5d8f1cc2019-04-24 09:03:53 -0400169 currPreCmd->execute(taskArgs);
Greg Daniel77b53f62016-10-18 11:48:51 -0400170 }
171
Greg Daniel38c3d932018-03-16 14:22:30 -0400172 // TODO: Many things create a scratch texture which adds the discard immediately, but then
173 // don't draw to it right away. This causes the discard to be ignored and we get yelled at
174 // for loading uninitialized data. However, once MDB lands with reordering, the discard will
175 // get reordered with the rest of the draw commands and we can remove the discard check.
176 if (cbInfo.fIsEmpty &&
177 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithClear &&
178 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithDiscard) {
Greg Daniel77b53f62016-10-18 11:48:51 -0400179 // We have sumbitted no actual draw commands to the command buffer and we are not using
180 // the render pass to do a clear so there is no need to submit anything.
181 continue;
182 }
Greg Daniel38c3d932018-03-16 14:22:30 -0400183
Greg Daniel070cbaf2019-01-03 17:35:54 -0500184 // We don't want to actually submit the secondary command buffer if it is wrapped.
185 if (this->wrapsSecondaryCommandBuffer()) {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500186 // There should have only been one secondary command buffer in the wrapped case so it is
187 // safe to just return here.
188 SkASSERT(fCommandBufferInfos.count() == 1);
189 return;
190 }
191
Greg Danieldbdba602018-04-20 11:52:43 -0400192 // Make sure if we only have a discard load that we execute the discard on the whole image.
193 // TODO: Once we improve our tracking of discards so that we never end up flushing a discard
194 // call with no actually ops, remove this.
195 if (cbInfo.fIsEmpty && cbInfo.fLoadStoreState == LoadStoreState::kStartsWithDiscard) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000196 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Greg Danieldbdba602018-04-20 11:52:43 -0400197 }
198
Mike Reed9ea63152019-08-22 16:19:50 -0400199 if (cbInfo.fBounds.intersect(SkRect::MakeIWH(fRenderTarget->width(),
200 fRenderTarget->height()))) {
Greg Daniel38c3d932018-03-16 14:22:30 -0400201 // Make sure we do the following layout changes after all copies, uploads, or any other
202 // pre-work is done since we may change the layouts in the pre-work. Also since the
203 // draws will be submitted in different render passes, we need to guard againts write
204 // and write issues.
205
206 // Change layout of our render target so it can be used as the color attachment.
Greg Danielf7828d02018-10-09 12:01:32 -0400207 // TODO: If we know that we will never be blending or loading the attachment we could
208 // drop the VK_ACCESS_COLOR_ATTACHMENT_READ_BIT.
Greg Daniel38c3d932018-03-16 14:22:30 -0400209 targetImage->setImageLayout(fGpu,
210 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
Greg Danielf7828d02018-10-09 12:01:32 -0400211 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
Greg Daniel38c3d932018-03-16 14:22:30 -0400212 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400213 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400214 false);
215
216 // If we are using a stencil attachment we also need to update its layout
217 if (stencil) {
218 GrVkStencilAttachment* vkStencil = (GrVkStencilAttachment*)stencil;
Greg Danielf7828d02018-10-09 12:01:32 -0400219 // We need the write and read access bits since we may load and store the stencil.
220 // The initial load happens in the VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT so we
221 // wait there.
Greg Daniel38c3d932018-03-16 14:22:30 -0400222 vkStencil->setImageLayout(fGpu,
223 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
224 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
225 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400226 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400227 false);
228 }
229
Greg Daniel36a77ee2016-10-18 10:33:25 -0400230 SkIRect iBounds;
231 cbInfo.fBounds.roundOut(&iBounds);
232
Greg Daniel228518f2019-08-07 16:55:17 -0400233 fGpu->submitSecondaryCommandBuffer(std::move(cbInfo.fCommandBuffer), cbInfo.fRenderPass,
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400234 &cbInfo.fColorClearValue, vkRT, fOrigin, iBounds);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400235 }
236 }
Brian Salomon24d377e2019-04-23 15:24:31 -0400237 SkASSERT(currPreCmd == fPreCommandBufferTasks.end());
egdaniel9cb63402016-06-23 08:37:05 -0700238}
239
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400240void GrVkOpsRenderPass::set(GrRenderTarget* rt, GrSurfaceOrigin origin,
241 const GrOpsRenderPass::LoadAndStoreInfo& colorInfo,
Greg Danielb20d7e52019-09-03 13:54:39 -0400242 const GrOpsRenderPass::StencilLoadAndStoreInfo& stencilInfo,
243 const SkTArray<GrTextureProxy*, true>& sampledProxies) {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400244 SkASSERT(!fRenderTarget);
245 SkASSERT(fCommandBufferInfos.empty());
246 SkASSERT(-1 == fCurrentCmdInfo);
Robert Phillips9da87e02019-02-04 13:26:26 -0500247 SkASSERT(fGpu == rt->getContext()->priv().getGpu());
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400248 SkASSERT(!fLastPipelineState);
249
Greg Danielb0c7ad12019-06-06 17:23:35 +0000250#ifdef SK_DEBUG
251 fIsActive = true;
252#endif
253
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400254 this->INHERITED::set(rt, origin);
255
Greg Danielb20d7e52019-09-03 13:54:39 -0400256 for (int i = 0; i < sampledProxies.count(); ++i) {
257 if (sampledProxies[i]->isInstantiated()) {
258 GrVkTexture* vkTex = static_cast<GrVkTexture*>(sampledProxies[i]->peekTexture());
259 SkASSERT(vkTex);
260 vkTex->setImageLayout(
261 fGpu, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_ACCESS_SHADER_READ_BIT,
262 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, false);
263 }
264 }
265
Greg Daniel070cbaf2019-01-03 17:35:54 -0500266 if (this->wrapsSecondaryCommandBuffer()) {
267 this->initWrapped();
268 return;
269 }
270
Brian Osman9a9baae2018-11-05 15:06:26 -0500271 fClearColor = colorInfo.fClearColor;
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400272
273 get_vk_load_store_ops(colorInfo.fLoadOp, colorInfo.fStoreOp,
274 &fVkColorLoadOp, &fVkColorStoreOp);
275
276 get_vk_load_store_ops(stencilInfo.fLoadOp, stencilInfo.fStoreOp,
277 &fVkStencilLoadOp, &fVkStencilStoreOp);
278
279 this->init();
280}
281
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400282void GrVkOpsRenderPass::reset() {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400283 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
284 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
Greg Daniel228518f2019-08-07 16:55:17 -0400285 if (cbInfo.fCommandBuffer) {
286 cbInfo.fCommandBuffer.release()->recycle(fGpu);
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400287 }
288 cbInfo.fRenderPass->unref(fGpu);
289 }
290 fCommandBufferInfos.reset();
Brian Salomon24d377e2019-04-23 15:24:31 -0400291 fPreCommandBufferTasks.reset();
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400292
293 fCurrentCmdInfo = -1;
294
295 fLastPipelineState = nullptr;
296 fRenderTarget = nullptr;
Greg Danielb0c7ad12019-06-06 17:23:35 +0000297
298#ifdef SK_DEBUG
299 fIsActive = false;
300#endif
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400301}
302
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400303bool GrVkOpsRenderPass::wrapsSecondaryCommandBuffer() const {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500304 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
305 return vkRT->wrapsSecondaryCommandBuffer();
306}
307
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400308////////////////////////////////////////////////////////////////////////////////
309
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400310void GrVkOpsRenderPass::insertEventMarker(const char* msg) {
Robert Phillips65a88fa2017-08-08 08:36:22 -0400311 // TODO: does Vulkan have a correlate?
312}
313
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400314void GrVkOpsRenderPass::onClearStencilClip(const GrFixedClip& clip, bool insideStencilMask) {
Chris Dalton94c04682017-11-01 17:15:06 -0600315 SkASSERT(!clip.hasWindowRectangles());
316
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000317 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
318
Greg Daniel65a09272016-10-12 09:47:22 -0400319 GrStencilAttachment* sb = fRenderTarget->renderTargetPriv().getStencilAttachment();
egdaniel9cb63402016-06-23 08:37:05 -0700320 // this should only be called internally when we know we have a
321 // stencil buffer.
322 SkASSERT(sb);
323 int stencilBitCount = sb->bits();
324
325 // The contract with the callers does not guarantee that we preserve all bits in the stencil
326 // during this clear. Thus we will clear the entire stencil to the desired value.
327
328 VkClearDepthStencilValue vkStencilColor;
329 memset(&vkStencilColor, 0, sizeof(VkClearDepthStencilValue));
csmartdalton29df7602016-08-31 11:55:52 -0700330 if (insideStencilMask) {
egdaniel9cb63402016-06-23 08:37:05 -0700331 vkStencilColor.stencil = (1 << (stencilBitCount - 1));
332 } else {
333 vkStencilColor.stencil = 0;
334 }
335
336 VkClearRect clearRect;
337 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700338 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000339 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000340 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400341 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700342 vkRect = clip.scissorRect();
343 } else {
344 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400345 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
346 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700347 }
348
349 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
350 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
351
352 clearRect.baseArrayLayer = 0;
353 clearRect.layerCount = 1;
354
355 uint32_t stencilIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400356 SkAssertResult(cbInfo.fRenderPass->stencilAttachmentIndex(&stencilIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700357
358 VkClearAttachment attachment;
359 attachment.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
360 attachment.colorAttachment = 0; // this value shouldn't matter
361 attachment.clearValue.depthStencil = vkStencilColor;
362
Greg Daniel22bc8652017-03-22 15:45:43 -0400363 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400364 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400365
366 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000367 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400368 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
369 } else {
370 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
371 }
egdaniel9cb63402016-06-23 08:37:05 -0700372}
373
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400374void GrVkOpsRenderPass::onClear(const GrFixedClip& clip, const SkPMColor4f& color) {
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000375 // parent class should never let us get here with no RT
csmartdaltonbf4a8f92016-09-06 10:01:06 -0700376 SkASSERT(!clip.hasWindowRectangles());
egdaniel9cb63402016-06-23 08:37:05 -0700377
Greg Daniel22bc8652017-03-22 15:45:43 -0400378 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Greg Daniel36a77ee2016-10-18 10:33:25 -0400379
Brian Osman9a9baae2018-11-05 15:06:26 -0500380 VkClearColorValue vkColor = {{color.fR, color.fG, color.fB, color.fA}};
egdaniel9cb63402016-06-23 08:37:05 -0700381
Greg Daniel674ee742019-08-27 13:12:33 -0400382 // If we end up in a situation where we are calling clear without a scissior then in general it
383 // means we missed an opportunity higher up the stack to set the load op to be a clear. However,
384 // there are situations where higher up we couldn't discard the previous ops and set a clear
385 // load op (e.g. if we needed to execute a wait op). Thus we also have the empty check here.
Greg Daniel4fe92572019-09-03 11:16:40 -0400386 // TODO: Make the waitOp a RenderTask instead so we can clear out the GrOpsTask for a clear. We
387 // can then reenable this assert assuming we can't get messed up by a waitOp.
388 //SkASSERT(!cbInfo.fIsEmpty || clip.scissorEnabled());
egdaniel9cb63402016-06-23 08:37:05 -0700389
390 // We always do a sub rect clear with clearAttachments since we are inside a render pass
391 VkClearRect clearRect;
392 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700393 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000394 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000395 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400396 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700397 vkRect = clip.scissorRect();
398 } else {
399 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400400 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
401 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700402 }
403 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
404 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
405 clearRect.baseArrayLayer = 0;
406 clearRect.layerCount = 1;
407
408 uint32_t colorIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400409 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&colorIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700410
411 VkClearAttachment attachment;
412 attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
413 attachment.colorAttachment = colorIndex;
414 attachment.clearValue.color = vkColor;
415
Greg Daniel22bc8652017-03-22 15:45:43 -0400416 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400417 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400418
419 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000420 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400421 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
422 } else {
423 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
424 }
egdaniel9cb63402016-06-23 08:37:05 -0700425 return;
426}
427
Greg Daniel500d58b2017-08-24 15:59:33 -0400428////////////////////////////////////////////////////////////////////////////////
429
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400430void GrVkOpsRenderPass::addAdditionalRenderPass() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400431 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
432
Greg Daniel22bc8652017-03-22 15:45:43 -0400433 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Greg Daniel77b53f62016-10-18 11:48:51 -0400434
435 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Greg Daniel22bc8652017-03-22 15:45:43 -0400436 fCurrentCmdInfo++;
Greg Daniel77b53f62016-10-18 11:48:51 -0400437
438 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
439 VK_ATTACHMENT_STORE_OP_STORE);
440 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
441 VK_ATTACHMENT_STORE_OP_STORE);
442
443 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400444 vkRT->compatibleRenderPassHandle();
Greg Daniel77b53f62016-10-18 11:48:51 -0400445 if (rpHandle.isValid()) {
446 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
447 vkColorOps,
448 vkStencilOps);
449 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400450 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel77b53f62016-10-18 11:48:51 -0400451 vkColorOps,
452 vkStencilOps);
453 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400454 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
Greg Daniel77b53f62016-10-18 11:48:51 -0400455
Greg Daniel228518f2019-08-07 16:55:17 -0400456 cbInfo.fCommandBuffer = fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu);
Greg Daniel77b53f62016-10-18 11:48:51 -0400457 // It shouldn't matter what we set the clear color to here since we will assume loading of the
458 // attachment.
459 memset(&cbInfo.fColorClearValue, 0, sizeof(VkClearValue));
460 cbInfo.fBounds.setEmpty();
Greg Daniel77b53f62016-10-18 11:48:51 -0400461
Robert Phillips19e51dc2017-08-09 09:30:51 -0400462 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
Greg Daniel77b53f62016-10-18 11:48:51 -0400463}
464
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400465void GrVkOpsRenderPass::inlineUpload(GrOpFlushState* state,
Brian Salomon943ed792017-10-30 09:37:55 -0400466 GrDeferredTextureUploadFn& upload) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400467 if (!fCommandBufferInfos[fCurrentCmdInfo].fIsEmpty) {
468 this->addAdditionalRenderPass();
Greg Daniel77b53f62016-10-18 11:48:51 -0400469 }
Brian Salomon24d377e2019-04-23 15:24:31 -0400470
Brian Salomon24d377e2019-04-23 15:24:31 -0400471 fPreCommandBufferTasks.emplace<InlineUpload>(state, upload);
472 ++fCommandBufferInfos[fCurrentCmdInfo].fNumPreCmds;
Greg Daniel77b53f62016-10-18 11:48:51 -0400473}
474
egdaniel9cb63402016-06-23 08:37:05 -0700475////////////////////////////////////////////////////////////////////////////////
476
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400477void GrVkOpsRenderPass::bindGeometry(const GrGpuBuffer* indexBuffer,
Brian Salomondbf70722019-02-07 11:31:24 -0500478 const GrGpuBuffer* vertexBuffer,
479 const GrGpuBuffer* instanceBuffer) {
Chris Daltonff926502017-05-03 14:36:54 -0400480 GrVkSecondaryCommandBuffer* currCmdBuf = fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf();
egdaniel9cb63402016-06-23 08:37:05 -0700481 // There is no need to put any memory barriers to make sure host writes have finished here.
482 // When a command buffer is submitted to a queue, there is an implicit memory barrier that
483 // occurs for all host writes. Additionally, BufferMemoryBarriers are not allowed inside of
484 // an active RenderPass.
egdaniel9cb63402016-06-23 08:37:05 -0700485
Chris Dalton1d616352017-05-31 12:51:23 -0600486 // Here our vertex and instance inputs need to match the same 0-based bindings they were
487 // assigned in GrVkPipeline. That is, vertex first (if any) followed by instance.
488 uint32_t binding = 0;
489
Brian Salomon802cb312018-06-08 18:05:20 -0400490 if (vertexBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600491 SkASSERT(vertexBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600492 SkASSERT(!vertexBuffer->isMapped());
493
494 currCmdBuf->bindInputBuffer(fGpu, binding++,
495 static_cast<const GrVkVertexBuffer*>(vertexBuffer));
496 }
497
Brian Salomon802cb312018-06-08 18:05:20 -0400498 if (instanceBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600499 SkASSERT(instanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600500 SkASSERT(!instanceBuffer->isMapped());
501
502 currCmdBuf->bindInputBuffer(fGpu, binding++,
503 static_cast<const GrVkVertexBuffer*>(instanceBuffer));
504 }
Chris Daltonff926502017-05-03 14:36:54 -0400505 if (indexBuffer) {
506 SkASSERT(indexBuffer);
507 SkASSERT(!indexBuffer->isMapped());
egdaniel9cb63402016-06-23 08:37:05 -0700508
Chris Daltonff926502017-05-03 14:36:54 -0400509 currCmdBuf->bindIndexBuffer(fGpu, static_cast<const GrVkIndexBuffer*>(indexBuffer));
egdaniel9cb63402016-06-23 08:37:05 -0700510 }
511}
512
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400513GrVkPipelineState* GrVkOpsRenderPass::prepareDrawState(
Brian Salomon49348902018-06-26 09:12:38 -0400514 const GrPrimitiveProcessor& primProc,
515 const GrPipeline& pipeline,
516 const GrPipeline::FixedDynamicState* fixedDynamicState,
517 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
518 GrPrimitiveType primitiveType) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400519 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
520 SkASSERT(cbInfo.fRenderPass);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400521
Greg Daniel99b88e02018-10-03 15:31:20 -0400522 VkRenderPass compatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
523
Greg Daniel9a51a862018-11-30 10:18:14 -0500524 const GrTextureProxy* const* primProcProxies = nullptr;
525 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
526 primProcProxies = dynamicStateArrays->fPrimitiveProcessorTextures;
527 } else if (fixedDynamicState) {
528 primProcProxies = fixedDynamicState->fPrimitiveProcessorTextures;
529 }
530
531 SkASSERT(SkToBool(primProcProxies) == SkToBool(primProc.numTextureSamplers()));
532
Greg Daniel09eeefb2017-10-16 15:15:02 -0400533 GrVkPipelineState* pipelineState =
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500534 fGpu->resourceProvider().findOrCreateCompatiblePipelineState(fRenderTarget, fOrigin,
535 pipeline,
egdaniel9cb63402016-06-23 08:37:05 -0700536 primProc,
Greg Daniel9a51a862018-11-30 10:18:14 -0500537 primProcProxies,
egdaniel9cb63402016-06-23 08:37:05 -0700538 primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400539 compatibleRenderPass);
egdaniel9cb63402016-06-23 08:37:05 -0700540 if (!pipelineState) {
541 return pipelineState;
542 }
543
Greg Daniel09eeefb2017-10-16 15:15:02 -0400544 fLastPipelineState = pipelineState;
Greg Daniel22bc8652017-03-22 15:45:43 -0400545
Brian Salomonf7232642018-09-19 08:58:08 -0400546 pipelineState->bindPipeline(fGpu, cbInfo.currentCmdBuf());
Brian Salomoncd7907b2018-08-30 08:36:18 -0400547
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500548 pipelineState->setAndBindUniforms(fGpu, fRenderTarget, fOrigin,
549 primProc, pipeline, cbInfo.currentCmdBuf());
Brian Salomonf7232642018-09-19 08:58:08 -0400550
551 // Check whether we need to bind textures between each GrMesh. If not we can bind them all now.
552 bool setTextures = !(dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures);
553 if (setTextures) {
Brian Salomonf7232642018-09-19 08:58:08 -0400554 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, primProcProxies,
555 cbInfo.currentCmdBuf());
556 }
egdaniel9cb63402016-06-23 08:37:05 -0700557
Brian Salomond818ebf2018-07-02 14:08:49 +0000558 if (!pipeline.isScissorEnabled()) {
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400559 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(),
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500560 fRenderTarget, fOrigin,
561 SkIRect::MakeWH(fRenderTarget->width(),
562 fRenderTarget->height()));
Brian Salomon49348902018-06-26 09:12:38 -0400563 } else if (!dynamicStateArrays || !dynamicStateArrays->fScissorRects) {
564 SkASSERT(fixedDynamicState);
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500565 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget,
566 fOrigin,
Brian Salomon49348902018-06-26 09:12:38 -0400567 fixedDynamicState->fScissorRect);
Chris Dalton46983b72017-06-06 12:27:16 -0600568 }
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500569 GrVkPipeline::SetDynamicViewportState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget);
570 GrVkPipeline::SetDynamicBlendConstantState(fGpu, cbInfo.currentCmdBuf(),
Greg Daniel2c3398d2019-06-19 11:58:01 -0400571 pipeline.outputSwizzle(),
Chris Dalton46983b72017-06-06 12:27:16 -0600572 pipeline.getXferProcessor());
egdaniel9cb63402016-06-23 08:37:05 -0700573
574 return pipelineState;
575}
576
Greg Danielb20d7e52019-09-03 13:54:39 -0400577#ifdef SK_DEBUG
578void check_sampled_texture(GrTexture* tex, GrRenderTarget* rt, GrVkGpu* gpu) {
579 SkASSERT(!tex->isProtected() || (rt->isProtected() && gpu->protectedContext()));
580 GrVkTexture* vkTex = static_cast<GrVkTexture*>(tex);
581 SkASSERT(vkTex->currentLayout() == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
582}
583#endif
584
585
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400586void GrVkOpsRenderPass::onDraw(const GrPrimitiveProcessor& primProc,
Brian Salomonff168d92018-06-23 15:17:27 -0400587 const GrPipeline& pipeline,
Brian Salomon49348902018-06-26 09:12:38 -0400588 const GrPipeline::FixedDynamicState* fixedDynamicState,
589 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
Greg Daniel500d58b2017-08-24 15:59:33 -0400590 const GrMesh meshes[],
Greg Daniel500d58b2017-08-24 15:59:33 -0400591 int meshCount,
592 const SkRect& bounds) {
egdaniel9cb63402016-06-23 08:37:05 -0700593 if (!meshCount) {
594 return;
595 }
Greg Danielea022cd2018-03-16 11:10:03 -0400596
597 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
598
Greg Danielb20d7e52019-09-03 13:54:39 -0400599#ifdef SK_DEBUG
Brian Salomonf7232642018-09-19 08:58:08 -0400600 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
601 for (int m = 0, i = 0; m < meshCount; ++m) {
602 for (int s = 0; s < primProc.numTextureSamplers(); ++s, ++i) {
603 auto texture = dynamicStateArrays->fPrimitiveProcessorTextures[i]->peekTexture();
Greg Danielb20d7e52019-09-03 13:54:39 -0400604 check_sampled_texture(texture, fRenderTarget, fGpu);
Brian Salomonf7232642018-09-19 08:58:08 -0400605 }
606 }
607 } else {
608 for (int i = 0; i < primProc.numTextureSamplers(); ++i) {
609 auto texture = fixedDynamicState->fPrimitiveProcessorTextures[i]->peekTexture();
Greg Danielb20d7e52019-09-03 13:54:39 -0400610 check_sampled_texture(texture, fRenderTarget, fGpu);
Brian Salomonf7232642018-09-19 08:58:08 -0400611 }
Brian Salomone782f842018-07-31 13:53:11 -0400612 }
bsalomonb58a2b42016-09-26 06:55:02 -0700613 GrFragmentProcessor::Iter iter(pipeline);
614 while (const GrFragmentProcessor* fp = iter.next()) {
Brian Salomone782f842018-07-31 13:53:11 -0400615 for (int i = 0; i < fp->numTextureSamplers(); ++i) {
616 const GrFragmentProcessor::TextureSampler& sampler = fp->textureSampler(i);
Greg Danielb20d7e52019-09-03 13:54:39 -0400617 check_sampled_texture(sampler.peekTexture(), fRenderTarget, fGpu);
Brian Salomone782f842018-07-31 13:53:11 -0400618 }
egdaniel2f5792a2016-07-06 08:51:23 -0700619 }
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400620 if (GrTexture* dstTexture = pipeline.peekDstTexture()) {
Greg Danielb20d7e52019-09-03 13:54:39 -0400621 check_sampled_texture(dstTexture, fRenderTarget, fGpu);
Brian Salomon18dfa982017-04-03 16:57:43 -0400622 }
Greg Danielb20d7e52019-09-03 13:54:39 -0400623#endif
egdaniel2f5792a2016-07-06 08:51:23 -0700624
Chris Daltonbca46e22017-05-15 11:03:26 -0600625 GrPrimitiveType primitiveType = meshes[0].primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400626 GrVkPipelineState* pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
627 dynamicStateArrays, primitiveType);
egdaniel9cb63402016-06-23 08:37:05 -0700628 if (!pipelineState) {
629 return;
630 }
631
Brian Salomond818ebf2018-07-02 14:08:49 +0000632 bool dynamicScissor =
633 pipeline.isScissorEnabled() && dynamicStateArrays && dynamicStateArrays->fScissorRects;
Brian Salomonf7232642018-09-19 08:58:08 -0400634 bool dynamicTextures = dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures;
Brian Salomon49348902018-06-26 09:12:38 -0400635
egdaniel9cb63402016-06-23 08:37:05 -0700636 for (int i = 0; i < meshCount; ++i) {
637 const GrMesh& mesh = meshes[i];
Chris Daltonbca46e22017-05-15 11:03:26 -0600638 if (mesh.primitiveType() != primitiveType) {
Chris Dalton6f241802017-05-08 13:58:38 -0400639 SkDEBUGCODE(pipelineState = nullptr);
Chris Daltonbca46e22017-05-15 11:03:26 -0600640 primitiveType = mesh.primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400641 pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
642 dynamicStateArrays, primitiveType);
Chris Dalton6f241802017-05-08 13:58:38 -0400643 if (!pipelineState) {
644 return;
egdaniel9cb63402016-06-23 08:37:05 -0700645 }
Chris Dalton6f241802017-05-08 13:58:38 -0400646 }
egdaniel9cb63402016-06-23 08:37:05 -0700647
Brian Salomon49348902018-06-26 09:12:38 -0400648 if (dynamicScissor) {
649 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget,
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500650 fOrigin,
Brian Salomon49348902018-06-26 09:12:38 -0400651 dynamicStateArrays->fScissorRects[i]);
Chris Dalton46983b72017-06-06 12:27:16 -0600652 }
Brian Salomonf7232642018-09-19 08:58:08 -0400653 if (dynamicTextures) {
654 GrTextureProxy* const* meshProxies = dynamicStateArrays->fPrimitiveProcessorTextures +
655 primProc.numTextureSamplers() * i;
656 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, meshProxies,
657 cbInfo.currentCmdBuf());
658 }
Chris Daltonbca46e22017-05-15 11:03:26 -0600659 SkASSERT(pipelineState);
Brian Salomon802cb312018-06-08 18:05:20 -0400660 mesh.sendToGpu(this);
egdaniel9cb63402016-06-23 08:37:05 -0700661 }
662
Greg Daniel36a77ee2016-10-18 10:33:25 -0400663 cbInfo.fBounds.join(bounds);
Chris Dalton114a3c02017-05-26 15:17:19 -0600664 cbInfo.fIsEmpty = false;
egdaniel066df7c2016-06-08 14:02:27 -0700665}
666
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400667void GrVkOpsRenderPass::sendInstancedMeshToGpu(GrPrimitiveType,
668 const GrBuffer* vertexBuffer,
669 int vertexCount,
670 int baseVertex,
671 const GrBuffer* instanceBuffer,
672 int instanceCount,
673 int baseInstance) {
Chris Dalton114a3c02017-05-26 15:17:19 -0600674 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomondbf70722019-02-07 11:31:24 -0500675 SkASSERT(!vertexBuffer || !vertexBuffer->isCpuBuffer());
676 SkASSERT(!instanceBuffer || !instanceBuffer->isCpuBuffer());
677 auto gpuVertexBuffer = static_cast<const GrGpuBuffer*>(vertexBuffer);
678 auto gpuInstanceBuffer = static_cast<const GrGpuBuffer*>(instanceBuffer);
679 this->bindGeometry(nullptr, gpuVertexBuffer, gpuInstanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600680 cbInfo.currentCmdBuf()->draw(fGpu, vertexCount, instanceCount, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600681 fGpu->stats()->incNumDraws();
682}
683
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400684void GrVkOpsRenderPass::sendIndexedInstancedMeshToGpu(GrPrimitiveType,
685 const GrBuffer* indexBuffer,
686 int indexCount,
687 int baseIndex,
688 const GrBuffer* vertexBuffer,
689 int baseVertex,
690 const GrBuffer* instanceBuffer,
691 int instanceCount,
692 int baseInstance,
693 GrPrimitiveRestart restart) {
Brian Salomon802cb312018-06-08 18:05:20 -0400694 SkASSERT(restart == GrPrimitiveRestart::kNo);
Chris Dalton114a3c02017-05-26 15:17:19 -0600695 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomondbf70722019-02-07 11:31:24 -0500696 SkASSERT(!vertexBuffer || !vertexBuffer->isCpuBuffer());
697 SkASSERT(!instanceBuffer || !instanceBuffer->isCpuBuffer());
698 SkASSERT(!indexBuffer->isCpuBuffer());
699 auto gpuIndexxBuffer = static_cast<const GrGpuBuffer*>(indexBuffer);
700 auto gpuVertexBuffer = static_cast<const GrGpuBuffer*>(vertexBuffer);
701 auto gpuInstanceBuffer = static_cast<const GrGpuBuffer*>(instanceBuffer);
702 this->bindGeometry(gpuIndexxBuffer, gpuVertexBuffer, gpuInstanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600703 cbInfo.currentCmdBuf()->drawIndexed(fGpu, indexCount, instanceCount,
704 baseIndex, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600705 fGpu->stats()->incNumDraws();
706}
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400707
708////////////////////////////////////////////////////////////////////////////////
709
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400710void GrVkOpsRenderPass::executeDrawable(std::unique_ptr<SkDrawable::GpuDrawHandler> drawable) {
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400711 GrVkRenderTarget* target = static_cast<GrVkRenderTarget*>(fRenderTarget);
712
713 GrVkImage* targetImage = target->msaaImage() ? target->msaaImage() : target;
714
715 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
716 VkRect2D bounds;
717 bounds.offset = { 0, 0 };
718 bounds.extent = { 0, 0 };
719
720 GrVkDrawableInfo vkInfo;
721 vkInfo.fSecondaryCommandBuffer = cbInfo.currentCmdBuf()->vkCommandBuffer();
722 vkInfo.fCompatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
Greg Danielb353eeb2018-12-05 11:01:58 -0500723 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&vkInfo.fColorAttachmentIndex));
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400724 vkInfo.fFormat = targetImage->imageFormat();
725 vkInfo.fDrawBounds = &bounds;
Stan Ilievcb580602019-02-26 11:36:07 -0500726#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
727 vkInfo.fImage = targetImage->image();
728#else
729 vkInfo.fImage = VK_NULL_HANDLE;
730#endif //SK_BUILD_FOR_ANDROID_FRAMEWORK
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400731
732 GrBackendDrawableInfo info(vkInfo);
733
Eric Karlc0b2ba22019-01-22 19:40:35 -0800734 // After we draw into the command buffer via the drawable, cached state we have may be invalid.
735 cbInfo.currentCmdBuf()->invalidateState();
Eric Karla8878a12019-02-07 18:17:43 -0800736 // Also assume that the drawable produced output.
737 cbInfo.fIsEmpty = false;
Eric Karlc0b2ba22019-01-22 19:40:35 -0800738
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400739 drawable->draw(info);
740 fGpu->addDrawable(std::move(drawable));
741
742 if (bounds.extent.width == 0 || bounds.extent.height == 0) {
743 cbInfo.fBounds.join(target->getBoundsRect());
744 } else {
745 cbInfo.fBounds.join(SkRect::MakeXYWH(bounds.offset.x, bounds.offset.y,
746 bounds.extent.width, bounds.extent.height));
747 }
748}
749