blob: e37b4bf97164c59c883167abdc29d5694e4c4449 [file] [log] [blame]
egdaniel066df7c2016-06-08 14:02:27 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Greg Daniel2d41d0d2019-08-26 11:08:51 -04008#include "src/gpu/vk/GrVkOpsRenderPass.h"
egdaniel066df7c2016-06-08 14:02:27 -07009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkDrawable.h"
11#include "include/core/SkRect.h"
12#include "include/gpu/GrBackendDrawableInfo.h"
13#include "src/gpu/GrContextPriv.h"
14#include "src/gpu/GrFixedClip.h"
15#include "src/gpu/GrMesh.h"
16#include "src/gpu/GrOpFlushState.h"
17#include "src/gpu/GrPipeline.h"
18#include "src/gpu/GrRenderTargetPriv.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050019#include "src/gpu/vk/GrVkCommandBuffer.h"
20#include "src/gpu/vk/GrVkCommandPool.h"
21#include "src/gpu/vk/GrVkGpu.h"
22#include "src/gpu/vk/GrVkPipeline.h"
23#include "src/gpu/vk/GrVkRenderPass.h"
24#include "src/gpu/vk/GrVkRenderTarget.h"
25#include "src/gpu/vk/GrVkResourceProvider.h"
26#include "src/gpu/vk/GrVkSemaphore.h"
27#include "src/gpu/vk/GrVkTexture.h"
egdaniel066df7c2016-06-08 14:02:27 -070028
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040029/////////////////////////////////////////////////////////////////////////////
30
Robert Phillips6b47c7d2017-08-29 07:24:09 -040031void get_vk_load_store_ops(GrLoadOp loadOpIn, GrStoreOp storeOpIn,
egdaniel066df7c2016-06-08 14:02:27 -070032 VkAttachmentLoadOp* loadOp, VkAttachmentStoreOp* storeOp) {
Robert Phillips95214472017-08-08 18:00:03 -040033 switch (loadOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -040034 case GrLoadOp::kLoad:
egdaniel066df7c2016-06-08 14:02:27 -070035 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel066df7c2016-06-08 14:02:27 -070036 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040037 case GrLoadOp::kClear:
egdaniel9cb63402016-06-23 08:37:05 -070038 *loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
39 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040040 case GrLoadOp::kDiscard:
egdaniel9cb63402016-06-23 08:37:05 -070041 *loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
42 break;
43 default:
44 SK_ABORT("Invalid LoadOp");
egdaniel066df7c2016-06-08 14:02:27 -070045 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel9cb63402016-06-23 08:37:05 -070046 }
47
Robert Phillips95214472017-08-08 18:00:03 -040048 switch (storeOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -040049 case GrStoreOp::kStore:
egdaniel066df7c2016-06-08 14:02:27 -070050 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
51 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040052 case GrStoreOp::kDiscard:
egdaniel066df7c2016-06-08 14:02:27 -070053 *storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
54 break;
brianosman0bbc3712016-06-14 04:53:09 -070055 default:
egdaniel9cb63402016-06-23 08:37:05 -070056 SK_ABORT("Invalid StoreOp");
brianosman0bbc3712016-06-14 04:53:09 -070057 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
egdaniel066df7c2016-06-08 14:02:27 -070058 }
59}
60
Greg Daniel2d41d0d2019-08-26 11:08:51 -040061GrVkOpsRenderPass::GrVkOpsRenderPass(GrVkGpu* gpu) : fGpu(gpu) {}
Brian Salomonc293a292016-11-30 13:38:32 -050062
Greg Danielf0c681e2019-09-05 14:07:41 -040063void GrVkOpsRenderPass::init(const GrOpsRenderPass::LoadAndStoreInfo& colorInfo,
64 const GrOpsRenderPass::StencilLoadAndStoreInfo& stencilInfo,
65 const SkPMColor4f& clearColor) {
egdaniel9cb63402016-06-23 08:37:05 -070066
Greg Danielf0c681e2019-09-05 14:07:41 -040067 VkAttachmentLoadOp loadOp;
68 VkAttachmentStoreOp storeOp;
69 get_vk_load_store_ops(colorInfo.fLoadOp, colorInfo.fStoreOp,
70 &loadOp, &storeOp);
71 GrVkRenderPass::LoadStoreOps vkColorOps(loadOp, storeOp);
72
73 get_vk_load_store_ops(stencilInfo.fLoadOp, stencilInfo.fStoreOp,
74 &loadOp, &storeOp);
75 GrVkRenderPass::LoadStoreOps vkStencilOps(loadOp, storeOp);
Greg Daniel36a77ee2016-10-18 10:33:25 -040076
Robert Phillips19e51dc2017-08-09 09:30:51 -040077 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Greg Danielf0c681e2019-09-05 14:07:41 -040078 GrVkImage* targetImage = vkRT->msaaImage() ? vkRT->msaaImage() : vkRT;
79
80 // Change layout of our render target so it can be used as the color attachment.
81 // TODO: If we know that we will never be blending or loading the attachment we could drop the
82 // VK_ACCESS_COLOR_ATTACHMENT_READ_BIT.
83 targetImage->setImageLayout(fGpu,
84 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
85 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
86 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
87 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
88 false);
89
90 // If we are using a stencil attachment we also need to update its layout
91 if (GrStencilAttachment* stencil = fRenderTarget->renderTargetPriv().getStencilAttachment()) {
92 GrVkStencilAttachment* vkStencil = (GrVkStencilAttachment*)stencil;
93 // We need the write and read access bits since we may load and store the stencil.
94 // The initial load happens in the VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT so we
95 // wait there.
96 vkStencil->setImageLayout(fGpu,
97 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
98 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
99 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
100 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
101 false);
102 }
103
Robert Phillips19e51dc2017-08-09 09:30:51 -0400104 const GrVkResourceProvider::CompatibleRPHandle& rpHandle = vkRT->compatibleRenderPassHandle();
egdaniel066df7c2016-06-08 14:02:27 -0700105 if (rpHandle.isValid()) {
Greg Danielf0c681e2019-09-05 14:07:41 -0400106 fCurrentRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400107 vkColorOps,
108 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -0700109 } else {
Greg Danielf0c681e2019-09-05 14:07:41 -0400110 fCurrentRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400111 vkColorOps,
112 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -0700113 }
Greg Danielf0c681e2019-09-05 14:07:41 -0400114 SkASSERT(fCurrentRenderPass);
egdaniel066df7c2016-06-08 14:02:27 -0700115
Greg Danielf0c681e2019-09-05 14:07:41 -0400116 VkClearValue vkClearColor;
117 vkClearColor.color.float32[0] = clearColor[0];
118 vkClearColor.color.float32[1] = clearColor[1];
119 vkClearColor.color.float32[2] = clearColor[2];
120 vkClearColor.color.float32[3] = clearColor[3];
egdaniel9cb63402016-06-23 08:37:05 -0700121
Greg Daniel28d40b22019-09-09 15:00:15 +0000122 if (!fGpu->vkCaps().preferPrimaryOverSecondaryCommandBuffers()) {
123 fCurrentSecondaryCommandBuffer = fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu);
124 fCurrentSecondaryCommandBuffer->begin(fGpu, vkRT->framebuffer(), fCurrentRenderPass);
125 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400126
Greg Daniel28d40b22019-09-09 15:00:15 +0000127 fGpu->beginRenderPass(fCurrentRenderPass, &vkClearColor, vkRT, fOrigin, fBounds,
128 SkToBool(fCurrentSecondaryCommandBuffer));
egdaniel066df7c2016-06-08 14:02:27 -0700129}
130
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400131void GrVkOpsRenderPass::initWrapped() {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500132 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
133 SkASSERT(vkRT->wrapsSecondaryCommandBuffer());
Greg Danielf0c681e2019-09-05 14:07:41 -0400134 fCurrentRenderPass = vkRT->externalRenderPass();
135 SkASSERT(fCurrentRenderPass);
136 fCurrentRenderPass->ref();
Greg Daniel070cbaf2019-01-03 17:35:54 -0500137
Greg Danielf0c681e2019-09-05 14:07:41 -0400138 fCurrentSecondaryCommandBuffer.reset(
Greg Daniel8daf3b72019-07-30 09:57:26 -0400139 GrVkSecondaryCommandBuffer::Create(vkRT->getExternalSecondaryCommandBuffer()));
Greg Danielf0c681e2019-09-05 14:07:41 -0400140 fCurrentSecondaryCommandBuffer->begin(fGpu, nullptr, fCurrentRenderPass);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500141}
Brian Salomonc293a292016-11-30 13:38:32 -0500142
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400143GrVkOpsRenderPass::~GrVkOpsRenderPass() {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400144 this->reset();
egdaniel066df7c2016-06-08 14:02:27 -0700145}
146
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400147GrGpu* GrVkOpsRenderPass::gpu() { return fGpu; }
egdaniel9cb63402016-06-23 08:37:05 -0700148
Greg Danielf0c681e2019-09-05 14:07:41 -0400149GrVkCommandBuffer* GrVkOpsRenderPass::currentCommandBuffer() {
Greg Daniel28d40b22019-09-09 15:00:15 +0000150 if (fCurrentSecondaryCommandBuffer) {
151 return fCurrentSecondaryCommandBuffer.get();
152 }
153 return fGpu->currentCommandBuffer();
Greg Danielf0c681e2019-09-05 14:07:41 -0400154}
155
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400156void GrVkOpsRenderPass::end() {
Greg Danielf0c681e2019-09-05 14:07:41 -0400157 if (fCurrentSecondaryCommandBuffer) {
158 fCurrentSecondaryCommandBuffer->end(fGpu);
Brian Salomonc293a292016-11-30 13:38:32 -0500159 }
egdaniel066df7c2016-06-08 14:02:27 -0700160}
161
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400162void GrVkOpsRenderPass::submit() {
Brian Salomonc293a292016-11-30 13:38:32 -0500163 if (!fRenderTarget) {
164 return;
165 }
Robert Phillips19e51dc2017-08-09 09:30:51 -0400166
Greg Danielf0c681e2019-09-05 14:07:41 -0400167 // We don't want to actually submit the secondary command buffer if it is wrapped.
168 if (this->wrapsSecondaryCommandBuffer()) {
169 return;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400170 }
Greg Danielf0c681e2019-09-05 14:07:41 -0400171
172 if (fCurrentSecondaryCommandBuffer) {
173 fGpu->submitSecondaryCommandBuffer(std::move(fCurrentSecondaryCommandBuffer));
174 }
175 fGpu->endRenderPass(fRenderTarget, fOrigin, fBounds);
egdaniel9cb63402016-06-23 08:37:05 -0700176}
177
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400178void GrVkOpsRenderPass::set(GrRenderTarget* rt, GrSurfaceOrigin origin,
179 const GrOpsRenderPass::LoadAndStoreInfo& colorInfo,
Greg Danielb20d7e52019-09-03 13:54:39 -0400180 const GrOpsRenderPass::StencilLoadAndStoreInfo& stencilInfo,
181 const SkTArray<GrTextureProxy*, true>& sampledProxies) {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400182 SkASSERT(!fRenderTarget);
Robert Phillips9da87e02019-02-04 13:26:26 -0500183 SkASSERT(fGpu == rt->getContext()->priv().getGpu());
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400184
Greg Danielb0c7ad12019-06-06 17:23:35 +0000185#ifdef SK_DEBUG
186 fIsActive = true;
187#endif
188
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400189 this->INHERITED::set(rt, origin);
190
Greg Danielb20d7e52019-09-03 13:54:39 -0400191 for (int i = 0; i < sampledProxies.count(); ++i) {
192 if (sampledProxies[i]->isInstantiated()) {
193 GrVkTexture* vkTex = static_cast<GrVkTexture*>(sampledProxies[i]->peekTexture());
194 SkASSERT(vkTex);
195 vkTex->setImageLayout(
196 fGpu, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_ACCESS_SHADER_READ_BIT,
197 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, false);
198 }
199 }
200
Greg Daniel070cbaf2019-01-03 17:35:54 -0500201 if (this->wrapsSecondaryCommandBuffer()) {
202 this->initWrapped();
203 return;
204 }
205
Greg Danielf0c681e2019-09-05 14:07:41 -0400206 // TODO: This should be passed in via the GrOpsTask instead of always setting it to the full
207 // render target bounds.
208 fBounds = SkIRect::MakeWH(fRenderTarget->width(), fRenderTarget->height());
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400209
Greg Danielf0c681e2019-09-05 14:07:41 -0400210 this->init(colorInfo, stencilInfo, colorInfo.fClearColor);
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400211}
212
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400213void GrVkOpsRenderPass::reset() {
Greg Danielf0c681e2019-09-05 14:07:41 -0400214 if (fCurrentSecondaryCommandBuffer) {
215 fCurrentSecondaryCommandBuffer.release()->recycle(fGpu);
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400216 }
Greg Danielf0c681e2019-09-05 14:07:41 -0400217 if (fCurrentRenderPass) {
218 fCurrentRenderPass->unref(fGpu);
219 fCurrentRenderPass = nullptr;
220 }
221 fCurrentCBIsEmpty = true;
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400222
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400223 fRenderTarget = nullptr;
Greg Danielb0c7ad12019-06-06 17:23:35 +0000224
225#ifdef SK_DEBUG
226 fIsActive = false;
227#endif
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400228}
229
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400230bool GrVkOpsRenderPass::wrapsSecondaryCommandBuffer() const {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500231 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
232 return vkRT->wrapsSecondaryCommandBuffer();
233}
234
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400235////////////////////////////////////////////////////////////////////////////////
236
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400237void GrVkOpsRenderPass::insertEventMarker(const char* msg) {
Robert Phillips65a88fa2017-08-08 08:36:22 -0400238 // TODO: does Vulkan have a correlate?
239}
240
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400241void GrVkOpsRenderPass::onClearStencilClip(const GrFixedClip& clip, bool insideStencilMask) {
Chris Dalton94c04682017-11-01 17:15:06 -0600242 SkASSERT(!clip.hasWindowRectangles());
243
Greg Daniel65a09272016-10-12 09:47:22 -0400244 GrStencilAttachment* sb = fRenderTarget->renderTargetPriv().getStencilAttachment();
egdaniel9cb63402016-06-23 08:37:05 -0700245 // this should only be called internally when we know we have a
246 // stencil buffer.
247 SkASSERT(sb);
248 int stencilBitCount = sb->bits();
249
250 // The contract with the callers does not guarantee that we preserve all bits in the stencil
251 // during this clear. Thus we will clear the entire stencil to the desired value.
252
253 VkClearDepthStencilValue vkStencilColor;
254 memset(&vkStencilColor, 0, sizeof(VkClearDepthStencilValue));
csmartdalton29df7602016-08-31 11:55:52 -0700255 if (insideStencilMask) {
egdaniel9cb63402016-06-23 08:37:05 -0700256 vkStencilColor.stencil = (1 << (stencilBitCount - 1));
257 } else {
258 vkStencilColor.stencil = 0;
259 }
260
261 VkClearRect clearRect;
262 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700263 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000264 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000265 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400266 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700267 vkRect = clip.scissorRect();
268 } else {
269 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400270 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
271 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700272 }
273
274 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
275 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
276
277 clearRect.baseArrayLayer = 0;
278 clearRect.layerCount = 1;
279
280 uint32_t stencilIndex;
Greg Danielf0c681e2019-09-05 14:07:41 -0400281 SkAssertResult(fCurrentRenderPass->stencilAttachmentIndex(&stencilIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700282
283 VkClearAttachment attachment;
284 attachment.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
285 attachment.colorAttachment = 0; // this value shouldn't matter
286 attachment.clearValue.depthStencil = vkStencilColor;
287
Greg Danielf0c681e2019-09-05 14:07:41 -0400288 this->currentCommandBuffer()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
289 fCurrentCBIsEmpty = false;
egdaniel9cb63402016-06-23 08:37:05 -0700290}
291
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400292void GrVkOpsRenderPass::onClear(const GrFixedClip& clip, const SkPMColor4f& color) {
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000293 // parent class should never let us get here with no RT
csmartdaltonbf4a8f92016-09-06 10:01:06 -0700294 SkASSERT(!clip.hasWindowRectangles());
egdaniel9cb63402016-06-23 08:37:05 -0700295
Brian Osman9a9baae2018-11-05 15:06:26 -0500296 VkClearColorValue vkColor = {{color.fR, color.fG, color.fB, color.fA}};
egdaniel9cb63402016-06-23 08:37:05 -0700297
Greg Daniel674ee742019-08-27 13:12:33 -0400298 // If we end up in a situation where we are calling clear without a scissior then in general it
299 // means we missed an opportunity higher up the stack to set the load op to be a clear. However,
300 // there are situations where higher up we couldn't discard the previous ops and set a clear
301 // load op (e.g. if we needed to execute a wait op). Thus we also have the empty check here.
Greg Daniel4fe92572019-09-03 11:16:40 -0400302 // TODO: Make the waitOp a RenderTask instead so we can clear out the GrOpsTask for a clear. We
303 // can then reenable this assert assuming we can't get messed up by a waitOp.
Greg Danielf0c681e2019-09-05 14:07:41 -0400304 //SkASSERT(!fCurrentCBIsEmpty || clip.scissorEnabled());
egdaniel9cb63402016-06-23 08:37:05 -0700305
306 // We always do a sub rect clear with clearAttachments since we are inside a render pass
307 VkClearRect clearRect;
308 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700309 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000310 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000311 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400312 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700313 vkRect = clip.scissorRect();
314 } else {
315 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400316 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
317 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700318 }
319 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
320 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
321 clearRect.baseArrayLayer = 0;
322 clearRect.layerCount = 1;
323
324 uint32_t colorIndex;
Greg Danielf0c681e2019-09-05 14:07:41 -0400325 SkAssertResult(fCurrentRenderPass->colorAttachmentIndex(&colorIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700326
327 VkClearAttachment attachment;
328 attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
329 attachment.colorAttachment = colorIndex;
330 attachment.clearValue.color = vkColor;
331
Greg Danielf0c681e2019-09-05 14:07:41 -0400332 this->currentCommandBuffer()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
333 fCurrentCBIsEmpty = false;
egdaniel9cb63402016-06-23 08:37:05 -0700334 return;
335}
336
Greg Daniel500d58b2017-08-24 15:59:33 -0400337////////////////////////////////////////////////////////////////////////////////
338
Greg Daniel28d40b22019-09-09 15:00:15 +0000339void GrVkOpsRenderPass::addAdditionalRenderPass(bool mustUseSecondaryCommandBuffer) {
Greg Danielf0c681e2019-09-05 14:07:41 -0400340 SkASSERT(!this->wrapsSecondaryCommandBuffer());
Robert Phillips19e51dc2017-08-09 09:30:51 -0400341 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
342
Greg Daniel77b53f62016-10-18 11:48:51 -0400343 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
344 VK_ATTACHMENT_STORE_OP_STORE);
345 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
346 VK_ATTACHMENT_STORE_OP_STORE);
347
348 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400349 vkRT->compatibleRenderPassHandle();
Greg Danielf0c681e2019-09-05 14:07:41 -0400350 SkASSERT(fCurrentRenderPass);
351 fCurrentRenderPass->unref(fGpu);
Greg Daniel77b53f62016-10-18 11:48:51 -0400352 if (rpHandle.isValid()) {
Greg Danielf0c681e2019-09-05 14:07:41 -0400353 fCurrentRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
Greg Daniel77b53f62016-10-18 11:48:51 -0400354 vkColorOps,
355 vkStencilOps);
356 } else {
Greg Danielf0c681e2019-09-05 14:07:41 -0400357 fCurrentRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel77b53f62016-10-18 11:48:51 -0400358 vkColorOps,
359 vkStencilOps);
360 }
Greg Danielf0c681e2019-09-05 14:07:41 -0400361 SkASSERT(fCurrentRenderPass);
Greg Daniel77b53f62016-10-18 11:48:51 -0400362
Greg Danielf0c681e2019-09-05 14:07:41 -0400363 VkClearValue vkClearColor;
364 memset(&vkClearColor, 0, sizeof(VkClearValue));
Greg Daniel28d40b22019-09-09 15:00:15 +0000365
366 if (!fGpu->vkCaps().preferPrimaryOverSecondaryCommandBuffers() ||
367 mustUseSecondaryCommandBuffer) {
368 fCurrentSecondaryCommandBuffer = fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu);
369 fCurrentSecondaryCommandBuffer->begin(fGpu, vkRT->framebuffer(), fCurrentRenderPass);
370 }
371
Greg Danielf0c681e2019-09-05 14:07:41 -0400372 // We use the same fBounds as the whole GrVkOpsRenderPass since we have no way of tracking the
373 // bounds in GrOpsTask for parts before and after inline uploads separately.
Greg Daniel28d40b22019-09-09 15:00:15 +0000374 fGpu->beginRenderPass(fCurrentRenderPass, &vkClearColor, vkRT, fOrigin, fBounds,
375 SkToBool(fCurrentSecondaryCommandBuffer));
Greg Daniel77b53f62016-10-18 11:48:51 -0400376}
377
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400378void GrVkOpsRenderPass::inlineUpload(GrOpFlushState* state,
Brian Salomon943ed792017-10-30 09:37:55 -0400379 GrDeferredTextureUploadFn& upload) {
Greg Danielf0c681e2019-09-05 14:07:41 -0400380 if (fCurrentSecondaryCommandBuffer) {
381 fCurrentSecondaryCommandBuffer->end(fGpu);
382 fGpu->submitSecondaryCommandBuffer(std::move(fCurrentSecondaryCommandBuffer));
Greg Daniel77b53f62016-10-18 11:48:51 -0400383 }
Greg Danielf0c681e2019-09-05 14:07:41 -0400384 fGpu->endRenderPass(fRenderTarget, fOrigin, fBounds);
Brian Salomon24d377e2019-04-23 15:24:31 -0400385
Greg Danielf0c681e2019-09-05 14:07:41 -0400386 // We pass in true here to signal that after the upload we need to set the upload textures
387 // layout back to VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL.
388 state->doUpload(upload, true);
389
Greg Daniel28d40b22019-09-09 15:00:15 +0000390 this->addAdditionalRenderPass(false);
Greg Daniel77b53f62016-10-18 11:48:51 -0400391}
392
egdaniel9cb63402016-06-23 08:37:05 -0700393////////////////////////////////////////////////////////////////////////////////
394
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400395void GrVkOpsRenderPass::bindGeometry(const GrGpuBuffer* indexBuffer,
Brian Salomondbf70722019-02-07 11:31:24 -0500396 const GrGpuBuffer* vertexBuffer,
397 const GrGpuBuffer* instanceBuffer) {
Greg Danielf0c681e2019-09-05 14:07:41 -0400398 GrVkCommandBuffer* currCmdBuf = this->currentCommandBuffer();
egdaniel9cb63402016-06-23 08:37:05 -0700399 // There is no need to put any memory barriers to make sure host writes have finished here.
400 // When a command buffer is submitted to a queue, there is an implicit memory barrier that
401 // occurs for all host writes. Additionally, BufferMemoryBarriers are not allowed inside of
402 // an active RenderPass.
egdaniel9cb63402016-06-23 08:37:05 -0700403
Chris Dalton1d616352017-05-31 12:51:23 -0600404 // Here our vertex and instance inputs need to match the same 0-based bindings they were
405 // assigned in GrVkPipeline. That is, vertex first (if any) followed by instance.
406 uint32_t binding = 0;
407
Brian Salomon802cb312018-06-08 18:05:20 -0400408 if (vertexBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600409 SkASSERT(vertexBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600410 SkASSERT(!vertexBuffer->isMapped());
411
412 currCmdBuf->bindInputBuffer(fGpu, binding++,
413 static_cast<const GrVkVertexBuffer*>(vertexBuffer));
414 }
415
Brian Salomon802cb312018-06-08 18:05:20 -0400416 if (instanceBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600417 SkASSERT(instanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600418 SkASSERT(!instanceBuffer->isMapped());
419
420 currCmdBuf->bindInputBuffer(fGpu, binding++,
421 static_cast<const GrVkVertexBuffer*>(instanceBuffer));
422 }
Chris Daltonff926502017-05-03 14:36:54 -0400423 if (indexBuffer) {
424 SkASSERT(indexBuffer);
425 SkASSERT(!indexBuffer->isMapped());
egdaniel9cb63402016-06-23 08:37:05 -0700426
Chris Daltonff926502017-05-03 14:36:54 -0400427 currCmdBuf->bindIndexBuffer(fGpu, static_cast<const GrVkIndexBuffer*>(indexBuffer));
egdaniel9cb63402016-06-23 08:37:05 -0700428 }
429}
430
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400431GrVkPipelineState* GrVkOpsRenderPass::prepareDrawState(
Brian Salomon49348902018-06-26 09:12:38 -0400432 const GrPrimitiveProcessor& primProc,
433 const GrPipeline& pipeline,
434 const GrPipeline::FixedDynamicState* fixedDynamicState,
435 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
436 GrPrimitiveType primitiveType) {
Greg Danielf0c681e2019-09-05 14:07:41 -0400437 GrVkCommandBuffer* currentCB = this->currentCommandBuffer();
438 SkASSERT(fCurrentRenderPass);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400439
Greg Danielf0c681e2019-09-05 14:07:41 -0400440 VkRenderPass compatibleRenderPass = fCurrentRenderPass->vkRenderPass();
Greg Daniel99b88e02018-10-03 15:31:20 -0400441
Greg Daniel9a51a862018-11-30 10:18:14 -0500442 const GrTextureProxy* const* primProcProxies = nullptr;
443 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
444 primProcProxies = dynamicStateArrays->fPrimitiveProcessorTextures;
445 } else if (fixedDynamicState) {
446 primProcProxies = fixedDynamicState->fPrimitiveProcessorTextures;
447 }
448
449 SkASSERT(SkToBool(primProcProxies) == SkToBool(primProc.numTextureSamplers()));
450
Greg Daniel09eeefb2017-10-16 15:15:02 -0400451 GrVkPipelineState* pipelineState =
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500452 fGpu->resourceProvider().findOrCreateCompatiblePipelineState(fRenderTarget, fOrigin,
453 pipeline,
egdaniel9cb63402016-06-23 08:37:05 -0700454 primProc,
Greg Daniel9a51a862018-11-30 10:18:14 -0500455 primProcProxies,
egdaniel9cb63402016-06-23 08:37:05 -0700456 primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400457 compatibleRenderPass);
egdaniel9cb63402016-06-23 08:37:05 -0700458 if (!pipelineState) {
459 return pipelineState;
460 }
461
Greg Danielf0c681e2019-09-05 14:07:41 -0400462 pipelineState->bindPipeline(fGpu, currentCB);
Greg Daniel22bc8652017-03-22 15:45:43 -0400463
Greg Danielf0c681e2019-09-05 14:07:41 -0400464 pipelineState->setAndBindUniforms(fGpu, fRenderTarget, fOrigin, primProc, pipeline, currentCB);
Brian Salomonf7232642018-09-19 08:58:08 -0400465
466 // Check whether we need to bind textures between each GrMesh. If not we can bind them all now.
467 bool setTextures = !(dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures);
468 if (setTextures) {
Greg Danielf0c681e2019-09-05 14:07:41 -0400469 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, primProcProxies, currentCB);
Brian Salomonf7232642018-09-19 08:58:08 -0400470 }
egdaniel9cb63402016-06-23 08:37:05 -0700471
Brian Salomond818ebf2018-07-02 14:08:49 +0000472 if (!pipeline.isScissorEnabled()) {
Greg Danielf0c681e2019-09-05 14:07:41 -0400473 GrVkPipeline::SetDynamicScissorRectState(fGpu, currentCB, fRenderTarget, fOrigin,
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500474 SkIRect::MakeWH(fRenderTarget->width(),
475 fRenderTarget->height()));
Brian Salomon49348902018-06-26 09:12:38 -0400476 } else if (!dynamicStateArrays || !dynamicStateArrays->fScissorRects) {
477 SkASSERT(fixedDynamicState);
Greg Danielf0c681e2019-09-05 14:07:41 -0400478 GrVkPipeline::SetDynamicScissorRectState(fGpu, currentCB, fRenderTarget, fOrigin,
Brian Salomon49348902018-06-26 09:12:38 -0400479 fixedDynamicState->fScissorRect);
Chris Dalton46983b72017-06-06 12:27:16 -0600480 }
Greg Danielf0c681e2019-09-05 14:07:41 -0400481 GrVkPipeline::SetDynamicViewportState(fGpu, currentCB, fRenderTarget);
482 GrVkPipeline::SetDynamicBlendConstantState(fGpu, currentCB, pipeline.outputSwizzle(),
Chris Dalton46983b72017-06-06 12:27:16 -0600483 pipeline.getXferProcessor());
egdaniel9cb63402016-06-23 08:37:05 -0700484
485 return pipelineState;
486}
487
Greg Danielb20d7e52019-09-03 13:54:39 -0400488#ifdef SK_DEBUG
489void check_sampled_texture(GrTexture* tex, GrRenderTarget* rt, GrVkGpu* gpu) {
490 SkASSERT(!tex->isProtected() || (rt->isProtected() && gpu->protectedContext()));
491 GrVkTexture* vkTex = static_cast<GrVkTexture*>(tex);
492 SkASSERT(vkTex->currentLayout() == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
493}
494#endif
495
496
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400497void GrVkOpsRenderPass::onDraw(const GrPrimitiveProcessor& primProc,
Brian Salomonff168d92018-06-23 15:17:27 -0400498 const GrPipeline& pipeline,
Brian Salomon49348902018-06-26 09:12:38 -0400499 const GrPipeline::FixedDynamicState* fixedDynamicState,
500 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
Greg Daniel500d58b2017-08-24 15:59:33 -0400501 const GrMesh meshes[],
Greg Daniel500d58b2017-08-24 15:59:33 -0400502 int meshCount,
503 const SkRect& bounds) {
egdaniel9cb63402016-06-23 08:37:05 -0700504 if (!meshCount) {
505 return;
506 }
Greg Danielea022cd2018-03-16 11:10:03 -0400507
Greg Danielb20d7e52019-09-03 13:54:39 -0400508#ifdef SK_DEBUG
Brian Salomonf7232642018-09-19 08:58:08 -0400509 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
510 for (int m = 0, i = 0; m < meshCount; ++m) {
511 for (int s = 0; s < primProc.numTextureSamplers(); ++s, ++i) {
512 auto texture = dynamicStateArrays->fPrimitiveProcessorTextures[i]->peekTexture();
Greg Danielb20d7e52019-09-03 13:54:39 -0400513 check_sampled_texture(texture, fRenderTarget, fGpu);
Brian Salomonf7232642018-09-19 08:58:08 -0400514 }
515 }
516 } else {
517 for (int i = 0; i < primProc.numTextureSamplers(); ++i) {
518 auto texture = fixedDynamicState->fPrimitiveProcessorTextures[i]->peekTexture();
Greg Danielb20d7e52019-09-03 13:54:39 -0400519 check_sampled_texture(texture, fRenderTarget, fGpu);
Brian Salomonf7232642018-09-19 08:58:08 -0400520 }
Brian Salomone782f842018-07-31 13:53:11 -0400521 }
bsalomonb58a2b42016-09-26 06:55:02 -0700522 GrFragmentProcessor::Iter iter(pipeline);
523 while (const GrFragmentProcessor* fp = iter.next()) {
Brian Salomone782f842018-07-31 13:53:11 -0400524 for (int i = 0; i < fp->numTextureSamplers(); ++i) {
525 const GrFragmentProcessor::TextureSampler& sampler = fp->textureSampler(i);
Greg Danielb20d7e52019-09-03 13:54:39 -0400526 check_sampled_texture(sampler.peekTexture(), fRenderTarget, fGpu);
Brian Salomone782f842018-07-31 13:53:11 -0400527 }
egdaniel2f5792a2016-07-06 08:51:23 -0700528 }
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400529 if (GrTexture* dstTexture = pipeline.peekDstTexture()) {
Greg Danielb20d7e52019-09-03 13:54:39 -0400530 check_sampled_texture(dstTexture, fRenderTarget, fGpu);
Brian Salomon18dfa982017-04-03 16:57:43 -0400531 }
Greg Danielb20d7e52019-09-03 13:54:39 -0400532#endif
egdaniel2f5792a2016-07-06 08:51:23 -0700533
Chris Daltonbca46e22017-05-15 11:03:26 -0600534 GrPrimitiveType primitiveType = meshes[0].primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400535 GrVkPipelineState* pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
536 dynamicStateArrays, primitiveType);
egdaniel9cb63402016-06-23 08:37:05 -0700537 if (!pipelineState) {
538 return;
539 }
540
Brian Salomond818ebf2018-07-02 14:08:49 +0000541 bool dynamicScissor =
542 pipeline.isScissorEnabled() && dynamicStateArrays && dynamicStateArrays->fScissorRects;
Brian Salomonf7232642018-09-19 08:58:08 -0400543 bool dynamicTextures = dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures;
Brian Salomon49348902018-06-26 09:12:38 -0400544
egdaniel9cb63402016-06-23 08:37:05 -0700545 for (int i = 0; i < meshCount; ++i) {
546 const GrMesh& mesh = meshes[i];
Chris Daltonbca46e22017-05-15 11:03:26 -0600547 if (mesh.primitiveType() != primitiveType) {
Chris Dalton6f241802017-05-08 13:58:38 -0400548 SkDEBUGCODE(pipelineState = nullptr);
Chris Daltonbca46e22017-05-15 11:03:26 -0600549 primitiveType = mesh.primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400550 pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
551 dynamicStateArrays, primitiveType);
Chris Dalton6f241802017-05-08 13:58:38 -0400552 if (!pipelineState) {
553 return;
egdaniel9cb63402016-06-23 08:37:05 -0700554 }
Chris Dalton6f241802017-05-08 13:58:38 -0400555 }
egdaniel9cb63402016-06-23 08:37:05 -0700556
Brian Salomon49348902018-06-26 09:12:38 -0400557 if (dynamicScissor) {
Greg Danielf0c681e2019-09-05 14:07:41 -0400558 GrVkPipeline::SetDynamicScissorRectState(fGpu, this->currentCommandBuffer(),
559 fRenderTarget, fOrigin,
Brian Salomon49348902018-06-26 09:12:38 -0400560 dynamicStateArrays->fScissorRects[i]);
Chris Dalton46983b72017-06-06 12:27:16 -0600561 }
Brian Salomonf7232642018-09-19 08:58:08 -0400562 if (dynamicTextures) {
563 GrTextureProxy* const* meshProxies = dynamicStateArrays->fPrimitiveProcessorTextures +
564 primProc.numTextureSamplers() * i;
565 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, meshProxies,
Greg Danielf0c681e2019-09-05 14:07:41 -0400566 this->currentCommandBuffer());
Brian Salomonf7232642018-09-19 08:58:08 -0400567 }
Chris Daltonbca46e22017-05-15 11:03:26 -0600568 SkASSERT(pipelineState);
Brian Salomon802cb312018-06-08 18:05:20 -0400569 mesh.sendToGpu(this);
egdaniel9cb63402016-06-23 08:37:05 -0700570 }
571
Greg Danielf0c681e2019-09-05 14:07:41 -0400572 fCurrentCBIsEmpty = false;
egdaniel066df7c2016-06-08 14:02:27 -0700573}
574
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400575void GrVkOpsRenderPass::sendInstancedMeshToGpu(GrPrimitiveType,
576 const GrBuffer* vertexBuffer,
577 int vertexCount,
578 int baseVertex,
579 const GrBuffer* instanceBuffer,
580 int instanceCount,
581 int baseInstance) {
Brian Salomondbf70722019-02-07 11:31:24 -0500582 SkASSERT(!vertexBuffer || !vertexBuffer->isCpuBuffer());
583 SkASSERT(!instanceBuffer || !instanceBuffer->isCpuBuffer());
584 auto gpuVertexBuffer = static_cast<const GrGpuBuffer*>(vertexBuffer);
585 auto gpuInstanceBuffer = static_cast<const GrGpuBuffer*>(instanceBuffer);
586 this->bindGeometry(nullptr, gpuVertexBuffer, gpuInstanceBuffer);
Greg Danielf0c681e2019-09-05 14:07:41 -0400587 this->currentCommandBuffer()->draw(fGpu, vertexCount, instanceCount, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600588 fGpu->stats()->incNumDraws();
589}
590
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400591void GrVkOpsRenderPass::sendIndexedInstancedMeshToGpu(GrPrimitiveType,
592 const GrBuffer* indexBuffer,
593 int indexCount,
594 int baseIndex,
595 const GrBuffer* vertexBuffer,
596 int baseVertex,
597 const GrBuffer* instanceBuffer,
598 int instanceCount,
599 int baseInstance,
600 GrPrimitiveRestart restart) {
Brian Salomon802cb312018-06-08 18:05:20 -0400601 SkASSERT(restart == GrPrimitiveRestart::kNo);
Brian Salomondbf70722019-02-07 11:31:24 -0500602 SkASSERT(!vertexBuffer || !vertexBuffer->isCpuBuffer());
603 SkASSERT(!instanceBuffer || !instanceBuffer->isCpuBuffer());
604 SkASSERT(!indexBuffer->isCpuBuffer());
605 auto gpuIndexxBuffer = static_cast<const GrGpuBuffer*>(indexBuffer);
606 auto gpuVertexBuffer = static_cast<const GrGpuBuffer*>(vertexBuffer);
607 auto gpuInstanceBuffer = static_cast<const GrGpuBuffer*>(instanceBuffer);
608 this->bindGeometry(gpuIndexxBuffer, gpuVertexBuffer, gpuInstanceBuffer);
Greg Danielf0c681e2019-09-05 14:07:41 -0400609 this->currentCommandBuffer()->drawIndexed(fGpu, indexCount, instanceCount,
610 baseIndex, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600611 fGpu->stats()->incNumDraws();
612}
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400613
614////////////////////////////////////////////////////////////////////////////////
615
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400616void GrVkOpsRenderPass::executeDrawable(std::unique_ptr<SkDrawable::GpuDrawHandler> drawable) {
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400617 GrVkRenderTarget* target = static_cast<GrVkRenderTarget*>(fRenderTarget);
618
619 GrVkImage* targetImage = target->msaaImage() ? target->msaaImage() : target;
620
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400621 VkRect2D bounds;
622 bounds.offset = { 0, 0 };
623 bounds.extent = { 0, 0 };
624
Greg Daniel28d40b22019-09-09 15:00:15 +0000625 if (!fCurrentSecondaryCommandBuffer) {
626 fGpu->endRenderPass(fRenderTarget, fOrigin, fBounds);
627 this->addAdditionalRenderPass(true);
628 }
Greg Danielf0c681e2019-09-05 14:07:41 -0400629 SkASSERT(fCurrentSecondaryCommandBuffer);
630
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400631 GrVkDrawableInfo vkInfo;
Greg Danielf0c681e2019-09-05 14:07:41 -0400632 vkInfo.fSecondaryCommandBuffer = fCurrentSecondaryCommandBuffer->vkCommandBuffer();
633 vkInfo.fCompatibleRenderPass = fCurrentRenderPass->vkRenderPass();
634 SkAssertResult(fCurrentRenderPass->colorAttachmentIndex(&vkInfo.fColorAttachmentIndex));
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400635 vkInfo.fFormat = targetImage->imageFormat();
636 vkInfo.fDrawBounds = &bounds;
Stan Ilievcb580602019-02-26 11:36:07 -0500637#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
638 vkInfo.fImage = targetImage->image();
639#else
640 vkInfo.fImage = VK_NULL_HANDLE;
641#endif //SK_BUILD_FOR_ANDROID_FRAMEWORK
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400642
643 GrBackendDrawableInfo info(vkInfo);
644
Eric Karlc0b2ba22019-01-22 19:40:35 -0800645 // After we draw into the command buffer via the drawable, cached state we have may be invalid.
Greg Danielf0c681e2019-09-05 14:07:41 -0400646 this->currentCommandBuffer()->invalidateState();
Eric Karla8878a12019-02-07 18:17:43 -0800647 // Also assume that the drawable produced output.
Greg Danielf0c681e2019-09-05 14:07:41 -0400648 fCurrentCBIsEmpty = false;
Eric Karlc0b2ba22019-01-22 19:40:35 -0800649
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400650 drawable->draw(info);
651 fGpu->addDrawable(std::move(drawable));
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400652}
653