blob: ba72061b3979983a217d067d524f6700956611e8 [file] [log] [blame]
egdaniel066df7c2016-06-08 14:02:27 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Greg Daniel2d41d0d2019-08-26 11:08:51 -04008#include "src/gpu/vk/GrVkOpsRenderPass.h"
egdaniel066df7c2016-06-08 14:02:27 -07009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkDrawable.h"
11#include "include/core/SkRect.h"
12#include "include/gpu/GrBackendDrawableInfo.h"
13#include "src/gpu/GrContextPriv.h"
14#include "src/gpu/GrFixedClip.h"
15#include "src/gpu/GrMesh.h"
16#include "src/gpu/GrOpFlushState.h"
17#include "src/gpu/GrPipeline.h"
18#include "src/gpu/GrRenderTargetPriv.h"
Chris Dalton6f31cc32019-08-26 20:18:44 +000019#include "src/gpu/GrTexturePriv.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050020#include "src/gpu/vk/GrVkCommandBuffer.h"
21#include "src/gpu/vk/GrVkCommandPool.h"
22#include "src/gpu/vk/GrVkGpu.h"
23#include "src/gpu/vk/GrVkPipeline.h"
24#include "src/gpu/vk/GrVkRenderPass.h"
25#include "src/gpu/vk/GrVkRenderTarget.h"
26#include "src/gpu/vk/GrVkResourceProvider.h"
27#include "src/gpu/vk/GrVkSemaphore.h"
28#include "src/gpu/vk/GrVkTexture.h"
egdaniel066df7c2016-06-08 14:02:27 -070029
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040030GrVkPrimaryCommandBufferTask::~GrVkPrimaryCommandBufferTask() = default;
31GrVkPrimaryCommandBufferTask::GrVkPrimaryCommandBufferTask() = default;
32
33namespace {
34
35class InlineUpload : public GrVkPrimaryCommandBufferTask {
36public:
37 InlineUpload(GrOpFlushState* state, const GrDeferredTextureUploadFn& upload)
38 : fFlushState(state), fUpload(upload) {}
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040039
40 void execute(const Args& args) override { fFlushState->doUpload(fUpload); }
41
42private:
43 GrOpFlushState* fFlushState;
44 GrDeferredTextureUploadFn fUpload;
45};
46
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040047} // anonymous namespace
48
49/////////////////////////////////////////////////////////////////////////////
50
Robert Phillips6b47c7d2017-08-29 07:24:09 -040051void get_vk_load_store_ops(GrLoadOp loadOpIn, GrStoreOp storeOpIn,
egdaniel066df7c2016-06-08 14:02:27 -070052 VkAttachmentLoadOp* loadOp, VkAttachmentStoreOp* storeOp) {
Robert Phillips95214472017-08-08 18:00:03 -040053 switch (loadOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -040054 case GrLoadOp::kLoad:
egdaniel066df7c2016-06-08 14:02:27 -070055 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel066df7c2016-06-08 14:02:27 -070056 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040057 case GrLoadOp::kClear:
egdaniel9cb63402016-06-23 08:37:05 -070058 *loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
59 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040060 case GrLoadOp::kDiscard:
egdaniel9cb63402016-06-23 08:37:05 -070061 *loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
62 break;
63 default:
64 SK_ABORT("Invalid LoadOp");
egdaniel066df7c2016-06-08 14:02:27 -070065 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel9cb63402016-06-23 08:37:05 -070066 }
67
Robert Phillips95214472017-08-08 18:00:03 -040068 switch (storeOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -040069 case GrStoreOp::kStore:
egdaniel066df7c2016-06-08 14:02:27 -070070 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
71 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040072 case GrStoreOp::kDiscard:
egdaniel066df7c2016-06-08 14:02:27 -070073 *storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
74 break;
brianosman0bbc3712016-06-14 04:53:09 -070075 default:
egdaniel9cb63402016-06-23 08:37:05 -070076 SK_ABORT("Invalid StoreOp");
brianosman0bbc3712016-06-14 04:53:09 -070077 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
egdaniel066df7c2016-06-08 14:02:27 -070078 }
79}
80
Greg Daniel2d41d0d2019-08-26 11:08:51 -040081GrVkOpsRenderPass::GrVkOpsRenderPass(GrVkGpu* gpu) : fGpu(gpu) {}
Brian Salomonc293a292016-11-30 13:38:32 -050082
Greg Daniel2d41d0d2019-08-26 11:08:51 -040083void GrVkOpsRenderPass::init() {
Brian Salomonc293a292016-11-30 13:38:32 -050084 GrVkRenderPass::LoadStoreOps vkColorOps(fVkColorLoadOp, fVkColorStoreOp);
85 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -070086
Greg Daniel36a77ee2016-10-18 10:33:25 -040087 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Brian Salomonc293a292016-11-30 13:38:32 -050088 SkASSERT(fCommandBufferInfos.count() == 1);
Greg Daniel22bc8652017-03-22 15:45:43 -040089 fCurrentCmdInfo = 0;
Greg Daniel36a77ee2016-10-18 10:33:25 -040090
Robert Phillips19e51dc2017-08-09 09:30:51 -040091 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
92 const GrVkResourceProvider::CompatibleRPHandle& rpHandle = vkRT->compatibleRenderPassHandle();
egdaniel066df7c2016-06-08 14:02:27 -070093 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -040094 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
95 vkColorOps,
96 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -070097 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -040098 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -040099 vkColorOps,
100 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -0700101 }
102
Brian Osmancb3d0872018-10-16 15:19:28 -0400103 cbInfo.fColorClearValue.color.float32[0] = fClearColor[0];
104 cbInfo.fColorClearValue.color.float32[1] = fClearColor[1];
105 cbInfo.fColorClearValue.color.float32[2] = fClearColor[2];
106 cbInfo.fColorClearValue.color.float32[3] = fClearColor[3];
egdaniel9cb63402016-06-23 08:37:05 -0700107
Robert Phillips380b90c2017-08-30 07:41:07 -0400108 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000109 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Robert Phillips380b90c2017-08-30 07:41:07 -0400110 } else {
111 cbInfo.fBounds.setEmpty();
112 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400113
114 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
115 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
116 } else if (VK_ATTACHMENT_LOAD_OP_LOAD == fVkColorLoadOp &&
117 VK_ATTACHMENT_STORE_OP_STORE == fVkColorStoreOp) {
118 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
119 } else if (VK_ATTACHMENT_LOAD_OP_DONT_CARE == fVkColorLoadOp) {
120 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithDiscard;
121 }
Greg Daniel36a77ee2016-10-18 10:33:25 -0400122
Greg Daniel228518f2019-08-07 16:55:17 -0400123 cbInfo.fCommandBuffer = fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu);
Robert Phillips19e51dc2017-08-09 09:30:51 -0400124 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
egdaniel066df7c2016-06-08 14:02:27 -0700125}
126
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400127void GrVkOpsRenderPass::initWrapped() {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500128 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
129 SkASSERT(fCommandBufferInfos.count() == 1);
130 fCurrentCmdInfo = 0;
131
132 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
133 SkASSERT(vkRT->wrapsSecondaryCommandBuffer());
134 cbInfo.fRenderPass = vkRT->externalRenderPass();
135 cbInfo.fRenderPass->ref();
136
137 cbInfo.fBounds.setEmpty();
Greg Daniel228518f2019-08-07 16:55:17 -0400138 cbInfo.fCommandBuffer.reset(
Greg Daniel8daf3b72019-07-30 09:57:26 -0400139 GrVkSecondaryCommandBuffer::Create(vkRT->getExternalSecondaryCommandBuffer()));
Greg Daniel070cbaf2019-01-03 17:35:54 -0500140 cbInfo.currentCmdBuf()->begin(fGpu, nullptr, cbInfo.fRenderPass);
141}
Brian Salomonc293a292016-11-30 13:38:32 -0500142
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400143GrVkOpsRenderPass::~GrVkOpsRenderPass() {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400144 this->reset();
egdaniel066df7c2016-06-08 14:02:27 -0700145}
146
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400147GrGpu* GrVkOpsRenderPass::gpu() { return fGpu; }
egdaniel9cb63402016-06-23 08:37:05 -0700148
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400149void GrVkOpsRenderPass::end() {
Greg Daniel22bc8652017-03-22 15:45:43 -0400150 if (fCurrentCmdInfo >= 0) {
151 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Brian Salomonc293a292016-11-30 13:38:32 -0500152 }
egdaniel066df7c2016-06-08 14:02:27 -0700153}
154
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400155void GrVkOpsRenderPass::submit() {
Brian Salomonc293a292016-11-30 13:38:32 -0500156 if (!fRenderTarget) {
157 return;
158 }
Robert Phillips19e51dc2017-08-09 09:30:51 -0400159
160 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Robert Phillips19e51dc2017-08-09 09:30:51 -0400161 GrVkImage* targetImage = vkRT->msaaImage() ? vkRT->msaaImage() : vkRT;
Greg Daniel45a44de2018-02-27 10:07:29 -0500162 GrStencilAttachment* stencil = fRenderTarget->renderTargetPriv().getStencilAttachment();
Brian Salomon24d377e2019-04-23 15:24:31 -0400163 auto currPreCmd = fPreCommandBufferTasks.begin();
egdaniel9cb63402016-06-23 08:37:05 -0700164
Greg Daniel46cfbc62019-06-07 11:43:30 -0400165 GrVkPrimaryCommandBufferTask::Args taskArgs{fGpu, fRenderTarget};
Greg Daniel36a77ee2016-10-18 10:33:25 -0400166 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
167 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
168
Brian Salomon24d377e2019-04-23 15:24:31 -0400169 for (int c = 0; c < cbInfo.fNumPreCmds; ++c, ++currPreCmd) {
Brian Salomon5d8f1cc2019-04-24 09:03:53 -0400170 currPreCmd->execute(taskArgs);
Greg Daniel77b53f62016-10-18 11:48:51 -0400171 }
172
Greg Daniel38c3d932018-03-16 14:22:30 -0400173 // TODO: Many things create a scratch texture which adds the discard immediately, but then
174 // don't draw to it right away. This causes the discard to be ignored and we get yelled at
175 // for loading uninitialized data. However, once MDB lands with reordering, the discard will
176 // get reordered with the rest of the draw commands and we can remove the discard check.
177 if (cbInfo.fIsEmpty &&
178 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithClear &&
179 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithDiscard) {
Greg Daniel77b53f62016-10-18 11:48:51 -0400180 // We have sumbitted no actual draw commands to the command buffer and we are not using
181 // the render pass to do a clear so there is no need to submit anything.
182 continue;
183 }
Greg Daniel38c3d932018-03-16 14:22:30 -0400184
Greg Daniel070cbaf2019-01-03 17:35:54 -0500185 // We don't want to actually submit the secondary command buffer if it is wrapped.
186 if (this->wrapsSecondaryCommandBuffer()) {
187 // If we have any sampled images set their layout now.
Chris Dalton298238a2019-02-21 16:28:44 -0500188 for (int j = 0; j < cbInfo.fSampledTextures.count(); ++j) {
189 cbInfo.fSampledTextures[j]->setImageLayout(
190 fGpu, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_ACCESS_SHADER_READ_BIT,
191 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, false);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500192 }
193
194 // There should have only been one secondary command buffer in the wrapped case so it is
195 // safe to just return here.
196 SkASSERT(fCommandBufferInfos.count() == 1);
197 return;
198 }
199
Greg Danieldbdba602018-04-20 11:52:43 -0400200 // Make sure if we only have a discard load that we execute the discard on the whole image.
201 // TODO: Once we improve our tracking of discards so that we never end up flushing a discard
202 // call with no actually ops, remove this.
203 if (cbInfo.fIsEmpty && cbInfo.fLoadStoreState == LoadStoreState::kStartsWithDiscard) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000204 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Greg Danieldbdba602018-04-20 11:52:43 -0400205 }
206
Mike Reed9ea63152019-08-22 16:19:50 -0400207 if (cbInfo.fBounds.intersect(SkRect::MakeIWH(fRenderTarget->width(),
208 fRenderTarget->height()))) {
Greg Daniel38c3d932018-03-16 14:22:30 -0400209 // Make sure we do the following layout changes after all copies, uploads, or any other
210 // pre-work is done since we may change the layouts in the pre-work. Also since the
211 // draws will be submitted in different render passes, we need to guard againts write
212 // and write issues.
213
214 // Change layout of our render target so it can be used as the color attachment.
Greg Danielf7828d02018-10-09 12:01:32 -0400215 // TODO: If we know that we will never be blending or loading the attachment we could
216 // drop the VK_ACCESS_COLOR_ATTACHMENT_READ_BIT.
Greg Daniel38c3d932018-03-16 14:22:30 -0400217 targetImage->setImageLayout(fGpu,
218 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
Greg Danielf7828d02018-10-09 12:01:32 -0400219 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
Greg Daniel38c3d932018-03-16 14:22:30 -0400220 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400221 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400222 false);
223
224 // If we are using a stencil attachment we also need to update its layout
225 if (stencil) {
226 GrVkStencilAttachment* vkStencil = (GrVkStencilAttachment*)stencil;
Greg Danielf7828d02018-10-09 12:01:32 -0400227 // We need the write and read access bits since we may load and store the stencil.
228 // The initial load happens in the VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT so we
229 // wait there.
Greg Daniel38c3d932018-03-16 14:22:30 -0400230 vkStencil->setImageLayout(fGpu,
231 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
232 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
233 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400234 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400235 false);
236 }
237
238 // If we have any sampled images set their layout now.
Chris Dalton298238a2019-02-21 16:28:44 -0500239 for (int j = 0; j < cbInfo.fSampledTextures.count(); ++j) {
240 cbInfo.fSampledTextures[j]->setImageLayout(
241 fGpu, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_ACCESS_SHADER_READ_BIT,
242 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, false);
Greg Daniel38c3d932018-03-16 14:22:30 -0400243 }
244
Greg Daniel36a77ee2016-10-18 10:33:25 -0400245 SkIRect iBounds;
246 cbInfo.fBounds.roundOut(&iBounds);
247
Greg Daniel228518f2019-08-07 16:55:17 -0400248 fGpu->submitSecondaryCommandBuffer(std::move(cbInfo.fCommandBuffer), cbInfo.fRenderPass,
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400249 &cbInfo.fColorClearValue, vkRT, fOrigin, iBounds);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400250 }
251 }
Brian Salomon24d377e2019-04-23 15:24:31 -0400252 SkASSERT(currPreCmd == fPreCommandBufferTasks.end());
egdaniel9cb63402016-06-23 08:37:05 -0700253}
254
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400255void GrVkOpsRenderPass::set(GrRenderTarget* rt, GrSurfaceOrigin origin,
256 const GrOpsRenderPass::LoadAndStoreInfo& colorInfo,
257 const GrOpsRenderPass::StencilLoadAndStoreInfo& stencilInfo) {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400258 SkASSERT(!fRenderTarget);
259 SkASSERT(fCommandBufferInfos.empty());
260 SkASSERT(-1 == fCurrentCmdInfo);
Robert Phillips9da87e02019-02-04 13:26:26 -0500261 SkASSERT(fGpu == rt->getContext()->priv().getGpu());
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400262 SkASSERT(!fLastPipelineState);
263
Greg Danielb0c7ad12019-06-06 17:23:35 +0000264#ifdef SK_DEBUG
265 fIsActive = true;
266#endif
267
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400268 this->INHERITED::set(rt, origin);
269
Greg Daniel070cbaf2019-01-03 17:35:54 -0500270 if (this->wrapsSecondaryCommandBuffer()) {
271 this->initWrapped();
272 return;
273 }
274
Brian Osman9a9baae2018-11-05 15:06:26 -0500275 fClearColor = colorInfo.fClearColor;
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400276
277 get_vk_load_store_ops(colorInfo.fLoadOp, colorInfo.fStoreOp,
278 &fVkColorLoadOp, &fVkColorStoreOp);
279
280 get_vk_load_store_ops(stencilInfo.fLoadOp, stencilInfo.fStoreOp,
281 &fVkStencilLoadOp, &fVkStencilStoreOp);
282
283 this->init();
284}
285
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400286void GrVkOpsRenderPass::reset() {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400287 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
288 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
Greg Daniel228518f2019-08-07 16:55:17 -0400289 if (cbInfo.fCommandBuffer) {
290 cbInfo.fCommandBuffer.release()->recycle(fGpu);
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400291 }
292 cbInfo.fRenderPass->unref(fGpu);
293 }
294 fCommandBufferInfos.reset();
Brian Salomon24d377e2019-04-23 15:24:31 -0400295 fPreCommandBufferTasks.reset();
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400296
297 fCurrentCmdInfo = -1;
298
299 fLastPipelineState = nullptr;
300 fRenderTarget = nullptr;
Greg Danielb0c7ad12019-06-06 17:23:35 +0000301
302#ifdef SK_DEBUG
303 fIsActive = false;
304#endif
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400305}
306
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400307bool GrVkOpsRenderPass::wrapsSecondaryCommandBuffer() const {
Greg Daniel070cbaf2019-01-03 17:35:54 -0500308 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
309 return vkRT->wrapsSecondaryCommandBuffer();
310}
311
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400312////////////////////////////////////////////////////////////////////////////////
313
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400314void GrVkOpsRenderPass::insertEventMarker(const char* msg) {
Robert Phillips65a88fa2017-08-08 08:36:22 -0400315 // TODO: does Vulkan have a correlate?
316}
317
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400318void GrVkOpsRenderPass::onClearStencilClip(const GrFixedClip& clip, bool insideStencilMask) {
Chris Dalton94c04682017-11-01 17:15:06 -0600319 SkASSERT(!clip.hasWindowRectangles());
320
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000321 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
322
Greg Daniel65a09272016-10-12 09:47:22 -0400323 GrStencilAttachment* sb = fRenderTarget->renderTargetPriv().getStencilAttachment();
egdaniel9cb63402016-06-23 08:37:05 -0700324 // this should only be called internally when we know we have a
325 // stencil buffer.
326 SkASSERT(sb);
327 int stencilBitCount = sb->bits();
328
329 // The contract with the callers does not guarantee that we preserve all bits in the stencil
330 // during this clear. Thus we will clear the entire stencil to the desired value.
331
332 VkClearDepthStencilValue vkStencilColor;
333 memset(&vkStencilColor, 0, sizeof(VkClearDepthStencilValue));
csmartdalton29df7602016-08-31 11:55:52 -0700334 if (insideStencilMask) {
egdaniel9cb63402016-06-23 08:37:05 -0700335 vkStencilColor.stencil = (1 << (stencilBitCount - 1));
336 } else {
337 vkStencilColor.stencil = 0;
338 }
339
340 VkClearRect clearRect;
341 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700342 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000343 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000344 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400345 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700346 vkRect = clip.scissorRect();
347 } else {
348 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400349 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
350 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700351 }
352
353 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
354 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
355
356 clearRect.baseArrayLayer = 0;
357 clearRect.layerCount = 1;
358
359 uint32_t stencilIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400360 SkAssertResult(cbInfo.fRenderPass->stencilAttachmentIndex(&stencilIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700361
362 VkClearAttachment attachment;
363 attachment.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
364 attachment.colorAttachment = 0; // this value shouldn't matter
365 attachment.clearValue.depthStencil = vkStencilColor;
366
Greg Daniel22bc8652017-03-22 15:45:43 -0400367 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400368 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400369
370 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000371 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400372 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
373 } else {
374 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
375 }
egdaniel9cb63402016-06-23 08:37:05 -0700376}
377
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400378void GrVkOpsRenderPass::onClear(const GrFixedClip& clip, const SkPMColor4f& color) {
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000379 // parent class should never let us get here with no RT
csmartdaltonbf4a8f92016-09-06 10:01:06 -0700380 SkASSERT(!clip.hasWindowRectangles());
egdaniel9cb63402016-06-23 08:37:05 -0700381
Greg Daniel22bc8652017-03-22 15:45:43 -0400382 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Greg Daniel36a77ee2016-10-18 10:33:25 -0400383
Brian Osman9a9baae2018-11-05 15:06:26 -0500384 VkClearColorValue vkColor = {{color.fR, color.fG, color.fB, color.fA}};
egdaniel9cb63402016-06-23 08:37:05 -0700385
Greg Daniel674ee742019-08-27 13:12:33 -0400386 // If we end up in a situation where we are calling clear without a scissior then in general it
387 // means we missed an opportunity higher up the stack to set the load op to be a clear. However,
388 // there are situations where higher up we couldn't discard the previous ops and set a clear
389 // load op (e.g. if we needed to execute a wait op). Thus we also have the empty check here.
390 SkASSERT(!cbInfo.fIsEmpty || clip.scissorEnabled());
egdaniel9cb63402016-06-23 08:37:05 -0700391
392 // We always do a sub rect clear with clearAttachments since we are inside a render pass
393 VkClearRect clearRect;
394 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700395 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000396 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000397 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400398 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700399 vkRect = clip.scissorRect();
400 } else {
401 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400402 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
403 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700404 }
405 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
406 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
407 clearRect.baseArrayLayer = 0;
408 clearRect.layerCount = 1;
409
410 uint32_t colorIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400411 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&colorIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700412
413 VkClearAttachment attachment;
414 attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
415 attachment.colorAttachment = colorIndex;
416 attachment.clearValue.color = vkColor;
417
Greg Daniel22bc8652017-03-22 15:45:43 -0400418 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400419 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400420
421 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000422 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400423 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
424 } else {
425 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
426 }
egdaniel9cb63402016-06-23 08:37:05 -0700427 return;
428}
429
Greg Daniel500d58b2017-08-24 15:59:33 -0400430////////////////////////////////////////////////////////////////////////////////
431
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400432void GrVkOpsRenderPass::addAdditionalRenderPass() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400433 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
434
Greg Daniel22bc8652017-03-22 15:45:43 -0400435 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Greg Daniel77b53f62016-10-18 11:48:51 -0400436
437 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Greg Daniel22bc8652017-03-22 15:45:43 -0400438 fCurrentCmdInfo++;
Greg Daniel77b53f62016-10-18 11:48:51 -0400439
440 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
441 VK_ATTACHMENT_STORE_OP_STORE);
442 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
443 VK_ATTACHMENT_STORE_OP_STORE);
444
445 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400446 vkRT->compatibleRenderPassHandle();
Greg Daniel77b53f62016-10-18 11:48:51 -0400447 if (rpHandle.isValid()) {
448 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
449 vkColorOps,
450 vkStencilOps);
451 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400452 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel77b53f62016-10-18 11:48:51 -0400453 vkColorOps,
454 vkStencilOps);
455 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400456 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
Greg Daniel77b53f62016-10-18 11:48:51 -0400457
Greg Daniel228518f2019-08-07 16:55:17 -0400458 cbInfo.fCommandBuffer = fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu);
Greg Daniel77b53f62016-10-18 11:48:51 -0400459 // It shouldn't matter what we set the clear color to here since we will assume loading of the
460 // attachment.
461 memset(&cbInfo.fColorClearValue, 0, sizeof(VkClearValue));
462 cbInfo.fBounds.setEmpty();
Greg Daniel77b53f62016-10-18 11:48:51 -0400463
Robert Phillips19e51dc2017-08-09 09:30:51 -0400464 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
Greg Daniel77b53f62016-10-18 11:48:51 -0400465}
466
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400467void GrVkOpsRenderPass::inlineUpload(GrOpFlushState* state,
Brian Salomon943ed792017-10-30 09:37:55 -0400468 GrDeferredTextureUploadFn& upload) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400469 if (!fCommandBufferInfos[fCurrentCmdInfo].fIsEmpty) {
470 this->addAdditionalRenderPass();
Greg Daniel77b53f62016-10-18 11:48:51 -0400471 }
Brian Salomon24d377e2019-04-23 15:24:31 -0400472
Brian Salomon24d377e2019-04-23 15:24:31 -0400473 fPreCommandBufferTasks.emplace<InlineUpload>(state, upload);
474 ++fCommandBufferInfos[fCurrentCmdInfo].fNumPreCmds;
Greg Daniel77b53f62016-10-18 11:48:51 -0400475}
476
egdaniel9cb63402016-06-23 08:37:05 -0700477////////////////////////////////////////////////////////////////////////////////
478
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400479void GrVkOpsRenderPass::bindGeometry(const GrGpuBuffer* indexBuffer,
Brian Salomondbf70722019-02-07 11:31:24 -0500480 const GrGpuBuffer* vertexBuffer,
481 const GrGpuBuffer* instanceBuffer) {
Chris Daltonff926502017-05-03 14:36:54 -0400482 GrVkSecondaryCommandBuffer* currCmdBuf = fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf();
egdaniel9cb63402016-06-23 08:37:05 -0700483 // There is no need to put any memory barriers to make sure host writes have finished here.
484 // When a command buffer is submitted to a queue, there is an implicit memory barrier that
485 // occurs for all host writes. Additionally, BufferMemoryBarriers are not allowed inside of
486 // an active RenderPass.
egdaniel9cb63402016-06-23 08:37:05 -0700487
Chris Dalton1d616352017-05-31 12:51:23 -0600488 // Here our vertex and instance inputs need to match the same 0-based bindings they were
489 // assigned in GrVkPipeline. That is, vertex first (if any) followed by instance.
490 uint32_t binding = 0;
491
Brian Salomon802cb312018-06-08 18:05:20 -0400492 if (vertexBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600493 SkASSERT(vertexBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600494 SkASSERT(!vertexBuffer->isMapped());
495
496 currCmdBuf->bindInputBuffer(fGpu, binding++,
497 static_cast<const GrVkVertexBuffer*>(vertexBuffer));
498 }
499
Brian Salomon802cb312018-06-08 18:05:20 -0400500 if (instanceBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600501 SkASSERT(instanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600502 SkASSERT(!instanceBuffer->isMapped());
503
504 currCmdBuf->bindInputBuffer(fGpu, binding++,
505 static_cast<const GrVkVertexBuffer*>(instanceBuffer));
506 }
Chris Daltonff926502017-05-03 14:36:54 -0400507 if (indexBuffer) {
508 SkASSERT(indexBuffer);
509 SkASSERT(!indexBuffer->isMapped());
egdaniel9cb63402016-06-23 08:37:05 -0700510
Chris Daltonff926502017-05-03 14:36:54 -0400511 currCmdBuf->bindIndexBuffer(fGpu, static_cast<const GrVkIndexBuffer*>(indexBuffer));
egdaniel9cb63402016-06-23 08:37:05 -0700512 }
513}
514
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400515GrVkPipelineState* GrVkOpsRenderPass::prepareDrawState(
Brian Salomon49348902018-06-26 09:12:38 -0400516 const GrPrimitiveProcessor& primProc,
517 const GrPipeline& pipeline,
518 const GrPipeline::FixedDynamicState* fixedDynamicState,
519 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
520 GrPrimitiveType primitiveType) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400521 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
522 SkASSERT(cbInfo.fRenderPass);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400523
Greg Daniel99b88e02018-10-03 15:31:20 -0400524 VkRenderPass compatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
525
Greg Daniel9a51a862018-11-30 10:18:14 -0500526 const GrTextureProxy* const* primProcProxies = nullptr;
527 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
528 primProcProxies = dynamicStateArrays->fPrimitiveProcessorTextures;
529 } else if (fixedDynamicState) {
530 primProcProxies = fixedDynamicState->fPrimitiveProcessorTextures;
531 }
532
533 SkASSERT(SkToBool(primProcProxies) == SkToBool(primProc.numTextureSamplers()));
534
Greg Daniel09eeefb2017-10-16 15:15:02 -0400535 GrVkPipelineState* pipelineState =
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500536 fGpu->resourceProvider().findOrCreateCompatiblePipelineState(fRenderTarget, fOrigin,
537 pipeline,
egdaniel9cb63402016-06-23 08:37:05 -0700538 primProc,
Greg Daniel9a51a862018-11-30 10:18:14 -0500539 primProcProxies,
egdaniel9cb63402016-06-23 08:37:05 -0700540 primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400541 compatibleRenderPass);
egdaniel9cb63402016-06-23 08:37:05 -0700542 if (!pipelineState) {
543 return pipelineState;
544 }
545
Greg Daniel09eeefb2017-10-16 15:15:02 -0400546 fLastPipelineState = pipelineState;
Greg Daniel22bc8652017-03-22 15:45:43 -0400547
Brian Salomonf7232642018-09-19 08:58:08 -0400548 pipelineState->bindPipeline(fGpu, cbInfo.currentCmdBuf());
Brian Salomoncd7907b2018-08-30 08:36:18 -0400549
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500550 pipelineState->setAndBindUniforms(fGpu, fRenderTarget, fOrigin,
551 primProc, pipeline, cbInfo.currentCmdBuf());
Brian Salomonf7232642018-09-19 08:58:08 -0400552
553 // Check whether we need to bind textures between each GrMesh. If not we can bind them all now.
554 bool setTextures = !(dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures);
555 if (setTextures) {
Brian Salomonf7232642018-09-19 08:58:08 -0400556 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, primProcProxies,
557 cbInfo.currentCmdBuf());
558 }
egdaniel9cb63402016-06-23 08:37:05 -0700559
Brian Salomond818ebf2018-07-02 14:08:49 +0000560 if (!pipeline.isScissorEnabled()) {
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400561 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(),
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500562 fRenderTarget, fOrigin,
563 SkIRect::MakeWH(fRenderTarget->width(),
564 fRenderTarget->height()));
Brian Salomon49348902018-06-26 09:12:38 -0400565 } else if (!dynamicStateArrays || !dynamicStateArrays->fScissorRects) {
566 SkASSERT(fixedDynamicState);
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500567 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget,
568 fOrigin,
Brian Salomon49348902018-06-26 09:12:38 -0400569 fixedDynamicState->fScissorRect);
Chris Dalton46983b72017-06-06 12:27:16 -0600570 }
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500571 GrVkPipeline::SetDynamicViewportState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget);
572 GrVkPipeline::SetDynamicBlendConstantState(fGpu, cbInfo.currentCmdBuf(),
Greg Daniel2c3398d2019-06-19 11:58:01 -0400573 pipeline.outputSwizzle(),
Chris Dalton46983b72017-06-06 12:27:16 -0600574 pipeline.getXferProcessor());
egdaniel9cb63402016-06-23 08:37:05 -0700575
576 return pipelineState;
577}
578
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400579void GrVkOpsRenderPass::onDraw(const GrPrimitiveProcessor& primProc,
Brian Salomonff168d92018-06-23 15:17:27 -0400580 const GrPipeline& pipeline,
Brian Salomon49348902018-06-26 09:12:38 -0400581 const GrPipeline::FixedDynamicState* fixedDynamicState,
582 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
Greg Daniel500d58b2017-08-24 15:59:33 -0400583 const GrMesh meshes[],
Greg Daniel500d58b2017-08-24 15:59:33 -0400584 int meshCount,
585 const SkRect& bounds) {
egdaniel9cb63402016-06-23 08:37:05 -0700586 if (!meshCount) {
587 return;
588 }
Greg Danielea022cd2018-03-16 11:10:03 -0400589
590 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
591
Chris Dalton6f31cc32019-08-26 20:18:44 +0000592 auto prepareSampledImage = [&](GrTexture* texture, GrSamplerState::Filter filter) {
593 GrVkTexture* vkTexture = static_cast<GrVkTexture*>(texture);
594 // We may need to resolve the texture first if it is also a render target
595 GrVkRenderTarget* texRT = static_cast<GrVkRenderTarget*>(vkTexture->asRenderTarget());
596 if (texRT && texRT->needsResolve()) {
597 fGpu->resolveRenderTargetNoFlush(texRT);
598 // TEMPORARY: MSAA resolve will have dirtied mipmaps. This goes away once we switch
599 // to resolving MSAA from the opsTask as well.
600 if (GrSamplerState::Filter::kMipMap == filter &&
601 (vkTexture->width() != 1 || vkTexture->height() != 1)) {
602 SkASSERT(vkTexture->texturePriv().mipMapped() == GrMipMapped::kYes);
603 SkASSERT(vkTexture->texturePriv().mipMapsAreDirty());
604 fGpu->regenerateMipMapLevels(vkTexture);
605 }
606 }
607
608 // Ensure mip maps were all resolved ahead of time by the opsTask.
609 if (GrSamplerState::Filter::kMipMap == filter &&
610 (vkTexture->width() != 1 || vkTexture->height() != 1)) {
611 SkASSERT(vkTexture->texturePriv().mipMapped() == GrMipMapped::kYes);
612 SkASSERT(!vkTexture->texturePriv().mipMapsAreDirty());
613 }
614 };
615
Brian Salomonf7232642018-09-19 08:58:08 -0400616 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
617 for (int m = 0, i = 0; m < meshCount; ++m) {
618 for (int s = 0; s < primProc.numTextureSamplers(); ++s, ++i) {
619 auto texture = dynamicStateArrays->fPrimitiveProcessorTextures[i]->peekTexture();
Chris Dalton6f31cc32019-08-26 20:18:44 +0000620 prepareSampledImage(texture, primProc.textureSampler(s).samplerState().filter());
Chris Dalton0d6f7752019-08-19 12:21:27 -0600621 this->appendSampledTexture(texture);
Brian Salomonf7232642018-09-19 08:58:08 -0400622 }
623 }
624 } else {
625 for (int i = 0; i < primProc.numTextureSamplers(); ++i) {
626 auto texture = fixedDynamicState->fPrimitiveProcessorTextures[i]->peekTexture();
Chris Dalton6f31cc32019-08-26 20:18:44 +0000627 prepareSampledImage(texture, primProc.textureSampler(i).samplerState().filter());
Chris Dalton0d6f7752019-08-19 12:21:27 -0600628 this->appendSampledTexture(texture);
Brian Salomonf7232642018-09-19 08:58:08 -0400629 }
Brian Salomone782f842018-07-31 13:53:11 -0400630 }
bsalomonb58a2b42016-09-26 06:55:02 -0700631 GrFragmentProcessor::Iter iter(pipeline);
632 while (const GrFragmentProcessor* fp = iter.next()) {
Brian Salomone782f842018-07-31 13:53:11 -0400633 for (int i = 0; i < fp->numTextureSamplers(); ++i) {
634 const GrFragmentProcessor::TextureSampler& sampler = fp->textureSampler(i);
Chris Dalton6f31cc32019-08-26 20:18:44 +0000635 prepareSampledImage(sampler.peekTexture(), sampler.samplerState().filter());
Chris Dalton0d6f7752019-08-19 12:21:27 -0600636 this->appendSampledTexture(sampler.peekTexture());
Brian Salomone782f842018-07-31 13:53:11 -0400637 }
egdaniel2f5792a2016-07-06 08:51:23 -0700638 }
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400639 if (GrTexture* dstTexture = pipeline.peekDstTexture()) {
Chris Dalton0d6f7752019-08-19 12:21:27 -0600640 this->appendSampledTexture(dstTexture);
Brian Salomon18dfa982017-04-03 16:57:43 -0400641 }
egdaniel2f5792a2016-07-06 08:51:23 -0700642
Chris Daltonbca46e22017-05-15 11:03:26 -0600643 GrPrimitiveType primitiveType = meshes[0].primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400644 GrVkPipelineState* pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
645 dynamicStateArrays, primitiveType);
egdaniel9cb63402016-06-23 08:37:05 -0700646 if (!pipelineState) {
647 return;
648 }
649
Brian Salomond818ebf2018-07-02 14:08:49 +0000650 bool dynamicScissor =
651 pipeline.isScissorEnabled() && dynamicStateArrays && dynamicStateArrays->fScissorRects;
Brian Salomonf7232642018-09-19 08:58:08 -0400652 bool dynamicTextures = dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures;
Brian Salomon49348902018-06-26 09:12:38 -0400653
egdaniel9cb63402016-06-23 08:37:05 -0700654 for (int i = 0; i < meshCount; ++i) {
655 const GrMesh& mesh = meshes[i];
Chris Daltonbca46e22017-05-15 11:03:26 -0600656 if (mesh.primitiveType() != primitiveType) {
Chris Dalton6f241802017-05-08 13:58:38 -0400657 SkDEBUGCODE(pipelineState = nullptr);
Chris Daltonbca46e22017-05-15 11:03:26 -0600658 primitiveType = mesh.primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400659 pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
660 dynamicStateArrays, primitiveType);
Chris Dalton6f241802017-05-08 13:58:38 -0400661 if (!pipelineState) {
662 return;
egdaniel9cb63402016-06-23 08:37:05 -0700663 }
Chris Dalton6f241802017-05-08 13:58:38 -0400664 }
egdaniel9cb63402016-06-23 08:37:05 -0700665
Brian Salomon49348902018-06-26 09:12:38 -0400666 if (dynamicScissor) {
667 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget,
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500668 fOrigin,
Brian Salomon49348902018-06-26 09:12:38 -0400669 dynamicStateArrays->fScissorRects[i]);
Chris Dalton46983b72017-06-06 12:27:16 -0600670 }
Brian Salomonf7232642018-09-19 08:58:08 -0400671 if (dynamicTextures) {
672 GrTextureProxy* const* meshProxies = dynamicStateArrays->fPrimitiveProcessorTextures +
673 primProc.numTextureSamplers() * i;
674 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, meshProxies,
675 cbInfo.currentCmdBuf());
676 }
Chris Daltonbca46e22017-05-15 11:03:26 -0600677 SkASSERT(pipelineState);
Brian Salomon802cb312018-06-08 18:05:20 -0400678 mesh.sendToGpu(this);
egdaniel9cb63402016-06-23 08:37:05 -0700679 }
680
Greg Daniel36a77ee2016-10-18 10:33:25 -0400681 cbInfo.fBounds.join(bounds);
Chris Dalton114a3c02017-05-26 15:17:19 -0600682 cbInfo.fIsEmpty = false;
egdaniel066df7c2016-06-08 14:02:27 -0700683}
684
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400685void GrVkOpsRenderPass::appendSampledTexture(GrTexture* tex) {
Chris Dalton0d6f7752019-08-19 12:21:27 -0600686 SkASSERT(!tex->isProtected() || (fRenderTarget->isProtected() && fGpu->protectedContext()));
Robert Phillipse1efd382019-08-21 10:07:10 -0400687 GrVkTexture* vkTex = static_cast<GrVkTexture*>(tex);
688
689 fCommandBufferInfos[fCurrentCmdInfo].fSampledTextures.push_back(sk_ref_sp(vkTex));
Chris Dalton0d6f7752019-08-19 12:21:27 -0600690}
691
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400692void GrVkOpsRenderPass::sendInstancedMeshToGpu(GrPrimitiveType,
693 const GrBuffer* vertexBuffer,
694 int vertexCount,
695 int baseVertex,
696 const GrBuffer* instanceBuffer,
697 int instanceCount,
698 int baseInstance) {
Chris Dalton114a3c02017-05-26 15:17:19 -0600699 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomondbf70722019-02-07 11:31:24 -0500700 SkASSERT(!vertexBuffer || !vertexBuffer->isCpuBuffer());
701 SkASSERT(!instanceBuffer || !instanceBuffer->isCpuBuffer());
702 auto gpuVertexBuffer = static_cast<const GrGpuBuffer*>(vertexBuffer);
703 auto gpuInstanceBuffer = static_cast<const GrGpuBuffer*>(instanceBuffer);
704 this->bindGeometry(nullptr, gpuVertexBuffer, gpuInstanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600705 cbInfo.currentCmdBuf()->draw(fGpu, vertexCount, instanceCount, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600706 fGpu->stats()->incNumDraws();
707}
708
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400709void GrVkOpsRenderPass::sendIndexedInstancedMeshToGpu(GrPrimitiveType,
710 const GrBuffer* indexBuffer,
711 int indexCount,
712 int baseIndex,
713 const GrBuffer* vertexBuffer,
714 int baseVertex,
715 const GrBuffer* instanceBuffer,
716 int instanceCount,
717 int baseInstance,
718 GrPrimitiveRestart restart) {
Brian Salomon802cb312018-06-08 18:05:20 -0400719 SkASSERT(restart == GrPrimitiveRestart::kNo);
Chris Dalton114a3c02017-05-26 15:17:19 -0600720 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomondbf70722019-02-07 11:31:24 -0500721 SkASSERT(!vertexBuffer || !vertexBuffer->isCpuBuffer());
722 SkASSERT(!instanceBuffer || !instanceBuffer->isCpuBuffer());
723 SkASSERT(!indexBuffer->isCpuBuffer());
724 auto gpuIndexxBuffer = static_cast<const GrGpuBuffer*>(indexBuffer);
725 auto gpuVertexBuffer = static_cast<const GrGpuBuffer*>(vertexBuffer);
726 auto gpuInstanceBuffer = static_cast<const GrGpuBuffer*>(instanceBuffer);
727 this->bindGeometry(gpuIndexxBuffer, gpuVertexBuffer, gpuInstanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600728 cbInfo.currentCmdBuf()->drawIndexed(fGpu, indexCount, instanceCount,
729 baseIndex, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600730 fGpu->stats()->incNumDraws();
731}
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400732
733////////////////////////////////////////////////////////////////////////////////
734
Greg Daniel2d41d0d2019-08-26 11:08:51 -0400735void GrVkOpsRenderPass::executeDrawable(std::unique_ptr<SkDrawable::GpuDrawHandler> drawable) {
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400736 GrVkRenderTarget* target = static_cast<GrVkRenderTarget*>(fRenderTarget);
737
738 GrVkImage* targetImage = target->msaaImage() ? target->msaaImage() : target;
739
740 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
741 VkRect2D bounds;
742 bounds.offset = { 0, 0 };
743 bounds.extent = { 0, 0 };
744
745 GrVkDrawableInfo vkInfo;
746 vkInfo.fSecondaryCommandBuffer = cbInfo.currentCmdBuf()->vkCommandBuffer();
747 vkInfo.fCompatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
Greg Danielb353eeb2018-12-05 11:01:58 -0500748 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&vkInfo.fColorAttachmentIndex));
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400749 vkInfo.fFormat = targetImage->imageFormat();
750 vkInfo.fDrawBounds = &bounds;
Stan Ilievcb580602019-02-26 11:36:07 -0500751#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
752 vkInfo.fImage = targetImage->image();
753#else
754 vkInfo.fImage = VK_NULL_HANDLE;
755#endif //SK_BUILD_FOR_ANDROID_FRAMEWORK
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400756
757 GrBackendDrawableInfo info(vkInfo);
758
Eric Karlc0b2ba22019-01-22 19:40:35 -0800759 // After we draw into the command buffer via the drawable, cached state we have may be invalid.
760 cbInfo.currentCmdBuf()->invalidateState();
Eric Karla8878a12019-02-07 18:17:43 -0800761 // Also assume that the drawable produced output.
762 cbInfo.fIsEmpty = false;
Eric Karlc0b2ba22019-01-22 19:40:35 -0800763
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400764 drawable->draw(info);
765 fGpu->addDrawable(std::move(drawable));
766
767 if (bounds.extent.width == 0 || bounds.extent.height == 0) {
768 cbInfo.fBounds.join(target->getBoundsRect());
769 } else {
770 cbInfo.fBounds.join(SkRect::MakeXYWH(bounds.offset.x, bounds.offset.y,
771 bounds.extent.width, bounds.extent.height));
772 }
773}
774