blob: 8f5309e4cdde32eb3f01515cb843def66325a935 [file] [log] [blame]
egdaniel066df7c2016-06-08 14:02:27 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/vk/GrVkGpuCommandBuffer.h"
egdaniel066df7c2016-06-08 14:02:27 -07009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/core/SkDrawable.h"
11#include "include/core/SkRect.h"
12#include "include/gpu/GrBackendDrawableInfo.h"
13#include "src/gpu/GrContextPriv.h"
14#include "src/gpu/GrFixedClip.h"
15#include "src/gpu/GrMesh.h"
16#include "src/gpu/GrOpFlushState.h"
17#include "src/gpu/GrPipeline.h"
18#include "src/gpu/GrRenderTargetPriv.h"
19#include "src/gpu/GrTexturePriv.h"
20#include "src/gpu/vk/GrVkCommandBuffer.h"
21#include "src/gpu/vk/GrVkCommandPool.h"
22#include "src/gpu/vk/GrVkGpu.h"
23#include "src/gpu/vk/GrVkPipeline.h"
24#include "src/gpu/vk/GrVkRenderPass.h"
25#include "src/gpu/vk/GrVkRenderTarget.h"
26#include "src/gpu/vk/GrVkResourceProvider.h"
27#include "src/gpu/vk/GrVkSemaphore.h"
28#include "src/gpu/vk/GrVkTexture.h"
egdaniel066df7c2016-06-08 14:02:27 -070029
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040030GrVkPrimaryCommandBufferTask::~GrVkPrimaryCommandBufferTask() = default;
31GrVkPrimaryCommandBufferTask::GrVkPrimaryCommandBufferTask() = default;
32
33namespace {
34
35class InlineUpload : public GrVkPrimaryCommandBufferTask {
36public:
37 InlineUpload(GrOpFlushState* state, const GrDeferredTextureUploadFn& upload)
38 : fFlushState(state), fUpload(upload) {}
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040039
40 void execute(const Args& args) override { fFlushState->doUpload(fUpload); }
41
42private:
43 GrOpFlushState* fFlushState;
44 GrDeferredTextureUploadFn fUpload;
45};
46
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040047} // anonymous namespace
48
49/////////////////////////////////////////////////////////////////////////////
50
Greg Daniel500d58b2017-08-24 15:59:33 -040051void GrVkGpuTextureCommandBuffer::insertEventMarker(const char* msg) {
52 // TODO: does Vulkan have a correlate?
53}
54
55void GrVkGpuTextureCommandBuffer::submit() {
Greg Daniel46cfbc62019-06-07 11:43:30 -040056 GrVkPrimaryCommandBufferTask::Args taskArgs{fGpu, fTexture};
Brian Salomon5d8f1cc2019-04-24 09:03:53 -040057 for (auto& task : fTasks) {
58 task.execute(taskArgs);
Greg Daniel500d58b2017-08-24 15:59:33 -040059 }
60}
61
Greg Daniel500d58b2017-08-24 15:59:33 -040062////////////////////////////////////////////////////////////////////////////////
63
Robert Phillips6b47c7d2017-08-29 07:24:09 -040064void get_vk_load_store_ops(GrLoadOp loadOpIn, GrStoreOp storeOpIn,
egdaniel066df7c2016-06-08 14:02:27 -070065 VkAttachmentLoadOp* loadOp, VkAttachmentStoreOp* storeOp) {
Robert Phillips95214472017-08-08 18:00:03 -040066 switch (loadOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -040067 case GrLoadOp::kLoad:
egdaniel066df7c2016-06-08 14:02:27 -070068 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel066df7c2016-06-08 14:02:27 -070069 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040070 case GrLoadOp::kClear:
egdaniel9cb63402016-06-23 08:37:05 -070071 *loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
72 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040073 case GrLoadOp::kDiscard:
egdaniel9cb63402016-06-23 08:37:05 -070074 *loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
75 break;
76 default:
77 SK_ABORT("Invalid LoadOp");
egdaniel066df7c2016-06-08 14:02:27 -070078 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel9cb63402016-06-23 08:37:05 -070079 }
80
Robert Phillips95214472017-08-08 18:00:03 -040081 switch (storeOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -040082 case GrStoreOp::kStore:
egdaniel066df7c2016-06-08 14:02:27 -070083 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
84 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040085 case GrStoreOp::kDiscard:
egdaniel066df7c2016-06-08 14:02:27 -070086 *storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
87 break;
brianosman0bbc3712016-06-14 04:53:09 -070088 default:
egdaniel9cb63402016-06-23 08:37:05 -070089 SK_ABORT("Invalid StoreOp");
brianosman0bbc3712016-06-14 04:53:09 -070090 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
egdaniel066df7c2016-06-08 14:02:27 -070091 }
92}
93
Brian Salomon24d377e2019-04-23 15:24:31 -040094GrVkGpuRTCommandBuffer::GrVkGpuRTCommandBuffer(GrVkGpu* gpu) : fGpu(gpu) {}
Brian Salomonc293a292016-11-30 13:38:32 -050095
Greg Daniel500d58b2017-08-24 15:59:33 -040096void GrVkGpuRTCommandBuffer::init() {
Brian Salomonc293a292016-11-30 13:38:32 -050097 GrVkRenderPass::LoadStoreOps vkColorOps(fVkColorLoadOp, fVkColorStoreOp);
98 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -070099
Greg Daniel36a77ee2016-10-18 10:33:25 -0400100 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Brian Salomonc293a292016-11-30 13:38:32 -0500101 SkASSERT(fCommandBufferInfos.count() == 1);
Greg Daniel22bc8652017-03-22 15:45:43 -0400102 fCurrentCmdInfo = 0;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400103
Robert Phillips19e51dc2017-08-09 09:30:51 -0400104 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
105 const GrVkResourceProvider::CompatibleRPHandle& rpHandle = vkRT->compatibleRenderPassHandle();
egdaniel066df7c2016-06-08 14:02:27 -0700106 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400107 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
108 vkColorOps,
109 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -0700110 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400111 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400112 vkColorOps,
113 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -0700114 }
115
Brian Osmancb3d0872018-10-16 15:19:28 -0400116 cbInfo.fColorClearValue.color.float32[0] = fClearColor[0];
117 cbInfo.fColorClearValue.color.float32[1] = fClearColor[1];
118 cbInfo.fColorClearValue.color.float32[2] = fClearColor[2];
119 cbInfo.fColorClearValue.color.float32[3] = fClearColor[3];
egdaniel9cb63402016-06-23 08:37:05 -0700120
Robert Phillips380b90c2017-08-30 07:41:07 -0400121 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000122 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Robert Phillips380b90c2017-08-30 07:41:07 -0400123 } else {
124 cbInfo.fBounds.setEmpty();
125 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400126
127 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
128 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
129 } else if (VK_ATTACHMENT_LOAD_OP_LOAD == fVkColorLoadOp &&
130 VK_ATTACHMENT_STORE_OP_STORE == fVkColorStoreOp) {
131 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
132 } else if (VK_ATTACHMENT_LOAD_OP_DONT_CARE == fVkColorLoadOp) {
133 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithDiscard;
134 }
Greg Daniel36a77ee2016-10-18 10:33:25 -0400135
Greg Daniel228518f2019-08-07 16:55:17 -0400136 cbInfo.fCommandBuffer = fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu);
Robert Phillips19e51dc2017-08-09 09:30:51 -0400137 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
egdaniel066df7c2016-06-08 14:02:27 -0700138}
139
Greg Daniel070cbaf2019-01-03 17:35:54 -0500140void GrVkGpuRTCommandBuffer::initWrapped() {
141 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
142 SkASSERT(fCommandBufferInfos.count() == 1);
143 fCurrentCmdInfo = 0;
144
145 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
146 SkASSERT(vkRT->wrapsSecondaryCommandBuffer());
147 cbInfo.fRenderPass = vkRT->externalRenderPass();
148 cbInfo.fRenderPass->ref();
149
150 cbInfo.fBounds.setEmpty();
Greg Daniel228518f2019-08-07 16:55:17 -0400151 cbInfo.fCommandBuffer.reset(
Greg Daniel8daf3b72019-07-30 09:57:26 -0400152 GrVkSecondaryCommandBuffer::Create(vkRT->getExternalSecondaryCommandBuffer()));
Greg Daniel070cbaf2019-01-03 17:35:54 -0500153 cbInfo.currentCmdBuf()->begin(fGpu, nullptr, cbInfo.fRenderPass);
154}
Brian Salomonc293a292016-11-30 13:38:32 -0500155
Greg Daniel500d58b2017-08-24 15:59:33 -0400156GrVkGpuRTCommandBuffer::~GrVkGpuRTCommandBuffer() {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400157 this->reset();
egdaniel066df7c2016-06-08 14:02:27 -0700158}
159
Greg Daniel500d58b2017-08-24 15:59:33 -0400160GrGpu* GrVkGpuRTCommandBuffer::gpu() { return fGpu; }
egdaniel9cb63402016-06-23 08:37:05 -0700161
Greg Daniel500d58b2017-08-24 15:59:33 -0400162void GrVkGpuRTCommandBuffer::end() {
Greg Daniel22bc8652017-03-22 15:45:43 -0400163 if (fCurrentCmdInfo >= 0) {
164 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Brian Salomonc293a292016-11-30 13:38:32 -0500165 }
egdaniel066df7c2016-06-08 14:02:27 -0700166}
167
Greg Daniel500d58b2017-08-24 15:59:33 -0400168void GrVkGpuRTCommandBuffer::submit() {
Brian Salomonc293a292016-11-30 13:38:32 -0500169 if (!fRenderTarget) {
170 return;
171 }
Robert Phillips19e51dc2017-08-09 09:30:51 -0400172
173 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Robert Phillips19e51dc2017-08-09 09:30:51 -0400174 GrVkImage* targetImage = vkRT->msaaImage() ? vkRT->msaaImage() : vkRT;
Greg Daniel45a44de2018-02-27 10:07:29 -0500175 GrStencilAttachment* stencil = fRenderTarget->renderTargetPriv().getStencilAttachment();
Brian Salomon24d377e2019-04-23 15:24:31 -0400176 auto currPreCmd = fPreCommandBufferTasks.begin();
egdaniel9cb63402016-06-23 08:37:05 -0700177
Greg Daniel46cfbc62019-06-07 11:43:30 -0400178 GrVkPrimaryCommandBufferTask::Args taskArgs{fGpu, fRenderTarget};
Greg Daniel36a77ee2016-10-18 10:33:25 -0400179 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
180 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
181
Brian Salomon24d377e2019-04-23 15:24:31 -0400182 for (int c = 0; c < cbInfo.fNumPreCmds; ++c, ++currPreCmd) {
Brian Salomon5d8f1cc2019-04-24 09:03:53 -0400183 currPreCmd->execute(taskArgs);
Greg Daniel77b53f62016-10-18 11:48:51 -0400184 }
185
Greg Daniel38c3d932018-03-16 14:22:30 -0400186 // TODO: Many things create a scratch texture which adds the discard immediately, but then
187 // don't draw to it right away. This causes the discard to be ignored and we get yelled at
188 // for loading uninitialized data. However, once MDB lands with reordering, the discard will
189 // get reordered with the rest of the draw commands and we can remove the discard check.
190 if (cbInfo.fIsEmpty &&
191 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithClear &&
192 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithDiscard) {
Greg Daniel77b53f62016-10-18 11:48:51 -0400193 // We have sumbitted no actual draw commands to the command buffer and we are not using
194 // the render pass to do a clear so there is no need to submit anything.
195 continue;
196 }
Greg Daniel38c3d932018-03-16 14:22:30 -0400197
Greg Daniel070cbaf2019-01-03 17:35:54 -0500198 // We don't want to actually submit the secondary command buffer if it is wrapped.
199 if (this->wrapsSecondaryCommandBuffer()) {
200 // If we have any sampled images set their layout now.
Chris Dalton298238a2019-02-21 16:28:44 -0500201 for (int j = 0; j < cbInfo.fSampledTextures.count(); ++j) {
202 cbInfo.fSampledTextures[j]->setImageLayout(
203 fGpu, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_ACCESS_SHADER_READ_BIT,
204 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, false);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500205 }
206
207 // There should have only been one secondary command buffer in the wrapped case so it is
208 // safe to just return here.
209 SkASSERT(fCommandBufferInfos.count() == 1);
210 return;
211 }
212
Greg Danieldbdba602018-04-20 11:52:43 -0400213 // Make sure if we only have a discard load that we execute the discard on the whole image.
214 // TODO: Once we improve our tracking of discards so that we never end up flushing a discard
215 // call with no actually ops, remove this.
216 if (cbInfo.fIsEmpty && cbInfo.fLoadStoreState == LoadStoreState::kStartsWithDiscard) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000217 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Greg Danieldbdba602018-04-20 11:52:43 -0400218 }
219
Greg Daniela41a74a2018-10-09 12:59:23 +0000220 if (cbInfo.fBounds.intersect(0, 0,
221 SkIntToScalar(fRenderTarget->width()),
222 SkIntToScalar(fRenderTarget->height()))) {
Greg Daniel38c3d932018-03-16 14:22:30 -0400223 // Make sure we do the following layout changes after all copies, uploads, or any other
224 // pre-work is done since we may change the layouts in the pre-work. Also since the
225 // draws will be submitted in different render passes, we need to guard againts write
226 // and write issues.
227
228 // Change layout of our render target so it can be used as the color attachment.
Greg Danielf7828d02018-10-09 12:01:32 -0400229 // TODO: If we know that we will never be blending or loading the attachment we could
230 // drop the VK_ACCESS_COLOR_ATTACHMENT_READ_BIT.
Greg Daniel38c3d932018-03-16 14:22:30 -0400231 targetImage->setImageLayout(fGpu,
232 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
Greg Danielf7828d02018-10-09 12:01:32 -0400233 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
Greg Daniel38c3d932018-03-16 14:22:30 -0400234 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400235 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400236 false);
237
238 // If we are using a stencil attachment we also need to update its layout
239 if (stencil) {
240 GrVkStencilAttachment* vkStencil = (GrVkStencilAttachment*)stencil;
Greg Danielf7828d02018-10-09 12:01:32 -0400241 // We need the write and read access bits since we may load and store the stencil.
242 // The initial load happens in the VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT so we
243 // wait there.
Greg Daniel38c3d932018-03-16 14:22:30 -0400244 vkStencil->setImageLayout(fGpu,
245 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
246 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
247 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400248 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400249 false);
250 }
251
252 // If we have any sampled images set their layout now.
Chris Dalton298238a2019-02-21 16:28:44 -0500253 for (int j = 0; j < cbInfo.fSampledTextures.count(); ++j) {
254 cbInfo.fSampledTextures[j]->setImageLayout(
255 fGpu, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_ACCESS_SHADER_READ_BIT,
256 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, false);
Greg Daniel38c3d932018-03-16 14:22:30 -0400257 }
258
Greg Daniel36a77ee2016-10-18 10:33:25 -0400259 SkIRect iBounds;
260 cbInfo.fBounds.roundOut(&iBounds);
261
Greg Daniel228518f2019-08-07 16:55:17 -0400262 fGpu->submitSecondaryCommandBuffer(std::move(cbInfo.fCommandBuffer), cbInfo.fRenderPass,
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400263 &cbInfo.fColorClearValue, vkRT, fOrigin, iBounds);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400264 }
265 }
Brian Salomon24d377e2019-04-23 15:24:31 -0400266 SkASSERT(currPreCmd == fPreCommandBufferTasks.end());
egdaniel9cb63402016-06-23 08:37:05 -0700267}
268
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400269void GrVkGpuRTCommandBuffer::set(GrRenderTarget* rt, GrSurfaceOrigin origin,
270 const GrGpuRTCommandBuffer::LoadAndStoreInfo& colorInfo,
271 const GrGpuRTCommandBuffer::StencilLoadAndStoreInfo& stencilInfo) {
272 SkASSERT(!fRenderTarget);
273 SkASSERT(fCommandBufferInfos.empty());
274 SkASSERT(-1 == fCurrentCmdInfo);
Robert Phillips9da87e02019-02-04 13:26:26 -0500275 SkASSERT(fGpu == rt->getContext()->priv().getGpu());
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400276 SkASSERT(!fLastPipelineState);
277
Greg Danielb0c7ad12019-06-06 17:23:35 +0000278#ifdef SK_DEBUG
279 fIsActive = true;
280#endif
281
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400282 this->INHERITED::set(rt, origin);
283
Greg Daniel070cbaf2019-01-03 17:35:54 -0500284 if (this->wrapsSecondaryCommandBuffer()) {
285 this->initWrapped();
286 return;
287 }
288
Brian Osman9a9baae2018-11-05 15:06:26 -0500289 fClearColor = colorInfo.fClearColor;
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400290
291 get_vk_load_store_ops(colorInfo.fLoadOp, colorInfo.fStoreOp,
292 &fVkColorLoadOp, &fVkColorStoreOp);
293
294 get_vk_load_store_ops(stencilInfo.fLoadOp, stencilInfo.fStoreOp,
295 &fVkStencilLoadOp, &fVkStencilStoreOp);
296
297 this->init();
298}
299
300void GrVkGpuRTCommandBuffer::reset() {
301 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
302 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
Greg Daniel228518f2019-08-07 16:55:17 -0400303 if (cbInfo.fCommandBuffer) {
304 cbInfo.fCommandBuffer.release()->recycle(fGpu);
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400305 }
306 cbInfo.fRenderPass->unref(fGpu);
307 }
308 fCommandBufferInfos.reset();
Brian Salomon24d377e2019-04-23 15:24:31 -0400309 fPreCommandBufferTasks.reset();
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400310
311 fCurrentCmdInfo = -1;
312
313 fLastPipelineState = nullptr;
314 fRenderTarget = nullptr;
Greg Danielb0c7ad12019-06-06 17:23:35 +0000315
316#ifdef SK_DEBUG
317 fIsActive = false;
318#endif
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400319}
320
Greg Daniel070cbaf2019-01-03 17:35:54 -0500321bool GrVkGpuRTCommandBuffer::wrapsSecondaryCommandBuffer() const {
322 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
323 return vkRT->wrapsSecondaryCommandBuffer();
324}
325
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400326////////////////////////////////////////////////////////////////////////////////
327
Greg Daniel500d58b2017-08-24 15:59:33 -0400328void GrVkGpuRTCommandBuffer::insertEventMarker(const char* msg) {
Robert Phillips65a88fa2017-08-08 08:36:22 -0400329 // TODO: does Vulkan have a correlate?
330}
331
Greg Daniel500d58b2017-08-24 15:59:33 -0400332void GrVkGpuRTCommandBuffer::onClearStencilClip(const GrFixedClip& clip, bool insideStencilMask) {
Chris Dalton94c04682017-11-01 17:15:06 -0600333 SkASSERT(!clip.hasWindowRectangles());
334
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000335 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
336
Greg Daniel65a09272016-10-12 09:47:22 -0400337 GrStencilAttachment* sb = fRenderTarget->renderTargetPriv().getStencilAttachment();
egdaniel9cb63402016-06-23 08:37:05 -0700338 // this should only be called internally when we know we have a
339 // stencil buffer.
340 SkASSERT(sb);
341 int stencilBitCount = sb->bits();
342
343 // The contract with the callers does not guarantee that we preserve all bits in the stencil
344 // during this clear. Thus we will clear the entire stencil to the desired value.
345
346 VkClearDepthStencilValue vkStencilColor;
347 memset(&vkStencilColor, 0, sizeof(VkClearDepthStencilValue));
csmartdalton29df7602016-08-31 11:55:52 -0700348 if (insideStencilMask) {
egdaniel9cb63402016-06-23 08:37:05 -0700349 vkStencilColor.stencil = (1 << (stencilBitCount - 1));
350 } else {
351 vkStencilColor.stencil = 0;
352 }
353
354 VkClearRect clearRect;
355 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700356 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000357 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000358 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400359 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700360 vkRect = clip.scissorRect();
361 } else {
362 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400363 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
364 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700365 }
366
367 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
368 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
369
370 clearRect.baseArrayLayer = 0;
371 clearRect.layerCount = 1;
372
373 uint32_t stencilIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400374 SkAssertResult(cbInfo.fRenderPass->stencilAttachmentIndex(&stencilIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700375
376 VkClearAttachment attachment;
377 attachment.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
378 attachment.colorAttachment = 0; // this value shouldn't matter
379 attachment.clearValue.depthStencil = vkStencilColor;
380
Greg Daniel22bc8652017-03-22 15:45:43 -0400381 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400382 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400383
384 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000385 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400386 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
387 } else {
388 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
389 }
egdaniel9cb63402016-06-23 08:37:05 -0700390}
391
Brian Osman9a9baae2018-11-05 15:06:26 -0500392void GrVkGpuRTCommandBuffer::onClear(const GrFixedClip& clip, const SkPMColor4f& color) {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400393 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
394
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000395 // parent class should never let us get here with no RT
csmartdaltonbf4a8f92016-09-06 10:01:06 -0700396 SkASSERT(!clip.hasWindowRectangles());
egdaniel9cb63402016-06-23 08:37:05 -0700397
Greg Daniel22bc8652017-03-22 15:45:43 -0400398 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Greg Daniel36a77ee2016-10-18 10:33:25 -0400399
Brian Osman9a9baae2018-11-05 15:06:26 -0500400 VkClearColorValue vkColor = {{color.fR, color.fG, color.fB, color.fA}};
egdaniel9cb63402016-06-23 08:37:05 -0700401
Brian Salomond818ebf2018-07-02 14:08:49 +0000402 if (cbInfo.fIsEmpty && !clip.scissorEnabled()) {
Robert Phillips74c627f2017-08-09 10:28:00 -0400403 // Change the render pass to do a clear load
egdaniel9cb63402016-06-23 08:37:05 -0700404 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_CLEAR,
405 VK_ATTACHMENT_STORE_OP_STORE);
Robert Phillips74c627f2017-08-09 10:28:00 -0400406 // Preserve the stencil buffer's load & store settings
407 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -0700408
Greg Daniel36a77ee2016-10-18 10:33:25 -0400409 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
egdaniel9cb63402016-06-23 08:37:05 -0700410
411 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400412 vkRT->compatibleRenderPassHandle();
egdaniel9cb63402016-06-23 08:37:05 -0700413 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400414 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
415 vkColorOps,
416 vkStencilOps);
egdaniel9cb63402016-06-23 08:37:05 -0700417 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400418 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400419 vkColorOps,
420 vkStencilOps);
egdaniel9cb63402016-06-23 08:37:05 -0700421 }
422
Greg Daniel36a77ee2016-10-18 10:33:25 -0400423 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
egdaniel9cb63402016-06-23 08:37:05 -0700424 oldRP->unref(fGpu);
425
Brian Osman9a9baae2018-11-05 15:06:26 -0500426 cbInfo.fColorClearValue.color = {{color.fR, color.fG, color.fB, color.fA}};
Greg Daniela3c68df2018-03-16 13:46:53 -0400427 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400428 // Update command buffer bounds
429 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
egdaniel9cb63402016-06-23 08:37:05 -0700430 return;
431 }
432
433 // We always do a sub rect clear with clearAttachments since we are inside a render pass
434 VkClearRect clearRect;
435 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700436 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000437 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000438 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400439 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700440 vkRect = clip.scissorRect();
441 } else {
442 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400443 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
444 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700445 }
446 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
447 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
448 clearRect.baseArrayLayer = 0;
449 clearRect.layerCount = 1;
450
451 uint32_t colorIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400452 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&colorIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700453
454 VkClearAttachment attachment;
455 attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
456 attachment.colorAttachment = colorIndex;
457 attachment.clearValue.color = vkColor;
458
Greg Daniel22bc8652017-03-22 15:45:43 -0400459 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400460 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400461
462 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000463 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400464 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
465 } else {
466 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
467 }
egdaniel9cb63402016-06-23 08:37:05 -0700468 return;
469}
470
Greg Daniel500d58b2017-08-24 15:59:33 -0400471////////////////////////////////////////////////////////////////////////////////
472
Greg Daniel500d58b2017-08-24 15:59:33 -0400473void GrVkGpuRTCommandBuffer::addAdditionalRenderPass() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400474 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
475
Greg Daniel22bc8652017-03-22 15:45:43 -0400476 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Greg Daniel77b53f62016-10-18 11:48:51 -0400477
478 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Greg Daniel22bc8652017-03-22 15:45:43 -0400479 fCurrentCmdInfo++;
Greg Daniel77b53f62016-10-18 11:48:51 -0400480
481 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
482 VK_ATTACHMENT_STORE_OP_STORE);
483 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
484 VK_ATTACHMENT_STORE_OP_STORE);
485
486 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400487 vkRT->compatibleRenderPassHandle();
Greg Daniel77b53f62016-10-18 11:48:51 -0400488 if (rpHandle.isValid()) {
489 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
490 vkColorOps,
491 vkStencilOps);
492 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400493 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel77b53f62016-10-18 11:48:51 -0400494 vkColorOps,
495 vkStencilOps);
496 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400497 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
Greg Daniel77b53f62016-10-18 11:48:51 -0400498
Greg Daniel228518f2019-08-07 16:55:17 -0400499 cbInfo.fCommandBuffer = fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu);
Greg Daniel77b53f62016-10-18 11:48:51 -0400500 // It shouldn't matter what we set the clear color to here since we will assume loading of the
501 // attachment.
502 memset(&cbInfo.fColorClearValue, 0, sizeof(VkClearValue));
503 cbInfo.fBounds.setEmpty();
Greg Daniel77b53f62016-10-18 11:48:51 -0400504
Robert Phillips19e51dc2017-08-09 09:30:51 -0400505 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
Greg Daniel77b53f62016-10-18 11:48:51 -0400506}
507
Brian Salomon943ed792017-10-30 09:37:55 -0400508void GrVkGpuRTCommandBuffer::inlineUpload(GrOpFlushState* state,
509 GrDeferredTextureUploadFn& upload) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400510 if (!fCommandBufferInfos[fCurrentCmdInfo].fIsEmpty) {
511 this->addAdditionalRenderPass();
Greg Daniel77b53f62016-10-18 11:48:51 -0400512 }
Brian Salomon24d377e2019-04-23 15:24:31 -0400513
Brian Salomon24d377e2019-04-23 15:24:31 -0400514 fPreCommandBufferTasks.emplace<InlineUpload>(state, upload);
515 ++fCommandBufferInfos[fCurrentCmdInfo].fNumPreCmds;
Greg Daniel77b53f62016-10-18 11:48:51 -0400516}
517
egdaniel9cb63402016-06-23 08:37:05 -0700518////////////////////////////////////////////////////////////////////////////////
519
Brian Salomondbf70722019-02-07 11:31:24 -0500520void GrVkGpuRTCommandBuffer::bindGeometry(const GrGpuBuffer* indexBuffer,
521 const GrGpuBuffer* vertexBuffer,
522 const GrGpuBuffer* instanceBuffer) {
Chris Daltonff926502017-05-03 14:36:54 -0400523 GrVkSecondaryCommandBuffer* currCmdBuf = fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf();
egdaniel9cb63402016-06-23 08:37:05 -0700524 // There is no need to put any memory barriers to make sure host writes have finished here.
525 // When a command buffer is submitted to a queue, there is an implicit memory barrier that
526 // occurs for all host writes. Additionally, BufferMemoryBarriers are not allowed inside of
527 // an active RenderPass.
egdaniel9cb63402016-06-23 08:37:05 -0700528
Chris Dalton1d616352017-05-31 12:51:23 -0600529 // Here our vertex and instance inputs need to match the same 0-based bindings they were
530 // assigned in GrVkPipeline. That is, vertex first (if any) followed by instance.
531 uint32_t binding = 0;
532
Brian Salomon802cb312018-06-08 18:05:20 -0400533 if (vertexBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600534 SkASSERT(vertexBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600535 SkASSERT(!vertexBuffer->isMapped());
536
537 currCmdBuf->bindInputBuffer(fGpu, binding++,
538 static_cast<const GrVkVertexBuffer*>(vertexBuffer));
539 }
540
Brian Salomon802cb312018-06-08 18:05:20 -0400541 if (instanceBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600542 SkASSERT(instanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600543 SkASSERT(!instanceBuffer->isMapped());
544
545 currCmdBuf->bindInputBuffer(fGpu, binding++,
546 static_cast<const GrVkVertexBuffer*>(instanceBuffer));
547 }
Chris Daltonff926502017-05-03 14:36:54 -0400548 if (indexBuffer) {
549 SkASSERT(indexBuffer);
550 SkASSERT(!indexBuffer->isMapped());
egdaniel9cb63402016-06-23 08:37:05 -0700551
Chris Daltonff926502017-05-03 14:36:54 -0400552 currCmdBuf->bindIndexBuffer(fGpu, static_cast<const GrVkIndexBuffer*>(indexBuffer));
egdaniel9cb63402016-06-23 08:37:05 -0700553 }
554}
555
Brian Salomon49348902018-06-26 09:12:38 -0400556GrVkPipelineState* GrVkGpuRTCommandBuffer::prepareDrawState(
557 const GrPrimitiveProcessor& primProc,
558 const GrPipeline& pipeline,
559 const GrPipeline::FixedDynamicState* fixedDynamicState,
560 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
561 GrPrimitiveType primitiveType) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400562 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
563 SkASSERT(cbInfo.fRenderPass);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400564
Greg Daniel99b88e02018-10-03 15:31:20 -0400565 VkRenderPass compatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
566
Greg Daniel9a51a862018-11-30 10:18:14 -0500567 const GrTextureProxy* const* primProcProxies = nullptr;
568 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
569 primProcProxies = dynamicStateArrays->fPrimitiveProcessorTextures;
570 } else if (fixedDynamicState) {
571 primProcProxies = fixedDynamicState->fPrimitiveProcessorTextures;
572 }
573
574 SkASSERT(SkToBool(primProcProxies) == SkToBool(primProc.numTextureSamplers()));
575
Greg Daniel09eeefb2017-10-16 15:15:02 -0400576 GrVkPipelineState* pipelineState =
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500577 fGpu->resourceProvider().findOrCreateCompatiblePipelineState(fRenderTarget, fOrigin,
578 pipeline,
egdaniel9cb63402016-06-23 08:37:05 -0700579 primProc,
Greg Daniel9a51a862018-11-30 10:18:14 -0500580 primProcProxies,
egdaniel9cb63402016-06-23 08:37:05 -0700581 primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400582 compatibleRenderPass);
egdaniel9cb63402016-06-23 08:37:05 -0700583 if (!pipelineState) {
584 return pipelineState;
585 }
586
Greg Daniel09eeefb2017-10-16 15:15:02 -0400587 fLastPipelineState = pipelineState;
Greg Daniel22bc8652017-03-22 15:45:43 -0400588
Brian Salomonf7232642018-09-19 08:58:08 -0400589 pipelineState->bindPipeline(fGpu, cbInfo.currentCmdBuf());
Brian Salomoncd7907b2018-08-30 08:36:18 -0400590
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500591 pipelineState->setAndBindUniforms(fGpu, fRenderTarget, fOrigin,
592 primProc, pipeline, cbInfo.currentCmdBuf());
Brian Salomonf7232642018-09-19 08:58:08 -0400593
594 // Check whether we need to bind textures between each GrMesh. If not we can bind them all now.
595 bool setTextures = !(dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures);
596 if (setTextures) {
Brian Salomonf7232642018-09-19 08:58:08 -0400597 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, primProcProxies,
598 cbInfo.currentCmdBuf());
599 }
egdaniel9cb63402016-06-23 08:37:05 -0700600
Brian Salomond818ebf2018-07-02 14:08:49 +0000601 if (!pipeline.isScissorEnabled()) {
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400602 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(),
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500603 fRenderTarget, fOrigin,
604 SkIRect::MakeWH(fRenderTarget->width(),
605 fRenderTarget->height()));
Brian Salomon49348902018-06-26 09:12:38 -0400606 } else if (!dynamicStateArrays || !dynamicStateArrays->fScissorRects) {
607 SkASSERT(fixedDynamicState);
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500608 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget,
609 fOrigin,
Brian Salomon49348902018-06-26 09:12:38 -0400610 fixedDynamicState->fScissorRect);
Chris Dalton46983b72017-06-06 12:27:16 -0600611 }
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500612 GrVkPipeline::SetDynamicViewportState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget);
613 GrVkPipeline::SetDynamicBlendConstantState(fGpu, cbInfo.currentCmdBuf(),
Greg Daniel2c3398d2019-06-19 11:58:01 -0400614 pipeline.outputSwizzle(),
Chris Dalton46983b72017-06-06 12:27:16 -0600615 pipeline.getXferProcessor());
egdaniel9cb63402016-06-23 08:37:05 -0700616
617 return pipelineState;
618}
619
Brian Salomonff168d92018-06-23 15:17:27 -0400620void GrVkGpuRTCommandBuffer::onDraw(const GrPrimitiveProcessor& primProc,
621 const GrPipeline& pipeline,
Brian Salomon49348902018-06-26 09:12:38 -0400622 const GrPipeline::FixedDynamicState* fixedDynamicState,
623 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
Greg Daniel500d58b2017-08-24 15:59:33 -0400624 const GrMesh meshes[],
Greg Daniel500d58b2017-08-24 15:59:33 -0400625 int meshCount,
626 const SkRect& bounds) {
egdaniel9cb63402016-06-23 08:37:05 -0700627 if (!meshCount) {
628 return;
629 }
Greg Danielea022cd2018-03-16 11:10:03 -0400630
631 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
632
Brian Salomone782f842018-07-31 13:53:11 -0400633 auto prepareSampledImage = [&](GrTexture* texture, GrSamplerState::Filter filter) {
634 GrVkTexture* vkTexture = static_cast<GrVkTexture*>(texture);
635 // We may need to resolve the texture first if it is also a render target
636 GrVkRenderTarget* texRT = static_cast<GrVkRenderTarget*>(vkTexture->asRenderTarget());
Chris Dalton3d770272019-08-14 09:24:37 -0600637 if (texRT && texRT->needsResolve()) {
Greg Daniel0a77f432018-12-06 11:23:32 -0500638 fGpu->resolveRenderTargetNoFlush(texRT);
Chris Dalton3d770272019-08-14 09:24:37 -0600639 // TEMPORARY: MSAA resolve will have dirtied mipmaps. This goes away once we switch
Greg Danielf41b2bd2019-08-22 16:19:24 -0400640 // to resolving MSAA from the opsTask as well.
Chris Dalton3d770272019-08-14 09:24:37 -0600641 if (GrSamplerState::Filter::kMipMap == filter &&
642 (vkTexture->width() != 1 || vkTexture->height() != 1)) {
643 SkASSERT(vkTexture->texturePriv().mipMapped() == GrMipMapped::kYes);
644 SkASSERT(vkTexture->texturePriv().mipMapsAreDirty());
645 fGpu->regenerateMipMapLevels(vkTexture);
646 }
Brian Salomone782f842018-07-31 13:53:11 -0400647 }
648
Greg Danielf41b2bd2019-08-22 16:19:24 -0400649 // Ensure mip maps were all resolved ahead of time by the opsTask.
Brian Salomone782f842018-07-31 13:53:11 -0400650 if (GrSamplerState::Filter::kMipMap == filter &&
651 (vkTexture->width() != 1 || vkTexture->height() != 1)) {
652 SkASSERT(vkTexture->texturePriv().mipMapped() == GrMipMapped::kYes);
Chris Dalton3d770272019-08-14 09:24:37 -0600653 SkASSERT(!vkTexture->texturePriv().mipMapsAreDirty());
Brian Salomone782f842018-07-31 13:53:11 -0400654 }
Brian Salomone782f842018-07-31 13:53:11 -0400655 };
656
Brian Salomonf7232642018-09-19 08:58:08 -0400657 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
658 for (int m = 0, i = 0; m < meshCount; ++m) {
659 for (int s = 0; s < primProc.numTextureSamplers(); ++s, ++i) {
660 auto texture = dynamicStateArrays->fPrimitiveProcessorTextures[i]->peekTexture();
661 prepareSampledImage(texture, primProc.textureSampler(s).samplerState().filter());
Chris Dalton0d6f7752019-08-19 12:21:27 -0600662 this->appendSampledTexture(texture);
Brian Salomonf7232642018-09-19 08:58:08 -0400663 }
664 }
665 } else {
666 for (int i = 0; i < primProc.numTextureSamplers(); ++i) {
667 auto texture = fixedDynamicState->fPrimitiveProcessorTextures[i]->peekTexture();
668 prepareSampledImage(texture, primProc.textureSampler(i).samplerState().filter());
Chris Dalton0d6f7752019-08-19 12:21:27 -0600669 this->appendSampledTexture(texture);
Brian Salomonf7232642018-09-19 08:58:08 -0400670 }
Brian Salomone782f842018-07-31 13:53:11 -0400671 }
bsalomonb58a2b42016-09-26 06:55:02 -0700672 GrFragmentProcessor::Iter iter(pipeline);
673 while (const GrFragmentProcessor* fp = iter.next()) {
Brian Salomone782f842018-07-31 13:53:11 -0400674 for (int i = 0; i < fp->numTextureSamplers(); ++i) {
675 const GrFragmentProcessor::TextureSampler& sampler = fp->textureSampler(i);
676 prepareSampledImage(sampler.peekTexture(), sampler.samplerState().filter());
Chris Dalton0d6f7752019-08-19 12:21:27 -0600677 this->appendSampledTexture(sampler.peekTexture());
Brian Salomone782f842018-07-31 13:53:11 -0400678 }
egdaniel2f5792a2016-07-06 08:51:23 -0700679 }
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400680 if (GrTexture* dstTexture = pipeline.peekDstTexture()) {
Chris Dalton0d6f7752019-08-19 12:21:27 -0600681 this->appendSampledTexture(dstTexture);
Brian Salomon18dfa982017-04-03 16:57:43 -0400682 }
egdaniel2f5792a2016-07-06 08:51:23 -0700683
Chris Daltonbca46e22017-05-15 11:03:26 -0600684 GrPrimitiveType primitiveType = meshes[0].primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400685 GrVkPipelineState* pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
686 dynamicStateArrays, primitiveType);
egdaniel9cb63402016-06-23 08:37:05 -0700687 if (!pipelineState) {
688 return;
689 }
690
Brian Salomond818ebf2018-07-02 14:08:49 +0000691 bool dynamicScissor =
692 pipeline.isScissorEnabled() && dynamicStateArrays && dynamicStateArrays->fScissorRects;
Brian Salomonf7232642018-09-19 08:58:08 -0400693 bool dynamicTextures = dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures;
Brian Salomon49348902018-06-26 09:12:38 -0400694
egdaniel9cb63402016-06-23 08:37:05 -0700695 for (int i = 0; i < meshCount; ++i) {
696 const GrMesh& mesh = meshes[i];
Chris Daltonbca46e22017-05-15 11:03:26 -0600697 if (mesh.primitiveType() != primitiveType) {
Chris Dalton6f241802017-05-08 13:58:38 -0400698 SkDEBUGCODE(pipelineState = nullptr);
Chris Daltonbca46e22017-05-15 11:03:26 -0600699 primitiveType = mesh.primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400700 pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
701 dynamicStateArrays, primitiveType);
Chris Dalton6f241802017-05-08 13:58:38 -0400702 if (!pipelineState) {
703 return;
egdaniel9cb63402016-06-23 08:37:05 -0700704 }
Chris Dalton6f241802017-05-08 13:58:38 -0400705 }
egdaniel9cb63402016-06-23 08:37:05 -0700706
Brian Salomon49348902018-06-26 09:12:38 -0400707 if (dynamicScissor) {
708 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget,
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500709 fOrigin,
Brian Salomon49348902018-06-26 09:12:38 -0400710 dynamicStateArrays->fScissorRects[i]);
Chris Dalton46983b72017-06-06 12:27:16 -0600711 }
Brian Salomonf7232642018-09-19 08:58:08 -0400712 if (dynamicTextures) {
713 GrTextureProxy* const* meshProxies = dynamicStateArrays->fPrimitiveProcessorTextures +
714 primProc.numTextureSamplers() * i;
715 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, meshProxies,
716 cbInfo.currentCmdBuf());
717 }
Chris Daltonbca46e22017-05-15 11:03:26 -0600718 SkASSERT(pipelineState);
Brian Salomon802cb312018-06-08 18:05:20 -0400719 mesh.sendToGpu(this);
egdaniel9cb63402016-06-23 08:37:05 -0700720 }
721
Greg Daniel36a77ee2016-10-18 10:33:25 -0400722 cbInfo.fBounds.join(bounds);
Chris Dalton114a3c02017-05-26 15:17:19 -0600723 cbInfo.fIsEmpty = false;
egdaniel066df7c2016-06-08 14:02:27 -0700724}
725
Chris Dalton0d6f7752019-08-19 12:21:27 -0600726void GrVkGpuRTCommandBuffer::appendSampledTexture(GrTexture* tex) {
727 SkASSERT(!tex->isProtected() || (fRenderTarget->isProtected() && fGpu->protectedContext()));
Robert Phillipse1efd382019-08-21 10:07:10 -0400728 GrVkTexture* vkTex = static_cast<GrVkTexture*>(tex);
729
730 fCommandBufferInfos[fCurrentCmdInfo].fSampledTextures.push_back(sk_ref_sp(vkTex));
Chris Dalton0d6f7752019-08-19 12:21:27 -0600731}
732
Brian Salomon802cb312018-06-08 18:05:20 -0400733void GrVkGpuRTCommandBuffer::sendInstancedMeshToGpu(GrPrimitiveType,
Greg Daniel500d58b2017-08-24 15:59:33 -0400734 const GrBuffer* vertexBuffer,
735 int vertexCount,
736 int baseVertex,
737 const GrBuffer* instanceBuffer,
738 int instanceCount,
739 int baseInstance) {
Chris Dalton114a3c02017-05-26 15:17:19 -0600740 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomondbf70722019-02-07 11:31:24 -0500741 SkASSERT(!vertexBuffer || !vertexBuffer->isCpuBuffer());
742 SkASSERT(!instanceBuffer || !instanceBuffer->isCpuBuffer());
743 auto gpuVertexBuffer = static_cast<const GrGpuBuffer*>(vertexBuffer);
744 auto gpuInstanceBuffer = static_cast<const GrGpuBuffer*>(instanceBuffer);
745 this->bindGeometry(nullptr, gpuVertexBuffer, gpuInstanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600746 cbInfo.currentCmdBuf()->draw(fGpu, vertexCount, instanceCount, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600747 fGpu->stats()->incNumDraws();
748}
749
Brian Salomon802cb312018-06-08 18:05:20 -0400750void GrVkGpuRTCommandBuffer::sendIndexedInstancedMeshToGpu(GrPrimitiveType,
Greg Daniel500d58b2017-08-24 15:59:33 -0400751 const GrBuffer* indexBuffer,
752 int indexCount,
753 int baseIndex,
754 const GrBuffer* vertexBuffer,
755 int baseVertex,
756 const GrBuffer* instanceBuffer,
757 int instanceCount,
Brian Salomon802cb312018-06-08 18:05:20 -0400758 int baseInstance,
759 GrPrimitiveRestart restart) {
760 SkASSERT(restart == GrPrimitiveRestart::kNo);
Chris Dalton114a3c02017-05-26 15:17:19 -0600761 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomondbf70722019-02-07 11:31:24 -0500762 SkASSERT(!vertexBuffer || !vertexBuffer->isCpuBuffer());
763 SkASSERT(!instanceBuffer || !instanceBuffer->isCpuBuffer());
764 SkASSERT(!indexBuffer->isCpuBuffer());
765 auto gpuIndexxBuffer = static_cast<const GrGpuBuffer*>(indexBuffer);
766 auto gpuVertexBuffer = static_cast<const GrGpuBuffer*>(vertexBuffer);
767 auto gpuInstanceBuffer = static_cast<const GrGpuBuffer*>(instanceBuffer);
768 this->bindGeometry(gpuIndexxBuffer, gpuVertexBuffer, gpuInstanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600769 cbInfo.currentCmdBuf()->drawIndexed(fGpu, indexCount, instanceCount,
770 baseIndex, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600771 fGpu->stats()->incNumDraws();
772}
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400773
774////////////////////////////////////////////////////////////////////////////////
775
776void GrVkGpuRTCommandBuffer::executeDrawable(std::unique_ptr<SkDrawable::GpuDrawHandler> drawable) {
777 GrVkRenderTarget* target = static_cast<GrVkRenderTarget*>(fRenderTarget);
778
779 GrVkImage* targetImage = target->msaaImage() ? target->msaaImage() : target;
780
781 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
782 VkRect2D bounds;
783 bounds.offset = { 0, 0 };
784 bounds.extent = { 0, 0 };
785
786 GrVkDrawableInfo vkInfo;
787 vkInfo.fSecondaryCommandBuffer = cbInfo.currentCmdBuf()->vkCommandBuffer();
788 vkInfo.fCompatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
Greg Danielb353eeb2018-12-05 11:01:58 -0500789 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&vkInfo.fColorAttachmentIndex));
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400790 vkInfo.fFormat = targetImage->imageFormat();
791 vkInfo.fDrawBounds = &bounds;
Stan Ilievcb580602019-02-26 11:36:07 -0500792#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
793 vkInfo.fImage = targetImage->image();
794#else
795 vkInfo.fImage = VK_NULL_HANDLE;
796#endif //SK_BUILD_FOR_ANDROID_FRAMEWORK
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400797
798 GrBackendDrawableInfo info(vkInfo);
799
Eric Karlc0b2ba22019-01-22 19:40:35 -0800800 // After we draw into the command buffer via the drawable, cached state we have may be invalid.
801 cbInfo.currentCmdBuf()->invalidateState();
Eric Karla8878a12019-02-07 18:17:43 -0800802 // Also assume that the drawable produced output.
803 cbInfo.fIsEmpty = false;
Eric Karlc0b2ba22019-01-22 19:40:35 -0800804
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400805 drawable->draw(info);
806 fGpu->addDrawable(std::move(drawable));
807
808 if (bounds.extent.width == 0 || bounds.extent.height == 0) {
809 cbInfo.fBounds.join(target->getBoundsRect());
810 } else {
811 cbInfo.fBounds.join(SkRect::MakeXYWH(bounds.offset.x, bounds.offset.y,
812 bounds.extent.width, bounds.extent.height));
813 }
814}
815