blob: 8cc91a85736e9dba86acc76fbec148aa0197d5c1 [file] [log] [blame]
egdaniel066df7c2016-06-08 14:02:27 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkGpuCommandBuffer.h"
9
Greg Daniel64cc9aa2018-10-19 13:54:56 -040010#include "GrBackendDrawableInfo.h"
csmartdalton29df7602016-08-31 11:55:52 -070011#include "GrFixedClip.h"
egdaniel9cb63402016-06-23 08:37:05 -070012#include "GrMesh.h"
Brian Salomon742e31d2016-12-07 17:06:19 -050013#include "GrOpFlushState.h"
egdaniel9cb63402016-06-23 08:37:05 -070014#include "GrPipeline.h"
15#include "GrRenderTargetPriv.h"
egdaniel9cb63402016-06-23 08:37:05 -070016#include "GrTexturePriv.h"
egdaniel066df7c2016-06-08 14:02:27 -070017#include "GrVkCommandBuffer.h"
Ethan Nicholas8e265a72018-12-12 16:22:40 -050018#include "GrVkCommandPool.h"
egdaniel066df7c2016-06-08 14:02:27 -070019#include "GrVkGpu.h"
egdaniel9cb63402016-06-23 08:37:05 -070020#include "GrVkPipeline.h"
egdaniel066df7c2016-06-08 14:02:27 -070021#include "GrVkRenderPass.h"
22#include "GrVkRenderTarget.h"
23#include "GrVkResourceProvider.h"
Greg Daniel64cc9aa2018-10-19 13:54:56 -040024#include "GrVkSemaphore.h"
egdaniel9cb63402016-06-23 08:37:05 -070025#include "GrVkTexture.h"
Greg Daniel64cc9aa2018-10-19 13:54:56 -040026#include "SkDrawable.h"
Greg Daniel36a77ee2016-10-18 10:33:25 -040027#include "SkRect.h"
egdaniel066df7c2016-06-08 14:02:27 -070028
Robert Phillipsb0e93a22017-08-29 08:26:54 -040029void GrVkGpuTextureCommandBuffer::copy(GrSurface* src, GrSurfaceOrigin srcOrigin,
30 const SkIRect& srcRect, const SkIPoint& dstPoint) {
31 fCopies.emplace_back(src, srcOrigin, srcRect, dstPoint);
Greg Daniel500d58b2017-08-24 15:59:33 -040032}
33
34void GrVkGpuTextureCommandBuffer::insertEventMarker(const char* msg) {
35 // TODO: does Vulkan have a correlate?
36}
37
38void GrVkGpuTextureCommandBuffer::submit() {
39 for (int i = 0; i < fCopies.count(); ++i) {
40 CopyInfo& copyInfo = fCopies[i];
Robert Phillipsb0e93a22017-08-29 08:26:54 -040041 fGpu->copySurface(fTexture, fOrigin, copyInfo.fSrc, copyInfo.fSrcOrigin, copyInfo.fSrcRect,
42 copyInfo.fDstPoint);
Greg Daniel500d58b2017-08-24 15:59:33 -040043 }
44}
45
46GrVkGpuTextureCommandBuffer::~GrVkGpuTextureCommandBuffer() {}
47
48////////////////////////////////////////////////////////////////////////////////
49
Robert Phillips6b47c7d2017-08-29 07:24:09 -040050void get_vk_load_store_ops(GrLoadOp loadOpIn, GrStoreOp storeOpIn,
egdaniel066df7c2016-06-08 14:02:27 -070051 VkAttachmentLoadOp* loadOp, VkAttachmentStoreOp* storeOp) {
Robert Phillips95214472017-08-08 18:00:03 -040052 switch (loadOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -040053 case GrLoadOp::kLoad:
egdaniel066df7c2016-06-08 14:02:27 -070054 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel066df7c2016-06-08 14:02:27 -070055 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040056 case GrLoadOp::kClear:
egdaniel9cb63402016-06-23 08:37:05 -070057 *loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
58 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040059 case GrLoadOp::kDiscard:
egdaniel9cb63402016-06-23 08:37:05 -070060 *loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
61 break;
62 default:
63 SK_ABORT("Invalid LoadOp");
egdaniel066df7c2016-06-08 14:02:27 -070064 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel9cb63402016-06-23 08:37:05 -070065 }
66
Robert Phillips95214472017-08-08 18:00:03 -040067 switch (storeOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -040068 case GrStoreOp::kStore:
egdaniel066df7c2016-06-08 14:02:27 -070069 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
70 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040071 case GrStoreOp::kDiscard:
egdaniel066df7c2016-06-08 14:02:27 -070072 *storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
73 break;
brianosman0bbc3712016-06-14 04:53:09 -070074 default:
egdaniel9cb63402016-06-23 08:37:05 -070075 SK_ABORT("Invalid StoreOp");
brianosman0bbc3712016-06-14 04:53:09 -070076 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
egdaniel066df7c2016-06-08 14:02:27 -070077 }
78}
79
Robert Phillips5b5d84c2018-08-09 15:12:18 -040080GrVkGpuRTCommandBuffer::GrVkGpuRTCommandBuffer(GrVkGpu* gpu)
81 : fCurrentCmdInfo(-1)
Robert Phillips19e51dc2017-08-09 09:30:51 -040082 , fGpu(gpu)
Robert Phillips19e51dc2017-08-09 09:30:51 -040083 , fLastPipelineState(nullptr) {
Brian Salomonc293a292016-11-30 13:38:32 -050084}
85
Greg Daniel500d58b2017-08-24 15:59:33 -040086void GrVkGpuRTCommandBuffer::init() {
Brian Salomonc293a292016-11-30 13:38:32 -050087 GrVkRenderPass::LoadStoreOps vkColorOps(fVkColorLoadOp, fVkColorStoreOp);
88 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -070089
Greg Daniel36a77ee2016-10-18 10:33:25 -040090 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Brian Salomonc293a292016-11-30 13:38:32 -050091 SkASSERT(fCommandBufferInfos.count() == 1);
Greg Daniel22bc8652017-03-22 15:45:43 -040092 fCurrentCmdInfo = 0;
Greg Daniel36a77ee2016-10-18 10:33:25 -040093
Robert Phillips19e51dc2017-08-09 09:30:51 -040094 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
95 const GrVkResourceProvider::CompatibleRPHandle& rpHandle = vkRT->compatibleRenderPassHandle();
egdaniel066df7c2016-06-08 14:02:27 -070096 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -040097 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
98 vkColorOps,
99 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -0700100 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400101 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400102 vkColorOps,
103 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -0700104 }
105
Brian Osmancb3d0872018-10-16 15:19:28 -0400106 cbInfo.fColorClearValue.color.float32[0] = fClearColor[0];
107 cbInfo.fColorClearValue.color.float32[1] = fClearColor[1];
108 cbInfo.fColorClearValue.color.float32[2] = fClearColor[2];
109 cbInfo.fColorClearValue.color.float32[3] = fClearColor[3];
egdaniel9cb63402016-06-23 08:37:05 -0700110
Robert Phillips380b90c2017-08-30 07:41:07 -0400111 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000112 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Robert Phillips380b90c2017-08-30 07:41:07 -0400113 } else {
114 cbInfo.fBounds.setEmpty();
115 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400116
117 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
118 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
119 } else if (VK_ATTACHMENT_LOAD_OP_LOAD == fVkColorLoadOp &&
120 VK_ATTACHMENT_STORE_OP_STORE == fVkColorStoreOp) {
121 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
122 } else if (VK_ATTACHMENT_LOAD_OP_DONT_CARE == fVkColorLoadOp) {
123 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithDiscard;
124 }
Greg Daniel36a77ee2016-10-18 10:33:25 -0400125
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500126 cbInfo.fCommandBuffers.push_back(fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu));
Robert Phillips19e51dc2017-08-09 09:30:51 -0400127 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
egdaniel066df7c2016-06-08 14:02:27 -0700128}
129
Greg Daniel070cbaf2019-01-03 17:35:54 -0500130void GrVkGpuRTCommandBuffer::initWrapped() {
131 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
132 SkASSERT(fCommandBufferInfos.count() == 1);
133 fCurrentCmdInfo = 0;
134
135 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
136 SkASSERT(vkRT->wrapsSecondaryCommandBuffer());
137 cbInfo.fRenderPass = vkRT->externalRenderPass();
138 cbInfo.fRenderPass->ref();
139
140 cbInfo.fBounds.setEmpty();
141 cbInfo.fCommandBuffers.push_back(vkRT->getExternalSecondaryCommandBuffer());
142 cbInfo.fCommandBuffers[0]->ref();
143 cbInfo.currentCmdBuf()->begin(fGpu, nullptr, cbInfo.fRenderPass);
144}
Brian Salomonc293a292016-11-30 13:38:32 -0500145
Greg Daniel500d58b2017-08-24 15:59:33 -0400146GrVkGpuRTCommandBuffer::~GrVkGpuRTCommandBuffer() {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400147 this->reset();
egdaniel066df7c2016-06-08 14:02:27 -0700148}
149
Greg Daniel500d58b2017-08-24 15:59:33 -0400150GrGpu* GrVkGpuRTCommandBuffer::gpu() { return fGpu; }
egdaniel9cb63402016-06-23 08:37:05 -0700151
Greg Daniel500d58b2017-08-24 15:59:33 -0400152void GrVkGpuRTCommandBuffer::end() {
Greg Daniel22bc8652017-03-22 15:45:43 -0400153 if (fCurrentCmdInfo >= 0) {
154 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Brian Salomonc293a292016-11-30 13:38:32 -0500155 }
egdaniel066df7c2016-06-08 14:02:27 -0700156}
157
Greg Daniel500d58b2017-08-24 15:59:33 -0400158void GrVkGpuRTCommandBuffer::submit() {
Brian Salomonc293a292016-11-30 13:38:32 -0500159 if (!fRenderTarget) {
160 return;
161 }
Robert Phillips19e51dc2017-08-09 09:30:51 -0400162
163 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Robert Phillips19e51dc2017-08-09 09:30:51 -0400164 GrVkImage* targetImage = vkRT->msaaImage() ? vkRT->msaaImage() : vkRT;
Greg Daniel45a44de2018-02-27 10:07:29 -0500165 GrStencilAttachment* stencil = fRenderTarget->renderTargetPriv().getStencilAttachment();
egdaniel9cb63402016-06-23 08:37:05 -0700166
Greg Daniel36a77ee2016-10-18 10:33:25 -0400167 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
168 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
169
Greg Daniel77b53f62016-10-18 11:48:51 -0400170 for (int j = 0; j < cbInfo.fPreDrawUploads.count(); ++j) {
171 InlineUploadInfo& iuInfo = cbInfo.fPreDrawUploads[j];
172 iuInfo.fFlushState->doUpload(iuInfo.fUpload);
173 }
174
Greg Daniel500d58b2017-08-24 15:59:33 -0400175 for (int j = 0; j < cbInfo.fPreCopies.count(); ++j) {
176 CopyInfo& copyInfo = cbInfo.fPreCopies[j];
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400177 fGpu->copySurface(fRenderTarget, fOrigin, copyInfo.fSrc, copyInfo.fSrcOrigin,
Greg Daniel55fa6472018-03-16 16:13:10 -0400178 copyInfo.fSrcRect, copyInfo.fDstPoint, copyInfo.fShouldDiscardDst);
Greg Daniel500d58b2017-08-24 15:59:33 -0400179 }
180
Greg Daniel45a44de2018-02-27 10:07:29 -0500181
Greg Daniel38c3d932018-03-16 14:22:30 -0400182 // TODO: Many things create a scratch texture which adds the discard immediately, but then
183 // don't draw to it right away. This causes the discard to be ignored and we get yelled at
184 // for loading uninitialized data. However, once MDB lands with reordering, the discard will
185 // get reordered with the rest of the draw commands and we can remove the discard check.
186 if (cbInfo.fIsEmpty &&
187 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithClear &&
188 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithDiscard) {
Greg Daniel77b53f62016-10-18 11:48:51 -0400189 // We have sumbitted no actual draw commands to the command buffer and we are not using
190 // the render pass to do a clear so there is no need to submit anything.
191 continue;
192 }
Greg Daniel38c3d932018-03-16 14:22:30 -0400193
Greg Daniel070cbaf2019-01-03 17:35:54 -0500194 // We don't want to actually submit the secondary command buffer if it is wrapped.
195 if (this->wrapsSecondaryCommandBuffer()) {
196 // If we have any sampled images set their layout now.
197 for (int j = 0; j < cbInfo.fSampledImages.count(); ++j) {
198 cbInfo.fSampledImages[j]->setImageLayout(fGpu,
199 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
200 VK_ACCESS_SHADER_READ_BIT,
201 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
202 false);
203 }
204
205 // There should have only been one secondary command buffer in the wrapped case so it is
206 // safe to just return here.
207 SkASSERT(fCommandBufferInfos.count() == 1);
208 return;
209 }
210
Greg Danieldbdba602018-04-20 11:52:43 -0400211 // Make sure if we only have a discard load that we execute the discard on the whole image.
212 // TODO: Once we improve our tracking of discards so that we never end up flushing a discard
213 // call with no actually ops, remove this.
214 if (cbInfo.fIsEmpty && cbInfo.fLoadStoreState == LoadStoreState::kStartsWithDiscard) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000215 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Greg Danieldbdba602018-04-20 11:52:43 -0400216 }
217
Greg Daniela41a74a2018-10-09 12:59:23 +0000218 if (cbInfo.fBounds.intersect(0, 0,
219 SkIntToScalar(fRenderTarget->width()),
220 SkIntToScalar(fRenderTarget->height()))) {
Greg Daniel38c3d932018-03-16 14:22:30 -0400221 // Make sure we do the following layout changes after all copies, uploads, or any other
222 // pre-work is done since we may change the layouts in the pre-work. Also since the
223 // draws will be submitted in different render passes, we need to guard againts write
224 // and write issues.
225
226 // Change layout of our render target so it can be used as the color attachment.
Greg Danielf7828d02018-10-09 12:01:32 -0400227 // TODO: If we know that we will never be blending or loading the attachment we could
228 // drop the VK_ACCESS_COLOR_ATTACHMENT_READ_BIT.
Greg Daniel38c3d932018-03-16 14:22:30 -0400229 targetImage->setImageLayout(fGpu,
230 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
Greg Danielf7828d02018-10-09 12:01:32 -0400231 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
Greg Daniel38c3d932018-03-16 14:22:30 -0400232 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400233 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400234 false);
235
236 // If we are using a stencil attachment we also need to update its layout
237 if (stencil) {
238 GrVkStencilAttachment* vkStencil = (GrVkStencilAttachment*)stencil;
Greg Danielf7828d02018-10-09 12:01:32 -0400239 // We need the write and read access bits since we may load and store the stencil.
240 // The initial load happens in the VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT so we
241 // wait there.
Greg Daniel38c3d932018-03-16 14:22:30 -0400242 vkStencil->setImageLayout(fGpu,
243 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
244 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
245 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400246 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400247 false);
248 }
249
250 // If we have any sampled images set their layout now.
251 for (int j = 0; j < cbInfo.fSampledImages.count(); ++j) {
252 cbInfo.fSampledImages[j]->setImageLayout(fGpu,
253 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
254 VK_ACCESS_SHADER_READ_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400255 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400256 false);
257 }
258
Greg Daniel36a77ee2016-10-18 10:33:25 -0400259 SkIRect iBounds;
260 cbInfo.fBounds.roundOut(&iBounds);
261
Greg Daniel22bc8652017-03-22 15:45:43 -0400262 fGpu->submitSecondaryCommandBuffer(cbInfo.fCommandBuffers, cbInfo.fRenderPass,
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400263 &cbInfo.fColorClearValue, vkRT, fOrigin, iBounds);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400264 }
265 }
egdaniel9cb63402016-06-23 08:37:05 -0700266}
267
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400268void GrVkGpuRTCommandBuffer::set(GrRenderTarget* rt, GrSurfaceOrigin origin,
269 const GrGpuRTCommandBuffer::LoadAndStoreInfo& colorInfo,
270 const GrGpuRTCommandBuffer::StencilLoadAndStoreInfo& stencilInfo) {
271 SkASSERT(!fRenderTarget);
272 SkASSERT(fCommandBufferInfos.empty());
273 SkASSERT(-1 == fCurrentCmdInfo);
274 SkASSERT(fGpu == rt->getContext()->contextPriv().getGpu());
275 SkASSERT(!fLastPipelineState);
276
277 this->INHERITED::set(rt, origin);
278
Greg Daniel070cbaf2019-01-03 17:35:54 -0500279 if (this->wrapsSecondaryCommandBuffer()) {
280 this->initWrapped();
281 return;
282 }
283
Brian Osman9a9baae2018-11-05 15:06:26 -0500284 fClearColor = colorInfo.fClearColor;
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400285
286 get_vk_load_store_ops(colorInfo.fLoadOp, colorInfo.fStoreOp,
287 &fVkColorLoadOp, &fVkColorStoreOp);
288
289 get_vk_load_store_ops(stencilInfo.fLoadOp, stencilInfo.fStoreOp,
290 &fVkStencilLoadOp, &fVkStencilStoreOp);
291
292 this->init();
293}
294
295void GrVkGpuRTCommandBuffer::reset() {
296 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
297 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
298 for (int j = 0; j < cbInfo.fCommandBuffers.count(); ++j) {
299 cbInfo.fCommandBuffers[j]->unref(fGpu);
300 }
301 cbInfo.fRenderPass->unref(fGpu);
302 }
303 fCommandBufferInfos.reset();
304
305 fCurrentCmdInfo = -1;
306
307 fLastPipelineState = nullptr;
308 fRenderTarget = nullptr;
309}
310
Greg Daniel070cbaf2019-01-03 17:35:54 -0500311bool GrVkGpuRTCommandBuffer::wrapsSecondaryCommandBuffer() const {
312 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
313 return vkRT->wrapsSecondaryCommandBuffer();
314}
315
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400316////////////////////////////////////////////////////////////////////////////////
317
Greg Daniel500d58b2017-08-24 15:59:33 -0400318void GrVkGpuRTCommandBuffer::discard() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400319 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Brian Salomonc293a292016-11-30 13:38:32 -0500320
Greg Daniel22bc8652017-03-22 15:45:43 -0400321 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Greg Daniel77b53f62016-10-18 11:48:51 -0400322 if (cbInfo.fIsEmpty) {
Robert Phillips74c627f2017-08-09 10:28:00 -0400323 // Change the render pass to do a don't-care load for both color & stencil
egdaniel37535c92016-06-30 08:23:30 -0700324 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_DONT_CARE,
325 VK_ATTACHMENT_STORE_OP_STORE);
326 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_DONT_CARE,
327 VK_ATTACHMENT_STORE_OP_STORE);
egdaniel37535c92016-06-30 08:23:30 -0700328
Greg Daniel36a77ee2016-10-18 10:33:25 -0400329 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
egdaniel37535c92016-06-30 08:23:30 -0700330
egdaniel37535c92016-06-30 08:23:30 -0700331 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400332 vkRT->compatibleRenderPassHandle();
egdaniel37535c92016-06-30 08:23:30 -0700333 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400334 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
335 vkColorOps,
336 vkStencilOps);
egdaniel37535c92016-06-30 08:23:30 -0700337 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400338 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400339 vkColorOps,
340 vkStencilOps);
egdaniel37535c92016-06-30 08:23:30 -0700341 }
342
Greg Daniel36a77ee2016-10-18 10:33:25 -0400343 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
egdaniel37535c92016-06-30 08:23:30 -0700344 oldRP->unref(fGpu);
Greg Daniel5011f852016-10-28 15:07:16 -0400345 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
Greg Daniela3c68df2018-03-16 13:46:53 -0400346 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithDiscard;
347 // If we are going to discard the whole render target then the results of any copies we did
348 // immediately before to the target won't matter, so just drop them.
349 cbInfo.fPreCopies.reset();
egdaniel37535c92016-06-30 08:23:30 -0700350 }
351}
352
Greg Daniel500d58b2017-08-24 15:59:33 -0400353void GrVkGpuRTCommandBuffer::insertEventMarker(const char* msg) {
Robert Phillips65a88fa2017-08-08 08:36:22 -0400354 // TODO: does Vulkan have a correlate?
355}
356
Greg Daniel500d58b2017-08-24 15:59:33 -0400357void GrVkGpuRTCommandBuffer::onClearStencilClip(const GrFixedClip& clip, bool insideStencilMask) {
Chris Dalton94c04682017-11-01 17:15:06 -0600358 SkASSERT(!clip.hasWindowRectangles());
359
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000360 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
361
Greg Daniel65a09272016-10-12 09:47:22 -0400362 GrStencilAttachment* sb = fRenderTarget->renderTargetPriv().getStencilAttachment();
egdaniel9cb63402016-06-23 08:37:05 -0700363 // this should only be called internally when we know we have a
364 // stencil buffer.
365 SkASSERT(sb);
366 int stencilBitCount = sb->bits();
367
368 // The contract with the callers does not guarantee that we preserve all bits in the stencil
369 // during this clear. Thus we will clear the entire stencil to the desired value.
370
371 VkClearDepthStencilValue vkStencilColor;
372 memset(&vkStencilColor, 0, sizeof(VkClearDepthStencilValue));
csmartdalton29df7602016-08-31 11:55:52 -0700373 if (insideStencilMask) {
egdaniel9cb63402016-06-23 08:37:05 -0700374 vkStencilColor.stencil = (1 << (stencilBitCount - 1));
375 } else {
376 vkStencilColor.stencil = 0;
377 }
378
379 VkClearRect clearRect;
380 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700381 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000382 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000383 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400384 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700385 vkRect = clip.scissorRect();
386 } else {
387 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400388 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
389 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700390 }
391
392 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
393 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
394
395 clearRect.baseArrayLayer = 0;
396 clearRect.layerCount = 1;
397
398 uint32_t stencilIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400399 SkAssertResult(cbInfo.fRenderPass->stencilAttachmentIndex(&stencilIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700400
401 VkClearAttachment attachment;
402 attachment.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
403 attachment.colorAttachment = 0; // this value shouldn't matter
404 attachment.clearValue.depthStencil = vkStencilColor;
405
Greg Daniel22bc8652017-03-22 15:45:43 -0400406 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400407 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400408
409 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000410 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400411 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
412 } else {
413 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
414 }
egdaniel9cb63402016-06-23 08:37:05 -0700415}
416
Brian Osman9a9baae2018-11-05 15:06:26 -0500417void GrVkGpuRTCommandBuffer::onClear(const GrFixedClip& clip, const SkPMColor4f& color) {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400418 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
419
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000420 // parent class should never let us get here with no RT
csmartdaltonbf4a8f92016-09-06 10:01:06 -0700421 SkASSERT(!clip.hasWindowRectangles());
egdaniel9cb63402016-06-23 08:37:05 -0700422
Greg Daniel22bc8652017-03-22 15:45:43 -0400423 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Greg Daniel36a77ee2016-10-18 10:33:25 -0400424
Brian Osman9a9baae2018-11-05 15:06:26 -0500425 VkClearColorValue vkColor = {{color.fR, color.fG, color.fB, color.fA}};
egdaniel9cb63402016-06-23 08:37:05 -0700426
Brian Salomond818ebf2018-07-02 14:08:49 +0000427 if (cbInfo.fIsEmpty && !clip.scissorEnabled()) {
Robert Phillips74c627f2017-08-09 10:28:00 -0400428 // Change the render pass to do a clear load
egdaniel9cb63402016-06-23 08:37:05 -0700429 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_CLEAR,
430 VK_ATTACHMENT_STORE_OP_STORE);
Robert Phillips74c627f2017-08-09 10:28:00 -0400431 // Preserve the stencil buffer's load & store settings
432 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -0700433
Greg Daniel36a77ee2016-10-18 10:33:25 -0400434 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
egdaniel9cb63402016-06-23 08:37:05 -0700435
436 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400437 vkRT->compatibleRenderPassHandle();
egdaniel9cb63402016-06-23 08:37:05 -0700438 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400439 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
440 vkColorOps,
441 vkStencilOps);
egdaniel9cb63402016-06-23 08:37:05 -0700442 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400443 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400444 vkColorOps,
445 vkStencilOps);
egdaniel9cb63402016-06-23 08:37:05 -0700446 }
447
Greg Daniel36a77ee2016-10-18 10:33:25 -0400448 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
egdaniel9cb63402016-06-23 08:37:05 -0700449 oldRP->unref(fGpu);
450
Brian Osman9a9baae2018-11-05 15:06:26 -0500451 cbInfo.fColorClearValue.color = {{color.fR, color.fG, color.fB, color.fA}};
Greg Daniela3c68df2018-03-16 13:46:53 -0400452 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
453 // If we are going to clear the whole render target then the results of any copies we did
454 // immediately before to the target won't matter, so just drop them.
455 cbInfo.fPreCopies.reset();
Greg Daniel36a77ee2016-10-18 10:33:25 -0400456
457 // Update command buffer bounds
458 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
egdaniel9cb63402016-06-23 08:37:05 -0700459 return;
460 }
461
462 // We always do a sub rect clear with clearAttachments since we are inside a render pass
463 VkClearRect clearRect;
464 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700465 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000466 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000467 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400468 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700469 vkRect = clip.scissorRect();
470 } else {
471 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400472 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
473 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700474 }
475 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
476 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
477 clearRect.baseArrayLayer = 0;
478 clearRect.layerCount = 1;
479
480 uint32_t colorIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400481 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&colorIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700482
483 VkClearAttachment attachment;
484 attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
485 attachment.colorAttachment = colorIndex;
486 attachment.clearValue.color = vkColor;
487
Greg Daniel22bc8652017-03-22 15:45:43 -0400488 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400489 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400490
491 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000492 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400493 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
494 } else {
495 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
496 }
egdaniel9cb63402016-06-23 08:37:05 -0700497 return;
498}
499
Greg Daniel500d58b2017-08-24 15:59:33 -0400500////////////////////////////////////////////////////////////////////////////////
501
502void GrVkGpuRTCommandBuffer::addAdditionalCommandBuffer() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400503 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
504
Greg Daniel22bc8652017-03-22 15:45:43 -0400505 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
506 cbInfo.currentCmdBuf()->end(fGpu);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500507 cbInfo.fCommandBuffers.push_back(fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu));
Robert Phillips19e51dc2017-08-09 09:30:51 -0400508 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
Greg Daniel22bc8652017-03-22 15:45:43 -0400509}
510
Greg Daniel500d58b2017-08-24 15:59:33 -0400511void GrVkGpuRTCommandBuffer::addAdditionalRenderPass() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400512 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
513
Greg Daniel22bc8652017-03-22 15:45:43 -0400514 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Greg Daniel77b53f62016-10-18 11:48:51 -0400515
516 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Greg Daniel22bc8652017-03-22 15:45:43 -0400517 fCurrentCmdInfo++;
Greg Daniel77b53f62016-10-18 11:48:51 -0400518
519 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
520 VK_ATTACHMENT_STORE_OP_STORE);
521 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
522 VK_ATTACHMENT_STORE_OP_STORE);
523
524 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400525 vkRT->compatibleRenderPassHandle();
Greg Daniel77b53f62016-10-18 11:48:51 -0400526 if (rpHandle.isValid()) {
527 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
528 vkColorOps,
529 vkStencilOps);
530 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400531 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel77b53f62016-10-18 11:48:51 -0400532 vkColorOps,
533 vkStencilOps);
534 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400535 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
Greg Daniel77b53f62016-10-18 11:48:51 -0400536
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500537 cbInfo.fCommandBuffers.push_back(fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu));
Greg Daniel77b53f62016-10-18 11:48:51 -0400538 // It shouldn't matter what we set the clear color to here since we will assume loading of the
539 // attachment.
540 memset(&cbInfo.fColorClearValue, 0, sizeof(VkClearValue));
541 cbInfo.fBounds.setEmpty();
Greg Daniel77b53f62016-10-18 11:48:51 -0400542
Robert Phillips19e51dc2017-08-09 09:30:51 -0400543 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
Greg Daniel77b53f62016-10-18 11:48:51 -0400544}
545
Brian Salomon943ed792017-10-30 09:37:55 -0400546void GrVkGpuRTCommandBuffer::inlineUpload(GrOpFlushState* state,
547 GrDeferredTextureUploadFn& upload) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400548 if (!fCommandBufferInfos[fCurrentCmdInfo].fIsEmpty) {
549 this->addAdditionalRenderPass();
Greg Daniel77b53f62016-10-18 11:48:51 -0400550 }
Greg Daniel22bc8652017-03-22 15:45:43 -0400551 fCommandBufferInfos[fCurrentCmdInfo].fPreDrawUploads.emplace_back(state, upload);
Greg Daniel77b53f62016-10-18 11:48:51 -0400552}
553
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400554void GrVkGpuRTCommandBuffer::copy(GrSurface* src, GrSurfaceOrigin srcOrigin, const SkIRect& srcRect,
Greg Daniel500d58b2017-08-24 15:59:33 -0400555 const SkIPoint& dstPoint) {
Greg Daniela3c68df2018-03-16 13:46:53 -0400556 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
557 if (!cbInfo.fIsEmpty || LoadStoreState::kStartsWithClear == cbInfo.fLoadStoreState) {
Greg Daniel500d58b2017-08-24 15:59:33 -0400558 this->addAdditionalRenderPass();
559 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400560
Greg Daniel55fa6472018-03-16 16:13:10 -0400561 fCommandBufferInfos[fCurrentCmdInfo].fPreCopies.emplace_back(
562 src, srcOrigin, srcRect, dstPoint,
563 LoadStoreState::kStartsWithDiscard == cbInfo.fLoadStoreState);
564
Greg Daniela3c68df2018-03-16 13:46:53 -0400565 if (LoadStoreState::kLoadAndStore != cbInfo.fLoadStoreState) {
566 // Change the render pass to do a load and store so we don't lose the results of our copy
567 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
568 VK_ATTACHMENT_STORE_OP_STORE);
569 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
570 VK_ATTACHMENT_STORE_OP_STORE);
571
572 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
573
574 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
575 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
576 vkRT->compatibleRenderPassHandle();
577 if (rpHandle.isValid()) {
578 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
579 vkColorOps,
580 vkStencilOps);
581 } else {
582 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
583 vkColorOps,
584 vkStencilOps);
585 }
586 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
587 oldRP->unref(fGpu);
588
589 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
590
591 }
Greg Daniel500d58b2017-08-24 15:59:33 -0400592}
593
egdaniel9cb63402016-06-23 08:37:05 -0700594////////////////////////////////////////////////////////////////////////////////
595
Brian Salomon802cb312018-06-08 18:05:20 -0400596void GrVkGpuRTCommandBuffer::bindGeometry(const GrBuffer* indexBuffer,
Greg Daniel500d58b2017-08-24 15:59:33 -0400597 const GrBuffer* vertexBuffer,
598 const GrBuffer* instanceBuffer) {
Chris Daltonff926502017-05-03 14:36:54 -0400599 GrVkSecondaryCommandBuffer* currCmdBuf = fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf();
egdaniel9cb63402016-06-23 08:37:05 -0700600 // There is no need to put any memory barriers to make sure host writes have finished here.
601 // When a command buffer is submitted to a queue, there is an implicit memory barrier that
602 // occurs for all host writes. Additionally, BufferMemoryBarriers are not allowed inside of
603 // an active RenderPass.
egdaniel9cb63402016-06-23 08:37:05 -0700604
Chris Dalton1d616352017-05-31 12:51:23 -0600605 // Here our vertex and instance inputs need to match the same 0-based bindings they were
606 // assigned in GrVkPipeline. That is, vertex first (if any) followed by instance.
607 uint32_t binding = 0;
608
Brian Salomon802cb312018-06-08 18:05:20 -0400609 if (vertexBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600610 SkASSERT(vertexBuffer);
611 SkASSERT(!vertexBuffer->isCPUBacked());
612 SkASSERT(!vertexBuffer->isMapped());
613
614 currCmdBuf->bindInputBuffer(fGpu, binding++,
615 static_cast<const GrVkVertexBuffer*>(vertexBuffer));
616 }
617
Brian Salomon802cb312018-06-08 18:05:20 -0400618 if (instanceBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600619 SkASSERT(instanceBuffer);
620 SkASSERT(!instanceBuffer->isCPUBacked());
621 SkASSERT(!instanceBuffer->isMapped());
622
623 currCmdBuf->bindInputBuffer(fGpu, binding++,
624 static_cast<const GrVkVertexBuffer*>(instanceBuffer));
625 }
Chris Daltonff926502017-05-03 14:36:54 -0400626 if (indexBuffer) {
627 SkASSERT(indexBuffer);
628 SkASSERT(!indexBuffer->isMapped());
629 SkASSERT(!indexBuffer->isCPUBacked());
egdaniel9cb63402016-06-23 08:37:05 -0700630
Chris Daltonff926502017-05-03 14:36:54 -0400631 currCmdBuf->bindIndexBuffer(fGpu, static_cast<const GrVkIndexBuffer*>(indexBuffer));
egdaniel9cb63402016-06-23 08:37:05 -0700632 }
633}
634
Brian Salomon49348902018-06-26 09:12:38 -0400635GrVkPipelineState* GrVkGpuRTCommandBuffer::prepareDrawState(
636 const GrPrimitiveProcessor& primProc,
637 const GrPipeline& pipeline,
638 const GrPipeline::FixedDynamicState* fixedDynamicState,
639 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
640 GrPrimitiveType primitiveType) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400641 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
642 SkASSERT(cbInfo.fRenderPass);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400643
Greg Daniel99b88e02018-10-03 15:31:20 -0400644 VkRenderPass compatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
645
Greg Daniel9a51a862018-11-30 10:18:14 -0500646 const GrTextureProxy* const* primProcProxies = nullptr;
647 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
648 primProcProxies = dynamicStateArrays->fPrimitiveProcessorTextures;
649 } else if (fixedDynamicState) {
650 primProcProxies = fixedDynamicState->fPrimitiveProcessorTextures;
651 }
652
653 SkASSERT(SkToBool(primProcProxies) == SkToBool(primProc.numTextureSamplers()));
654
Greg Daniel09eeefb2017-10-16 15:15:02 -0400655 GrVkPipelineState* pipelineState =
egdaniel9cb63402016-06-23 08:37:05 -0700656 fGpu->resourceProvider().findOrCreateCompatiblePipelineState(pipeline,
657 primProc,
Greg Daniel9a51a862018-11-30 10:18:14 -0500658 primProcProxies,
egdaniel9cb63402016-06-23 08:37:05 -0700659 primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400660 compatibleRenderPass);
egdaniel9cb63402016-06-23 08:37:05 -0700661 if (!pipelineState) {
662 return pipelineState;
663 }
664
Greg Daniel22bc8652017-03-22 15:45:43 -0400665 if (!cbInfo.fIsEmpty &&
Greg Daniel09eeefb2017-10-16 15:15:02 -0400666 fLastPipelineState && fLastPipelineState != pipelineState &&
Greg Daniele3cd6912017-05-17 11:15:55 -0400667 fGpu->vkCaps().newCBOnPipelineChange()) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400668 this->addAdditionalCommandBuffer();
669 }
Greg Daniel09eeefb2017-10-16 15:15:02 -0400670 fLastPipelineState = pipelineState;
Greg Daniel22bc8652017-03-22 15:45:43 -0400671
Brian Salomonf7232642018-09-19 08:58:08 -0400672 pipelineState->bindPipeline(fGpu, cbInfo.currentCmdBuf());
Brian Salomoncd7907b2018-08-30 08:36:18 -0400673
Brian Salomonf7232642018-09-19 08:58:08 -0400674 pipelineState->setAndBindUniforms(fGpu, primProc, pipeline, cbInfo.currentCmdBuf());
675
676 // Check whether we need to bind textures between each GrMesh. If not we can bind them all now.
677 bool setTextures = !(dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures);
678 if (setTextures) {
Brian Salomonf7232642018-09-19 08:58:08 -0400679 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, primProcProxies,
680 cbInfo.currentCmdBuf());
681 }
egdaniel9cb63402016-06-23 08:37:05 -0700682
Robert Phillips2890fbf2017-07-26 15:48:41 -0400683 GrRenderTarget* rt = pipeline.renderTarget();
Chris Dalton46983b72017-06-06 12:27:16 -0600684
Brian Salomond818ebf2018-07-02 14:08:49 +0000685 if (!pipeline.isScissorEnabled()) {
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400686 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(),
687 rt, pipeline.proxy()->origin(),
Greg Daniela41a74a2018-10-09 12:59:23 +0000688 SkIRect::MakeWH(rt->width(), rt->height()));
Brian Salomon49348902018-06-26 09:12:38 -0400689 } else if (!dynamicStateArrays || !dynamicStateArrays->fScissorRects) {
690 SkASSERT(fixedDynamicState);
691 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), rt,
692 pipeline.proxy()->origin(),
693 fixedDynamicState->fScissorRect);
Chris Dalton46983b72017-06-06 12:27:16 -0600694 }
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000695 GrVkPipeline::SetDynamicViewportState(fGpu, cbInfo.currentCmdBuf(), rt);
Chris Dalton46983b72017-06-06 12:27:16 -0600696 GrVkPipeline::SetDynamicBlendConstantState(fGpu, cbInfo.currentCmdBuf(), rt->config(),
697 pipeline.getXferProcessor());
egdaniel9cb63402016-06-23 08:37:05 -0700698
699 return pipelineState;
700}
701
Brian Salomonff168d92018-06-23 15:17:27 -0400702void GrVkGpuRTCommandBuffer::onDraw(const GrPrimitiveProcessor& primProc,
703 const GrPipeline& pipeline,
Brian Salomon49348902018-06-26 09:12:38 -0400704 const GrPipeline::FixedDynamicState* fixedDynamicState,
705 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
Greg Daniel500d58b2017-08-24 15:59:33 -0400706 const GrMesh meshes[],
Greg Daniel500d58b2017-08-24 15:59:33 -0400707 int meshCount,
708 const SkRect& bounds) {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400709 SkASSERT(pipeline.renderTarget() == fRenderTarget);
Brian Salomonc293a292016-11-30 13:38:32 -0500710
egdaniel9cb63402016-06-23 08:37:05 -0700711 if (!meshCount) {
712 return;
713 }
Greg Danielea022cd2018-03-16 11:10:03 -0400714
715 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
716
Brian Salomone782f842018-07-31 13:53:11 -0400717 auto prepareSampledImage = [&](GrTexture* texture, GrSamplerState::Filter filter) {
718 GrVkTexture* vkTexture = static_cast<GrVkTexture*>(texture);
719 // We may need to resolve the texture first if it is also a render target
720 GrVkRenderTarget* texRT = static_cast<GrVkRenderTarget*>(vkTexture->asRenderTarget());
721 if (texRT) {
Greg Daniel0a77f432018-12-06 11:23:32 -0500722 fGpu->resolveRenderTargetNoFlush(texRT);
Brian Salomone782f842018-07-31 13:53:11 -0400723 }
724
725 // Check if we need to regenerate any mip maps
726 if (GrSamplerState::Filter::kMipMap == filter &&
727 (vkTexture->width() != 1 || vkTexture->height() != 1)) {
728 SkASSERT(vkTexture->texturePriv().mipMapped() == GrMipMapped::kYes);
729 if (vkTexture->texturePriv().mipMapsAreDirty()) {
730 fGpu->regenerateMipMapLevels(vkTexture);
731 }
732 }
733 cbInfo.fSampledImages.push_back(vkTexture);
734 };
735
Brian Salomonf7232642018-09-19 08:58:08 -0400736 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
737 for (int m = 0, i = 0; m < meshCount; ++m) {
738 for (int s = 0; s < primProc.numTextureSamplers(); ++s, ++i) {
739 auto texture = dynamicStateArrays->fPrimitiveProcessorTextures[i]->peekTexture();
740 prepareSampledImage(texture, primProc.textureSampler(s).samplerState().filter());
741 }
742 }
743 } else {
744 for (int i = 0; i < primProc.numTextureSamplers(); ++i) {
745 auto texture = fixedDynamicState->fPrimitiveProcessorTextures[i]->peekTexture();
746 prepareSampledImage(texture, primProc.textureSampler(i).samplerState().filter());
747 }
Brian Salomone782f842018-07-31 13:53:11 -0400748 }
bsalomonb58a2b42016-09-26 06:55:02 -0700749 GrFragmentProcessor::Iter iter(pipeline);
750 while (const GrFragmentProcessor* fp = iter.next()) {
Brian Salomone782f842018-07-31 13:53:11 -0400751 for (int i = 0; i < fp->numTextureSamplers(); ++i) {
752 const GrFragmentProcessor::TextureSampler& sampler = fp->textureSampler(i);
753 prepareSampledImage(sampler.peekTexture(), sampler.samplerState().filter());
754 }
egdaniel2f5792a2016-07-06 08:51:23 -0700755 }
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400756 if (GrTexture* dstTexture = pipeline.peekDstTexture()) {
Greg Danielea022cd2018-03-16 11:10:03 -0400757 cbInfo.fSampledImages.push_back(static_cast<GrVkTexture*>(dstTexture));
Brian Salomon18dfa982017-04-03 16:57:43 -0400758 }
egdaniel2f5792a2016-07-06 08:51:23 -0700759
Chris Daltonbca46e22017-05-15 11:03:26 -0600760 GrPrimitiveType primitiveType = meshes[0].primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400761 GrVkPipelineState* pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
762 dynamicStateArrays, primitiveType);
egdaniel9cb63402016-06-23 08:37:05 -0700763 if (!pipelineState) {
764 return;
765 }
766
Brian Salomond818ebf2018-07-02 14:08:49 +0000767 bool dynamicScissor =
768 pipeline.isScissorEnabled() && dynamicStateArrays && dynamicStateArrays->fScissorRects;
Brian Salomonf7232642018-09-19 08:58:08 -0400769 bool dynamicTextures = dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures;
Brian Salomon49348902018-06-26 09:12:38 -0400770
egdaniel9cb63402016-06-23 08:37:05 -0700771 for (int i = 0; i < meshCount; ++i) {
772 const GrMesh& mesh = meshes[i];
Chris Daltonbca46e22017-05-15 11:03:26 -0600773 if (mesh.primitiveType() != primitiveType) {
Chris Dalton6f241802017-05-08 13:58:38 -0400774 SkDEBUGCODE(pipelineState = nullptr);
Chris Daltonbca46e22017-05-15 11:03:26 -0600775 primitiveType = mesh.primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400776 pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
777 dynamicStateArrays, primitiveType);
Chris Dalton6f241802017-05-08 13:58:38 -0400778 if (!pipelineState) {
779 return;
egdaniel9cb63402016-06-23 08:37:05 -0700780 }
Chris Dalton6f241802017-05-08 13:58:38 -0400781 }
egdaniel9cb63402016-06-23 08:37:05 -0700782
Brian Salomon49348902018-06-26 09:12:38 -0400783 if (dynamicScissor) {
784 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget,
785 pipeline.proxy()->origin(),
786 dynamicStateArrays->fScissorRects[i]);
Chris Dalton46983b72017-06-06 12:27:16 -0600787 }
Brian Salomonf7232642018-09-19 08:58:08 -0400788 if (dynamicTextures) {
789 GrTextureProxy* const* meshProxies = dynamicStateArrays->fPrimitiveProcessorTextures +
790 primProc.numTextureSamplers() * i;
791 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, meshProxies,
792 cbInfo.currentCmdBuf());
793 }
Chris Daltonbca46e22017-05-15 11:03:26 -0600794 SkASSERT(pipelineState);
Brian Salomon802cb312018-06-08 18:05:20 -0400795 mesh.sendToGpu(this);
egdaniel9cb63402016-06-23 08:37:05 -0700796 }
797
Greg Daniel36a77ee2016-10-18 10:33:25 -0400798 cbInfo.fBounds.join(bounds);
Chris Dalton114a3c02017-05-26 15:17:19 -0600799 cbInfo.fIsEmpty = false;
egdaniel066df7c2016-06-08 14:02:27 -0700800}
801
Brian Salomon802cb312018-06-08 18:05:20 -0400802void GrVkGpuRTCommandBuffer::sendInstancedMeshToGpu(GrPrimitiveType,
Greg Daniel500d58b2017-08-24 15:59:33 -0400803 const GrBuffer* vertexBuffer,
804 int vertexCount,
805 int baseVertex,
806 const GrBuffer* instanceBuffer,
807 int instanceCount,
808 int baseInstance) {
Chris Dalton114a3c02017-05-26 15:17:19 -0600809 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomon802cb312018-06-08 18:05:20 -0400810 this->bindGeometry(nullptr, vertexBuffer, instanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600811 cbInfo.currentCmdBuf()->draw(fGpu, vertexCount, instanceCount, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600812 fGpu->stats()->incNumDraws();
813}
814
Brian Salomon802cb312018-06-08 18:05:20 -0400815void GrVkGpuRTCommandBuffer::sendIndexedInstancedMeshToGpu(GrPrimitiveType,
Greg Daniel500d58b2017-08-24 15:59:33 -0400816 const GrBuffer* indexBuffer,
817 int indexCount,
818 int baseIndex,
819 const GrBuffer* vertexBuffer,
820 int baseVertex,
821 const GrBuffer* instanceBuffer,
822 int instanceCount,
Brian Salomon802cb312018-06-08 18:05:20 -0400823 int baseInstance,
824 GrPrimitiveRestart restart) {
825 SkASSERT(restart == GrPrimitiveRestart::kNo);
Chris Dalton114a3c02017-05-26 15:17:19 -0600826 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomon802cb312018-06-08 18:05:20 -0400827 this->bindGeometry(indexBuffer, vertexBuffer, instanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600828 cbInfo.currentCmdBuf()->drawIndexed(fGpu, indexCount, instanceCount,
829 baseIndex, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600830 fGpu->stats()->incNumDraws();
831}
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400832
833////////////////////////////////////////////////////////////////////////////////
834
835void GrVkGpuRTCommandBuffer::executeDrawable(std::unique_ptr<SkDrawable::GpuDrawHandler> drawable) {
836 GrVkRenderTarget* target = static_cast<GrVkRenderTarget*>(fRenderTarget);
837
838 GrVkImage* targetImage = target->msaaImage() ? target->msaaImage() : target;
839
840 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
841 VkRect2D bounds;
842 bounds.offset = { 0, 0 };
843 bounds.extent = { 0, 0 };
844
845 GrVkDrawableInfo vkInfo;
846 vkInfo.fSecondaryCommandBuffer = cbInfo.currentCmdBuf()->vkCommandBuffer();
847 vkInfo.fCompatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
Greg Danielb353eeb2018-12-05 11:01:58 -0500848 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&vkInfo.fColorAttachmentIndex));
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400849 vkInfo.fFormat = targetImage->imageFormat();
850 vkInfo.fDrawBounds = &bounds;
851
852 GrBackendDrawableInfo info(vkInfo);
853
Eric Karlc0b2ba22019-01-22 19:40:35 -0800854 // After we draw into the command buffer via the drawable, cached state we have may be invalid.
855 cbInfo.currentCmdBuf()->invalidateState();
856
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400857 drawable->draw(info);
858 fGpu->addDrawable(std::move(drawable));
859
860 if (bounds.extent.width == 0 || bounds.extent.height == 0) {
861 cbInfo.fBounds.join(target->getBoundsRect());
862 } else {
863 cbInfo.fBounds.join(SkRect::MakeXYWH(bounds.offset.x, bounds.offset.y,
864 bounds.extent.width, bounds.extent.height));
865 }
866}
867