blob: b38ae18b640ef5c8e86d5f54b5567437d5d05ce5 [file] [log] [blame]
egdaniel066df7c2016-06-08 14:02:27 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkGpuCommandBuffer.h"
9
Greg Daniel64cc9aa2018-10-19 13:54:56 -040010#include "GrBackendDrawableInfo.h"
Robert Phillipsbe9aff22019-02-15 11:33:22 -050011#include "GrContextPriv.h"
csmartdalton29df7602016-08-31 11:55:52 -070012#include "GrFixedClip.h"
egdaniel9cb63402016-06-23 08:37:05 -070013#include "GrMesh.h"
Brian Salomon742e31d2016-12-07 17:06:19 -050014#include "GrOpFlushState.h"
egdaniel9cb63402016-06-23 08:37:05 -070015#include "GrPipeline.h"
16#include "GrRenderTargetPriv.h"
egdaniel9cb63402016-06-23 08:37:05 -070017#include "GrTexturePriv.h"
egdaniel066df7c2016-06-08 14:02:27 -070018#include "GrVkCommandBuffer.h"
Ethan Nicholas8e265a72018-12-12 16:22:40 -050019#include "GrVkCommandPool.h"
egdaniel066df7c2016-06-08 14:02:27 -070020#include "GrVkGpu.h"
egdaniel9cb63402016-06-23 08:37:05 -070021#include "GrVkPipeline.h"
egdaniel066df7c2016-06-08 14:02:27 -070022#include "GrVkRenderPass.h"
23#include "GrVkRenderTarget.h"
24#include "GrVkResourceProvider.h"
Greg Daniel64cc9aa2018-10-19 13:54:56 -040025#include "GrVkSemaphore.h"
egdaniel9cb63402016-06-23 08:37:05 -070026#include "GrVkTexture.h"
Greg Daniel64cc9aa2018-10-19 13:54:56 -040027#include "SkDrawable.h"
Greg Daniel36a77ee2016-10-18 10:33:25 -040028#include "SkRect.h"
egdaniel066df7c2016-06-08 14:02:27 -070029
Robert Phillipsb0e93a22017-08-29 08:26:54 -040030void GrVkGpuTextureCommandBuffer::copy(GrSurface* src, GrSurfaceOrigin srcOrigin,
31 const SkIRect& srcRect, const SkIPoint& dstPoint) {
32 fCopies.emplace_back(src, srcOrigin, srcRect, dstPoint);
Greg Daniel500d58b2017-08-24 15:59:33 -040033}
34
35void GrVkGpuTextureCommandBuffer::insertEventMarker(const char* msg) {
36 // TODO: does Vulkan have a correlate?
37}
38
39void GrVkGpuTextureCommandBuffer::submit() {
40 for (int i = 0; i < fCopies.count(); ++i) {
41 CopyInfo& copyInfo = fCopies[i];
Chris Dalton298238a2019-02-21 16:28:44 -050042 fGpu->copySurface(fTexture, fOrigin, copyInfo.fSrc.get(), copyInfo.fSrcOrigin,
43 copyInfo.fSrcRect, copyInfo.fDstPoint);
Greg Daniel500d58b2017-08-24 15:59:33 -040044 }
45}
46
47GrVkGpuTextureCommandBuffer::~GrVkGpuTextureCommandBuffer() {}
48
49////////////////////////////////////////////////////////////////////////////////
50
Robert Phillips6b47c7d2017-08-29 07:24:09 -040051void get_vk_load_store_ops(GrLoadOp loadOpIn, GrStoreOp storeOpIn,
egdaniel066df7c2016-06-08 14:02:27 -070052 VkAttachmentLoadOp* loadOp, VkAttachmentStoreOp* storeOp) {
Robert Phillips95214472017-08-08 18:00:03 -040053 switch (loadOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -040054 case GrLoadOp::kLoad:
egdaniel066df7c2016-06-08 14:02:27 -070055 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel066df7c2016-06-08 14:02:27 -070056 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040057 case GrLoadOp::kClear:
egdaniel9cb63402016-06-23 08:37:05 -070058 *loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
59 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040060 case GrLoadOp::kDiscard:
egdaniel9cb63402016-06-23 08:37:05 -070061 *loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
62 break;
63 default:
64 SK_ABORT("Invalid LoadOp");
egdaniel066df7c2016-06-08 14:02:27 -070065 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel9cb63402016-06-23 08:37:05 -070066 }
67
Robert Phillips95214472017-08-08 18:00:03 -040068 switch (storeOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -040069 case GrStoreOp::kStore:
egdaniel066df7c2016-06-08 14:02:27 -070070 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
71 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040072 case GrStoreOp::kDiscard:
egdaniel066df7c2016-06-08 14:02:27 -070073 *storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
74 break;
brianosman0bbc3712016-06-14 04:53:09 -070075 default:
egdaniel9cb63402016-06-23 08:37:05 -070076 SK_ABORT("Invalid StoreOp");
brianosman0bbc3712016-06-14 04:53:09 -070077 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
egdaniel066df7c2016-06-08 14:02:27 -070078 }
79}
80
Brian Salomon24d377e2019-04-23 15:24:31 -040081GrVkGpuRTCommandBuffer::GrVkGpuRTCommandBuffer(GrVkGpu* gpu) : fGpu(gpu) {}
Brian Salomonc293a292016-11-30 13:38:32 -050082
Greg Daniel500d58b2017-08-24 15:59:33 -040083void GrVkGpuRTCommandBuffer::init() {
Brian Salomonc293a292016-11-30 13:38:32 -050084 GrVkRenderPass::LoadStoreOps vkColorOps(fVkColorLoadOp, fVkColorStoreOp);
85 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -070086
Greg Daniel36a77ee2016-10-18 10:33:25 -040087 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Brian Salomonc293a292016-11-30 13:38:32 -050088 SkASSERT(fCommandBufferInfos.count() == 1);
Greg Daniel22bc8652017-03-22 15:45:43 -040089 fCurrentCmdInfo = 0;
Greg Daniel36a77ee2016-10-18 10:33:25 -040090
Robert Phillips19e51dc2017-08-09 09:30:51 -040091 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
92 const GrVkResourceProvider::CompatibleRPHandle& rpHandle = vkRT->compatibleRenderPassHandle();
egdaniel066df7c2016-06-08 14:02:27 -070093 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -040094 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
95 vkColorOps,
96 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -070097 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -040098 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -040099 vkColorOps,
100 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -0700101 }
102
Brian Osmancb3d0872018-10-16 15:19:28 -0400103 cbInfo.fColorClearValue.color.float32[0] = fClearColor[0];
104 cbInfo.fColorClearValue.color.float32[1] = fClearColor[1];
105 cbInfo.fColorClearValue.color.float32[2] = fClearColor[2];
106 cbInfo.fColorClearValue.color.float32[3] = fClearColor[3];
egdaniel9cb63402016-06-23 08:37:05 -0700107
Robert Phillips380b90c2017-08-30 07:41:07 -0400108 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000109 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Robert Phillips380b90c2017-08-30 07:41:07 -0400110 } else {
111 cbInfo.fBounds.setEmpty();
112 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400113
114 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
115 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
116 } else if (VK_ATTACHMENT_LOAD_OP_LOAD == fVkColorLoadOp &&
117 VK_ATTACHMENT_STORE_OP_STORE == fVkColorStoreOp) {
118 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
119 } else if (VK_ATTACHMENT_LOAD_OP_DONT_CARE == fVkColorLoadOp) {
120 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithDiscard;
121 }
Greg Daniel36a77ee2016-10-18 10:33:25 -0400122
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500123 cbInfo.fCommandBuffers.push_back(fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu));
Robert Phillips19e51dc2017-08-09 09:30:51 -0400124 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
egdaniel066df7c2016-06-08 14:02:27 -0700125}
126
Greg Daniel070cbaf2019-01-03 17:35:54 -0500127void GrVkGpuRTCommandBuffer::initWrapped() {
128 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
129 SkASSERT(fCommandBufferInfos.count() == 1);
130 fCurrentCmdInfo = 0;
131
132 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
133 SkASSERT(vkRT->wrapsSecondaryCommandBuffer());
134 cbInfo.fRenderPass = vkRT->externalRenderPass();
135 cbInfo.fRenderPass->ref();
136
137 cbInfo.fBounds.setEmpty();
138 cbInfo.fCommandBuffers.push_back(vkRT->getExternalSecondaryCommandBuffer());
139 cbInfo.fCommandBuffers[0]->ref();
140 cbInfo.currentCmdBuf()->begin(fGpu, nullptr, cbInfo.fRenderPass);
141}
Brian Salomonc293a292016-11-30 13:38:32 -0500142
Greg Daniel500d58b2017-08-24 15:59:33 -0400143GrVkGpuRTCommandBuffer::~GrVkGpuRTCommandBuffer() {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400144 this->reset();
egdaniel066df7c2016-06-08 14:02:27 -0700145}
146
Greg Daniel500d58b2017-08-24 15:59:33 -0400147GrGpu* GrVkGpuRTCommandBuffer::gpu() { return fGpu; }
egdaniel9cb63402016-06-23 08:37:05 -0700148
Greg Daniel500d58b2017-08-24 15:59:33 -0400149void GrVkGpuRTCommandBuffer::end() {
Greg Daniel22bc8652017-03-22 15:45:43 -0400150 if (fCurrentCmdInfo >= 0) {
151 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Brian Salomonc293a292016-11-30 13:38:32 -0500152 }
egdaniel066df7c2016-06-08 14:02:27 -0700153}
154
Greg Daniel500d58b2017-08-24 15:59:33 -0400155void GrVkGpuRTCommandBuffer::submit() {
Brian Salomonc293a292016-11-30 13:38:32 -0500156 if (!fRenderTarget) {
157 return;
158 }
Robert Phillips19e51dc2017-08-09 09:30:51 -0400159
160 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Robert Phillips19e51dc2017-08-09 09:30:51 -0400161 GrVkImage* targetImage = vkRT->msaaImage() ? vkRT->msaaImage() : vkRT;
Greg Daniel45a44de2018-02-27 10:07:29 -0500162 GrStencilAttachment* stencil = fRenderTarget->renderTargetPriv().getStencilAttachment();
Brian Salomon24d377e2019-04-23 15:24:31 -0400163 auto currPreCmd = fPreCommandBufferTasks.begin();
egdaniel9cb63402016-06-23 08:37:05 -0700164
Greg Daniel36a77ee2016-10-18 10:33:25 -0400165 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
166 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
167
Brian Salomon24d377e2019-04-23 15:24:31 -0400168 for (int c = 0; c < cbInfo.fNumPreCmds; ++c, ++currPreCmd) {
169 currPreCmd->execute(this);
Greg Daniel77b53f62016-10-18 11:48:51 -0400170 }
171
Greg Daniel38c3d932018-03-16 14:22:30 -0400172 // TODO: Many things create a scratch texture which adds the discard immediately, but then
173 // don't draw to it right away. This causes the discard to be ignored and we get yelled at
174 // for loading uninitialized data. However, once MDB lands with reordering, the discard will
175 // get reordered with the rest of the draw commands and we can remove the discard check.
176 if (cbInfo.fIsEmpty &&
177 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithClear &&
178 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithDiscard) {
Greg Daniel77b53f62016-10-18 11:48:51 -0400179 // We have sumbitted no actual draw commands to the command buffer and we are not using
180 // the render pass to do a clear so there is no need to submit anything.
181 continue;
182 }
Greg Daniel38c3d932018-03-16 14:22:30 -0400183
Greg Daniel070cbaf2019-01-03 17:35:54 -0500184 // We don't want to actually submit the secondary command buffer if it is wrapped.
185 if (this->wrapsSecondaryCommandBuffer()) {
186 // If we have any sampled images set their layout now.
Chris Dalton298238a2019-02-21 16:28:44 -0500187 for (int j = 0; j < cbInfo.fSampledTextures.count(); ++j) {
188 cbInfo.fSampledTextures[j]->setImageLayout(
189 fGpu, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_ACCESS_SHADER_READ_BIT,
190 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, false);
Greg Daniel070cbaf2019-01-03 17:35:54 -0500191 }
192
193 // There should have only been one secondary command buffer in the wrapped case so it is
194 // safe to just return here.
195 SkASSERT(fCommandBufferInfos.count() == 1);
196 return;
197 }
198
Greg Danieldbdba602018-04-20 11:52:43 -0400199 // Make sure if we only have a discard load that we execute the discard on the whole image.
200 // TODO: Once we improve our tracking of discards so that we never end up flushing a discard
201 // call with no actually ops, remove this.
202 if (cbInfo.fIsEmpty && cbInfo.fLoadStoreState == LoadStoreState::kStartsWithDiscard) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000203 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Greg Danieldbdba602018-04-20 11:52:43 -0400204 }
205
Greg Daniela41a74a2018-10-09 12:59:23 +0000206 if (cbInfo.fBounds.intersect(0, 0,
207 SkIntToScalar(fRenderTarget->width()),
208 SkIntToScalar(fRenderTarget->height()))) {
Greg Daniel38c3d932018-03-16 14:22:30 -0400209 // Make sure we do the following layout changes after all copies, uploads, or any other
210 // pre-work is done since we may change the layouts in the pre-work. Also since the
211 // draws will be submitted in different render passes, we need to guard againts write
212 // and write issues.
213
214 // Change layout of our render target so it can be used as the color attachment.
Greg Danielf7828d02018-10-09 12:01:32 -0400215 // TODO: If we know that we will never be blending or loading the attachment we could
216 // drop the VK_ACCESS_COLOR_ATTACHMENT_READ_BIT.
Greg Daniel38c3d932018-03-16 14:22:30 -0400217 targetImage->setImageLayout(fGpu,
218 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
Greg Danielf7828d02018-10-09 12:01:32 -0400219 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
Greg Daniel38c3d932018-03-16 14:22:30 -0400220 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400221 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400222 false);
223
224 // If we are using a stencil attachment we also need to update its layout
225 if (stencil) {
226 GrVkStencilAttachment* vkStencil = (GrVkStencilAttachment*)stencil;
Greg Danielf7828d02018-10-09 12:01:32 -0400227 // We need the write and read access bits since we may load and store the stencil.
228 // The initial load happens in the VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT so we
229 // wait there.
Greg Daniel38c3d932018-03-16 14:22:30 -0400230 vkStencil->setImageLayout(fGpu,
231 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
232 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
233 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400234 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400235 false);
236 }
237
238 // If we have any sampled images set their layout now.
Chris Dalton298238a2019-02-21 16:28:44 -0500239 for (int j = 0; j < cbInfo.fSampledTextures.count(); ++j) {
240 cbInfo.fSampledTextures[j]->setImageLayout(
241 fGpu, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_ACCESS_SHADER_READ_BIT,
242 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, false);
Greg Daniel38c3d932018-03-16 14:22:30 -0400243 }
244
Greg Daniel36a77ee2016-10-18 10:33:25 -0400245 SkIRect iBounds;
246 cbInfo.fBounds.roundOut(&iBounds);
247
Greg Daniel22bc8652017-03-22 15:45:43 -0400248 fGpu->submitSecondaryCommandBuffer(cbInfo.fCommandBuffers, cbInfo.fRenderPass,
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400249 &cbInfo.fColorClearValue, vkRT, fOrigin, iBounds);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400250 }
251 }
Brian Salomon24d377e2019-04-23 15:24:31 -0400252 SkASSERT(currPreCmd == fPreCommandBufferTasks.end());
egdaniel9cb63402016-06-23 08:37:05 -0700253}
254
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400255void GrVkGpuRTCommandBuffer::set(GrRenderTarget* rt, GrSurfaceOrigin origin,
256 const GrGpuRTCommandBuffer::LoadAndStoreInfo& colorInfo,
257 const GrGpuRTCommandBuffer::StencilLoadAndStoreInfo& stencilInfo) {
258 SkASSERT(!fRenderTarget);
259 SkASSERT(fCommandBufferInfos.empty());
260 SkASSERT(-1 == fCurrentCmdInfo);
Robert Phillips9da87e02019-02-04 13:26:26 -0500261 SkASSERT(fGpu == rt->getContext()->priv().getGpu());
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400262 SkASSERT(!fLastPipelineState);
263
264 this->INHERITED::set(rt, origin);
265
Greg Daniel070cbaf2019-01-03 17:35:54 -0500266 if (this->wrapsSecondaryCommandBuffer()) {
267 this->initWrapped();
268 return;
269 }
270
Brian Osman9a9baae2018-11-05 15:06:26 -0500271 fClearColor = colorInfo.fClearColor;
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400272
273 get_vk_load_store_ops(colorInfo.fLoadOp, colorInfo.fStoreOp,
274 &fVkColorLoadOp, &fVkColorStoreOp);
275
276 get_vk_load_store_ops(stencilInfo.fLoadOp, stencilInfo.fStoreOp,
277 &fVkStencilLoadOp, &fVkStencilStoreOp);
278
279 this->init();
280}
281
282void GrVkGpuRTCommandBuffer::reset() {
283 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
284 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
285 for (int j = 0; j < cbInfo.fCommandBuffers.count(); ++j) {
286 cbInfo.fCommandBuffers[j]->unref(fGpu);
287 }
288 cbInfo.fRenderPass->unref(fGpu);
289 }
290 fCommandBufferInfos.reset();
Brian Salomon24d377e2019-04-23 15:24:31 -0400291 fPreCommandBufferTasks.reset();
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400292
293 fCurrentCmdInfo = -1;
294
295 fLastPipelineState = nullptr;
296 fRenderTarget = nullptr;
297}
298
Greg Daniel070cbaf2019-01-03 17:35:54 -0500299bool GrVkGpuRTCommandBuffer::wrapsSecondaryCommandBuffer() const {
300 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
301 return vkRT->wrapsSecondaryCommandBuffer();
302}
303
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400304////////////////////////////////////////////////////////////////////////////////
305
Greg Daniel500d58b2017-08-24 15:59:33 -0400306void GrVkGpuRTCommandBuffer::discard() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400307 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Brian Salomonc293a292016-11-30 13:38:32 -0500308
Greg Daniel22bc8652017-03-22 15:45:43 -0400309 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Greg Daniel77b53f62016-10-18 11:48:51 -0400310 if (cbInfo.fIsEmpty) {
Robert Phillips74c627f2017-08-09 10:28:00 -0400311 // Change the render pass to do a don't-care load for both color & stencil
egdaniel37535c92016-06-30 08:23:30 -0700312 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_DONT_CARE,
313 VK_ATTACHMENT_STORE_OP_STORE);
314 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_DONT_CARE,
315 VK_ATTACHMENT_STORE_OP_STORE);
egdaniel37535c92016-06-30 08:23:30 -0700316
Greg Daniel36a77ee2016-10-18 10:33:25 -0400317 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
egdaniel37535c92016-06-30 08:23:30 -0700318
egdaniel37535c92016-06-30 08:23:30 -0700319 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400320 vkRT->compatibleRenderPassHandle();
egdaniel37535c92016-06-30 08:23:30 -0700321 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400322 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
323 vkColorOps,
324 vkStencilOps);
egdaniel37535c92016-06-30 08:23:30 -0700325 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400326 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400327 vkColorOps,
328 vkStencilOps);
egdaniel37535c92016-06-30 08:23:30 -0700329 }
330
Greg Daniel36a77ee2016-10-18 10:33:25 -0400331 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
egdaniel37535c92016-06-30 08:23:30 -0700332 oldRP->unref(fGpu);
Greg Daniel5011f852016-10-28 15:07:16 -0400333 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
Greg Daniela3c68df2018-03-16 13:46:53 -0400334 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithDiscard;
egdaniel37535c92016-06-30 08:23:30 -0700335 }
336}
337
Greg Daniel500d58b2017-08-24 15:59:33 -0400338void GrVkGpuRTCommandBuffer::insertEventMarker(const char* msg) {
Robert Phillips65a88fa2017-08-08 08:36:22 -0400339 // TODO: does Vulkan have a correlate?
340}
341
Greg Daniel500d58b2017-08-24 15:59:33 -0400342void GrVkGpuRTCommandBuffer::onClearStencilClip(const GrFixedClip& clip, bool insideStencilMask) {
Chris Dalton94c04682017-11-01 17:15:06 -0600343 SkASSERT(!clip.hasWindowRectangles());
344
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000345 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
346
Greg Daniel65a09272016-10-12 09:47:22 -0400347 GrStencilAttachment* sb = fRenderTarget->renderTargetPriv().getStencilAttachment();
egdaniel9cb63402016-06-23 08:37:05 -0700348 // this should only be called internally when we know we have a
349 // stencil buffer.
350 SkASSERT(sb);
351 int stencilBitCount = sb->bits();
352
353 // The contract with the callers does not guarantee that we preserve all bits in the stencil
354 // during this clear. Thus we will clear the entire stencil to the desired value.
355
356 VkClearDepthStencilValue vkStencilColor;
357 memset(&vkStencilColor, 0, sizeof(VkClearDepthStencilValue));
csmartdalton29df7602016-08-31 11:55:52 -0700358 if (insideStencilMask) {
egdaniel9cb63402016-06-23 08:37:05 -0700359 vkStencilColor.stencil = (1 << (stencilBitCount - 1));
360 } else {
361 vkStencilColor.stencil = 0;
362 }
363
364 VkClearRect clearRect;
365 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700366 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000367 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000368 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400369 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700370 vkRect = clip.scissorRect();
371 } else {
372 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400373 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
374 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700375 }
376
377 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
378 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
379
380 clearRect.baseArrayLayer = 0;
381 clearRect.layerCount = 1;
382
383 uint32_t stencilIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400384 SkAssertResult(cbInfo.fRenderPass->stencilAttachmentIndex(&stencilIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700385
386 VkClearAttachment attachment;
387 attachment.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
388 attachment.colorAttachment = 0; // this value shouldn't matter
389 attachment.clearValue.depthStencil = vkStencilColor;
390
Greg Daniel22bc8652017-03-22 15:45:43 -0400391 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400392 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400393
394 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000395 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400396 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
397 } else {
398 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
399 }
egdaniel9cb63402016-06-23 08:37:05 -0700400}
401
Brian Osman9a9baae2018-11-05 15:06:26 -0500402void GrVkGpuRTCommandBuffer::onClear(const GrFixedClip& clip, const SkPMColor4f& color) {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400403 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
404
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000405 // parent class should never let us get here with no RT
csmartdaltonbf4a8f92016-09-06 10:01:06 -0700406 SkASSERT(!clip.hasWindowRectangles());
egdaniel9cb63402016-06-23 08:37:05 -0700407
Greg Daniel22bc8652017-03-22 15:45:43 -0400408 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Greg Daniel36a77ee2016-10-18 10:33:25 -0400409
Brian Osman9a9baae2018-11-05 15:06:26 -0500410 VkClearColorValue vkColor = {{color.fR, color.fG, color.fB, color.fA}};
egdaniel9cb63402016-06-23 08:37:05 -0700411
Brian Salomond818ebf2018-07-02 14:08:49 +0000412 if (cbInfo.fIsEmpty && !clip.scissorEnabled()) {
Robert Phillips74c627f2017-08-09 10:28:00 -0400413 // Change the render pass to do a clear load
egdaniel9cb63402016-06-23 08:37:05 -0700414 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_CLEAR,
415 VK_ATTACHMENT_STORE_OP_STORE);
Robert Phillips74c627f2017-08-09 10:28:00 -0400416 // Preserve the stencil buffer's load & store settings
417 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -0700418
Greg Daniel36a77ee2016-10-18 10:33:25 -0400419 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
egdaniel9cb63402016-06-23 08:37:05 -0700420
421 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400422 vkRT->compatibleRenderPassHandle();
egdaniel9cb63402016-06-23 08:37:05 -0700423 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400424 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
425 vkColorOps,
426 vkStencilOps);
egdaniel9cb63402016-06-23 08:37:05 -0700427 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400428 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400429 vkColorOps,
430 vkStencilOps);
egdaniel9cb63402016-06-23 08:37:05 -0700431 }
432
Greg Daniel36a77ee2016-10-18 10:33:25 -0400433 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
egdaniel9cb63402016-06-23 08:37:05 -0700434 oldRP->unref(fGpu);
435
Brian Osman9a9baae2018-11-05 15:06:26 -0500436 cbInfo.fColorClearValue.color = {{color.fR, color.fG, color.fB, color.fA}};
Greg Daniela3c68df2018-03-16 13:46:53 -0400437 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400438 // Update command buffer bounds
439 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
egdaniel9cb63402016-06-23 08:37:05 -0700440 return;
441 }
442
443 // We always do a sub rect clear with clearAttachments since we are inside a render pass
444 VkClearRect clearRect;
445 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700446 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000447 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000448 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400449 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700450 vkRect = clip.scissorRect();
451 } else {
452 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400453 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
454 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700455 }
456 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
457 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
458 clearRect.baseArrayLayer = 0;
459 clearRect.layerCount = 1;
460
461 uint32_t colorIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400462 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&colorIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700463
464 VkClearAttachment attachment;
465 attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
466 attachment.colorAttachment = colorIndex;
467 attachment.clearValue.color = vkColor;
468
Greg Daniel22bc8652017-03-22 15:45:43 -0400469 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400470 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400471
472 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000473 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400474 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
475 } else {
476 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
477 }
egdaniel9cb63402016-06-23 08:37:05 -0700478 return;
479}
480
Greg Daniel500d58b2017-08-24 15:59:33 -0400481////////////////////////////////////////////////////////////////////////////////
482
483void GrVkGpuRTCommandBuffer::addAdditionalCommandBuffer() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400484 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
485
Greg Daniel22bc8652017-03-22 15:45:43 -0400486 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
487 cbInfo.currentCmdBuf()->end(fGpu);
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500488 cbInfo.fCommandBuffers.push_back(fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu));
Robert Phillips19e51dc2017-08-09 09:30:51 -0400489 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
Greg Daniel22bc8652017-03-22 15:45:43 -0400490}
491
Greg Daniel500d58b2017-08-24 15:59:33 -0400492void GrVkGpuRTCommandBuffer::addAdditionalRenderPass() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400493 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
494
Greg Daniel22bc8652017-03-22 15:45:43 -0400495 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Greg Daniel77b53f62016-10-18 11:48:51 -0400496
497 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Greg Daniel22bc8652017-03-22 15:45:43 -0400498 fCurrentCmdInfo++;
Greg Daniel77b53f62016-10-18 11:48:51 -0400499
500 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
501 VK_ATTACHMENT_STORE_OP_STORE);
502 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
503 VK_ATTACHMENT_STORE_OP_STORE);
504
505 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400506 vkRT->compatibleRenderPassHandle();
Greg Daniel77b53f62016-10-18 11:48:51 -0400507 if (rpHandle.isValid()) {
508 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
509 vkColorOps,
510 vkStencilOps);
511 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400512 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel77b53f62016-10-18 11:48:51 -0400513 vkColorOps,
514 vkStencilOps);
515 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400516 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
Greg Daniel77b53f62016-10-18 11:48:51 -0400517
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500518 cbInfo.fCommandBuffers.push_back(fGpu->cmdPool()->findOrCreateSecondaryCommandBuffer(fGpu));
Greg Daniel77b53f62016-10-18 11:48:51 -0400519 // It shouldn't matter what we set the clear color to here since we will assume loading of the
520 // attachment.
521 memset(&cbInfo.fColorClearValue, 0, sizeof(VkClearValue));
522 cbInfo.fBounds.setEmpty();
Greg Daniel77b53f62016-10-18 11:48:51 -0400523
Robert Phillips19e51dc2017-08-09 09:30:51 -0400524 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
Greg Daniel77b53f62016-10-18 11:48:51 -0400525}
526
Brian Salomon943ed792017-10-30 09:37:55 -0400527void GrVkGpuRTCommandBuffer::inlineUpload(GrOpFlushState* state,
528 GrDeferredTextureUploadFn& upload) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400529 if (!fCommandBufferInfos[fCurrentCmdInfo].fIsEmpty) {
530 this->addAdditionalRenderPass();
Greg Daniel77b53f62016-10-18 11:48:51 -0400531 }
Brian Salomon24d377e2019-04-23 15:24:31 -0400532
533 class InlineUpload : public PreCommandBufferTask {
534 public:
535 InlineUpload(GrOpFlushState* state, const GrDeferredTextureUploadFn& upload)
536 : fFlushState(state), fUpload(upload) {}
537 ~InlineUpload() override = default;
538
539 void execute(GrVkGpuRTCommandBuffer*) override { fFlushState->doUpload(fUpload); }
540
541 private:
542 GrOpFlushState* fFlushState;
543 GrDeferredTextureUploadFn fUpload;
544 };
545 fPreCommandBufferTasks.emplace<InlineUpload>(state, upload);
546 ++fCommandBufferInfos[fCurrentCmdInfo].fNumPreCmds;
Greg Daniel77b53f62016-10-18 11:48:51 -0400547}
548
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400549void GrVkGpuRTCommandBuffer::copy(GrSurface* src, GrSurfaceOrigin srcOrigin, const SkIRect& srcRect,
Greg Daniel500d58b2017-08-24 15:59:33 -0400550 const SkIPoint& dstPoint) {
Greg Daniela3c68df2018-03-16 13:46:53 -0400551 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
552 if (!cbInfo.fIsEmpty || LoadStoreState::kStartsWithClear == cbInfo.fLoadStoreState) {
Greg Daniel500d58b2017-08-24 15:59:33 -0400553 this->addAdditionalRenderPass();
554 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400555
Brian Salomon24d377e2019-04-23 15:24:31 -0400556 class Copy : public PreCommandBufferTask {
557 public:
558 Copy(GrSurface* src, GrSurfaceOrigin srcOrigin, const SkIRect& srcRect,
559 const SkIPoint& dstPoint, bool shouldDiscardDst)
560 : fSrc(src)
561 , fSrcOrigin(srcOrigin)
562 , fSrcRect(srcRect)
563 , fDstPoint(dstPoint)
564 , fShouldDiscardDst(shouldDiscardDst) {}
565 ~Copy() override = default;
566
567 void execute(GrVkGpuRTCommandBuffer* cb) override {
568 cb->fGpu->copySurface(cb->fRenderTarget, cb->fOrigin, fSrc.get(), fSrcOrigin, fSrcRect,
569 fDstPoint, fShouldDiscardDst);
570 }
571
572 private:
573 using Src = GrPendingIOResource<GrSurface, kRead_GrIOType>;
574 Src fSrc;
575 GrSurfaceOrigin fSrcOrigin;
576 SkIRect fSrcRect;
577 SkIPoint fDstPoint;
578 bool fShouldDiscardDst;
579 };
580
581 fPreCommandBufferTasks.emplace<Copy>(
Greg Daniel55fa6472018-03-16 16:13:10 -0400582 src, srcOrigin, srcRect, dstPoint,
583 LoadStoreState::kStartsWithDiscard == cbInfo.fLoadStoreState);
Brian Salomon24d377e2019-04-23 15:24:31 -0400584 ++fCommandBufferInfos[fCurrentCmdInfo].fNumPreCmds;
Greg Daniel55fa6472018-03-16 16:13:10 -0400585
Greg Daniela3c68df2018-03-16 13:46:53 -0400586 if (LoadStoreState::kLoadAndStore != cbInfo.fLoadStoreState) {
587 // Change the render pass to do a load and store so we don't lose the results of our copy
588 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
589 VK_ATTACHMENT_STORE_OP_STORE);
590 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
591 VK_ATTACHMENT_STORE_OP_STORE);
592
593 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
594
595 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
596 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
597 vkRT->compatibleRenderPassHandle();
598 if (rpHandle.isValid()) {
599 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
600 vkColorOps,
601 vkStencilOps);
602 } else {
603 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
604 vkColorOps,
605 vkStencilOps);
606 }
607 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
608 oldRP->unref(fGpu);
609
610 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
611
612 }
Greg Daniel500d58b2017-08-24 15:59:33 -0400613}
614
egdaniel9cb63402016-06-23 08:37:05 -0700615////////////////////////////////////////////////////////////////////////////////
616
Brian Salomondbf70722019-02-07 11:31:24 -0500617void GrVkGpuRTCommandBuffer::bindGeometry(const GrGpuBuffer* indexBuffer,
618 const GrGpuBuffer* vertexBuffer,
619 const GrGpuBuffer* instanceBuffer) {
Chris Daltonff926502017-05-03 14:36:54 -0400620 GrVkSecondaryCommandBuffer* currCmdBuf = fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf();
egdaniel9cb63402016-06-23 08:37:05 -0700621 // There is no need to put any memory barriers to make sure host writes have finished here.
622 // When a command buffer is submitted to a queue, there is an implicit memory barrier that
623 // occurs for all host writes. Additionally, BufferMemoryBarriers are not allowed inside of
624 // an active RenderPass.
egdaniel9cb63402016-06-23 08:37:05 -0700625
Chris Dalton1d616352017-05-31 12:51:23 -0600626 // Here our vertex and instance inputs need to match the same 0-based bindings they were
627 // assigned in GrVkPipeline. That is, vertex first (if any) followed by instance.
628 uint32_t binding = 0;
629
Brian Salomon802cb312018-06-08 18:05:20 -0400630 if (vertexBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600631 SkASSERT(vertexBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600632 SkASSERT(!vertexBuffer->isMapped());
633
634 currCmdBuf->bindInputBuffer(fGpu, binding++,
635 static_cast<const GrVkVertexBuffer*>(vertexBuffer));
636 }
637
Brian Salomon802cb312018-06-08 18:05:20 -0400638 if (instanceBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600639 SkASSERT(instanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600640 SkASSERT(!instanceBuffer->isMapped());
641
642 currCmdBuf->bindInputBuffer(fGpu, binding++,
643 static_cast<const GrVkVertexBuffer*>(instanceBuffer));
644 }
Chris Daltonff926502017-05-03 14:36:54 -0400645 if (indexBuffer) {
646 SkASSERT(indexBuffer);
647 SkASSERT(!indexBuffer->isMapped());
egdaniel9cb63402016-06-23 08:37:05 -0700648
Chris Daltonff926502017-05-03 14:36:54 -0400649 currCmdBuf->bindIndexBuffer(fGpu, static_cast<const GrVkIndexBuffer*>(indexBuffer));
egdaniel9cb63402016-06-23 08:37:05 -0700650 }
651}
652
Brian Salomon49348902018-06-26 09:12:38 -0400653GrVkPipelineState* GrVkGpuRTCommandBuffer::prepareDrawState(
654 const GrPrimitiveProcessor& primProc,
655 const GrPipeline& pipeline,
656 const GrPipeline::FixedDynamicState* fixedDynamicState,
657 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
658 GrPrimitiveType primitiveType) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400659 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
660 SkASSERT(cbInfo.fRenderPass);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400661
Greg Daniel99b88e02018-10-03 15:31:20 -0400662 VkRenderPass compatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
663
Greg Daniel9a51a862018-11-30 10:18:14 -0500664 const GrTextureProxy* const* primProcProxies = nullptr;
665 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
666 primProcProxies = dynamicStateArrays->fPrimitiveProcessorTextures;
667 } else if (fixedDynamicState) {
668 primProcProxies = fixedDynamicState->fPrimitiveProcessorTextures;
669 }
670
671 SkASSERT(SkToBool(primProcProxies) == SkToBool(primProc.numTextureSamplers()));
672
Greg Daniel09eeefb2017-10-16 15:15:02 -0400673 GrVkPipelineState* pipelineState =
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500674 fGpu->resourceProvider().findOrCreateCompatiblePipelineState(fRenderTarget, fOrigin,
675 pipeline,
egdaniel9cb63402016-06-23 08:37:05 -0700676 primProc,
Greg Daniel9a51a862018-11-30 10:18:14 -0500677 primProcProxies,
egdaniel9cb63402016-06-23 08:37:05 -0700678 primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400679 compatibleRenderPass);
egdaniel9cb63402016-06-23 08:37:05 -0700680 if (!pipelineState) {
681 return pipelineState;
682 }
683
Greg Daniel22bc8652017-03-22 15:45:43 -0400684 if (!cbInfo.fIsEmpty &&
Greg Daniel09eeefb2017-10-16 15:15:02 -0400685 fLastPipelineState && fLastPipelineState != pipelineState &&
Greg Daniele3cd6912017-05-17 11:15:55 -0400686 fGpu->vkCaps().newCBOnPipelineChange()) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400687 this->addAdditionalCommandBuffer();
688 }
Greg Daniel09eeefb2017-10-16 15:15:02 -0400689 fLastPipelineState = pipelineState;
Greg Daniel22bc8652017-03-22 15:45:43 -0400690
Brian Salomonf7232642018-09-19 08:58:08 -0400691 pipelineState->bindPipeline(fGpu, cbInfo.currentCmdBuf());
Brian Salomoncd7907b2018-08-30 08:36:18 -0400692
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500693 pipelineState->setAndBindUniforms(fGpu, fRenderTarget, fOrigin,
694 primProc, pipeline, cbInfo.currentCmdBuf());
Brian Salomonf7232642018-09-19 08:58:08 -0400695
696 // Check whether we need to bind textures between each GrMesh. If not we can bind them all now.
697 bool setTextures = !(dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures);
698 if (setTextures) {
Brian Salomonf7232642018-09-19 08:58:08 -0400699 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, primProcProxies,
700 cbInfo.currentCmdBuf());
701 }
egdaniel9cb63402016-06-23 08:37:05 -0700702
Brian Salomond818ebf2018-07-02 14:08:49 +0000703 if (!pipeline.isScissorEnabled()) {
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400704 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(),
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500705 fRenderTarget, fOrigin,
706 SkIRect::MakeWH(fRenderTarget->width(),
707 fRenderTarget->height()));
Brian Salomon49348902018-06-26 09:12:38 -0400708 } else if (!dynamicStateArrays || !dynamicStateArrays->fScissorRects) {
709 SkASSERT(fixedDynamicState);
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500710 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget,
711 fOrigin,
Brian Salomon49348902018-06-26 09:12:38 -0400712 fixedDynamicState->fScissorRect);
Chris Dalton46983b72017-06-06 12:27:16 -0600713 }
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500714 GrVkPipeline::SetDynamicViewportState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget);
715 GrVkPipeline::SetDynamicBlendConstantState(fGpu, cbInfo.currentCmdBuf(),
716 fRenderTarget->config(),
Chris Dalton46983b72017-06-06 12:27:16 -0600717 pipeline.getXferProcessor());
egdaniel9cb63402016-06-23 08:37:05 -0700718
719 return pipelineState;
720}
721
Brian Salomonff168d92018-06-23 15:17:27 -0400722void GrVkGpuRTCommandBuffer::onDraw(const GrPrimitiveProcessor& primProc,
723 const GrPipeline& pipeline,
Brian Salomon49348902018-06-26 09:12:38 -0400724 const GrPipeline::FixedDynamicState* fixedDynamicState,
725 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
Greg Daniel500d58b2017-08-24 15:59:33 -0400726 const GrMesh meshes[],
Greg Daniel500d58b2017-08-24 15:59:33 -0400727 int meshCount,
728 const SkRect& bounds) {
egdaniel9cb63402016-06-23 08:37:05 -0700729 if (!meshCount) {
730 return;
731 }
Greg Danielea022cd2018-03-16 11:10:03 -0400732
733 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
734
Brian Salomone782f842018-07-31 13:53:11 -0400735 auto prepareSampledImage = [&](GrTexture* texture, GrSamplerState::Filter filter) {
736 GrVkTexture* vkTexture = static_cast<GrVkTexture*>(texture);
737 // We may need to resolve the texture first if it is also a render target
738 GrVkRenderTarget* texRT = static_cast<GrVkRenderTarget*>(vkTexture->asRenderTarget());
739 if (texRT) {
Greg Daniel0a77f432018-12-06 11:23:32 -0500740 fGpu->resolveRenderTargetNoFlush(texRT);
Brian Salomone782f842018-07-31 13:53:11 -0400741 }
742
743 // Check if we need to regenerate any mip maps
744 if (GrSamplerState::Filter::kMipMap == filter &&
745 (vkTexture->width() != 1 || vkTexture->height() != 1)) {
746 SkASSERT(vkTexture->texturePriv().mipMapped() == GrMipMapped::kYes);
747 if (vkTexture->texturePriv().mipMapsAreDirty()) {
748 fGpu->regenerateMipMapLevels(vkTexture);
749 }
750 }
Brian Salomon5fd10572019-04-01 12:07:05 -0400751 cbInfo.fSampledTextures.push_back(vkTexture);
Brian Salomone782f842018-07-31 13:53:11 -0400752 };
753
Brian Salomonf7232642018-09-19 08:58:08 -0400754 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
755 for (int m = 0, i = 0; m < meshCount; ++m) {
756 for (int s = 0; s < primProc.numTextureSamplers(); ++s, ++i) {
757 auto texture = dynamicStateArrays->fPrimitiveProcessorTextures[i]->peekTexture();
758 prepareSampledImage(texture, primProc.textureSampler(s).samplerState().filter());
759 }
760 }
761 } else {
762 for (int i = 0; i < primProc.numTextureSamplers(); ++i) {
763 auto texture = fixedDynamicState->fPrimitiveProcessorTextures[i]->peekTexture();
764 prepareSampledImage(texture, primProc.textureSampler(i).samplerState().filter());
765 }
Brian Salomone782f842018-07-31 13:53:11 -0400766 }
bsalomonb58a2b42016-09-26 06:55:02 -0700767 GrFragmentProcessor::Iter iter(pipeline);
768 while (const GrFragmentProcessor* fp = iter.next()) {
Brian Salomone782f842018-07-31 13:53:11 -0400769 for (int i = 0; i < fp->numTextureSamplers(); ++i) {
770 const GrFragmentProcessor::TextureSampler& sampler = fp->textureSampler(i);
771 prepareSampledImage(sampler.peekTexture(), sampler.samplerState().filter());
772 }
egdaniel2f5792a2016-07-06 08:51:23 -0700773 }
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400774 if (GrTexture* dstTexture = pipeline.peekDstTexture()) {
Chris Dalton298238a2019-02-21 16:28:44 -0500775 cbInfo.fSampledTextures.push_back(sk_ref_sp(static_cast<GrVkTexture*>(dstTexture)));
Brian Salomon18dfa982017-04-03 16:57:43 -0400776 }
egdaniel2f5792a2016-07-06 08:51:23 -0700777
Chris Daltonbca46e22017-05-15 11:03:26 -0600778 GrPrimitiveType primitiveType = meshes[0].primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400779 GrVkPipelineState* pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
780 dynamicStateArrays, primitiveType);
egdaniel9cb63402016-06-23 08:37:05 -0700781 if (!pipelineState) {
782 return;
783 }
784
Brian Salomond818ebf2018-07-02 14:08:49 +0000785 bool dynamicScissor =
786 pipeline.isScissorEnabled() && dynamicStateArrays && dynamicStateArrays->fScissorRects;
Brian Salomonf7232642018-09-19 08:58:08 -0400787 bool dynamicTextures = dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures;
Brian Salomon49348902018-06-26 09:12:38 -0400788
egdaniel9cb63402016-06-23 08:37:05 -0700789 for (int i = 0; i < meshCount; ++i) {
790 const GrMesh& mesh = meshes[i];
Chris Daltonbca46e22017-05-15 11:03:26 -0600791 if (mesh.primitiveType() != primitiveType) {
Chris Dalton6f241802017-05-08 13:58:38 -0400792 SkDEBUGCODE(pipelineState = nullptr);
Chris Daltonbca46e22017-05-15 11:03:26 -0600793 primitiveType = mesh.primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400794 pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
795 dynamicStateArrays, primitiveType);
Chris Dalton6f241802017-05-08 13:58:38 -0400796 if (!pipelineState) {
797 return;
egdaniel9cb63402016-06-23 08:37:05 -0700798 }
Chris Dalton6f241802017-05-08 13:58:38 -0400799 }
egdaniel9cb63402016-06-23 08:37:05 -0700800
Brian Salomon49348902018-06-26 09:12:38 -0400801 if (dynamicScissor) {
802 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget,
Robert Phillipsd0fe8752019-01-31 14:13:59 -0500803 fOrigin,
Brian Salomon49348902018-06-26 09:12:38 -0400804 dynamicStateArrays->fScissorRects[i]);
Chris Dalton46983b72017-06-06 12:27:16 -0600805 }
Brian Salomonf7232642018-09-19 08:58:08 -0400806 if (dynamicTextures) {
807 GrTextureProxy* const* meshProxies = dynamicStateArrays->fPrimitiveProcessorTextures +
808 primProc.numTextureSamplers() * i;
809 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, meshProxies,
810 cbInfo.currentCmdBuf());
811 }
Chris Daltonbca46e22017-05-15 11:03:26 -0600812 SkASSERT(pipelineState);
Brian Salomon802cb312018-06-08 18:05:20 -0400813 mesh.sendToGpu(this);
egdaniel9cb63402016-06-23 08:37:05 -0700814 }
815
Greg Daniel36a77ee2016-10-18 10:33:25 -0400816 cbInfo.fBounds.join(bounds);
Chris Dalton114a3c02017-05-26 15:17:19 -0600817 cbInfo.fIsEmpty = false;
egdaniel066df7c2016-06-08 14:02:27 -0700818}
819
Brian Salomon802cb312018-06-08 18:05:20 -0400820void GrVkGpuRTCommandBuffer::sendInstancedMeshToGpu(GrPrimitiveType,
Greg Daniel500d58b2017-08-24 15:59:33 -0400821 const GrBuffer* vertexBuffer,
822 int vertexCount,
823 int baseVertex,
824 const GrBuffer* instanceBuffer,
825 int instanceCount,
826 int baseInstance) {
Chris Dalton114a3c02017-05-26 15:17:19 -0600827 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomondbf70722019-02-07 11:31:24 -0500828 SkASSERT(!vertexBuffer || !vertexBuffer->isCpuBuffer());
829 SkASSERT(!instanceBuffer || !instanceBuffer->isCpuBuffer());
830 auto gpuVertexBuffer = static_cast<const GrGpuBuffer*>(vertexBuffer);
831 auto gpuInstanceBuffer = static_cast<const GrGpuBuffer*>(instanceBuffer);
832 this->bindGeometry(nullptr, gpuVertexBuffer, gpuInstanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600833 cbInfo.currentCmdBuf()->draw(fGpu, vertexCount, instanceCount, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600834 fGpu->stats()->incNumDraws();
835}
836
Brian Salomon802cb312018-06-08 18:05:20 -0400837void GrVkGpuRTCommandBuffer::sendIndexedInstancedMeshToGpu(GrPrimitiveType,
Greg Daniel500d58b2017-08-24 15:59:33 -0400838 const GrBuffer* indexBuffer,
839 int indexCount,
840 int baseIndex,
841 const GrBuffer* vertexBuffer,
842 int baseVertex,
843 const GrBuffer* instanceBuffer,
844 int instanceCount,
Brian Salomon802cb312018-06-08 18:05:20 -0400845 int baseInstance,
846 GrPrimitiveRestart restart) {
847 SkASSERT(restart == GrPrimitiveRestart::kNo);
Chris Dalton114a3c02017-05-26 15:17:19 -0600848 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomondbf70722019-02-07 11:31:24 -0500849 SkASSERT(!vertexBuffer || !vertexBuffer->isCpuBuffer());
850 SkASSERT(!instanceBuffer || !instanceBuffer->isCpuBuffer());
851 SkASSERT(!indexBuffer->isCpuBuffer());
852 auto gpuIndexxBuffer = static_cast<const GrGpuBuffer*>(indexBuffer);
853 auto gpuVertexBuffer = static_cast<const GrGpuBuffer*>(vertexBuffer);
854 auto gpuInstanceBuffer = static_cast<const GrGpuBuffer*>(instanceBuffer);
855 this->bindGeometry(gpuIndexxBuffer, gpuVertexBuffer, gpuInstanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600856 cbInfo.currentCmdBuf()->drawIndexed(fGpu, indexCount, instanceCount,
857 baseIndex, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600858 fGpu->stats()->incNumDraws();
859}
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400860
861////////////////////////////////////////////////////////////////////////////////
862
863void GrVkGpuRTCommandBuffer::executeDrawable(std::unique_ptr<SkDrawable::GpuDrawHandler> drawable) {
864 GrVkRenderTarget* target = static_cast<GrVkRenderTarget*>(fRenderTarget);
865
866 GrVkImage* targetImage = target->msaaImage() ? target->msaaImage() : target;
867
868 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
869 VkRect2D bounds;
870 bounds.offset = { 0, 0 };
871 bounds.extent = { 0, 0 };
872
873 GrVkDrawableInfo vkInfo;
874 vkInfo.fSecondaryCommandBuffer = cbInfo.currentCmdBuf()->vkCommandBuffer();
875 vkInfo.fCompatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
Greg Danielb353eeb2018-12-05 11:01:58 -0500876 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&vkInfo.fColorAttachmentIndex));
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400877 vkInfo.fFormat = targetImage->imageFormat();
878 vkInfo.fDrawBounds = &bounds;
Stan Ilievcb580602019-02-26 11:36:07 -0500879#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
880 vkInfo.fImage = targetImage->image();
881#else
882 vkInfo.fImage = VK_NULL_HANDLE;
883#endif //SK_BUILD_FOR_ANDROID_FRAMEWORK
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400884
885 GrBackendDrawableInfo info(vkInfo);
886
Eric Karlc0b2ba22019-01-22 19:40:35 -0800887 // After we draw into the command buffer via the drawable, cached state we have may be invalid.
888 cbInfo.currentCmdBuf()->invalidateState();
Eric Karla8878a12019-02-07 18:17:43 -0800889 // Also assume that the drawable produced output.
890 cbInfo.fIsEmpty = false;
Eric Karlc0b2ba22019-01-22 19:40:35 -0800891
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400892 drawable->draw(info);
893 fGpu->addDrawable(std::move(drawable));
894
895 if (bounds.extent.width == 0 || bounds.extent.height == 0) {
896 cbInfo.fBounds.join(target->getBoundsRect());
897 } else {
898 cbInfo.fBounds.join(SkRect::MakeXYWH(bounds.offset.x, bounds.offset.y,
899 bounds.extent.width, bounds.extent.height));
900 }
901}
902