blob: fce6235cda037a19471194fcfe15c808c1946658 [file] [log] [blame]
egdaniel066df7c2016-06-08 14:02:27 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkGpuCommandBuffer.h"
9
Greg Daniel64cc9aa2018-10-19 13:54:56 -040010#include "GrBackendDrawableInfo.h"
csmartdalton29df7602016-08-31 11:55:52 -070011#include "GrFixedClip.h"
egdaniel9cb63402016-06-23 08:37:05 -070012#include "GrMesh.h"
Brian Salomon742e31d2016-12-07 17:06:19 -050013#include "GrOpFlushState.h"
egdaniel9cb63402016-06-23 08:37:05 -070014#include "GrPipeline.h"
15#include "GrRenderTargetPriv.h"
egdaniel9cb63402016-06-23 08:37:05 -070016#include "GrTexturePriv.h"
egdaniel066df7c2016-06-08 14:02:27 -070017#include "GrVkCommandBuffer.h"
18#include "GrVkGpu.h"
egdaniel9cb63402016-06-23 08:37:05 -070019#include "GrVkPipeline.h"
egdaniel066df7c2016-06-08 14:02:27 -070020#include "GrVkRenderPass.h"
21#include "GrVkRenderTarget.h"
22#include "GrVkResourceProvider.h"
Greg Daniel64cc9aa2018-10-19 13:54:56 -040023#include "GrVkSemaphore.h"
egdaniel9cb63402016-06-23 08:37:05 -070024#include "GrVkTexture.h"
Greg Daniel64cc9aa2018-10-19 13:54:56 -040025#include "SkDrawable.h"
Greg Daniel36a77ee2016-10-18 10:33:25 -040026#include "SkRect.h"
egdaniel066df7c2016-06-08 14:02:27 -070027
Robert Phillipsb0e93a22017-08-29 08:26:54 -040028void GrVkGpuTextureCommandBuffer::copy(GrSurface* src, GrSurfaceOrigin srcOrigin,
29 const SkIRect& srcRect, const SkIPoint& dstPoint) {
30 fCopies.emplace_back(src, srcOrigin, srcRect, dstPoint);
Greg Daniel500d58b2017-08-24 15:59:33 -040031}
32
33void GrVkGpuTextureCommandBuffer::insertEventMarker(const char* msg) {
34 // TODO: does Vulkan have a correlate?
35}
36
37void GrVkGpuTextureCommandBuffer::submit() {
38 for (int i = 0; i < fCopies.count(); ++i) {
39 CopyInfo& copyInfo = fCopies[i];
Robert Phillipsb0e93a22017-08-29 08:26:54 -040040 fGpu->copySurface(fTexture, fOrigin, copyInfo.fSrc, copyInfo.fSrcOrigin, copyInfo.fSrcRect,
41 copyInfo.fDstPoint);
Greg Daniel500d58b2017-08-24 15:59:33 -040042 }
43}
44
45GrVkGpuTextureCommandBuffer::~GrVkGpuTextureCommandBuffer() {}
46
47////////////////////////////////////////////////////////////////////////////////
48
Robert Phillips6b47c7d2017-08-29 07:24:09 -040049void get_vk_load_store_ops(GrLoadOp loadOpIn, GrStoreOp storeOpIn,
egdaniel066df7c2016-06-08 14:02:27 -070050 VkAttachmentLoadOp* loadOp, VkAttachmentStoreOp* storeOp) {
Robert Phillips95214472017-08-08 18:00:03 -040051 switch (loadOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -040052 case GrLoadOp::kLoad:
egdaniel066df7c2016-06-08 14:02:27 -070053 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel066df7c2016-06-08 14:02:27 -070054 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040055 case GrLoadOp::kClear:
egdaniel9cb63402016-06-23 08:37:05 -070056 *loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
57 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040058 case GrLoadOp::kDiscard:
egdaniel9cb63402016-06-23 08:37:05 -070059 *loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
60 break;
61 default:
62 SK_ABORT("Invalid LoadOp");
egdaniel066df7c2016-06-08 14:02:27 -070063 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel9cb63402016-06-23 08:37:05 -070064 }
65
Robert Phillips95214472017-08-08 18:00:03 -040066 switch (storeOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -040067 case GrStoreOp::kStore:
egdaniel066df7c2016-06-08 14:02:27 -070068 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
69 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040070 case GrStoreOp::kDiscard:
egdaniel066df7c2016-06-08 14:02:27 -070071 *storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
72 break;
brianosman0bbc3712016-06-14 04:53:09 -070073 default:
egdaniel9cb63402016-06-23 08:37:05 -070074 SK_ABORT("Invalid StoreOp");
brianosman0bbc3712016-06-14 04:53:09 -070075 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
egdaniel066df7c2016-06-08 14:02:27 -070076 }
77}
78
Robert Phillips5b5d84c2018-08-09 15:12:18 -040079GrVkGpuRTCommandBuffer::GrVkGpuRTCommandBuffer(GrVkGpu* gpu)
80 : fCurrentCmdInfo(-1)
Robert Phillips19e51dc2017-08-09 09:30:51 -040081 , fGpu(gpu)
Robert Phillips19e51dc2017-08-09 09:30:51 -040082 , fLastPipelineState(nullptr) {
Brian Salomonc293a292016-11-30 13:38:32 -050083}
84
Greg Daniel500d58b2017-08-24 15:59:33 -040085void GrVkGpuRTCommandBuffer::init() {
Brian Salomonc293a292016-11-30 13:38:32 -050086 GrVkRenderPass::LoadStoreOps vkColorOps(fVkColorLoadOp, fVkColorStoreOp);
87 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -070088
Greg Daniel36a77ee2016-10-18 10:33:25 -040089 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Brian Salomonc293a292016-11-30 13:38:32 -050090 SkASSERT(fCommandBufferInfos.count() == 1);
Greg Daniel22bc8652017-03-22 15:45:43 -040091 fCurrentCmdInfo = 0;
Greg Daniel36a77ee2016-10-18 10:33:25 -040092
Robert Phillips19e51dc2017-08-09 09:30:51 -040093 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
94 const GrVkResourceProvider::CompatibleRPHandle& rpHandle = vkRT->compatibleRenderPassHandle();
egdaniel066df7c2016-06-08 14:02:27 -070095 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -040096 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
97 vkColorOps,
98 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -070099 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400100 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400101 vkColorOps,
102 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -0700103 }
104
Brian Osmancb3d0872018-10-16 15:19:28 -0400105 cbInfo.fColorClearValue.color.float32[0] = fClearColor[0];
106 cbInfo.fColorClearValue.color.float32[1] = fClearColor[1];
107 cbInfo.fColorClearValue.color.float32[2] = fClearColor[2];
108 cbInfo.fColorClearValue.color.float32[3] = fClearColor[3];
egdaniel9cb63402016-06-23 08:37:05 -0700109
Robert Phillips380b90c2017-08-30 07:41:07 -0400110 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000111 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Robert Phillips380b90c2017-08-30 07:41:07 -0400112 } else {
113 cbInfo.fBounds.setEmpty();
114 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400115
116 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
117 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
118 } else if (VK_ATTACHMENT_LOAD_OP_LOAD == fVkColorLoadOp &&
119 VK_ATTACHMENT_STORE_OP_STORE == fVkColorStoreOp) {
120 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
121 } else if (VK_ATTACHMENT_LOAD_OP_DONT_CARE == fVkColorLoadOp) {
122 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithDiscard;
123 }
Greg Daniel36a77ee2016-10-18 10:33:25 -0400124
Greg Daniel22bc8652017-03-22 15:45:43 -0400125 cbInfo.fCommandBuffers.push_back(fGpu->resourceProvider().findOrCreateSecondaryCommandBuffer());
Robert Phillips19e51dc2017-08-09 09:30:51 -0400126 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
egdaniel066df7c2016-06-08 14:02:27 -0700127}
128
Brian Salomonc293a292016-11-30 13:38:32 -0500129
Greg Daniel500d58b2017-08-24 15:59:33 -0400130GrVkGpuRTCommandBuffer::~GrVkGpuRTCommandBuffer() {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400131 this->reset();
egdaniel066df7c2016-06-08 14:02:27 -0700132}
133
Greg Daniel500d58b2017-08-24 15:59:33 -0400134GrGpu* GrVkGpuRTCommandBuffer::gpu() { return fGpu; }
egdaniel9cb63402016-06-23 08:37:05 -0700135
Greg Daniel500d58b2017-08-24 15:59:33 -0400136void GrVkGpuRTCommandBuffer::end() {
Greg Daniel22bc8652017-03-22 15:45:43 -0400137 if (fCurrentCmdInfo >= 0) {
138 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Brian Salomonc293a292016-11-30 13:38:32 -0500139 }
egdaniel066df7c2016-06-08 14:02:27 -0700140}
141
Greg Daniel500d58b2017-08-24 15:59:33 -0400142void GrVkGpuRTCommandBuffer::submit() {
Brian Salomonc293a292016-11-30 13:38:32 -0500143 if (!fRenderTarget) {
144 return;
145 }
Robert Phillips19e51dc2017-08-09 09:30:51 -0400146
147 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Robert Phillips19e51dc2017-08-09 09:30:51 -0400148 GrVkImage* targetImage = vkRT->msaaImage() ? vkRT->msaaImage() : vkRT;
Greg Daniel45a44de2018-02-27 10:07:29 -0500149 GrStencilAttachment* stencil = fRenderTarget->renderTargetPriv().getStencilAttachment();
egdaniel9cb63402016-06-23 08:37:05 -0700150
Greg Daniel36a77ee2016-10-18 10:33:25 -0400151 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
152 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
153
Greg Daniel77b53f62016-10-18 11:48:51 -0400154 for (int j = 0; j < cbInfo.fPreDrawUploads.count(); ++j) {
155 InlineUploadInfo& iuInfo = cbInfo.fPreDrawUploads[j];
156 iuInfo.fFlushState->doUpload(iuInfo.fUpload);
157 }
158
Greg Daniel500d58b2017-08-24 15:59:33 -0400159 for (int j = 0; j < cbInfo.fPreCopies.count(); ++j) {
160 CopyInfo& copyInfo = cbInfo.fPreCopies[j];
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400161 fGpu->copySurface(fRenderTarget, fOrigin, copyInfo.fSrc, copyInfo.fSrcOrigin,
Greg Daniel55fa6472018-03-16 16:13:10 -0400162 copyInfo.fSrcRect, copyInfo.fDstPoint, copyInfo.fShouldDiscardDst);
Greg Daniel500d58b2017-08-24 15:59:33 -0400163 }
164
Greg Daniel45a44de2018-02-27 10:07:29 -0500165
Greg Daniel38c3d932018-03-16 14:22:30 -0400166 // TODO: Many things create a scratch texture which adds the discard immediately, but then
167 // don't draw to it right away. This causes the discard to be ignored and we get yelled at
168 // for loading uninitialized data. However, once MDB lands with reordering, the discard will
169 // get reordered with the rest of the draw commands and we can remove the discard check.
170 if (cbInfo.fIsEmpty &&
171 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithClear &&
172 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithDiscard) {
Greg Daniel77b53f62016-10-18 11:48:51 -0400173 // We have sumbitted no actual draw commands to the command buffer and we are not using
174 // the render pass to do a clear so there is no need to submit anything.
175 continue;
176 }
Greg Daniel38c3d932018-03-16 14:22:30 -0400177
Greg Danieldbdba602018-04-20 11:52:43 -0400178 // Make sure if we only have a discard load that we execute the discard on the whole image.
179 // TODO: Once we improve our tracking of discards so that we never end up flushing a discard
180 // call with no actually ops, remove this.
181 if (cbInfo.fIsEmpty && cbInfo.fLoadStoreState == LoadStoreState::kStartsWithDiscard) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000182 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Greg Danieldbdba602018-04-20 11:52:43 -0400183 }
184
Greg Daniela41a74a2018-10-09 12:59:23 +0000185 if (cbInfo.fBounds.intersect(0, 0,
186 SkIntToScalar(fRenderTarget->width()),
187 SkIntToScalar(fRenderTarget->height()))) {
Greg Daniel38c3d932018-03-16 14:22:30 -0400188 // Make sure we do the following layout changes after all copies, uploads, or any other
189 // pre-work is done since we may change the layouts in the pre-work. Also since the
190 // draws will be submitted in different render passes, we need to guard againts write
191 // and write issues.
192
193 // Change layout of our render target so it can be used as the color attachment.
Greg Danielf7828d02018-10-09 12:01:32 -0400194 // TODO: If we know that we will never be blending or loading the attachment we could
195 // drop the VK_ACCESS_COLOR_ATTACHMENT_READ_BIT.
Greg Daniel38c3d932018-03-16 14:22:30 -0400196 targetImage->setImageLayout(fGpu,
197 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
Greg Danielf7828d02018-10-09 12:01:32 -0400198 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
Greg Daniel38c3d932018-03-16 14:22:30 -0400199 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400200 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400201 false);
202
203 // If we are using a stencil attachment we also need to update its layout
204 if (stencil) {
205 GrVkStencilAttachment* vkStencil = (GrVkStencilAttachment*)stencil;
Greg Danielf7828d02018-10-09 12:01:32 -0400206 // We need the write and read access bits since we may load and store the stencil.
207 // The initial load happens in the VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT so we
208 // wait there.
Greg Daniel38c3d932018-03-16 14:22:30 -0400209 vkStencil->setImageLayout(fGpu,
210 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
211 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
212 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400213 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400214 false);
215 }
216
217 // If we have any sampled images set their layout now.
218 for (int j = 0; j < cbInfo.fSampledImages.count(); ++j) {
219 cbInfo.fSampledImages[j]->setImageLayout(fGpu,
220 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
221 VK_ACCESS_SHADER_READ_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400222 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400223 false);
224 }
225
Greg Daniel36a77ee2016-10-18 10:33:25 -0400226 SkIRect iBounds;
227 cbInfo.fBounds.roundOut(&iBounds);
228
Greg Daniel22bc8652017-03-22 15:45:43 -0400229 fGpu->submitSecondaryCommandBuffer(cbInfo.fCommandBuffers, cbInfo.fRenderPass,
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400230 &cbInfo.fColorClearValue, vkRT, fOrigin, iBounds);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400231 }
232 }
egdaniel9cb63402016-06-23 08:37:05 -0700233}
234
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400235void GrVkGpuRTCommandBuffer::set(GrRenderTarget* rt, GrSurfaceOrigin origin,
236 const GrGpuRTCommandBuffer::LoadAndStoreInfo& colorInfo,
237 const GrGpuRTCommandBuffer::StencilLoadAndStoreInfo& stencilInfo) {
238 SkASSERT(!fRenderTarget);
239 SkASSERT(fCommandBufferInfos.empty());
240 SkASSERT(-1 == fCurrentCmdInfo);
241 SkASSERT(fGpu == rt->getContext()->contextPriv().getGpu());
242 SkASSERT(!fLastPipelineState);
243
244 this->INHERITED::set(rt, origin);
245
Brian Osmancb3d0872018-10-16 15:19:28 -0400246 GrColorToRGBAFloat(colorInfo.fClearColor, fClearColor);
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400247
248 get_vk_load_store_ops(colorInfo.fLoadOp, colorInfo.fStoreOp,
249 &fVkColorLoadOp, &fVkColorStoreOp);
250
251 get_vk_load_store_ops(stencilInfo.fLoadOp, stencilInfo.fStoreOp,
252 &fVkStencilLoadOp, &fVkStencilStoreOp);
253
254 this->init();
255}
256
257void GrVkGpuRTCommandBuffer::reset() {
258 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
259 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
260 for (int j = 0; j < cbInfo.fCommandBuffers.count(); ++j) {
261 cbInfo.fCommandBuffers[j]->unref(fGpu);
262 }
263 cbInfo.fRenderPass->unref(fGpu);
264 }
265 fCommandBufferInfos.reset();
266
267 fCurrentCmdInfo = -1;
268
269 fLastPipelineState = nullptr;
270 fRenderTarget = nullptr;
271}
272
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400273////////////////////////////////////////////////////////////////////////////////
274
Greg Daniel500d58b2017-08-24 15:59:33 -0400275void GrVkGpuRTCommandBuffer::discard() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400276 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Brian Salomonc293a292016-11-30 13:38:32 -0500277
Greg Daniel22bc8652017-03-22 15:45:43 -0400278 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Greg Daniel77b53f62016-10-18 11:48:51 -0400279 if (cbInfo.fIsEmpty) {
Robert Phillips74c627f2017-08-09 10:28:00 -0400280 // Change the render pass to do a don't-care load for both color & stencil
egdaniel37535c92016-06-30 08:23:30 -0700281 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_DONT_CARE,
282 VK_ATTACHMENT_STORE_OP_STORE);
283 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_DONT_CARE,
284 VK_ATTACHMENT_STORE_OP_STORE);
egdaniel37535c92016-06-30 08:23:30 -0700285
Greg Daniel36a77ee2016-10-18 10:33:25 -0400286 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
egdaniel37535c92016-06-30 08:23:30 -0700287
egdaniel37535c92016-06-30 08:23:30 -0700288 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400289 vkRT->compatibleRenderPassHandle();
egdaniel37535c92016-06-30 08:23:30 -0700290 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400291 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
292 vkColorOps,
293 vkStencilOps);
egdaniel37535c92016-06-30 08:23:30 -0700294 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400295 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400296 vkColorOps,
297 vkStencilOps);
egdaniel37535c92016-06-30 08:23:30 -0700298 }
299
Greg Daniel36a77ee2016-10-18 10:33:25 -0400300 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
egdaniel37535c92016-06-30 08:23:30 -0700301 oldRP->unref(fGpu);
Greg Daniel5011f852016-10-28 15:07:16 -0400302 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
Greg Daniela3c68df2018-03-16 13:46:53 -0400303 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithDiscard;
304 // If we are going to discard the whole render target then the results of any copies we did
305 // immediately before to the target won't matter, so just drop them.
306 cbInfo.fPreCopies.reset();
egdaniel37535c92016-06-30 08:23:30 -0700307 }
308}
309
Greg Daniel500d58b2017-08-24 15:59:33 -0400310void GrVkGpuRTCommandBuffer::insertEventMarker(const char* msg) {
Robert Phillips65a88fa2017-08-08 08:36:22 -0400311 // TODO: does Vulkan have a correlate?
312}
313
Greg Daniel500d58b2017-08-24 15:59:33 -0400314void GrVkGpuRTCommandBuffer::onClearStencilClip(const GrFixedClip& clip, bool insideStencilMask) {
Chris Dalton94c04682017-11-01 17:15:06 -0600315 SkASSERT(!clip.hasWindowRectangles());
316
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000317 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
318
Greg Daniel65a09272016-10-12 09:47:22 -0400319 GrStencilAttachment* sb = fRenderTarget->renderTargetPriv().getStencilAttachment();
egdaniel9cb63402016-06-23 08:37:05 -0700320 // this should only be called internally when we know we have a
321 // stencil buffer.
322 SkASSERT(sb);
323 int stencilBitCount = sb->bits();
324
325 // The contract with the callers does not guarantee that we preserve all bits in the stencil
326 // during this clear. Thus we will clear the entire stencil to the desired value.
327
328 VkClearDepthStencilValue vkStencilColor;
329 memset(&vkStencilColor, 0, sizeof(VkClearDepthStencilValue));
csmartdalton29df7602016-08-31 11:55:52 -0700330 if (insideStencilMask) {
egdaniel9cb63402016-06-23 08:37:05 -0700331 vkStencilColor.stencil = (1 << (stencilBitCount - 1));
332 } else {
333 vkStencilColor.stencil = 0;
334 }
335
336 VkClearRect clearRect;
337 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700338 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000339 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000340 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400341 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700342 vkRect = clip.scissorRect();
343 } else {
344 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400345 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
346 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700347 }
348
349 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
350 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
351
352 clearRect.baseArrayLayer = 0;
353 clearRect.layerCount = 1;
354
355 uint32_t stencilIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400356 SkAssertResult(cbInfo.fRenderPass->stencilAttachmentIndex(&stencilIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700357
358 VkClearAttachment attachment;
359 attachment.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
360 attachment.colorAttachment = 0; // this value shouldn't matter
361 attachment.clearValue.depthStencil = vkStencilColor;
362
Greg Daniel22bc8652017-03-22 15:45:43 -0400363 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400364 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400365
366 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000367 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400368 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
369 } else {
370 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
371 }
egdaniel9cb63402016-06-23 08:37:05 -0700372}
373
Greg Daniel500d58b2017-08-24 15:59:33 -0400374void GrVkGpuRTCommandBuffer::onClear(const GrFixedClip& clip, GrColor color) {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400375 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
376
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000377 // parent class should never let us get here with no RT
csmartdaltonbf4a8f92016-09-06 10:01:06 -0700378 SkASSERT(!clip.hasWindowRectangles());
egdaniel9cb63402016-06-23 08:37:05 -0700379
Greg Daniel22bc8652017-03-22 15:45:43 -0400380 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Greg Daniel36a77ee2016-10-18 10:33:25 -0400381
egdaniel9cb63402016-06-23 08:37:05 -0700382 VkClearColorValue vkColor;
383 GrColorToRGBAFloat(color, vkColor.float32);
384
Brian Salomond818ebf2018-07-02 14:08:49 +0000385 if (cbInfo.fIsEmpty && !clip.scissorEnabled()) {
Robert Phillips74c627f2017-08-09 10:28:00 -0400386 // Change the render pass to do a clear load
egdaniel9cb63402016-06-23 08:37:05 -0700387 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_CLEAR,
388 VK_ATTACHMENT_STORE_OP_STORE);
Robert Phillips74c627f2017-08-09 10:28:00 -0400389 // Preserve the stencil buffer's load & store settings
390 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -0700391
Greg Daniel36a77ee2016-10-18 10:33:25 -0400392 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
egdaniel9cb63402016-06-23 08:37:05 -0700393
394 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400395 vkRT->compatibleRenderPassHandle();
egdaniel9cb63402016-06-23 08:37:05 -0700396 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400397 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
398 vkColorOps,
399 vkStencilOps);
egdaniel9cb63402016-06-23 08:37:05 -0700400 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400401 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400402 vkColorOps,
403 vkStencilOps);
egdaniel9cb63402016-06-23 08:37:05 -0700404 }
405
Greg Daniel36a77ee2016-10-18 10:33:25 -0400406 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
egdaniel9cb63402016-06-23 08:37:05 -0700407 oldRP->unref(fGpu);
408
Greg Daniel36a77ee2016-10-18 10:33:25 -0400409 GrColorToRGBAFloat(color, cbInfo.fColorClearValue.color.float32);
Greg Daniela3c68df2018-03-16 13:46:53 -0400410 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
411 // If we are going to clear the whole render target then the results of any copies we did
412 // immediately before to the target won't matter, so just drop them.
413 cbInfo.fPreCopies.reset();
Greg Daniel36a77ee2016-10-18 10:33:25 -0400414
415 // Update command buffer bounds
416 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
egdaniel9cb63402016-06-23 08:37:05 -0700417 return;
418 }
419
420 // We always do a sub rect clear with clearAttachments since we are inside a render pass
421 VkClearRect clearRect;
422 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700423 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000424 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000425 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400426 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700427 vkRect = clip.scissorRect();
428 } else {
429 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400430 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
431 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700432 }
433 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
434 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
435 clearRect.baseArrayLayer = 0;
436 clearRect.layerCount = 1;
437
438 uint32_t colorIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400439 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&colorIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700440
441 VkClearAttachment attachment;
442 attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
443 attachment.colorAttachment = colorIndex;
444 attachment.clearValue.color = vkColor;
445
Greg Daniel22bc8652017-03-22 15:45:43 -0400446 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400447 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400448
449 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000450 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400451 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
452 } else {
453 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
454 }
egdaniel9cb63402016-06-23 08:37:05 -0700455 return;
456}
457
Greg Daniel500d58b2017-08-24 15:59:33 -0400458////////////////////////////////////////////////////////////////////////////////
459
460void GrVkGpuRTCommandBuffer::addAdditionalCommandBuffer() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400461 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
462
Greg Daniel22bc8652017-03-22 15:45:43 -0400463 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
464 cbInfo.currentCmdBuf()->end(fGpu);
465 cbInfo.fCommandBuffers.push_back(fGpu->resourceProvider().findOrCreateSecondaryCommandBuffer());
Robert Phillips19e51dc2017-08-09 09:30:51 -0400466 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
Greg Daniel22bc8652017-03-22 15:45:43 -0400467}
468
Greg Daniel500d58b2017-08-24 15:59:33 -0400469void GrVkGpuRTCommandBuffer::addAdditionalRenderPass() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400470 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
471
Greg Daniel22bc8652017-03-22 15:45:43 -0400472 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Greg Daniel77b53f62016-10-18 11:48:51 -0400473
474 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Greg Daniel22bc8652017-03-22 15:45:43 -0400475 fCurrentCmdInfo++;
Greg Daniel77b53f62016-10-18 11:48:51 -0400476
477 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
478 VK_ATTACHMENT_STORE_OP_STORE);
479 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
480 VK_ATTACHMENT_STORE_OP_STORE);
481
482 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400483 vkRT->compatibleRenderPassHandle();
Greg Daniel77b53f62016-10-18 11:48:51 -0400484 if (rpHandle.isValid()) {
485 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
486 vkColorOps,
487 vkStencilOps);
488 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400489 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel77b53f62016-10-18 11:48:51 -0400490 vkColorOps,
491 vkStencilOps);
492 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400493 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
Greg Daniel77b53f62016-10-18 11:48:51 -0400494
Greg Daniel22bc8652017-03-22 15:45:43 -0400495 cbInfo.fCommandBuffers.push_back(fGpu->resourceProvider().findOrCreateSecondaryCommandBuffer());
Greg Daniel77b53f62016-10-18 11:48:51 -0400496 // It shouldn't matter what we set the clear color to here since we will assume loading of the
497 // attachment.
498 memset(&cbInfo.fColorClearValue, 0, sizeof(VkClearValue));
499 cbInfo.fBounds.setEmpty();
Greg Daniel77b53f62016-10-18 11:48:51 -0400500
Robert Phillips19e51dc2017-08-09 09:30:51 -0400501 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
Greg Daniel77b53f62016-10-18 11:48:51 -0400502}
503
Brian Salomon943ed792017-10-30 09:37:55 -0400504void GrVkGpuRTCommandBuffer::inlineUpload(GrOpFlushState* state,
505 GrDeferredTextureUploadFn& upload) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400506 if (!fCommandBufferInfos[fCurrentCmdInfo].fIsEmpty) {
507 this->addAdditionalRenderPass();
Greg Daniel77b53f62016-10-18 11:48:51 -0400508 }
Greg Daniel22bc8652017-03-22 15:45:43 -0400509 fCommandBufferInfos[fCurrentCmdInfo].fPreDrawUploads.emplace_back(state, upload);
Greg Daniel77b53f62016-10-18 11:48:51 -0400510}
511
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400512void GrVkGpuRTCommandBuffer::copy(GrSurface* src, GrSurfaceOrigin srcOrigin, const SkIRect& srcRect,
Greg Daniel500d58b2017-08-24 15:59:33 -0400513 const SkIPoint& dstPoint) {
Greg Daniela3c68df2018-03-16 13:46:53 -0400514 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
515 if (!cbInfo.fIsEmpty || LoadStoreState::kStartsWithClear == cbInfo.fLoadStoreState) {
Greg Daniel500d58b2017-08-24 15:59:33 -0400516 this->addAdditionalRenderPass();
517 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400518
Greg Daniel55fa6472018-03-16 16:13:10 -0400519 fCommandBufferInfos[fCurrentCmdInfo].fPreCopies.emplace_back(
520 src, srcOrigin, srcRect, dstPoint,
521 LoadStoreState::kStartsWithDiscard == cbInfo.fLoadStoreState);
522
Greg Daniela3c68df2018-03-16 13:46:53 -0400523 if (LoadStoreState::kLoadAndStore != cbInfo.fLoadStoreState) {
524 // Change the render pass to do a load and store so we don't lose the results of our copy
525 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
526 VK_ATTACHMENT_STORE_OP_STORE);
527 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
528 VK_ATTACHMENT_STORE_OP_STORE);
529
530 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
531
532 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
533 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
534 vkRT->compatibleRenderPassHandle();
535 if (rpHandle.isValid()) {
536 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
537 vkColorOps,
538 vkStencilOps);
539 } else {
540 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
541 vkColorOps,
542 vkStencilOps);
543 }
544 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
545 oldRP->unref(fGpu);
546
547 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
548
549 }
Greg Daniel500d58b2017-08-24 15:59:33 -0400550}
551
egdaniel9cb63402016-06-23 08:37:05 -0700552////////////////////////////////////////////////////////////////////////////////
553
Brian Salomon802cb312018-06-08 18:05:20 -0400554void GrVkGpuRTCommandBuffer::bindGeometry(const GrBuffer* indexBuffer,
Greg Daniel500d58b2017-08-24 15:59:33 -0400555 const GrBuffer* vertexBuffer,
556 const GrBuffer* instanceBuffer) {
Chris Daltonff926502017-05-03 14:36:54 -0400557 GrVkSecondaryCommandBuffer* currCmdBuf = fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf();
egdaniel9cb63402016-06-23 08:37:05 -0700558 // There is no need to put any memory barriers to make sure host writes have finished here.
559 // When a command buffer is submitted to a queue, there is an implicit memory barrier that
560 // occurs for all host writes. Additionally, BufferMemoryBarriers are not allowed inside of
561 // an active RenderPass.
egdaniel9cb63402016-06-23 08:37:05 -0700562
Chris Dalton1d616352017-05-31 12:51:23 -0600563 // Here our vertex and instance inputs need to match the same 0-based bindings they were
564 // assigned in GrVkPipeline. That is, vertex first (if any) followed by instance.
565 uint32_t binding = 0;
566
Brian Salomon802cb312018-06-08 18:05:20 -0400567 if (vertexBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600568 SkASSERT(vertexBuffer);
569 SkASSERT(!vertexBuffer->isCPUBacked());
570 SkASSERT(!vertexBuffer->isMapped());
571
572 currCmdBuf->bindInputBuffer(fGpu, binding++,
573 static_cast<const GrVkVertexBuffer*>(vertexBuffer));
574 }
575
Brian Salomon802cb312018-06-08 18:05:20 -0400576 if (instanceBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600577 SkASSERT(instanceBuffer);
578 SkASSERT(!instanceBuffer->isCPUBacked());
579 SkASSERT(!instanceBuffer->isMapped());
580
581 currCmdBuf->bindInputBuffer(fGpu, binding++,
582 static_cast<const GrVkVertexBuffer*>(instanceBuffer));
583 }
egdaniel9cb63402016-06-23 08:37:05 -0700584
Chris Daltonff926502017-05-03 14:36:54 -0400585 if (indexBuffer) {
586 SkASSERT(indexBuffer);
587 SkASSERT(!indexBuffer->isMapped());
588 SkASSERT(!indexBuffer->isCPUBacked());
egdaniel9cb63402016-06-23 08:37:05 -0700589
Chris Daltonff926502017-05-03 14:36:54 -0400590 currCmdBuf->bindIndexBuffer(fGpu, static_cast<const GrVkIndexBuffer*>(indexBuffer));
egdaniel9cb63402016-06-23 08:37:05 -0700591 }
592}
593
Brian Salomon49348902018-06-26 09:12:38 -0400594GrVkPipelineState* GrVkGpuRTCommandBuffer::prepareDrawState(
595 const GrPrimitiveProcessor& primProc,
596 const GrPipeline& pipeline,
597 const GrPipeline::FixedDynamicState* fixedDynamicState,
598 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
599 GrPrimitiveType primitiveType) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400600 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
601 SkASSERT(cbInfo.fRenderPass);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400602
Greg Daniel99b88e02018-10-03 15:31:20 -0400603 VkRenderPass compatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
604
Greg Daniel09eeefb2017-10-16 15:15:02 -0400605 GrVkPipelineState* pipelineState =
egdaniel9cb63402016-06-23 08:37:05 -0700606 fGpu->resourceProvider().findOrCreateCompatiblePipelineState(pipeline,
607 primProc,
608 primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400609 compatibleRenderPass);
egdaniel9cb63402016-06-23 08:37:05 -0700610 if (!pipelineState) {
611 return pipelineState;
612 }
613
Greg Daniel22bc8652017-03-22 15:45:43 -0400614 if (!cbInfo.fIsEmpty &&
Greg Daniel09eeefb2017-10-16 15:15:02 -0400615 fLastPipelineState && fLastPipelineState != pipelineState &&
Greg Daniele3cd6912017-05-17 11:15:55 -0400616 fGpu->vkCaps().newCBOnPipelineChange()) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400617 this->addAdditionalCommandBuffer();
618 }
Greg Daniel09eeefb2017-10-16 15:15:02 -0400619 fLastPipelineState = pipelineState;
Greg Daniel22bc8652017-03-22 15:45:43 -0400620
Brian Salomonf7232642018-09-19 08:58:08 -0400621 pipelineState->bindPipeline(fGpu, cbInfo.currentCmdBuf());
Brian Salomoncd7907b2018-08-30 08:36:18 -0400622
Brian Salomonf7232642018-09-19 08:58:08 -0400623 pipelineState->setAndBindUniforms(fGpu, primProc, pipeline, cbInfo.currentCmdBuf());
624
625 // Check whether we need to bind textures between each GrMesh. If not we can bind them all now.
626 bool setTextures = !(dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures);
627 if (setTextures) {
628 const GrTextureProxy* const* primProcProxies = nullptr;
629 if (fixedDynamicState) {
630 primProcProxies = fixedDynamicState->fPrimitiveProcessorTextures;
631 }
632 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, primProcProxies,
633 cbInfo.currentCmdBuf());
634 }
egdaniel9cb63402016-06-23 08:37:05 -0700635
Robert Phillips2890fbf2017-07-26 15:48:41 -0400636 GrRenderTarget* rt = pipeline.renderTarget();
Chris Dalton46983b72017-06-06 12:27:16 -0600637
Brian Salomond818ebf2018-07-02 14:08:49 +0000638 if (!pipeline.isScissorEnabled()) {
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400639 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(),
640 rt, pipeline.proxy()->origin(),
Greg Daniela41a74a2018-10-09 12:59:23 +0000641 SkIRect::MakeWH(rt->width(), rt->height()));
Brian Salomon49348902018-06-26 09:12:38 -0400642 } else if (!dynamicStateArrays || !dynamicStateArrays->fScissorRects) {
643 SkASSERT(fixedDynamicState);
644 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), rt,
645 pipeline.proxy()->origin(),
646 fixedDynamicState->fScissorRect);
Chris Dalton46983b72017-06-06 12:27:16 -0600647 }
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000648 GrVkPipeline::SetDynamicViewportState(fGpu, cbInfo.currentCmdBuf(), rt);
Chris Dalton46983b72017-06-06 12:27:16 -0600649 GrVkPipeline::SetDynamicBlendConstantState(fGpu, cbInfo.currentCmdBuf(), rt->config(),
650 pipeline.getXferProcessor());
egdaniel9cb63402016-06-23 08:37:05 -0700651
652 return pipelineState;
653}
654
Brian Salomonff168d92018-06-23 15:17:27 -0400655void GrVkGpuRTCommandBuffer::onDraw(const GrPrimitiveProcessor& primProc,
656 const GrPipeline& pipeline,
Brian Salomon49348902018-06-26 09:12:38 -0400657 const GrPipeline::FixedDynamicState* fixedDynamicState,
658 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
Greg Daniel500d58b2017-08-24 15:59:33 -0400659 const GrMesh meshes[],
Greg Daniel500d58b2017-08-24 15:59:33 -0400660 int meshCount,
661 const SkRect& bounds) {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400662 SkASSERT(pipeline.renderTarget() == fRenderTarget);
Brian Salomonc293a292016-11-30 13:38:32 -0500663
egdaniel9cb63402016-06-23 08:37:05 -0700664 if (!meshCount) {
665 return;
666 }
Greg Danielea022cd2018-03-16 11:10:03 -0400667
668 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
669
Brian Salomone782f842018-07-31 13:53:11 -0400670 auto prepareSampledImage = [&](GrTexture* texture, GrSamplerState::Filter filter) {
671 GrVkTexture* vkTexture = static_cast<GrVkTexture*>(texture);
672 // We may need to resolve the texture first if it is also a render target
673 GrVkRenderTarget* texRT = static_cast<GrVkRenderTarget*>(vkTexture->asRenderTarget());
674 if (texRT) {
675 fGpu->onResolveRenderTarget(texRT);
676 }
677
678 // Check if we need to regenerate any mip maps
679 if (GrSamplerState::Filter::kMipMap == filter &&
680 (vkTexture->width() != 1 || vkTexture->height() != 1)) {
681 SkASSERT(vkTexture->texturePriv().mipMapped() == GrMipMapped::kYes);
682 if (vkTexture->texturePriv().mipMapsAreDirty()) {
683 fGpu->regenerateMipMapLevels(vkTexture);
684 }
685 }
686 cbInfo.fSampledImages.push_back(vkTexture);
687 };
688
Brian Salomonf7232642018-09-19 08:58:08 -0400689 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
690 for (int m = 0, i = 0; m < meshCount; ++m) {
691 for (int s = 0; s < primProc.numTextureSamplers(); ++s, ++i) {
692 auto texture = dynamicStateArrays->fPrimitiveProcessorTextures[i]->peekTexture();
693 prepareSampledImage(texture, primProc.textureSampler(s).samplerState().filter());
694 }
695 }
696 } else {
697 for (int i = 0; i < primProc.numTextureSamplers(); ++i) {
698 auto texture = fixedDynamicState->fPrimitiveProcessorTextures[i]->peekTexture();
699 prepareSampledImage(texture, primProc.textureSampler(i).samplerState().filter());
700 }
Brian Salomone782f842018-07-31 13:53:11 -0400701 }
bsalomonb58a2b42016-09-26 06:55:02 -0700702 GrFragmentProcessor::Iter iter(pipeline);
703 while (const GrFragmentProcessor* fp = iter.next()) {
Brian Salomone782f842018-07-31 13:53:11 -0400704 for (int i = 0; i < fp->numTextureSamplers(); ++i) {
705 const GrFragmentProcessor::TextureSampler& sampler = fp->textureSampler(i);
706 prepareSampledImage(sampler.peekTexture(), sampler.samplerState().filter());
707 }
egdaniel2f5792a2016-07-06 08:51:23 -0700708 }
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400709 if (GrTexture* dstTexture = pipeline.peekDstTexture()) {
Greg Danielea022cd2018-03-16 11:10:03 -0400710 cbInfo.fSampledImages.push_back(static_cast<GrVkTexture*>(dstTexture));
Brian Salomon18dfa982017-04-03 16:57:43 -0400711 }
egdaniel2f5792a2016-07-06 08:51:23 -0700712
Chris Daltonbca46e22017-05-15 11:03:26 -0600713 GrPrimitiveType primitiveType = meshes[0].primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400714 GrVkPipelineState* pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
715 dynamicStateArrays, primitiveType);
egdaniel9cb63402016-06-23 08:37:05 -0700716 if (!pipelineState) {
717 return;
718 }
719
Brian Salomond818ebf2018-07-02 14:08:49 +0000720 bool dynamicScissor =
721 pipeline.isScissorEnabled() && dynamicStateArrays && dynamicStateArrays->fScissorRects;
Brian Salomonf7232642018-09-19 08:58:08 -0400722 bool dynamicTextures = dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures;
Brian Salomon49348902018-06-26 09:12:38 -0400723
egdaniel9cb63402016-06-23 08:37:05 -0700724 for (int i = 0; i < meshCount; ++i) {
725 const GrMesh& mesh = meshes[i];
Chris Daltonbca46e22017-05-15 11:03:26 -0600726 if (mesh.primitiveType() != primitiveType) {
Chris Dalton6f241802017-05-08 13:58:38 -0400727 SkDEBUGCODE(pipelineState = nullptr);
Chris Daltonbca46e22017-05-15 11:03:26 -0600728 primitiveType = mesh.primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400729 pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
730 dynamicStateArrays, primitiveType);
Chris Dalton6f241802017-05-08 13:58:38 -0400731 if (!pipelineState) {
732 return;
egdaniel9cb63402016-06-23 08:37:05 -0700733 }
Chris Dalton6f241802017-05-08 13:58:38 -0400734 }
egdaniel9cb63402016-06-23 08:37:05 -0700735
Brian Salomon49348902018-06-26 09:12:38 -0400736 if (dynamicScissor) {
737 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget,
738 pipeline.proxy()->origin(),
739 dynamicStateArrays->fScissorRects[i]);
Chris Dalton46983b72017-06-06 12:27:16 -0600740 }
Brian Salomonf7232642018-09-19 08:58:08 -0400741 if (dynamicTextures) {
742 GrTextureProxy* const* meshProxies = dynamicStateArrays->fPrimitiveProcessorTextures +
743 primProc.numTextureSamplers() * i;
744 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, meshProxies,
745 cbInfo.currentCmdBuf());
746 }
Chris Daltonbca46e22017-05-15 11:03:26 -0600747 SkASSERT(pipelineState);
Brian Salomon802cb312018-06-08 18:05:20 -0400748 mesh.sendToGpu(this);
egdaniel9cb63402016-06-23 08:37:05 -0700749 }
750
Greg Daniel36a77ee2016-10-18 10:33:25 -0400751 cbInfo.fBounds.join(bounds);
Chris Dalton114a3c02017-05-26 15:17:19 -0600752 cbInfo.fIsEmpty = false;
egdaniel066df7c2016-06-08 14:02:27 -0700753}
754
Brian Salomon802cb312018-06-08 18:05:20 -0400755void GrVkGpuRTCommandBuffer::sendInstancedMeshToGpu(GrPrimitiveType,
Greg Daniel500d58b2017-08-24 15:59:33 -0400756 const GrBuffer* vertexBuffer,
757 int vertexCount,
758 int baseVertex,
759 const GrBuffer* instanceBuffer,
760 int instanceCount,
761 int baseInstance) {
Chris Dalton114a3c02017-05-26 15:17:19 -0600762 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomon802cb312018-06-08 18:05:20 -0400763 this->bindGeometry(nullptr, vertexBuffer, instanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600764 cbInfo.currentCmdBuf()->draw(fGpu, vertexCount, instanceCount, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600765 fGpu->stats()->incNumDraws();
766}
767
Brian Salomon802cb312018-06-08 18:05:20 -0400768void GrVkGpuRTCommandBuffer::sendIndexedInstancedMeshToGpu(GrPrimitiveType,
Greg Daniel500d58b2017-08-24 15:59:33 -0400769 const GrBuffer* indexBuffer,
770 int indexCount,
771 int baseIndex,
772 const GrBuffer* vertexBuffer,
773 int baseVertex,
774 const GrBuffer* instanceBuffer,
775 int instanceCount,
Brian Salomon802cb312018-06-08 18:05:20 -0400776 int baseInstance,
777 GrPrimitiveRestart restart) {
778 SkASSERT(restart == GrPrimitiveRestart::kNo);
Chris Dalton114a3c02017-05-26 15:17:19 -0600779 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomon802cb312018-06-08 18:05:20 -0400780 this->bindGeometry(indexBuffer, vertexBuffer, instanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600781 cbInfo.currentCmdBuf()->drawIndexed(fGpu, indexCount, instanceCount,
782 baseIndex, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600783 fGpu->stats()->incNumDraws();
784}
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400785
786////////////////////////////////////////////////////////////////////////////////
787
788void GrVkGpuRTCommandBuffer::executeDrawable(std::unique_ptr<SkDrawable::GpuDrawHandler> drawable) {
789 GrVkRenderTarget* target = static_cast<GrVkRenderTarget*>(fRenderTarget);
790
791 GrVkImage* targetImage = target->msaaImage() ? target->msaaImage() : target;
792
793 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
794 VkRect2D bounds;
795 bounds.offset = { 0, 0 };
796 bounds.extent = { 0, 0 };
797
798 GrVkDrawableInfo vkInfo;
799 vkInfo.fSecondaryCommandBuffer = cbInfo.currentCmdBuf()->vkCommandBuffer();
800 vkInfo.fCompatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
801 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&vkInfo.fImageAttachmentIndex));
802 vkInfo.fFormat = targetImage->imageFormat();
803 vkInfo.fDrawBounds = &bounds;
804
805 GrBackendDrawableInfo info(vkInfo);
806
807 drawable->draw(info);
808 fGpu->addDrawable(std::move(drawable));
809
810 if (bounds.extent.width == 0 || bounds.extent.height == 0) {
811 cbInfo.fBounds.join(target->getBoundsRect());
812 } else {
813 cbInfo.fBounds.join(SkRect::MakeXYWH(bounds.offset.x, bounds.offset.y,
814 bounds.extent.width, bounds.extent.height));
815 }
816}
817