blob: 9ffd8f80b45fc5735fc6b0f451aa36c7b2675728 [file] [log] [blame]
egdaniel066df7c2016-06-08 14:02:27 -07001/*
2* Copyright 2016 Google Inc.
3*
4* Use of this source code is governed by a BSD-style license that can be
5* found in the LICENSE file.
6*/
7
8#include "GrVkGpuCommandBuffer.h"
9
Greg Daniel64cc9aa2018-10-19 13:54:56 -040010#include "GrBackendDrawableInfo.h"
csmartdalton29df7602016-08-31 11:55:52 -070011#include "GrFixedClip.h"
egdaniel9cb63402016-06-23 08:37:05 -070012#include "GrMesh.h"
Brian Salomon742e31d2016-12-07 17:06:19 -050013#include "GrOpFlushState.h"
egdaniel9cb63402016-06-23 08:37:05 -070014#include "GrPipeline.h"
15#include "GrRenderTargetPriv.h"
egdaniel9cb63402016-06-23 08:37:05 -070016#include "GrTexturePriv.h"
egdaniel066df7c2016-06-08 14:02:27 -070017#include "GrVkCommandBuffer.h"
18#include "GrVkGpu.h"
egdaniel9cb63402016-06-23 08:37:05 -070019#include "GrVkPipeline.h"
egdaniel066df7c2016-06-08 14:02:27 -070020#include "GrVkRenderPass.h"
21#include "GrVkRenderTarget.h"
22#include "GrVkResourceProvider.h"
Greg Daniel64cc9aa2018-10-19 13:54:56 -040023#include "GrVkSemaphore.h"
egdaniel9cb63402016-06-23 08:37:05 -070024#include "GrVkTexture.h"
Greg Daniel64cc9aa2018-10-19 13:54:56 -040025#include "SkDrawable.h"
Greg Daniel36a77ee2016-10-18 10:33:25 -040026#include "SkRect.h"
egdaniel066df7c2016-06-08 14:02:27 -070027
Robert Phillipsb0e93a22017-08-29 08:26:54 -040028void GrVkGpuTextureCommandBuffer::copy(GrSurface* src, GrSurfaceOrigin srcOrigin,
29 const SkIRect& srcRect, const SkIPoint& dstPoint) {
30 fCopies.emplace_back(src, srcOrigin, srcRect, dstPoint);
Greg Daniel500d58b2017-08-24 15:59:33 -040031}
32
33void GrVkGpuTextureCommandBuffer::insertEventMarker(const char* msg) {
34 // TODO: does Vulkan have a correlate?
35}
36
37void GrVkGpuTextureCommandBuffer::submit() {
38 for (int i = 0; i < fCopies.count(); ++i) {
39 CopyInfo& copyInfo = fCopies[i];
Robert Phillipsb0e93a22017-08-29 08:26:54 -040040 fGpu->copySurface(fTexture, fOrigin, copyInfo.fSrc, copyInfo.fSrcOrigin, copyInfo.fSrcRect,
41 copyInfo.fDstPoint);
Greg Daniel500d58b2017-08-24 15:59:33 -040042 }
43}
44
45GrVkGpuTextureCommandBuffer::~GrVkGpuTextureCommandBuffer() {}
46
47////////////////////////////////////////////////////////////////////////////////
48
Robert Phillips6b47c7d2017-08-29 07:24:09 -040049void get_vk_load_store_ops(GrLoadOp loadOpIn, GrStoreOp storeOpIn,
egdaniel066df7c2016-06-08 14:02:27 -070050 VkAttachmentLoadOp* loadOp, VkAttachmentStoreOp* storeOp) {
Robert Phillips95214472017-08-08 18:00:03 -040051 switch (loadOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -040052 case GrLoadOp::kLoad:
egdaniel066df7c2016-06-08 14:02:27 -070053 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel066df7c2016-06-08 14:02:27 -070054 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040055 case GrLoadOp::kClear:
egdaniel9cb63402016-06-23 08:37:05 -070056 *loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
57 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040058 case GrLoadOp::kDiscard:
egdaniel9cb63402016-06-23 08:37:05 -070059 *loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
60 break;
61 default:
62 SK_ABORT("Invalid LoadOp");
egdaniel066df7c2016-06-08 14:02:27 -070063 *loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
egdaniel9cb63402016-06-23 08:37:05 -070064 }
65
Robert Phillips95214472017-08-08 18:00:03 -040066 switch (storeOpIn) {
Robert Phillips6b47c7d2017-08-29 07:24:09 -040067 case GrStoreOp::kStore:
egdaniel066df7c2016-06-08 14:02:27 -070068 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
69 break;
Robert Phillips6b47c7d2017-08-29 07:24:09 -040070 case GrStoreOp::kDiscard:
egdaniel066df7c2016-06-08 14:02:27 -070071 *storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
72 break;
brianosman0bbc3712016-06-14 04:53:09 -070073 default:
egdaniel9cb63402016-06-23 08:37:05 -070074 SK_ABORT("Invalid StoreOp");
brianosman0bbc3712016-06-14 04:53:09 -070075 *storeOp = VK_ATTACHMENT_STORE_OP_STORE;
egdaniel066df7c2016-06-08 14:02:27 -070076 }
77}
78
Robert Phillips5b5d84c2018-08-09 15:12:18 -040079GrVkGpuRTCommandBuffer::GrVkGpuRTCommandBuffer(GrVkGpu* gpu)
80 : fCurrentCmdInfo(-1)
Robert Phillips19e51dc2017-08-09 09:30:51 -040081 , fGpu(gpu)
Robert Phillips19e51dc2017-08-09 09:30:51 -040082 , fLastPipelineState(nullptr) {
Brian Salomonc293a292016-11-30 13:38:32 -050083}
84
Greg Daniel500d58b2017-08-24 15:59:33 -040085void GrVkGpuRTCommandBuffer::init() {
Brian Salomonc293a292016-11-30 13:38:32 -050086 GrVkRenderPass::LoadStoreOps vkColorOps(fVkColorLoadOp, fVkColorStoreOp);
87 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -070088
Greg Daniel36a77ee2016-10-18 10:33:25 -040089 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Brian Salomonc293a292016-11-30 13:38:32 -050090 SkASSERT(fCommandBufferInfos.count() == 1);
Greg Daniel22bc8652017-03-22 15:45:43 -040091 fCurrentCmdInfo = 0;
Greg Daniel36a77ee2016-10-18 10:33:25 -040092
Robert Phillips19e51dc2017-08-09 09:30:51 -040093 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
94 const GrVkResourceProvider::CompatibleRPHandle& rpHandle = vkRT->compatibleRenderPassHandle();
egdaniel066df7c2016-06-08 14:02:27 -070095 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -040096 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
97 vkColorOps,
98 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -070099 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400100 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400101 vkColorOps,
102 vkStencilOps);
egdaniel066df7c2016-06-08 14:02:27 -0700103 }
104
Brian Osmancb3d0872018-10-16 15:19:28 -0400105 cbInfo.fColorClearValue.color.float32[0] = fClearColor[0];
106 cbInfo.fColorClearValue.color.float32[1] = fClearColor[1];
107 cbInfo.fColorClearValue.color.float32[2] = fClearColor[2];
108 cbInfo.fColorClearValue.color.float32[3] = fClearColor[3];
egdaniel9cb63402016-06-23 08:37:05 -0700109
Robert Phillips380b90c2017-08-30 07:41:07 -0400110 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000111 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Robert Phillips380b90c2017-08-30 07:41:07 -0400112 } else {
113 cbInfo.fBounds.setEmpty();
114 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400115
116 if (VK_ATTACHMENT_LOAD_OP_CLEAR == fVkColorLoadOp) {
117 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
118 } else if (VK_ATTACHMENT_LOAD_OP_LOAD == fVkColorLoadOp &&
119 VK_ATTACHMENT_STORE_OP_STORE == fVkColorStoreOp) {
120 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
121 } else if (VK_ATTACHMENT_LOAD_OP_DONT_CARE == fVkColorLoadOp) {
122 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithDiscard;
123 }
Greg Daniel36a77ee2016-10-18 10:33:25 -0400124
Greg Daniel22bc8652017-03-22 15:45:43 -0400125 cbInfo.fCommandBuffers.push_back(fGpu->resourceProvider().findOrCreateSecondaryCommandBuffer());
Robert Phillips19e51dc2017-08-09 09:30:51 -0400126 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
egdaniel066df7c2016-06-08 14:02:27 -0700127}
128
Brian Salomonc293a292016-11-30 13:38:32 -0500129
Greg Daniel500d58b2017-08-24 15:59:33 -0400130GrVkGpuRTCommandBuffer::~GrVkGpuRTCommandBuffer() {
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400131 this->reset();
egdaniel066df7c2016-06-08 14:02:27 -0700132}
133
Greg Daniel500d58b2017-08-24 15:59:33 -0400134GrGpu* GrVkGpuRTCommandBuffer::gpu() { return fGpu; }
egdaniel9cb63402016-06-23 08:37:05 -0700135
Greg Daniel500d58b2017-08-24 15:59:33 -0400136void GrVkGpuRTCommandBuffer::end() {
Greg Daniel22bc8652017-03-22 15:45:43 -0400137 if (fCurrentCmdInfo >= 0) {
138 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Brian Salomonc293a292016-11-30 13:38:32 -0500139 }
egdaniel066df7c2016-06-08 14:02:27 -0700140}
141
Greg Daniel500d58b2017-08-24 15:59:33 -0400142void GrVkGpuRTCommandBuffer::submit() {
Brian Salomonc293a292016-11-30 13:38:32 -0500143 if (!fRenderTarget) {
144 return;
145 }
Robert Phillips19e51dc2017-08-09 09:30:51 -0400146
147 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Robert Phillips19e51dc2017-08-09 09:30:51 -0400148 GrVkImage* targetImage = vkRT->msaaImage() ? vkRT->msaaImage() : vkRT;
Greg Daniel45a44de2018-02-27 10:07:29 -0500149 GrStencilAttachment* stencil = fRenderTarget->renderTargetPriv().getStencilAttachment();
egdaniel9cb63402016-06-23 08:37:05 -0700150
Greg Daniel36a77ee2016-10-18 10:33:25 -0400151 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
152 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
153
Greg Daniel77b53f62016-10-18 11:48:51 -0400154 for (int j = 0; j < cbInfo.fPreDrawUploads.count(); ++j) {
155 InlineUploadInfo& iuInfo = cbInfo.fPreDrawUploads[j];
156 iuInfo.fFlushState->doUpload(iuInfo.fUpload);
157 }
158
Greg Daniel500d58b2017-08-24 15:59:33 -0400159 for (int j = 0; j < cbInfo.fPreCopies.count(); ++j) {
160 CopyInfo& copyInfo = cbInfo.fPreCopies[j];
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400161 fGpu->copySurface(fRenderTarget, fOrigin, copyInfo.fSrc, copyInfo.fSrcOrigin,
Greg Daniel55fa6472018-03-16 16:13:10 -0400162 copyInfo.fSrcRect, copyInfo.fDstPoint, copyInfo.fShouldDiscardDst);
Greg Daniel500d58b2017-08-24 15:59:33 -0400163 }
164
Greg Daniel45a44de2018-02-27 10:07:29 -0500165
Greg Daniel38c3d932018-03-16 14:22:30 -0400166 // TODO: Many things create a scratch texture which adds the discard immediately, but then
167 // don't draw to it right away. This causes the discard to be ignored and we get yelled at
168 // for loading uninitialized data. However, once MDB lands with reordering, the discard will
169 // get reordered with the rest of the draw commands and we can remove the discard check.
170 if (cbInfo.fIsEmpty &&
171 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithClear &&
172 cbInfo.fLoadStoreState != LoadStoreState::kStartsWithDiscard) {
Greg Daniel77b53f62016-10-18 11:48:51 -0400173 // We have sumbitted no actual draw commands to the command buffer and we are not using
174 // the render pass to do a clear so there is no need to submit anything.
175 continue;
176 }
Greg Daniel38c3d932018-03-16 14:22:30 -0400177
Greg Danieldbdba602018-04-20 11:52:43 -0400178 // Make sure if we only have a discard load that we execute the discard on the whole image.
179 // TODO: Once we improve our tracking of discards so that we never end up flushing a discard
180 // call with no actually ops, remove this.
181 if (cbInfo.fIsEmpty && cbInfo.fLoadStoreState == LoadStoreState::kStartsWithDiscard) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000182 cbInfo.fBounds = SkRect::MakeWH(vkRT->width(), vkRT->height());
Greg Danieldbdba602018-04-20 11:52:43 -0400183 }
184
Greg Daniela41a74a2018-10-09 12:59:23 +0000185 if (cbInfo.fBounds.intersect(0, 0,
186 SkIntToScalar(fRenderTarget->width()),
187 SkIntToScalar(fRenderTarget->height()))) {
Greg Daniel38c3d932018-03-16 14:22:30 -0400188 // Make sure we do the following layout changes after all copies, uploads, or any other
189 // pre-work is done since we may change the layouts in the pre-work. Also since the
190 // draws will be submitted in different render passes, we need to guard againts write
191 // and write issues.
192
193 // Change layout of our render target so it can be used as the color attachment.
Greg Danielf7828d02018-10-09 12:01:32 -0400194 // TODO: If we know that we will never be blending or loading the attachment we could
195 // drop the VK_ACCESS_COLOR_ATTACHMENT_READ_BIT.
Greg Daniel38c3d932018-03-16 14:22:30 -0400196 targetImage->setImageLayout(fGpu,
197 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
Greg Danielf7828d02018-10-09 12:01:32 -0400198 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
Greg Daniel38c3d932018-03-16 14:22:30 -0400199 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400200 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400201 false);
202
203 // If we are using a stencil attachment we also need to update its layout
204 if (stencil) {
205 GrVkStencilAttachment* vkStencil = (GrVkStencilAttachment*)stencil;
Greg Danielf7828d02018-10-09 12:01:32 -0400206 // We need the write and read access bits since we may load and store the stencil.
207 // The initial load happens in the VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT so we
208 // wait there.
Greg Daniel38c3d932018-03-16 14:22:30 -0400209 vkStencil->setImageLayout(fGpu,
210 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
211 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
212 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400213 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400214 false);
215 }
216
217 // If we have any sampled images set their layout now.
218 for (int j = 0; j < cbInfo.fSampledImages.count(); ++j) {
219 cbInfo.fSampledImages[j]->setImageLayout(fGpu,
220 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
221 VK_ACCESS_SHADER_READ_BIT,
Greg Danielf7828d02018-10-09 12:01:32 -0400222 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
Greg Daniel38c3d932018-03-16 14:22:30 -0400223 false);
224 }
225
Greg Daniel36a77ee2016-10-18 10:33:25 -0400226 SkIRect iBounds;
227 cbInfo.fBounds.roundOut(&iBounds);
228
Greg Daniel22bc8652017-03-22 15:45:43 -0400229 fGpu->submitSecondaryCommandBuffer(cbInfo.fCommandBuffers, cbInfo.fRenderPass,
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400230 &cbInfo.fColorClearValue, vkRT, fOrigin, iBounds);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400231 }
232 }
egdaniel9cb63402016-06-23 08:37:05 -0700233}
234
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400235void GrVkGpuRTCommandBuffer::set(GrRenderTarget* rt, GrSurfaceOrigin origin,
236 const GrGpuRTCommandBuffer::LoadAndStoreInfo& colorInfo,
237 const GrGpuRTCommandBuffer::StencilLoadAndStoreInfo& stencilInfo) {
238 SkASSERT(!fRenderTarget);
239 SkASSERT(fCommandBufferInfos.empty());
240 SkASSERT(-1 == fCurrentCmdInfo);
241 SkASSERT(fGpu == rt->getContext()->contextPriv().getGpu());
242 SkASSERT(!fLastPipelineState);
243
244 this->INHERITED::set(rt, origin);
245
Brian Osman9a9baae2018-11-05 15:06:26 -0500246 fClearColor = colorInfo.fClearColor;
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400247
248 get_vk_load_store_ops(colorInfo.fLoadOp, colorInfo.fStoreOp,
249 &fVkColorLoadOp, &fVkColorStoreOp);
250
251 get_vk_load_store_ops(stencilInfo.fLoadOp, stencilInfo.fStoreOp,
252 &fVkStencilLoadOp, &fVkStencilStoreOp);
253
254 this->init();
255}
256
257void GrVkGpuRTCommandBuffer::reset() {
258 for (int i = 0; i < fCommandBufferInfos.count(); ++i) {
259 CommandBufferInfo& cbInfo = fCommandBufferInfos[i];
260 for (int j = 0; j < cbInfo.fCommandBuffers.count(); ++j) {
261 cbInfo.fCommandBuffers[j]->unref(fGpu);
262 }
263 cbInfo.fRenderPass->unref(fGpu);
264 }
265 fCommandBufferInfos.reset();
266
267 fCurrentCmdInfo = -1;
268
269 fLastPipelineState = nullptr;
270 fRenderTarget = nullptr;
271}
272
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400273////////////////////////////////////////////////////////////////////////////////
274
Greg Daniel500d58b2017-08-24 15:59:33 -0400275void GrVkGpuRTCommandBuffer::discard() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400276 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
Brian Salomonc293a292016-11-30 13:38:32 -0500277
Greg Daniel22bc8652017-03-22 15:45:43 -0400278 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Greg Daniel77b53f62016-10-18 11:48:51 -0400279 if (cbInfo.fIsEmpty) {
Robert Phillips74c627f2017-08-09 10:28:00 -0400280 // Change the render pass to do a don't-care load for both color & stencil
egdaniel37535c92016-06-30 08:23:30 -0700281 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_DONT_CARE,
282 VK_ATTACHMENT_STORE_OP_STORE);
283 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_DONT_CARE,
284 VK_ATTACHMENT_STORE_OP_STORE);
egdaniel37535c92016-06-30 08:23:30 -0700285
Greg Daniel36a77ee2016-10-18 10:33:25 -0400286 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
egdaniel37535c92016-06-30 08:23:30 -0700287
egdaniel37535c92016-06-30 08:23:30 -0700288 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400289 vkRT->compatibleRenderPassHandle();
egdaniel37535c92016-06-30 08:23:30 -0700290 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400291 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
292 vkColorOps,
293 vkStencilOps);
egdaniel37535c92016-06-30 08:23:30 -0700294 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400295 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400296 vkColorOps,
297 vkStencilOps);
egdaniel37535c92016-06-30 08:23:30 -0700298 }
299
Greg Daniel36a77ee2016-10-18 10:33:25 -0400300 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
egdaniel37535c92016-06-30 08:23:30 -0700301 oldRP->unref(fGpu);
Greg Daniel5011f852016-10-28 15:07:16 -0400302 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
Greg Daniela3c68df2018-03-16 13:46:53 -0400303 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithDiscard;
304 // If we are going to discard the whole render target then the results of any copies we did
305 // immediately before to the target won't matter, so just drop them.
306 cbInfo.fPreCopies.reset();
egdaniel37535c92016-06-30 08:23:30 -0700307 }
308}
309
Greg Daniel500d58b2017-08-24 15:59:33 -0400310void GrVkGpuRTCommandBuffer::insertEventMarker(const char* msg) {
Robert Phillips65a88fa2017-08-08 08:36:22 -0400311 // TODO: does Vulkan have a correlate?
312}
313
Greg Daniel500d58b2017-08-24 15:59:33 -0400314void GrVkGpuRTCommandBuffer::onClearStencilClip(const GrFixedClip& clip, bool insideStencilMask) {
Chris Dalton94c04682017-11-01 17:15:06 -0600315 SkASSERT(!clip.hasWindowRectangles());
316
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000317 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
318
Greg Daniel65a09272016-10-12 09:47:22 -0400319 GrStencilAttachment* sb = fRenderTarget->renderTargetPriv().getStencilAttachment();
egdaniel9cb63402016-06-23 08:37:05 -0700320 // this should only be called internally when we know we have a
321 // stencil buffer.
322 SkASSERT(sb);
323 int stencilBitCount = sb->bits();
324
325 // The contract with the callers does not guarantee that we preserve all bits in the stencil
326 // during this clear. Thus we will clear the entire stencil to the desired value.
327
328 VkClearDepthStencilValue vkStencilColor;
329 memset(&vkStencilColor, 0, sizeof(VkClearDepthStencilValue));
csmartdalton29df7602016-08-31 11:55:52 -0700330 if (insideStencilMask) {
egdaniel9cb63402016-06-23 08:37:05 -0700331 vkStencilColor.stencil = (1 << (stencilBitCount - 1));
332 } else {
333 vkStencilColor.stencil = 0;
334 }
335
336 VkClearRect clearRect;
337 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700338 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000339 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000340 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400341 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700342 vkRect = clip.scissorRect();
343 } else {
344 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400345 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
346 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700347 }
348
349 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
350 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
351
352 clearRect.baseArrayLayer = 0;
353 clearRect.layerCount = 1;
354
355 uint32_t stencilIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400356 SkAssertResult(cbInfo.fRenderPass->stencilAttachmentIndex(&stencilIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700357
358 VkClearAttachment attachment;
359 attachment.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
360 attachment.colorAttachment = 0; // this value shouldn't matter
361 attachment.clearValue.depthStencil = vkStencilColor;
362
Greg Daniel22bc8652017-03-22 15:45:43 -0400363 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400364 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400365
366 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000367 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400368 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
369 } else {
370 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
371 }
egdaniel9cb63402016-06-23 08:37:05 -0700372}
373
Brian Osman9a9baae2018-11-05 15:06:26 -0500374void GrVkGpuRTCommandBuffer::onClear(const GrFixedClip& clip, const SkPMColor4f& color) {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400375 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
376
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000377 // parent class should never let us get here with no RT
csmartdaltonbf4a8f92016-09-06 10:01:06 -0700378 SkASSERT(!clip.hasWindowRectangles());
egdaniel9cb63402016-06-23 08:37:05 -0700379
Greg Daniel22bc8652017-03-22 15:45:43 -0400380 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Greg Daniel36a77ee2016-10-18 10:33:25 -0400381
Brian Osman9a9baae2018-11-05 15:06:26 -0500382 VkClearColorValue vkColor = {{color.fR, color.fG, color.fB, color.fA}};
egdaniel9cb63402016-06-23 08:37:05 -0700383
Brian Salomond818ebf2018-07-02 14:08:49 +0000384 if (cbInfo.fIsEmpty && !clip.scissorEnabled()) {
Robert Phillips74c627f2017-08-09 10:28:00 -0400385 // Change the render pass to do a clear load
egdaniel9cb63402016-06-23 08:37:05 -0700386 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_CLEAR,
387 VK_ATTACHMENT_STORE_OP_STORE);
Robert Phillips74c627f2017-08-09 10:28:00 -0400388 // Preserve the stencil buffer's load & store settings
389 GrVkRenderPass::LoadStoreOps vkStencilOps(fVkStencilLoadOp, fVkStencilStoreOp);
egdaniel9cb63402016-06-23 08:37:05 -0700390
Greg Daniel36a77ee2016-10-18 10:33:25 -0400391 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
egdaniel9cb63402016-06-23 08:37:05 -0700392
393 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400394 vkRT->compatibleRenderPassHandle();
egdaniel9cb63402016-06-23 08:37:05 -0700395 if (rpHandle.isValid()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400396 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
397 vkColorOps,
398 vkStencilOps);
egdaniel9cb63402016-06-23 08:37:05 -0700399 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400400 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel36a77ee2016-10-18 10:33:25 -0400401 vkColorOps,
402 vkStencilOps);
egdaniel9cb63402016-06-23 08:37:05 -0700403 }
404
Greg Daniel36a77ee2016-10-18 10:33:25 -0400405 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
egdaniel9cb63402016-06-23 08:37:05 -0700406 oldRP->unref(fGpu);
407
Brian Osman9a9baae2018-11-05 15:06:26 -0500408 cbInfo.fColorClearValue.color = {{color.fR, color.fG, color.fB, color.fA}};
Greg Daniela3c68df2018-03-16 13:46:53 -0400409 cbInfo.fLoadStoreState = LoadStoreState::kStartsWithClear;
410 // If we are going to clear the whole render target then the results of any copies we did
411 // immediately before to the target won't matter, so just drop them.
412 cbInfo.fPreCopies.reset();
Greg Daniel36a77ee2016-10-18 10:33:25 -0400413
414 // Update command buffer bounds
415 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
egdaniel9cb63402016-06-23 08:37:05 -0700416 return;
417 }
418
419 // We always do a sub rect clear with clearAttachments since we are inside a render pass
420 VkClearRect clearRect;
421 // Flip rect if necessary
csmartdalton29df7602016-08-31 11:55:52 -0700422 SkIRect vkRect;
Brian Salomond818ebf2018-07-02 14:08:49 +0000423 if (!clip.scissorEnabled()) {
Greg Daniela41a74a2018-10-09 12:59:23 +0000424 vkRect.setXYWH(0, 0, fRenderTarget->width(), fRenderTarget->height());
Robert Phillips4f101a72017-07-28 08:42:04 -0400425 } else if (kBottomLeft_GrSurfaceOrigin != fOrigin) {
csmartdalton29df7602016-08-31 11:55:52 -0700426 vkRect = clip.scissorRect();
427 } else {
428 const SkIRect& scissor = clip.scissorRect();
Greg Daniel65a09272016-10-12 09:47:22 -0400429 vkRect.setLTRB(scissor.fLeft, fRenderTarget->height() - scissor.fBottom,
430 scissor.fRight, fRenderTarget->height() - scissor.fTop);
egdaniel9cb63402016-06-23 08:37:05 -0700431 }
432 clearRect.rect.offset = { vkRect.fLeft, vkRect.fTop };
433 clearRect.rect.extent = { (uint32_t)vkRect.width(), (uint32_t)vkRect.height() };
434 clearRect.baseArrayLayer = 0;
435 clearRect.layerCount = 1;
436
437 uint32_t colorIndex;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400438 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&colorIndex));
egdaniel9cb63402016-06-23 08:37:05 -0700439
440 VkClearAttachment attachment;
441 attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
442 attachment.colorAttachment = colorIndex;
443 attachment.clearValue.color = vkColor;
444
Greg Daniel22bc8652017-03-22 15:45:43 -0400445 cbInfo.currentCmdBuf()->clearAttachments(fGpu, 1, &attachment, 1, &clearRect);
Greg Daniel77b53f62016-10-18 11:48:51 -0400446 cbInfo.fIsEmpty = false;
Greg Daniel36a77ee2016-10-18 10:33:25 -0400447
448 // Update command buffer bounds
Brian Salomond818ebf2018-07-02 14:08:49 +0000449 if (!clip.scissorEnabled()) {
Greg Daniel36a77ee2016-10-18 10:33:25 -0400450 cbInfo.fBounds.join(fRenderTarget->getBoundsRect());
451 } else {
452 cbInfo.fBounds.join(SkRect::Make(clip.scissorRect()));
453 }
egdaniel9cb63402016-06-23 08:37:05 -0700454 return;
455}
456
Greg Daniel500d58b2017-08-24 15:59:33 -0400457////////////////////////////////////////////////////////////////////////////////
458
459void GrVkGpuRTCommandBuffer::addAdditionalCommandBuffer() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400460 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
461
Greg Daniel22bc8652017-03-22 15:45:43 -0400462 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
463 cbInfo.currentCmdBuf()->end(fGpu);
464 cbInfo.fCommandBuffers.push_back(fGpu->resourceProvider().findOrCreateSecondaryCommandBuffer());
Robert Phillips19e51dc2017-08-09 09:30:51 -0400465 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
Greg Daniel22bc8652017-03-22 15:45:43 -0400466}
467
Greg Daniel500d58b2017-08-24 15:59:33 -0400468void GrVkGpuRTCommandBuffer::addAdditionalRenderPass() {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400469 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
470
Greg Daniel22bc8652017-03-22 15:45:43 -0400471 fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf()->end(fGpu);
Greg Daniel77b53f62016-10-18 11:48:51 -0400472
473 CommandBufferInfo& cbInfo = fCommandBufferInfos.push_back();
Greg Daniel22bc8652017-03-22 15:45:43 -0400474 fCurrentCmdInfo++;
Greg Daniel77b53f62016-10-18 11:48:51 -0400475
476 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
477 VK_ATTACHMENT_STORE_OP_STORE);
478 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
479 VK_ATTACHMENT_STORE_OP_STORE);
480
481 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
Robert Phillips19e51dc2017-08-09 09:30:51 -0400482 vkRT->compatibleRenderPassHandle();
Greg Daniel77b53f62016-10-18 11:48:51 -0400483 if (rpHandle.isValid()) {
484 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
485 vkColorOps,
486 vkStencilOps);
487 } else {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400488 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
Greg Daniel77b53f62016-10-18 11:48:51 -0400489 vkColorOps,
490 vkStencilOps);
491 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400492 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
Greg Daniel77b53f62016-10-18 11:48:51 -0400493
Greg Daniel22bc8652017-03-22 15:45:43 -0400494 cbInfo.fCommandBuffers.push_back(fGpu->resourceProvider().findOrCreateSecondaryCommandBuffer());
Greg Daniel77b53f62016-10-18 11:48:51 -0400495 // It shouldn't matter what we set the clear color to here since we will assume loading of the
496 // attachment.
497 memset(&cbInfo.fColorClearValue, 0, sizeof(VkClearValue));
498 cbInfo.fBounds.setEmpty();
Greg Daniel77b53f62016-10-18 11:48:51 -0400499
Robert Phillips19e51dc2017-08-09 09:30:51 -0400500 cbInfo.currentCmdBuf()->begin(fGpu, vkRT->framebuffer(), cbInfo.fRenderPass);
Greg Daniel77b53f62016-10-18 11:48:51 -0400501}
502
Brian Salomon943ed792017-10-30 09:37:55 -0400503void GrVkGpuRTCommandBuffer::inlineUpload(GrOpFlushState* state,
504 GrDeferredTextureUploadFn& upload) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400505 if (!fCommandBufferInfos[fCurrentCmdInfo].fIsEmpty) {
506 this->addAdditionalRenderPass();
Greg Daniel77b53f62016-10-18 11:48:51 -0400507 }
Greg Daniel22bc8652017-03-22 15:45:43 -0400508 fCommandBufferInfos[fCurrentCmdInfo].fPreDrawUploads.emplace_back(state, upload);
Greg Daniel77b53f62016-10-18 11:48:51 -0400509}
510
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400511void GrVkGpuRTCommandBuffer::copy(GrSurface* src, GrSurfaceOrigin srcOrigin, const SkIRect& srcRect,
Greg Daniel500d58b2017-08-24 15:59:33 -0400512 const SkIPoint& dstPoint) {
Greg Daniela3c68df2018-03-16 13:46:53 -0400513 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
514 if (!cbInfo.fIsEmpty || LoadStoreState::kStartsWithClear == cbInfo.fLoadStoreState) {
Greg Daniel500d58b2017-08-24 15:59:33 -0400515 this->addAdditionalRenderPass();
516 }
Greg Daniela3c68df2018-03-16 13:46:53 -0400517
Greg Daniel55fa6472018-03-16 16:13:10 -0400518 fCommandBufferInfos[fCurrentCmdInfo].fPreCopies.emplace_back(
519 src, srcOrigin, srcRect, dstPoint,
520 LoadStoreState::kStartsWithDiscard == cbInfo.fLoadStoreState);
521
Greg Daniela3c68df2018-03-16 13:46:53 -0400522 if (LoadStoreState::kLoadAndStore != cbInfo.fLoadStoreState) {
523 // Change the render pass to do a load and store so we don't lose the results of our copy
524 GrVkRenderPass::LoadStoreOps vkColorOps(VK_ATTACHMENT_LOAD_OP_LOAD,
525 VK_ATTACHMENT_STORE_OP_STORE);
526 GrVkRenderPass::LoadStoreOps vkStencilOps(VK_ATTACHMENT_LOAD_OP_LOAD,
527 VK_ATTACHMENT_STORE_OP_STORE);
528
529 const GrVkRenderPass* oldRP = cbInfo.fRenderPass;
530
531 GrVkRenderTarget* vkRT = static_cast<GrVkRenderTarget*>(fRenderTarget);
532 const GrVkResourceProvider::CompatibleRPHandle& rpHandle =
533 vkRT->compatibleRenderPassHandle();
534 if (rpHandle.isValid()) {
535 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(rpHandle,
536 vkColorOps,
537 vkStencilOps);
538 } else {
539 cbInfo.fRenderPass = fGpu->resourceProvider().findRenderPass(*vkRT,
540 vkColorOps,
541 vkStencilOps);
542 }
543 SkASSERT(cbInfo.fRenderPass->isCompatible(*oldRP));
544 oldRP->unref(fGpu);
545
546 cbInfo.fLoadStoreState = LoadStoreState::kLoadAndStore;
547
548 }
Greg Daniel500d58b2017-08-24 15:59:33 -0400549}
550
egdaniel9cb63402016-06-23 08:37:05 -0700551////////////////////////////////////////////////////////////////////////////////
552
Brian Salomon802cb312018-06-08 18:05:20 -0400553void GrVkGpuRTCommandBuffer::bindGeometry(const GrBuffer* indexBuffer,
Greg Daniel500d58b2017-08-24 15:59:33 -0400554 const GrBuffer* vertexBuffer,
555 const GrBuffer* instanceBuffer) {
Chris Daltonff926502017-05-03 14:36:54 -0400556 GrVkSecondaryCommandBuffer* currCmdBuf = fCommandBufferInfos[fCurrentCmdInfo].currentCmdBuf();
egdaniel9cb63402016-06-23 08:37:05 -0700557 // There is no need to put any memory barriers to make sure host writes have finished here.
558 // When a command buffer is submitted to a queue, there is an implicit memory barrier that
559 // occurs for all host writes. Additionally, BufferMemoryBarriers are not allowed inside of
560 // an active RenderPass.
egdaniel9cb63402016-06-23 08:37:05 -0700561
Chris Dalton1d616352017-05-31 12:51:23 -0600562 // Here our vertex and instance inputs need to match the same 0-based bindings they were
563 // assigned in GrVkPipeline. That is, vertex first (if any) followed by instance.
564 uint32_t binding = 0;
565
Brian Salomon802cb312018-06-08 18:05:20 -0400566 if (vertexBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600567 SkASSERT(vertexBuffer);
568 SkASSERT(!vertexBuffer->isCPUBacked());
569 SkASSERT(!vertexBuffer->isMapped());
570
571 currCmdBuf->bindInputBuffer(fGpu, binding++,
572 static_cast<const GrVkVertexBuffer*>(vertexBuffer));
573 }
574
Brian Salomon802cb312018-06-08 18:05:20 -0400575 if (instanceBuffer) {
Chris Dalton1d616352017-05-31 12:51:23 -0600576 SkASSERT(instanceBuffer);
577 SkASSERT(!instanceBuffer->isCPUBacked());
578 SkASSERT(!instanceBuffer->isMapped());
579
580 currCmdBuf->bindInputBuffer(fGpu, binding++,
581 static_cast<const GrVkVertexBuffer*>(instanceBuffer));
582 }
egdaniel9cb63402016-06-23 08:37:05 -0700583
Chris Daltonff926502017-05-03 14:36:54 -0400584 if (indexBuffer) {
585 SkASSERT(indexBuffer);
586 SkASSERT(!indexBuffer->isMapped());
587 SkASSERT(!indexBuffer->isCPUBacked());
egdaniel9cb63402016-06-23 08:37:05 -0700588
Chris Daltonff926502017-05-03 14:36:54 -0400589 currCmdBuf->bindIndexBuffer(fGpu, static_cast<const GrVkIndexBuffer*>(indexBuffer));
egdaniel9cb63402016-06-23 08:37:05 -0700590 }
591}
592
Brian Salomon49348902018-06-26 09:12:38 -0400593GrVkPipelineState* GrVkGpuRTCommandBuffer::prepareDrawState(
594 const GrPrimitiveProcessor& primProc,
595 const GrPipeline& pipeline,
596 const GrPipeline::FixedDynamicState* fixedDynamicState,
597 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
598 GrPrimitiveType primitiveType) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400599 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
600 SkASSERT(cbInfo.fRenderPass);
Greg Daniel36a77ee2016-10-18 10:33:25 -0400601
Greg Daniel99b88e02018-10-03 15:31:20 -0400602 VkRenderPass compatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
603
Greg Daniel09eeefb2017-10-16 15:15:02 -0400604 GrVkPipelineState* pipelineState =
egdaniel9cb63402016-06-23 08:37:05 -0700605 fGpu->resourceProvider().findOrCreateCompatiblePipelineState(pipeline,
606 primProc,
607 primitiveType,
Greg Daniel99b88e02018-10-03 15:31:20 -0400608 compatibleRenderPass);
egdaniel9cb63402016-06-23 08:37:05 -0700609 if (!pipelineState) {
610 return pipelineState;
611 }
612
Greg Daniel22bc8652017-03-22 15:45:43 -0400613 if (!cbInfo.fIsEmpty &&
Greg Daniel09eeefb2017-10-16 15:15:02 -0400614 fLastPipelineState && fLastPipelineState != pipelineState &&
Greg Daniele3cd6912017-05-17 11:15:55 -0400615 fGpu->vkCaps().newCBOnPipelineChange()) {
Greg Daniel22bc8652017-03-22 15:45:43 -0400616 this->addAdditionalCommandBuffer();
617 }
Greg Daniel09eeefb2017-10-16 15:15:02 -0400618 fLastPipelineState = pipelineState;
Greg Daniel22bc8652017-03-22 15:45:43 -0400619
Brian Salomonf7232642018-09-19 08:58:08 -0400620 pipelineState->bindPipeline(fGpu, cbInfo.currentCmdBuf());
Brian Salomoncd7907b2018-08-30 08:36:18 -0400621
Brian Salomonf7232642018-09-19 08:58:08 -0400622 pipelineState->setAndBindUniforms(fGpu, primProc, pipeline, cbInfo.currentCmdBuf());
623
624 // Check whether we need to bind textures between each GrMesh. If not we can bind them all now.
625 bool setTextures = !(dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures);
626 if (setTextures) {
627 const GrTextureProxy* const* primProcProxies = nullptr;
628 if (fixedDynamicState) {
629 primProcProxies = fixedDynamicState->fPrimitiveProcessorTextures;
630 }
631 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, primProcProxies,
632 cbInfo.currentCmdBuf());
633 }
egdaniel9cb63402016-06-23 08:37:05 -0700634
Robert Phillips2890fbf2017-07-26 15:48:41 -0400635 GrRenderTarget* rt = pipeline.renderTarget();
Chris Dalton46983b72017-06-06 12:27:16 -0600636
Brian Salomond818ebf2018-07-02 14:08:49 +0000637 if (!pipeline.isScissorEnabled()) {
Robert Phillipsb0e93a22017-08-29 08:26:54 -0400638 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(),
639 rt, pipeline.proxy()->origin(),
Greg Daniela41a74a2018-10-09 12:59:23 +0000640 SkIRect::MakeWH(rt->width(), rt->height()));
Brian Salomon49348902018-06-26 09:12:38 -0400641 } else if (!dynamicStateArrays || !dynamicStateArrays->fScissorRects) {
642 SkASSERT(fixedDynamicState);
643 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), rt,
644 pipeline.proxy()->origin(),
645 fixedDynamicState->fScissorRect);
Chris Dalton46983b72017-06-06 12:27:16 -0600646 }
Jim Van Verth6a40abc2017-11-02 16:56:09 +0000647 GrVkPipeline::SetDynamicViewportState(fGpu, cbInfo.currentCmdBuf(), rt);
Chris Dalton46983b72017-06-06 12:27:16 -0600648 GrVkPipeline::SetDynamicBlendConstantState(fGpu, cbInfo.currentCmdBuf(), rt->config(),
649 pipeline.getXferProcessor());
egdaniel9cb63402016-06-23 08:37:05 -0700650
651 return pipelineState;
652}
653
Brian Salomonff168d92018-06-23 15:17:27 -0400654void GrVkGpuRTCommandBuffer::onDraw(const GrPrimitiveProcessor& primProc,
655 const GrPipeline& pipeline,
Brian Salomon49348902018-06-26 09:12:38 -0400656 const GrPipeline::FixedDynamicState* fixedDynamicState,
657 const GrPipeline::DynamicStateArrays* dynamicStateArrays,
Greg Daniel500d58b2017-08-24 15:59:33 -0400658 const GrMesh meshes[],
Greg Daniel500d58b2017-08-24 15:59:33 -0400659 int meshCount,
660 const SkRect& bounds) {
Robert Phillips19e51dc2017-08-09 09:30:51 -0400661 SkASSERT(pipeline.renderTarget() == fRenderTarget);
Brian Salomonc293a292016-11-30 13:38:32 -0500662
egdaniel9cb63402016-06-23 08:37:05 -0700663 if (!meshCount) {
664 return;
665 }
Greg Danielea022cd2018-03-16 11:10:03 -0400666
667 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
668
Brian Salomone782f842018-07-31 13:53:11 -0400669 auto prepareSampledImage = [&](GrTexture* texture, GrSamplerState::Filter filter) {
670 GrVkTexture* vkTexture = static_cast<GrVkTexture*>(texture);
671 // We may need to resolve the texture first if it is also a render target
672 GrVkRenderTarget* texRT = static_cast<GrVkRenderTarget*>(vkTexture->asRenderTarget());
673 if (texRT) {
674 fGpu->onResolveRenderTarget(texRT);
675 }
676
677 // Check if we need to regenerate any mip maps
678 if (GrSamplerState::Filter::kMipMap == filter &&
679 (vkTexture->width() != 1 || vkTexture->height() != 1)) {
680 SkASSERT(vkTexture->texturePriv().mipMapped() == GrMipMapped::kYes);
681 if (vkTexture->texturePriv().mipMapsAreDirty()) {
682 fGpu->regenerateMipMapLevels(vkTexture);
683 }
684 }
685 cbInfo.fSampledImages.push_back(vkTexture);
686 };
687
Brian Salomonf7232642018-09-19 08:58:08 -0400688 if (dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures) {
689 for (int m = 0, i = 0; m < meshCount; ++m) {
690 for (int s = 0; s < primProc.numTextureSamplers(); ++s, ++i) {
691 auto texture = dynamicStateArrays->fPrimitiveProcessorTextures[i]->peekTexture();
692 prepareSampledImage(texture, primProc.textureSampler(s).samplerState().filter());
693 }
694 }
695 } else {
696 for (int i = 0; i < primProc.numTextureSamplers(); ++i) {
697 auto texture = fixedDynamicState->fPrimitiveProcessorTextures[i]->peekTexture();
698 prepareSampledImage(texture, primProc.textureSampler(i).samplerState().filter());
699 }
Brian Salomone782f842018-07-31 13:53:11 -0400700 }
bsalomonb58a2b42016-09-26 06:55:02 -0700701 GrFragmentProcessor::Iter iter(pipeline);
702 while (const GrFragmentProcessor* fp = iter.next()) {
Brian Salomone782f842018-07-31 13:53:11 -0400703 for (int i = 0; i < fp->numTextureSamplers(); ++i) {
704 const GrFragmentProcessor::TextureSampler& sampler = fp->textureSampler(i);
705 prepareSampledImage(sampler.peekTexture(), sampler.samplerState().filter());
706 }
egdaniel2f5792a2016-07-06 08:51:23 -0700707 }
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400708 if (GrTexture* dstTexture = pipeline.peekDstTexture()) {
Greg Danielea022cd2018-03-16 11:10:03 -0400709 cbInfo.fSampledImages.push_back(static_cast<GrVkTexture*>(dstTexture));
Brian Salomon18dfa982017-04-03 16:57:43 -0400710 }
egdaniel2f5792a2016-07-06 08:51:23 -0700711
Chris Daltonbca46e22017-05-15 11:03:26 -0600712 GrPrimitiveType primitiveType = meshes[0].primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400713 GrVkPipelineState* pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
714 dynamicStateArrays, primitiveType);
egdaniel9cb63402016-06-23 08:37:05 -0700715 if (!pipelineState) {
716 return;
717 }
718
Brian Salomond818ebf2018-07-02 14:08:49 +0000719 bool dynamicScissor =
720 pipeline.isScissorEnabled() && dynamicStateArrays && dynamicStateArrays->fScissorRects;
Brian Salomonf7232642018-09-19 08:58:08 -0400721 bool dynamicTextures = dynamicStateArrays && dynamicStateArrays->fPrimitiveProcessorTextures;
Brian Salomon49348902018-06-26 09:12:38 -0400722
egdaniel9cb63402016-06-23 08:37:05 -0700723 for (int i = 0; i < meshCount; ++i) {
724 const GrMesh& mesh = meshes[i];
Chris Daltonbca46e22017-05-15 11:03:26 -0600725 if (mesh.primitiveType() != primitiveType) {
Chris Dalton6f241802017-05-08 13:58:38 -0400726 SkDEBUGCODE(pipelineState = nullptr);
Chris Daltonbca46e22017-05-15 11:03:26 -0600727 primitiveType = mesh.primitiveType();
Brian Salomon49348902018-06-26 09:12:38 -0400728 pipelineState = this->prepareDrawState(primProc, pipeline, fixedDynamicState,
729 dynamicStateArrays, primitiveType);
Chris Dalton6f241802017-05-08 13:58:38 -0400730 if (!pipelineState) {
731 return;
egdaniel9cb63402016-06-23 08:37:05 -0700732 }
Chris Dalton6f241802017-05-08 13:58:38 -0400733 }
egdaniel9cb63402016-06-23 08:37:05 -0700734
Brian Salomon49348902018-06-26 09:12:38 -0400735 if (dynamicScissor) {
736 GrVkPipeline::SetDynamicScissorRectState(fGpu, cbInfo.currentCmdBuf(), fRenderTarget,
737 pipeline.proxy()->origin(),
738 dynamicStateArrays->fScissorRects[i]);
Chris Dalton46983b72017-06-06 12:27:16 -0600739 }
Brian Salomonf7232642018-09-19 08:58:08 -0400740 if (dynamicTextures) {
741 GrTextureProxy* const* meshProxies = dynamicStateArrays->fPrimitiveProcessorTextures +
742 primProc.numTextureSamplers() * i;
743 pipelineState->setAndBindTextures(fGpu, primProc, pipeline, meshProxies,
744 cbInfo.currentCmdBuf());
745 }
Chris Daltonbca46e22017-05-15 11:03:26 -0600746 SkASSERT(pipelineState);
Brian Salomon802cb312018-06-08 18:05:20 -0400747 mesh.sendToGpu(this);
egdaniel9cb63402016-06-23 08:37:05 -0700748 }
749
Greg Daniel36a77ee2016-10-18 10:33:25 -0400750 cbInfo.fBounds.join(bounds);
Chris Dalton114a3c02017-05-26 15:17:19 -0600751 cbInfo.fIsEmpty = false;
egdaniel066df7c2016-06-08 14:02:27 -0700752}
753
Brian Salomon802cb312018-06-08 18:05:20 -0400754void GrVkGpuRTCommandBuffer::sendInstancedMeshToGpu(GrPrimitiveType,
Greg Daniel500d58b2017-08-24 15:59:33 -0400755 const GrBuffer* vertexBuffer,
756 int vertexCount,
757 int baseVertex,
758 const GrBuffer* instanceBuffer,
759 int instanceCount,
760 int baseInstance) {
Chris Dalton114a3c02017-05-26 15:17:19 -0600761 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomon802cb312018-06-08 18:05:20 -0400762 this->bindGeometry(nullptr, vertexBuffer, instanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600763 cbInfo.currentCmdBuf()->draw(fGpu, vertexCount, instanceCount, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600764 fGpu->stats()->incNumDraws();
765}
766
Brian Salomon802cb312018-06-08 18:05:20 -0400767void GrVkGpuRTCommandBuffer::sendIndexedInstancedMeshToGpu(GrPrimitiveType,
Greg Daniel500d58b2017-08-24 15:59:33 -0400768 const GrBuffer* indexBuffer,
769 int indexCount,
770 int baseIndex,
771 const GrBuffer* vertexBuffer,
772 int baseVertex,
773 const GrBuffer* instanceBuffer,
774 int instanceCount,
Brian Salomon802cb312018-06-08 18:05:20 -0400775 int baseInstance,
776 GrPrimitiveRestart restart) {
777 SkASSERT(restart == GrPrimitiveRestart::kNo);
Chris Dalton114a3c02017-05-26 15:17:19 -0600778 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
Brian Salomon802cb312018-06-08 18:05:20 -0400779 this->bindGeometry(indexBuffer, vertexBuffer, instanceBuffer);
Chris Dalton1d616352017-05-31 12:51:23 -0600780 cbInfo.currentCmdBuf()->drawIndexed(fGpu, indexCount, instanceCount,
781 baseIndex, baseVertex, baseInstance);
Chris Dalton114a3c02017-05-26 15:17:19 -0600782 fGpu->stats()->incNumDraws();
783}
Greg Daniel64cc9aa2018-10-19 13:54:56 -0400784
785////////////////////////////////////////////////////////////////////////////////
786
787void GrVkGpuRTCommandBuffer::executeDrawable(std::unique_ptr<SkDrawable::GpuDrawHandler> drawable) {
788 GrVkRenderTarget* target = static_cast<GrVkRenderTarget*>(fRenderTarget);
789
790 GrVkImage* targetImage = target->msaaImage() ? target->msaaImage() : target;
791
792 CommandBufferInfo& cbInfo = fCommandBufferInfos[fCurrentCmdInfo];
793 VkRect2D bounds;
794 bounds.offset = { 0, 0 };
795 bounds.extent = { 0, 0 };
796
797 GrVkDrawableInfo vkInfo;
798 vkInfo.fSecondaryCommandBuffer = cbInfo.currentCmdBuf()->vkCommandBuffer();
799 vkInfo.fCompatibleRenderPass = cbInfo.fRenderPass->vkRenderPass();
800 SkAssertResult(cbInfo.fRenderPass->colorAttachmentIndex(&vkInfo.fImageAttachmentIndex));
801 vkInfo.fFormat = targetImage->imageFormat();
802 vkInfo.fDrawBounds = &bounds;
803
804 GrBackendDrawableInfo info(vkInfo);
805
806 drawable->draw(info);
807 fGpu->addDrawable(std::move(drawable));
808
809 if (bounds.extent.width == 0 || bounds.extent.height == 0) {
810 cbInfo.fBounds.join(target->getBoundsRect());
811 } else {
812 cbInfo.fBounds.join(SkRect::MakeXYWH(bounds.offset.x, bounds.offset.y,
813 bounds.extent.width, bounds.extent.height));
814 }
815}
816