blob: b4e6870585af770a9deb80ec0755934343fcbbbd [file] [log] [blame]
reed@google.comac10a2d2010-12-22 21:39:39 +00001/*
epoger@google.comec3ed6a2011-07-28 14:26:00 +00002 * Copyright 2010 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
reed@google.comac10a2d2010-12-22 21:39:39 +00006 */
7
Robert Phillipsf2361d22016-10-25 14:20:06 -04008#include "GrRenderTargetOpList.h"
joshualitt4d8da812015-01-28 12:53:54 -08009
csmartdalton28341fa2016-08-17 10:00:21 -070010#include "GrAppliedClip.h"
joshualitt086cee12016-01-12 06:45:24 -080011#include "GrAuditTrail.h"
bsalomoneb1cb5c2015-05-22 08:01:09 -070012#include "GrCaps.h"
Brian Osman11052242016-10-27 14:47:55 -040013#include "GrRenderTargetContext.h"
bsalomon4061b122015-05-29 10:26:19 -070014#include "GrGpu.h"
egdaniel9cb63402016-06-23 08:37:05 -070015#include "GrGpuCommandBuffer.h"
commit-bot@chromium.orgc4dc0ad2013-10-09 14:11:33 +000016#include "GrPath.h"
egdaniele36914c2015-02-13 09:00:33 -080017#include "GrPipeline.h"
joshualittb7133be2015-04-08 09:08:31 -070018#include "GrMemoryPool.h"
robertphillips5fa7f302016-07-21 09:21:04 -070019#include "GrPipelineBuilder.h"
bsalomon@google.com8f9cbd62011-12-09 15:55:34 +000020#include "GrRenderTarget.h"
bsalomon4061b122015-05-29 10:26:19 -070021#include "GrResourceProvider.h"
bsalomon6bc1b5f2015-02-23 09:06:38 -080022#include "GrRenderTargetPriv.h"
cdalton93a379b2016-05-11 13:58:08 -070023#include "GrStencilAttachment.h"
bsalomonafbf2d62014-09-30 12:18:44 -070024#include "GrSurfacePriv.h"
bsalomon@google.com86afc2a2011-02-16 16:12:19 +000025#include "GrTexture.h"
ethannicholas22793252016-01-30 09:59:10 -080026#include "gl/GrGLRenderTarget.h"
reed@google.comac10a2d2010-12-22 21:39:39 +000027
joshualitt086cee12016-01-12 06:45:24 -080028#include "SkStrokeRec.h"
29
bsalomon9f129de2016-08-10 16:31:05 -070030#include "batches/GrClearBatch.h"
robertphillips9199a9f2016-07-13 07:48:43 -070031#include "batches/GrClearStencilClipBatch.h"
bsalomon872062c2015-08-18 12:12:35 -070032#include "batches/GrCopySurfaceBatch.h"
bsalomon53469832015-08-18 09:20:09 -070033#include "batches/GrDiscardBatch.h"
bsalomon16b99132015-08-13 14:55:50 -070034#include "batches/GrDrawBatch.h"
bsalomonadd79ef2015-08-19 13:26:49 -070035#include "batches/GrDrawPathBatch.h"
joshualittecd1a692015-08-10 10:08:26 -070036#include "batches/GrRectBatchFactory.h"
bsalomona44919e2015-08-18 13:28:19 -070037#include "batches/GrStencilPathBatch.h"
joshualitt74417822015-08-07 11:42:16 -070038
csmartdaltona7f29642016-07-07 08:49:11 -070039#include "instanced/InstancedRendering.h"
40
Robert Phillipsf2361d22016-10-25 14:20:06 -040041using gr_instanced::InstancedRendering;
42
reed@google.comac10a2d2010-12-22 21:39:39 +000043////////////////////////////////////////////////////////////////////////////////
44
bsalomon489147c2015-12-14 12:13:09 -080045// Experimentally we have found that most batching occurs within the first 10 comparisons.
bsalomonaecc0182016-03-07 11:50:44 -080046static const int kDefaultMaxBatchLookback = 10;
47static const int kDefaultMaxBatchLookahead = 10;
bsalomon489147c2015-12-14 12:13:09 -080048
Robert Phillipsc7635fa2016-10-28 13:25:24 -040049GrRenderTargetOpList::GrRenderTargetOpList(GrRenderTargetProxy* rtp, GrGpu* gpu,
Robert Phillipsf2361d22016-10-25 14:20:06 -040050 GrResourceProvider* resourceProvider,
51 GrAuditTrail* auditTrail, const Options& options)
Robert Phillipsc7635fa2016-10-28 13:25:24 -040052 : INHERITED(rtp, auditTrail)
Robert Phillipsf2361d22016-10-25 14:20:06 -040053 , fLastFullClearBatch(nullptr)
bsalomonfd8d0132016-08-11 11:25:33 -070054 , fGpu(SkRef(gpu))
csmartdalton7cdda992016-11-01 07:03:03 -070055 , fResourceProvider(resourceProvider)
56 , fLastClipStackGenID(SK_InvalidUniqueID) {
csmartdaltonc6f411e2016-08-05 22:32:12 -070057 // TODO: Stop extracting the context (currently needed by GrClip)
bsalomonb3b9aec2015-09-10 11:16:35 -070058 fContext = fGpu->getContext();
robertphillips4beb5c12015-10-20 07:50:00 -070059
cdalton862cff32016-05-12 15:09:48 -070060 fClipBatchToBounds = options.fClipBatchToBounds;
bsalomon6dea83f2015-12-03 12:58:06 -080061 fDrawBatchBounds = options.fDrawBatchBounds;
bsalomon489147c2015-12-14 12:13:09 -080062 fMaxBatchLookback = (options.fMaxBatchLookback < 0) ? kDefaultMaxBatchLookback :
63 options.fMaxBatchLookback;
bsalomonaecc0182016-03-07 11:50:44 -080064 fMaxBatchLookahead = (options.fMaxBatchLookahead < 0) ? kDefaultMaxBatchLookahead :
65 options.fMaxBatchLookahead;
bsalomon6dea83f2015-12-03 12:58:06 -080066
csmartdaltone0d36292016-07-29 08:14:20 -070067 if (GrCaps::InstancedSupport::kNone != this->caps()->instancedSupport()) {
68 fInstancedRendering.reset(fGpu->createInstancedRendering());
69 }
bsalomon4061b122015-05-29 10:26:19 -070070}
71
Robert Phillipsf2361d22016-10-25 14:20:06 -040072GrRenderTargetOpList::~GrRenderTargetOpList() {
bsalomon4061b122015-05-29 10:26:19 -070073 fGpu->unref();
bsalomon@google.com25fb21f2011-06-21 18:17:25 +000074}
75
76////////////////////////////////////////////////////////////////////////////////
77
robertphillips4beb5c12015-10-20 07:50:00 -070078#ifdef SK_DEBUG
Robert Phillipsf2361d22016-10-25 14:20:06 -040079void GrRenderTargetOpList::dump() const {
80 INHERITED::dump();
81
bsalomon6cc90062016-07-08 11:31:22 -070082 SkDebugf("batches (%d):\n", fRecordedBatches.count());
83 for (int i = 0; i < fRecordedBatches.count(); ++i) {
robertphillips4beb5c12015-10-20 07:50:00 -070084 SkDebugf("*******************************\n");
bsalomon6cc90062016-07-08 11:31:22 -070085 if (!fRecordedBatches[i].fBatch) {
bsalomonaecc0182016-03-07 11:50:44 -080086 SkDebugf("%d: <combined forward>\n", i);
87 } else {
bsalomon6cc90062016-07-08 11:31:22 -070088 SkDebugf("%d: %s\n", i, fRecordedBatches[i].fBatch->name());
89 SkString str = fRecordedBatches[i].fBatch->dumpInfo();
bsalomonaecc0182016-03-07 11:50:44 -080090 SkDebugf("%s\n", str.c_str());
bsalomon6cc90062016-07-08 11:31:22 -070091 const SkRect& clippedBounds = fRecordedBatches[i].fClippedBounds;
92 SkDebugf("ClippedBounds: [L: %.2f, T: %.2f, R: %.2f, B: %.2f]\n",
93 clippedBounds.fLeft, clippedBounds.fTop, clippedBounds.fRight,
94 clippedBounds.fBottom);
bsalomonaecc0182016-03-07 11:50:44 -080095 }
robertphillips4beb5c12015-10-20 07:50:00 -070096 }
97}
98#endif
99
Robert Phillipsf2361d22016-10-25 14:20:06 -0400100bool GrRenderTargetOpList::setupDstReadIfNecessary(const GrPipelineBuilder& pipelineBuilder,
101 GrRenderTarget* rt,
102 const GrClip& clip,
103 const GrPipelineOptimizations& optimizations,
104 GrXferProcessor::DstTexture* dstTexture,
105 const SkRect& batchBounds) {
bsalomonad792c12015-09-10 11:10:50 -0700106 SkRect bounds = batchBounds;
107 bounds.outset(0.5f, 0.5f);
108
ethannicholasde4166a2015-11-30 08:57:38 -0800109 if (!pipelineBuilder.willXPNeedDstTexture(*this->caps(), optimizations)) {
bsalomon@google.com26e18b52013-03-29 19:22:36 +0000110 return true;
111 }
cdalton9954bc32015-04-29 14:17:00 -0700112
cdalton9954bc32015-04-29 14:17:00 -0700113 if (this->caps()->textureBarrierSupport()) {
114 if (GrTexture* rtTex = rt->asTexture()) {
bsalomondc47ff72015-05-26 12:16:59 -0700115 // The render target is a texture, so we can read from it directly in the shader. The XP
cdalton9954bc32015-04-29 14:17:00 -0700116 // will be responsible to detect this situation and request a texture barrier.
bungeman6bd52842016-10-27 09:30:08 -0700117 dstTexture->setTexture(sk_ref_sp(rtTex));
bsalomon6a44c6a2015-05-26 09:49:05 -0700118 dstTexture->setOffset(0, 0);
cdalton9954bc32015-04-29 14:17:00 -0700119 return true;
120 }
121 }
122
123 SkIRect copyRect;
cdalton862cff32016-05-12 15:09:48 -0700124 clip.getConservativeBounds(rt->width(), rt->height(), &copyRect);
commit-bot@chromium.orgc4dc0ad2013-10-09 14:11:33 +0000125
bsalomonad792c12015-09-10 11:10:50 -0700126 SkIRect drawIBounds;
127 bounds.roundOut(&drawIBounds);
128 if (!copyRect.intersect(drawIBounds)) {
commit-bot@chromium.org515dcd32013-08-28 14:17:03 +0000129#ifdef SK_DEBUG
bsalomonb3b9aec2015-09-10 11:16:35 -0700130 GrCapsDebugf(this->caps(), "Missed an early reject. "
131 "Bailing on draw from setupDstReadIfNecessary.\n");
commit-bot@chromium.orgbb5c4652013-04-01 12:49:31 +0000132#endif
bsalomonad792c12015-09-10 11:10:50 -0700133 return false;
commit-bot@chromium.orgbb5c4652013-04-01 12:49:31 +0000134 }
skia.committer@gmail.com05a2ee02013-04-02 07:01:34 +0000135
commit-bot@chromium.org63150af2013-04-11 22:00:22 +0000136 // MSAA consideration: When there is support for reading MSAA samples in the shader we could
137 // have per-sample dst values by making the copy multisampled.
bsalomonf2703d82014-10-28 14:33:06 -0700138 GrSurfaceDesc desc;
egdaniel4bcd62e2016-08-31 07:37:31 -0700139 if (!fGpu->initDescForDstCopy(rt, &desc)) {
bsalomona73239a2015-04-28 13:35:17 -0700140 desc.fOrigin = kDefault_GrSurfaceOrigin;
141 desc.fFlags = kRenderTarget_GrSurfaceFlag;
142 desc.fConfig = rt->config();
143 }
144
commit-bot@chromium.orgbb5c4652013-04-01 12:49:31 +0000145 desc.fWidth = copyRect.width();
146 desc.fHeight = copyRect.height();
bsalomon@google.com26e18b52013-03-29 19:22:36 +0000147
bsalomoneae62002015-07-31 13:59:30 -0700148 static const uint32_t kFlags = 0;
bungeman6bd52842016-10-27 09:30:08 -0700149 sk_sp<GrTexture> copy(fResourceProvider->createApproxTexture(desc, kFlags));
bsalomon@google.com26e18b52013-03-29 19:22:36 +0000150
bsalomone3059732014-10-14 11:47:22 -0700151 if (!copy) {
tfarina38406c82014-10-31 07:11:12 -0700152 SkDebugf("Failed to create temporary copy of destination texture.\n");
bsalomon@google.com26e18b52013-03-29 19:22:36 +0000153 return false;
154 }
bsalomon@google.come4617bf2013-04-03 14:56:40 +0000155 SkIPoint dstPoint = {0, 0};
bungeman6bd52842016-10-27 09:30:08 -0700156 this->copySurface(copy.get(), rt, copyRect, dstPoint);
157 dstTexture->setTexture(std::move(copy));
bsalomon6df86402015-06-01 10:41:49 -0700158 dstTexture->setOffset(copyRect.fLeft, copyRect.fTop);
159 return true;
bsalomon@google.com26e18b52013-03-29 19:22:36 +0000160}
161
Robert Phillipsf2361d22016-10-25 14:20:06 -0400162void GrRenderTargetOpList::prepareBatches(GrBatchFlushState* flushState) {
163 // Semi-usually the GrOpLists are already closed at this point, but sometimes Ganesh
164 // needs to flush mid-draw. In that case, the SkGpuDevice's GrOpLists won't be closed
165 // but need to be flushed anyway. Closing such GrOpLists here will mean new
166 // GrOpLists will be created to replace them if the SkGpuDevice(s) write to them again.
robertphillipsa106c622015-10-16 09:07:06 -0700167 this->makeClosed();
168
robertphillips498d7ac2015-10-30 10:11:30 -0700169 // Loop over the batches that haven't yet generated their geometry
bsalomon6cc90062016-07-08 11:31:22 -0700170 for (int i = 0; i < fRecordedBatches.count(); ++i) {
171 if (fRecordedBatches[i].fBatch) {
172 fRecordedBatches[i].fBatch->prepare(flushState);
bsalomonaecc0182016-03-07 11:50:44 -0800173 }
bsalomon512be532015-09-10 10:42:55 -0700174 }
csmartdaltona7f29642016-07-07 08:49:11 -0700175
176 if (fInstancedRendering) {
177 fInstancedRendering->beginFlush(flushState->resourceProvider());
178 }
robertphillipsa13e2022015-11-11 12:01:09 -0800179}
bsalomon512be532015-09-10 10:42:55 -0700180
Robert Phillipsf2361d22016-10-25 14:20:06 -0400181bool GrRenderTargetOpList::drawBatches(GrBatchFlushState* flushState) {
egdanielb4021cf2016-07-28 08:53:07 -0700182 if (0 == fRecordedBatches.count()) {
bsalomondc438982016-08-31 11:53:49 -0700183 return false;
egdanielb4021cf2016-07-28 08:53:07 -0700184 }
bsalomon512be532015-09-10 10:42:55 -0700185 // Draw all the generated geometry.
bsalomon6dea83f2015-12-03 12:58:06 -0800186 SkRandom random;
egdaniel9cb63402016-06-23 08:37:05 -0700187 GrRenderTarget* currentRT = nullptr;
Ben Wagner145dbcd2016-11-03 14:40:50 -0400188 std::unique_ptr<GrGpuCommandBuffer> commandBuffer;
bsalomon6cc90062016-07-08 11:31:22 -0700189 for (int i = 0; i < fRecordedBatches.count(); ++i) {
190 if (!fRecordedBatches[i].fBatch) {
bsalomonaecc0182016-03-07 11:50:44 -0800191 continue;
192 }
bsalomon6cc90062016-07-08 11:31:22 -0700193 if (fRecordedBatches[i].fBatch->renderTarget() != currentRT) {
egdaniel9cb63402016-06-23 08:37:05 -0700194 if (commandBuffer) {
195 commandBuffer->end();
Greg Daniel36a77ee2016-10-18 10:33:25 -0400196 commandBuffer->submit();
egdaniel9cb63402016-06-23 08:37:05 -0700197 commandBuffer.reset();
198 }
bsalomon6cc90062016-07-08 11:31:22 -0700199 currentRT = fRecordedBatches[i].fBatch->renderTarget();
egdaniel9cb63402016-06-23 08:37:05 -0700200 if (currentRT) {
201 static const GrGpuCommandBuffer::LoadAndStoreInfo kBasicLoadStoreInfo
202 { GrGpuCommandBuffer::LoadOp::kLoad,GrGpuCommandBuffer::StoreOp::kStore,
203 GrColor_ILLEGAL };
204 commandBuffer.reset(fGpu->createCommandBuffer(currentRT,
205 kBasicLoadStoreInfo, // Color
206 kBasicLoadStoreInfo)); // Stencil
207 }
Ben Wagner145dbcd2016-11-03 14:40:50 -0400208 flushState->setCommandBuffer(commandBuffer.get());
egdaniel9cb63402016-06-23 08:37:05 -0700209 }
bsalomon6dea83f2015-12-03 12:58:06 -0800210 if (fDrawBatchBounds) {
bsalomon6cc90062016-07-08 11:31:22 -0700211 const SkRect& bounds = fRecordedBatches[i].fClippedBounds;
212 SkIRect ibounds;
213 bounds.roundOut(&ibounds);
bsalomon6dea83f2015-12-03 12:58:06 -0800214 // In multi-draw buffer all the batches use the same render target and we won't need to
215 // get the batchs bounds.
bsalomon6cc90062016-07-08 11:31:22 -0700216 if (GrRenderTarget* rt = fRecordedBatches[i].fBatch->renderTarget()) {
217 fGpu->drawDebugWireRect(rt, ibounds, 0xFF000000 | random.nextU());
bsalomon6dea83f2015-12-03 12:58:06 -0800218 }
219 }
Greg Daniel36a77ee2016-10-18 10:33:25 -0400220 fRecordedBatches[i].fBatch->draw(flushState, fRecordedBatches[i].fClippedBounds);
bsalomon512be532015-09-10 10:42:55 -0700221 }
egdaniel9cb63402016-06-23 08:37:05 -0700222 if (commandBuffer) {
223 commandBuffer->end();
Greg Daniel36a77ee2016-10-18 10:33:25 -0400224 commandBuffer->submit();
egdaniel9cb63402016-06-23 08:37:05 -0700225 flushState->setCommandBuffer(nullptr);
226 }
ethannicholas22793252016-01-30 09:59:10 -0800227
Robert Phillipsf2361d22016-10-25 14:20:06 -0400228 fGpu->finishOpList();
bsalomondc438982016-08-31 11:53:49 -0700229 return true;
bsalomona73239a2015-04-28 13:35:17 -0700230}
231
Robert Phillipsf2361d22016-10-25 14:20:06 -0400232void GrRenderTargetOpList::reset() {
bsalomonfd8d0132016-08-11 11:25:33 -0700233 fLastFullClearBatch = nullptr;
bsalomon6cc90062016-07-08 11:31:22 -0700234 fRecordedBatches.reset();
csmartdaltona7f29642016-07-07 08:49:11 -0700235 if (fInstancedRendering) {
236 fInstancedRendering->endFlush();
237 }
bsalomon512be532015-09-10 10:42:55 -0700238}
239
Robert Phillipsf2361d22016-10-25 14:20:06 -0400240void GrRenderTargetOpList::abandonGpuResources() {
241 if (GrCaps::InstancedSupport::kNone != fContext->caps()->instancedSupport()) {
242 InstancedRendering* ir = this->instancedRendering();
243 ir->resetGpuResources(InstancedRendering::ResetType::kAbandon);
244 }
245}
246
247void GrRenderTargetOpList::freeGpuResources() {
248 if (GrCaps::InstancedSupport::kNone != fContext->caps()->instancedSupport()) {
249 InstancedRendering* ir = this->instancedRendering();
250 ir->resetGpuResources(InstancedRendering::ResetType::kDestroy);
251 }
252}
253
bsalomon88cf17d2016-07-08 06:40:56 -0700254static void batch_bounds(SkRect* bounds, const GrBatch* batch) {
255 *bounds = batch->bounds();
256 if (batch->hasZeroArea()) {
257 if (batch->hasAABloat()) {
258 bounds->outset(0.5f, 0.5f);
259 } else {
260 // We don't know which way the particular GPU will snap lines or points at integer
261 // coords. So we ensure that the bounds is large enough for either snap.
262 SkRect before = *bounds;
263 bounds->roundOut(bounds);
264 if (bounds->fLeft == before.fLeft) {
265 bounds->fLeft -= 1;
266 }
267 if (bounds->fTop == before.fTop) {
268 bounds->fTop -= 1;
269 }
270 if (bounds->fRight == before.fRight) {
271 bounds->fRight += 1;
272 }
273 if (bounds->fBottom == before.fBottom) {
274 bounds->fBottom += 1;
275 }
276 }
277 }
278}
279
Robert Phillipsf2361d22016-10-25 14:20:06 -0400280void GrRenderTargetOpList::drawBatch(const GrPipelineBuilder& pipelineBuilder,
Brian Osman11052242016-10-27 14:47:55 -0400281 GrRenderTargetContext* renderTargetContext,
Robert Phillipsf2361d22016-10-25 14:20:06 -0400282 const GrClip& clip,
283 GrDrawBatch* batch) {
joshualitt4d8da812015-01-28 12:53:54 -0800284 // Setup clip
bsalomon88cf17d2016-07-08 06:40:56 -0700285 SkRect bounds;
286 batch_bounds(&bounds, batch);
csmartdaltond211e782016-08-15 11:17:19 -0700287 GrAppliedClip appliedClip(bounds);
Brian Osman11052242016-10-27 14:47:55 -0400288 if (!clip.apply(fContext, renderTargetContext, pipelineBuilder.isHWAntialias(),
csmartdaltond211e782016-08-15 11:17:19 -0700289 pipelineBuilder.hasUserStencilSettings(), &appliedClip)) {
cdalton862cff32016-05-12 15:09:48 -0700290 return;
joshualitt4d8da812015-01-28 12:53:54 -0800291 }
robertphillips391395d2016-03-02 09:26:36 -0800292
robertphillips55fdccc2016-06-06 06:16:20 -0700293 // TODO: this is the only remaining usage of the AutoRestoreFragmentProcessorState - remove it
cdaltond4727922015-11-10 12:49:06 -0800294 GrPipelineBuilder::AutoRestoreFragmentProcessorState arfps;
csmartdaltond211e782016-08-15 11:17:19 -0700295 if (appliedClip.clipCoverageFragmentProcessor()) {
cdaltond4727922015-11-10 12:49:06 -0800296 arfps.set(&pipelineBuilder);
csmartdaltond211e782016-08-15 11:17:19 -0700297 arfps.addCoverageFragmentProcessor(sk_ref_sp(appliedClip.clipCoverageFragmentProcessor()));
cdaltond4727922015-11-10 12:49:06 -0800298 }
joshualitt4d8da812015-01-28 12:53:54 -0800299
cdalton862cff32016-05-12 15:09:48 -0700300 if (pipelineBuilder.hasUserStencilSettings() || appliedClip.hasStencilClip()) {
Brian Osman11052242016-10-27 14:47:55 -0400301 if (!fResourceProvider->attachStencilAttachment(
302 renderTargetContext->accessRenderTarget())) {
cdalton17bf8202016-05-13 11:27:15 -0700303 SkDebugf("ERROR creating stencil attachment. Draw skipped.\n");
304 return;
305 }
cdalton193d9cf2016-05-12 11:52:02 -0700306 }
csmartdaltond211e782016-08-15 11:17:19 -0700307
308 GrPipeline::CreateArgs args;
309 args.fPipelineBuilder = &pipelineBuilder;
Brian Osman11052242016-10-27 14:47:55 -0400310 args.fRenderTargetContext = renderTargetContext;
csmartdaltond211e782016-08-15 11:17:19 -0700311 args.fCaps = this->caps();
cdalton193d9cf2016-05-12 11:52:02 -0700312 batch->getPipelineOptimizations(&args.fOpts);
cdalton862cff32016-05-12 15:09:48 -0700313 if (args.fOpts.fOverrides.fUsePLSDstRead || fClipBatchToBounds) {
cdalton193d9cf2016-05-12 11:52:02 -0700314 GrGLIRect viewport;
315 viewport.fLeft = 0;
316 viewport.fBottom = 0;
Brian Osman11052242016-10-27 14:47:55 -0400317 viewport.fWidth = renderTargetContext->width();
318 viewport.fHeight = renderTargetContext->height();
cdalton193d9cf2016-05-12 11:52:02 -0700319 SkIRect ibounds;
320 ibounds.fLeft = SkTPin(SkScalarFloorToInt(batch->bounds().fLeft), viewport.fLeft,
321 viewport.fWidth);
322 ibounds.fTop = SkTPin(SkScalarFloorToInt(batch->bounds().fTop), viewport.fBottom,
323 viewport.fHeight);
324 ibounds.fRight = SkTPin(SkScalarCeilToInt(batch->bounds().fRight), viewport.fLeft,
325 viewport.fWidth);
326 ibounds.fBottom = SkTPin(SkScalarCeilToInt(batch->bounds().fBottom), viewport.fBottom,
327 viewport.fHeight);
csmartdaltond211e782016-08-15 11:17:19 -0700328 if (!appliedClip.addScissor(ibounds)) {
329 return;
cdalton193d9cf2016-05-12 11:52:02 -0700330 }
cdalton193d9cf2016-05-12 11:52:02 -0700331 }
bungeman06ca8ec2016-06-09 08:01:03 -0700332 args.fOpts.fColorPOI.completeCalculations(
333 sk_sp_address_as_pointer_address(pipelineBuilder.fColorFragmentProcessors.begin()),
334 pipelineBuilder.numColorFragmentProcessors());
cdalton193d9cf2016-05-12 11:52:02 -0700335 args.fOpts.fCoveragePOI.completeCalculations(
bungeman06ca8ec2016-06-09 08:01:03 -0700336 sk_sp_address_as_pointer_address(pipelineBuilder.fCoverageFragmentProcessors.begin()),
337 pipelineBuilder.numCoverageFragmentProcessors());
csmartdaltond211e782016-08-15 11:17:19 -0700338 args.fScissor = &appliedClip.scissorState();
csmartdaltonbf4a8f92016-09-06 10:01:06 -0700339 args.fWindowRectsState = &appliedClip.windowRectsState();
csmartdaltond211e782016-08-15 11:17:19 -0700340 args.fHasStencilClip = appliedClip.hasStencilClip();
Brian Osman11052242016-10-27 14:47:55 -0400341 if (!this->setupDstReadIfNecessary(pipelineBuilder, renderTargetContext->accessRenderTarget(),
robertphillips55fdccc2016-06-06 06:16:20 -0700342 clip, args.fOpts,
343 &args.fDstTexture, batch->bounds())) {
cdalton193d9cf2016-05-12 11:52:02 -0700344 return;
345 }
346
347 if (!batch->installPipeline(args)) {
egdaniele36914c2015-02-13 09:00:33 -0800348 return;
349 }
bsalomonad792c12015-09-10 11:10:50 -0700350
robertphillips498d7ac2015-10-30 10:11:30 -0700351#ifdef ENABLE_MDB
Robert Phillipsf2361d22016-10-25 14:20:06 -0400352 SkASSERT(fSurface);
353 batch->pipeline()->addDependenciesTo(fSurface);
robertphillips498d7ac2015-10-30 10:11:30 -0700354#endif
csmartdaltond211e782016-08-15 11:17:19 -0700355 this->recordBatch(batch, appliedClip.clippedDrawBounds());
joshualitt4d8da812015-01-28 12:53:54 -0800356}
357
Brian Osman11052242016-10-27 14:47:55 -0400358void GrRenderTargetOpList::stencilPath(GrRenderTargetContext* renderTargetContext,
Robert Phillipsf2361d22016-10-25 14:20:06 -0400359 const GrClip& clip,
360 bool useHWAA,
361 const SkMatrix& viewMatrix,
362 const GrPath* path) {
bsalomon@google.com64aef2b2012-06-11 15:36:13 +0000363 // TODO: extract portions of checkDraw that are relevant to path stenciling.
bsalomon49f085d2014-09-05 13:34:00 -0700364 SkASSERT(path);
jvanverthe9c0fc62015-04-29 11:18:05 -0700365 SkASSERT(this->caps()->shaderCaps()->pathRenderingSupport());
joshualitt2c93efe2014-11-06 12:57:13 -0800366
csmartdaltond211e782016-08-15 11:17:19 -0700367 // FIXME: Use path bounds instead of this WAR once
368 // https://bugs.chromium.org/p/skia/issues/detail?id=5640 is resolved.
Brian Osman11052242016-10-27 14:47:55 -0400369 SkRect bounds = SkRect::MakeIWH(renderTargetContext->width(), renderTargetContext->height());
csmartdaltond211e782016-08-15 11:17:19 -0700370
joshualitt2c93efe2014-11-06 12:57:13 -0800371 // Setup clip
csmartdaltond211e782016-08-15 11:17:19 -0700372 GrAppliedClip appliedClip(bounds);
Brian Osman11052242016-10-27 14:47:55 -0400373 if (!clip.apply(fContext, renderTargetContext, useHWAA, true, &appliedClip)) {
joshualitt2c93efe2014-11-06 12:57:13 -0800374 return;
375 }
cdalton862cff32016-05-12 15:09:48 -0700376 // TODO: respect fClipBatchToBounds if we ever start computing bounds here.
joshualitt2c93efe2014-11-06 12:57:13 -0800377
cdalton846c0512016-05-13 10:25:00 -0700378 // Coverage AA does not make sense when rendering to the stencil buffer. The caller should never
379 // attempt this in a situation that would require coverage AA.
csmartdaltond211e782016-08-15 11:17:19 -0700380 SkASSERT(!appliedClip.clipCoverageFragmentProcessor());
bsalomon0ba8c242015-10-07 09:20:28 -0700381
robertphillips55fdccc2016-06-06 06:16:20 -0700382 GrStencilAttachment* stencilAttachment = fResourceProvider->attachStencilAttachment(
Brian Osman11052242016-10-27 14:47:55 -0400383 renderTargetContext->accessRenderTarget());
cdalton17bf8202016-05-13 11:27:15 -0700384 if (!stencilAttachment) {
385 SkDebugf("ERROR creating stencil attachment. Draw skipped.\n");
386 return;
387 }
joshualitt2c93efe2014-11-06 12:57:13 -0800388
joshualittf2384692015-09-10 11:00:51 -0700389 GrBatch* batch = GrStencilPathBatch::Create(viewMatrix,
robertphillips59cf61a2016-07-13 09:18:21 -0700390 useHWAA,
391 path->getFillType(),
cdalton862cff32016-05-12 15:09:48 -0700392 appliedClip.hasStencilClip(),
cdalton93a379b2016-05-11 13:58:08 -0700393 stencilAttachment->bits(),
cdalton862cff32016-05-12 15:09:48 -0700394 appliedClip.scissorState(),
Brian Osman11052242016-10-27 14:47:55 -0400395 renderTargetContext->accessRenderTarget(),
bsalomona44919e2015-08-18 13:28:19 -0700396 path);
csmartdaltond211e782016-08-15 11:17:19 -0700397 this->recordBatch(batch, appliedClip.clippedDrawBounds());
bsalomona44919e2015-08-18 13:28:19 -0700398 batch->unref();
bsalomon@google.com64aef2b2012-06-11 15:36:13 +0000399}
400
Robert Phillipsf2361d22016-10-25 14:20:06 -0400401void GrRenderTargetOpList::addBatch(sk_sp<GrBatch> batch) {
robertphillips9199a9f2016-07-13 07:48:43 -0700402 this->recordBatch(batch.get(), batch->bounds());
bsalomon53469832015-08-18 09:20:09 -0700403}
404
Robert Phillipsf2361d22016-10-25 14:20:06 -0400405void GrRenderTargetOpList::fullClear(GrRenderTarget* renderTarget, GrColor color) {
bsalomonfd8d0132016-08-11 11:25:33 -0700406 // Currently this just inserts or updates the last clear batch. However, once in MDB this can
407 // remove all the previously recorded batches and change the load op to clear with supplied
408 // color.
409 if (fLastFullClearBatch &&
robertphillips8abb3702016-08-31 14:04:06 -0700410 fLastFullClearBatch->renderTargetUniqueID() == renderTarget->uniqueID()) {
bsalomonfd8d0132016-08-11 11:25:33 -0700411 // As currently implemented, fLastFullClearBatch should be the last batch because we would
412 // have cleared it when another batch was recorded.
413 SkASSERT(fRecordedBatches.back().fBatch.get() == fLastFullClearBatch);
414 fLastFullClearBatch->setColor(color);
415 return;
416 }
csmartdalton29df7602016-08-31 11:55:52 -0700417 sk_sp<GrClearBatch> batch(GrClearBatch::Make(GrFixedClip::Disabled(), color, renderTarget));
bsalomonfd8d0132016-08-11 11:25:33 -0700418 if (batch.get() == this->recordBatch(batch.get(), batch->bounds())) {
419 fLastFullClearBatch = batch.get();
420 }
bsalomon9f129de2016-08-10 16:31:05 -0700421}
422
Robert Phillipsf2361d22016-10-25 14:20:06 -0400423void GrRenderTargetOpList::discard(GrRenderTarget* renderTarget) {
bsalomon9f129de2016-08-10 16:31:05 -0700424 // Currently this just inserts a discard batch. However, once in MDB this can remove all the
425 // previously recorded batches and change the load op to discard.
bsalomon53469832015-08-18 09:20:09 -0700426 if (this->caps()->discardRenderTargetSupport()) {
halcanary385fe4d2015-08-26 13:07:48 -0700427 GrBatch* batch = new GrDiscardBatch(renderTarget);
bsalomon6cc90062016-07-08 11:31:22 -0700428 this->recordBatch(batch, batch->bounds());
bsalomon53469832015-08-18 09:20:09 -0700429 batch->unref();
bsalomon63b21962014-11-05 07:05:34 -0800430 }
431}
432
bsalomon@google.com25fb21f2011-06-21 18:17:25 +0000433////////////////////////////////////////////////////////////////////////////////
bsalomon@google.com86afc2a2011-02-16 16:12:19 +0000434
Robert Phillipsf2361d22016-10-25 14:20:06 -0400435bool GrRenderTargetOpList::copySurface(GrSurface* dst,
436 GrSurface* src,
437 const SkIRect& srcRect,
438 const SkIPoint& dstPoint) {
bsalomon872062c2015-08-18 12:12:35 -0700439 GrBatch* batch = GrCopySurfaceBatch::Create(dst, src, srcRect, dstPoint);
bsalomonb8fea972016-02-16 07:34:17 -0800440 if (!batch) {
441 return false;
442 }
robertphillips498d7ac2015-10-30 10:11:30 -0700443#ifdef ENABLE_MDB
bsalomonb8fea972016-02-16 07:34:17 -0800444 this->addDependency(src);
robertphillips498d7ac2015-10-30 10:11:30 -0700445#endif
446
bsalomon6cc90062016-07-08 11:31:22 -0700447 this->recordBatch(batch, batch->bounds());
bsalomonb8fea972016-02-16 07:34:17 -0800448 batch->unref();
449 return true;
bsalomon@google.comeb851172013-04-15 13:51:00 +0000450}
451
bsalomon6cc90062016-07-08 11:31:22 -0700452static inline bool can_reorder(const SkRect& a, const SkRect& b) {
bsalomon88cf17d2016-07-08 06:40:56 -0700453 return a.fRight <= b.fLeft || a.fBottom <= b.fTop ||
454 b.fRight <= a.fLeft || b.fBottom <= a.fTop;
455}
456
bsalomon6cc90062016-07-08 11:31:22 -0700457static void join(SkRect* out, const SkRect& a, const SkRect& b) {
458 SkASSERT(a.fLeft <= a.fRight && a.fTop <= a.fBottom);
459 SkASSERT(b.fLeft <= b.fRight && b.fTop <= b.fBottom);
460 out->fLeft = SkTMin(a.fLeft, b.fLeft);
461 out->fTop = SkTMin(a.fTop, b.fTop);
462 out->fRight = SkTMax(a.fRight, b.fRight);
463 out->fBottom = SkTMax(a.fBottom, b.fBottom);
bsalomon512be532015-09-10 10:42:55 -0700464}
465
Robert Phillipsf2361d22016-10-25 14:20:06 -0400466GrBatch* GrRenderTargetOpList::recordBatch(GrBatch* batch, const SkRect& clippedBounds) {
467 // A closed GrOpList should never receive new/more batches
robertphillips6a186652015-10-20 07:37:58 -0700468 SkASSERT(!this->isClosed());
robertphillipsa106c622015-10-16 09:07:06 -0700469
bsalomon512be532015-09-10 10:42:55 -0700470 // Check if there is a Batch Draw we can batch with by linearly searching back until we either
471 // 1) check every draw
472 // 2) intersect with something
473 // 3) find a 'blocker'
joshualittb0666ad2016-03-08 10:43:41 -0800474 GR_AUDIT_TRAIL_ADDBATCH(fAuditTrail, batch);
bsalomon512be532015-09-10 10:42:55 -0700475 GrBATCH_INFO("Re-Recording (%s, B%u)\n"
joshualitte2bcec32015-09-30 06:22:22 -0700476 "\tBounds LRTB (%f, %f, %f, %f)\n",
bsalomon512be532015-09-10 10:42:55 -0700477 batch->name(),
478 batch->uniqueID(),
479 batch->bounds().fLeft, batch->bounds().fRight,
480 batch->bounds().fTop, batch->bounds().fBottom);
481 GrBATCH_INFO(SkTabString(batch->dumpInfo(), 1).c_str());
bsalomon6cc90062016-07-08 11:31:22 -0700482 GrBATCH_INFO("\tClipped Bounds: [L: %.2f, T: %.2f, R: %.2f, B: %.2f]\n",
483 clippedBounds.fLeft, clippedBounds.fTop, clippedBounds.fRight,
484 clippedBounds.fBottom);
halcanary9d524f22016-03-29 09:03:52 -0700485 GrBATCH_INFO("\tOutcome:\n");
bsalomon6cc90062016-07-08 11:31:22 -0700486 int maxCandidates = SkTMin(fMaxBatchLookback, fRecordedBatches.count());
bsalomon512be532015-09-10 10:42:55 -0700487 if (maxCandidates) {
488 int i = 0;
489 while (true) {
bsalomon6cc90062016-07-08 11:31:22 -0700490 GrBatch* candidate = fRecordedBatches.fromBack(i).fBatch.get();
bsalomon512be532015-09-10 10:42:55 -0700491 // We cannot continue to search backwards if the render target changes
492 if (candidate->renderTargetUniqueID() != batch->renderTargetUniqueID()) {
493 GrBATCH_INFO("\t\tBreaking because of (%s, B%u) Rendertarget\n",
494 candidate->name(), candidate->uniqueID());
495 break;
496 }
497 if (candidate->combineIfPossible(batch, *this->caps())) {
498 GrBATCH_INFO("\t\tCombining with (%s, B%u)\n", candidate->name(),
499 candidate->uniqueID());
joshualittb0666ad2016-03-08 10:43:41 -0800500 GR_AUDIT_TRAIL_BATCHING_RESULT_COMBINED(fAuditTrail, candidate, batch);
bsalomon6cc90062016-07-08 11:31:22 -0700501 join(&fRecordedBatches.fromBack(i).fClippedBounds,
502 fRecordedBatches.fromBack(i).fClippedBounds, clippedBounds);
bsalomonfd8d0132016-08-11 11:25:33 -0700503 return candidate;
bsalomon512be532015-09-10 10:42:55 -0700504 }
505 // Stop going backwards if we would cause a painter's order violation.
bsalomon6cc90062016-07-08 11:31:22 -0700506 const SkRect& candidateBounds = fRecordedBatches.fromBack(i).fClippedBounds;
507 if (!can_reorder(candidateBounds, clippedBounds)) {
bsalomon512be532015-09-10 10:42:55 -0700508 GrBATCH_INFO("\t\tIntersects with (%s, B%u)\n", candidate->name(),
509 candidate->uniqueID());
510 break;
511 }
512 ++i;
513 if (i == maxCandidates) {
514 GrBATCH_INFO("\t\tReached max lookback or beginning of batch array %d\n", i);
515 break;
516 }
517 }
518 } else {
519 GrBATCH_INFO("\t\tFirstBatch\n");
520 }
joshualitt18d6b752016-02-26 08:07:50 -0800521 GR_AUDIT_TRAIL_BATCHING_RESULT_NEW(fAuditTrail, batch);
bsalomon6cc90062016-07-08 11:31:22 -0700522 fRecordedBatches.emplace_back(RecordedBatch{sk_ref_sp(batch), clippedBounds});
bsalomonfd8d0132016-08-11 11:25:33 -0700523 fLastFullClearBatch = nullptr;
524 return batch;
bsalomon512be532015-09-10 10:42:55 -0700525}
526
Robert Phillipsf2361d22016-10-25 14:20:06 -0400527void GrRenderTargetOpList::forwardCombine() {
bsalomondb27fc52016-08-29 12:43:27 -0700528 if (fMaxBatchLookahead <= 0) {
529 return;
530 }
bsalomon6cc90062016-07-08 11:31:22 -0700531 for (int i = 0; i < fRecordedBatches.count() - 2; ++i) {
532 GrBatch* batch = fRecordedBatches[i].fBatch.get();
533 const SkRect& batchBounds = fRecordedBatches[i].fClippedBounds;
534 int maxCandidateIdx = SkTMin(i + fMaxBatchLookahead, fRecordedBatches.count() - 1);
bsalomonaecc0182016-03-07 11:50:44 -0800535 int j = i + 1;
536 while (true) {
bsalomon6cc90062016-07-08 11:31:22 -0700537 GrBatch* candidate = fRecordedBatches[j].fBatch.get();
bsalomonaecc0182016-03-07 11:50:44 -0800538 // We cannot continue to search if the render target changes
539 if (candidate->renderTargetUniqueID() != batch->renderTargetUniqueID()) {
540 GrBATCH_INFO("\t\tBreaking because of (%s, B%u) Rendertarget\n",
541 candidate->name(), candidate->uniqueID());
542 break;
543 }
544 if (j == i +1) {
545 // We assume batch would have combined with candidate when the candidate was added
546 // via backwards combining in recordBatch.
547 SkASSERT(!batch->combineIfPossible(candidate, *this->caps()));
548 } else if (batch->combineIfPossible(candidate, *this->caps())) {
549 GrBATCH_INFO("\t\tCombining with (%s, B%u)\n", candidate->name(),
550 candidate->uniqueID());
joshualittb0666ad2016-03-08 10:43:41 -0800551 GR_AUDIT_TRAIL_BATCHING_RESULT_COMBINED(fAuditTrail, batch, candidate);
bsalomon6cc90062016-07-08 11:31:22 -0700552 fRecordedBatches[j].fBatch = std::move(fRecordedBatches[i].fBatch);
553 join(&fRecordedBatches[j].fClippedBounds, fRecordedBatches[j].fClippedBounds,
554 batchBounds);
bsalomonaecc0182016-03-07 11:50:44 -0800555 break;
556 }
557 // Stop going traversing if we would cause a painter's order violation.
bsalomon6cc90062016-07-08 11:31:22 -0700558 const SkRect& candidateBounds = fRecordedBatches[j].fClippedBounds;
559 if (!can_reorder(candidateBounds, batchBounds)) {
bsalomonaecc0182016-03-07 11:50:44 -0800560 GrBATCH_INFO("\t\tIntersects with (%s, B%u)\n", candidate->name(),
561 candidate->uniqueID());
562 break;
563 }
564 ++j;
565 if (j > maxCandidateIdx) {
566 GrBATCH_INFO("\t\tReached max lookahead or end of batch array %d\n", i);
567 break;
568 }
569 }
570 }
571}
572
egdaniele36914c2015-02-13 09:00:33 -0800573///////////////////////////////////////////////////////////////////////////////
574
Robert Phillipsf2361d22016-10-25 14:20:06 -0400575void GrRenderTargetOpList::clearStencilClip(const GrFixedClip& clip,
576 bool insideStencilMask,
577 GrRenderTarget* rt) {
csmartdalton29df7602016-08-31 11:55:52 -0700578 GrBatch* batch = new GrClearStencilClipBatch(clip, insideStencilMask, rt);
bsalomon6cc90062016-07-08 11:31:22 -0700579 this->recordBatch(batch, batch->bounds());
bsalomon5ea03632015-08-18 10:33:30 -0700580 batch->unref();
581}