blob: df5233dc907ed25dd88680a949af434be08b6ed3 [file] [log] [blame]
reed@google.comac10a2d2010-12-22 21:39:39 +00001/*
epoger@google.comec3ed6a2011-07-28 14:26:00 +00002 * Copyright 2010 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
reed@google.comac10a2d2010-12-22 21:39:39 +00006 */
7
Robert Phillipsf2361d22016-10-25 14:20:06 -04008#include "GrRenderTargetOpList.h"
joshualitt086cee12016-01-12 06:45:24 -08009#include "GrAuditTrail.h"
bsalomoneb1cb5c2015-05-22 08:01:09 -070010#include "GrCaps.h"
bsalomon4061b122015-05-29 10:26:19 -070011#include "GrGpu.h"
egdaniel9cb63402016-06-23 08:37:05 -070012#include "GrGpuCommandBuffer.h"
Robert Phillips7c525e62018-06-12 10:11:12 -040013#include "GrMemoryPool.h"
Brian Salomona4677b52017-05-04 12:39:56 -040014#include "GrRect.h"
Brian Salomon467921e2017-03-06 16:17:12 -050015#include "GrRenderTargetContext.h"
Robert Phillipsd375dbf2017-09-14 12:45:25 -040016#include "GrResourceAllocator.h"
Brian Salomon89527432016-12-16 09:52:16 -050017#include "ops/GrClearOp.h"
Brian Salomon89527432016-12-16 09:52:16 -050018#include "ops/GrCopySurfaceOp.h"
Stan Iliev2af578d2017-08-16 13:00:28 -040019#include "SkTraceEvent.h"
csmartdaltona7f29642016-07-07 08:49:11 -070020
Robert Phillipsf2361d22016-10-25 14:20:06 -040021
reed@google.comac10a2d2010-12-22 21:39:39 +000022////////////////////////////////////////////////////////////////////////////////
23
Brian Salomon09d994e2016-12-21 11:14:46 -050024// Experimentally we have found that most combining occurs within the first 10 comparisons.
Robert Phillips8185f592017-04-26 08:31:08 -040025static const int kMaxOpLookback = 10;
26static const int kMaxOpLookahead = 10;
bsalomon489147c2015-12-14 12:13:09 -080027
Robert Phillips3a9710b2018-03-27 17:51:55 -040028GrRenderTargetOpList::GrRenderTargetOpList(GrResourceProvider* resourceProvider,
Robert Phillipsc994a932018-06-19 13:09:54 -040029 sk_sp<GrOpMemoryPool> opMemoryPool,
Robert Phillips3a9710b2018-03-27 17:51:55 -040030 GrRenderTargetProxy* proxy,
Robert Phillips8185f592017-04-26 08:31:08 -040031 GrAuditTrail* auditTrail)
Robert Phillipsc994a932018-06-19 13:09:54 -040032 : INHERITED(resourceProvider, std::move(opMemoryPool), proxy, auditTrail)
Brian Salomonc3833b42018-07-09 18:23:58 +000033 , fLastClipStackGenID(SK_InvalidUniqueID)
Robert Phillipsb6deea82017-05-11 14:14:30 -040034 SkDEBUGCODE(, fNumClips(0)) {
bsalomon4061b122015-05-29 10:26:19 -070035}
36
Robert Phillipsc994a932018-06-19 13:09:54 -040037void GrRenderTargetOpList::RecordedOp::deleteOp(GrOpMemoryPool* opMemoryPool) {
38 opMemoryPool->release(std::move(fOp));
39}
40
41void GrRenderTargetOpList::deleteOps() {
42 for (int i = 0; i < fRecordedOps.count(); ++i) {
43 if (fRecordedOps[i].fOp) {
44 fRecordedOps[i].deleteOp(fOpMemoryPool.get());
45 }
46 }
47 fRecordedOps.reset();
48}
49
Robert Phillipsf2361d22016-10-25 14:20:06 -040050GrRenderTargetOpList::~GrRenderTargetOpList() {
Robert Phillipsc994a932018-06-19 13:09:54 -040051 this->deleteOps();
bsalomon@google.com25fb21f2011-06-21 18:17:25 +000052}
53
54////////////////////////////////////////////////////////////////////////////////
55
robertphillips4beb5c12015-10-20 07:50:00 -070056#ifdef SK_DEBUG
Robert Phillips27483912018-04-20 12:43:18 -040057void GrRenderTargetOpList::dump(bool printDependencies) const {
58 INHERITED::dump(printDependencies);
Robert Phillipsf2361d22016-10-25 14:20:06 -040059
Brian Salomon1e41f4a2016-12-07 15:05:04 -050060 SkDebugf("ops (%d):\n", fRecordedOps.count());
61 for (int i = 0; i < fRecordedOps.count(); ++i) {
robertphillips4beb5c12015-10-20 07:50:00 -070062 SkDebugf("*******************************\n");
Brian Salomon1e41f4a2016-12-07 15:05:04 -050063 if (!fRecordedOps[i].fOp) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -050064 SkDebugf("%d: <combined forward or failed instantiation>\n", i);
bsalomonaecc0182016-03-07 11:50:44 -080065 } else {
Brian Salomon1e41f4a2016-12-07 15:05:04 -050066 SkDebugf("%d: %s\n", i, fRecordedOps[i].fOp->name());
67 SkString str = fRecordedOps[i].fOp->dumpInfo();
bsalomonaecc0182016-03-07 11:50:44 -080068 SkDebugf("%s\n", str.c_str());
Brian Salomon9e50f7b2017-03-06 12:02:34 -050069 const SkRect& bounds = fRecordedOps[i].fOp->bounds();
70 SkDebugf("ClippedBounds: [L: %.2f, T: %.2f, R: %.2f, B: %.2f]\n", bounds.fLeft,
71 bounds.fTop, bounds.fRight, bounds.fBottom);
bsalomonaecc0182016-03-07 11:50:44 -080072 }
robertphillips4beb5c12015-10-20 07:50:00 -070073 }
74}
Chris Dalton706a6ff2017-11-29 22:01:06 -070075
76void GrRenderTargetOpList::visitProxies_debugOnly(const GrOp::VisitProxyFunc& func) const {
77 for (const RecordedOp& recordedOp : fRecordedOps) {
Brian Salomon7d94bb52018-10-12 14:37:19 -040078 recordedOp.visitProxies(func, GrOp::VisitorType::kOther);
Chris Dalton706a6ff2017-11-29 22:01:06 -070079 }
80}
Brian Salomonc525d4f2018-09-17 15:48:20 -040081
82static void assert_chain_bounds(const GrOp* op) {
83 SkASSERT(op->isChainHead());
84 auto headBounds = op->bounds();
85 while ((op = op->nextInChain())) {
86 SkASSERT(headBounds.contains(op->bounds()));
87 }
88}
robertphillips4beb5c12015-10-20 07:50:00 -070089#endif
90
Brian Osman407b3422017-08-22 15:01:32 -040091void GrRenderTargetOpList::onPrepare(GrOpFlushState* flushState) {
Brian Salomonfd98c2c2018-07-31 17:25:29 -040092 SkASSERT(fTarget.get()->peekRenderTarget());
Robert Phillips6cdc22c2017-05-11 16:29:14 -040093 SkASSERT(this->isClosed());
Stan Iliev2af578d2017-08-16 13:00:28 -040094#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
95 TRACE_EVENT0("skia", TRACE_FUNC);
96#endif
robertphillipsa106c622015-10-16 09:07:06 -070097
Brian Salomon1e41f4a2016-12-07 15:05:04 -050098 // Loop over the ops that haven't yet been prepared.
99 for (int i = 0; i < fRecordedOps.count(); ++i) {
Brian Salomond25f5bc2018-08-08 11:25:17 -0400100 if (fRecordedOps[i].fOp && fRecordedOps[i].fOp->isChainHead()) {
Stan Iliev2af578d2017-08-16 13:00:28 -0400101#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
102 TRACE_EVENT0("skia", fRecordedOps[i].fOp->name());
103#endif
Brian Salomon29b60c92017-10-31 14:42:10 -0400104 GrOpFlushState::OpArgs opArgs = {
105 fRecordedOps[i].fOp.get(),
Robert Phillips2890fbf2017-07-26 15:48:41 -0400106 fTarget.get()->asRenderTargetProxy(),
Robert Phillips318c4192017-05-17 09:36:38 -0400107 fRecordedOps[i].fAppliedClip,
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400108 fRecordedOps[i].fDstProxy
Robert Phillips318c4192017-05-17 09:36:38 -0400109 };
Brian Salomonc525d4f2018-09-17 15:48:20 -0400110 SkDEBUGCODE(assert_chain_bounds(opArgs.fOp));
Brian Salomon29b60c92017-10-31 14:42:10 -0400111 flushState->setOpArgs(&opArgs);
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500112 fRecordedOps[i].fOp->prepare(flushState);
Brian Salomon29b60c92017-10-31 14:42:10 -0400113 flushState->setOpArgs(nullptr);
bsalomonaecc0182016-03-07 11:50:44 -0800114 }
bsalomon512be532015-09-10 10:42:55 -0700115 }
robertphillipsa13e2022015-11-11 12:01:09 -0800116}
bsalomon512be532015-09-10 10:42:55 -0700117
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400118static GrGpuRTCommandBuffer* create_command_buffer(GrGpu* gpu,
119 GrRenderTarget* rt,
120 GrSurfaceOrigin origin,
Ethan Nicholas56d19a52018-10-15 11:26:20 -0400121 const SkRect& bounds,
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400122 GrLoadOp colorLoadOp,
Brian Osman9a9baae2018-11-05 15:06:26 -0500123 const SkPMColor4f& loadClearColor,
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400124 GrLoadOp stencilLoadOp) {
Robert Phillipscb2e2352017-08-30 16:44:40 -0400125 const GrGpuRTCommandBuffer::LoadAndStoreInfo kColorLoadStoreInfo {
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400126 colorLoadOp,
127 GrStoreOp::kStore,
128 loadClearColor
Robert Phillips178ce3e2017-04-13 09:15:47 -0400129 };
130
Robert Phillips95214472017-08-08 18:00:03 -0400131 // TODO:
132 // We would like to (at this level) only ever clear & discard. We would need
133 // to stop splitting up higher level opLists for copyOps to achieve that.
134 // Note: we would still need SB loads and stores but they would happen at a
135 // lower level (inside the VK command buffer).
Greg Daniel500d58b2017-08-24 15:59:33 -0400136 const GrGpuRTCommandBuffer::StencilLoadAndStoreInfo stencilLoadAndStoreInfo {
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400137 stencilLoadOp,
138 GrStoreOp::kStore,
Robert Phillips95214472017-08-08 18:00:03 -0400139 };
140
Ethan Nicholas56d19a52018-10-15 11:26:20 -0400141 return gpu->getCommandBuffer(rt, origin, bounds, kColorLoadStoreInfo, stencilLoadAndStoreInfo);
Robert Phillips178ce3e2017-04-13 09:15:47 -0400142}
143
Brian Salomon25a88092016-12-01 09:36:50 -0500144// TODO: this is where GrOp::renderTarget is used (which is fine since it
Robert Phillips294870f2016-11-11 12:38:40 -0500145// is at flush time). However, we need to store the RenderTargetProxy in the
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500146// Ops and instantiate them here.
Brian Osman407b3422017-08-22 15:01:32 -0400147bool GrRenderTargetOpList::onExecute(GrOpFlushState* flushState) {
Greg Danieldbdba602018-04-20 11:52:43 -0400148 // TODO: Forcing the execution of the discard here isn't ideal since it will cause us to do a
149 // discard and then store the data back in memory so that the load op on future draws doesn't
150 // think the memory is unitialized. Ideally we would want a system where we are tracking whether
151 // the proxy itself has valid data or not, and then use that as a signal on whether we should be
152 // loading or discarding. In that world we wouldni;t need to worry about executing oplists with
153 // no ops just to do a discard.
154 if (0 == fRecordedOps.count() && GrLoadOp::kClear != fColorLoadOp &&
155 GrLoadOp::kDiscard != fColorLoadOp) {
bsalomondc438982016-08-31 11:53:49 -0700156 return false;
egdanielb4021cf2016-07-28 08:53:07 -0700157 }
Robert Phillips4a395042017-04-24 16:27:17 +0000158
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400159 SkASSERT(fTarget.get()->peekRenderTarget());
Stan Iliev2af578d2017-08-16 13:00:28 -0400160 TRACE_EVENT0("skia", TRACE_FUNC);
Robert Phillips6cdc22c2017-05-11 16:29:14 -0400161
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400162 // TODO: at the very least, we want the stencil store op to always be discard (at this
163 // level). In Vulkan, sub-command buffers would still need to load & store the stencil buffer.
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400164 GrGpuRTCommandBuffer* commandBuffer = create_command_buffer(
Robert Phillips95214472017-08-08 18:00:03 -0400165 flushState->gpu(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400166 fTarget.get()->peekRenderTarget(),
Robert Phillips95214472017-08-08 18:00:03 -0400167 fTarget.get()->origin(),
Ethan Nicholas56d19a52018-10-15 11:26:20 -0400168 fTarget.get()->getBoundsRect(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400169 fColorLoadOp,
170 fLoadClearColor,
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400171 fStencilLoadOp);
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400172 flushState->setCommandBuffer(commandBuffer);
Robert Phillips95214472017-08-08 18:00:03 -0400173 commandBuffer->begin();
Robert Phillips6cdc22c2017-05-11 16:29:14 -0400174
175 // Draw all the generated geometry.
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500176 for (int i = 0; i < fRecordedOps.count(); ++i) {
Brian Salomond25f5bc2018-08-08 11:25:17 -0400177 if (!fRecordedOps[i].fOp || !fRecordedOps[i].fOp->isChainHead()) {
bsalomonaecc0182016-03-07 11:50:44 -0800178 continue;
179 }
Stan Iliev2af578d2017-08-16 13:00:28 -0400180#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
181 TRACE_EVENT0("skia", fRecordedOps[i].fOp->name());
182#endif
Robert Phillips178ce3e2017-04-13 09:15:47 -0400183
Brian Salomon29b60c92017-10-31 14:42:10 -0400184 GrOpFlushState::OpArgs opArgs {
185 fRecordedOps[i].fOp.get(),
Robert Phillips2890fbf2017-07-26 15:48:41 -0400186 fTarget.get()->asRenderTargetProxy(),
Robert Phillips178ce3e2017-04-13 09:15:47 -0400187 fRecordedOps[i].fAppliedClip,
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400188 fRecordedOps[i].fDstProxy
Robert Phillips178ce3e2017-04-13 09:15:47 -0400189 };
190
Brian Salomon29b60c92017-10-31 14:42:10 -0400191 flushState->setOpArgs(&opArgs);
Brian Salomon9e50f7b2017-03-06 12:02:34 -0500192 fRecordedOps[i].fOp->execute(flushState);
Brian Salomon29b60c92017-10-31 14:42:10 -0400193 flushState->setOpArgs(nullptr);
bsalomon512be532015-09-10 10:42:55 -0700194 }
Robert Phillips178ce3e2017-04-13 09:15:47 -0400195
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400196 commandBuffer->end();
197 flushState->gpu()->submit(commandBuffer);
Robert Phillips178ce3e2017-04-13 09:15:47 -0400198 flushState->setCommandBuffer(nullptr);
ethannicholas22793252016-01-30 09:59:10 -0800199
bsalomondc438982016-08-31 11:53:49 -0700200 return true;
bsalomona73239a2015-04-28 13:35:17 -0700201}
202
Chris Daltona84cacf2017-10-04 10:30:29 -0600203void GrRenderTargetOpList::endFlush() {
Brian Salomonc3833b42018-07-09 18:23:58 +0000204 fLastClipStackGenID = SK_InvalidUniqueID;
Robert Phillipsc994a932018-06-19 13:09:54 -0400205 this->deleteOps();
Chris Daltonc82dd4e2017-11-20 18:20:28 -0700206 fClipAllocator.reset();
Chris Daltona84cacf2017-10-04 10:30:29 -0600207 INHERITED::endFlush();
bsalomon512be532015-09-10 10:42:55 -0700208}
209
Robert Phillips380b90c2017-08-30 07:41:07 -0400210void GrRenderTargetOpList::discard() {
211 // Discard calls to in-progress opLists are ignored. Calls at the start update the
212 // opLists' color & stencil load ops.
213 if (this->isEmpty()) {
214 fColorLoadOp = GrLoadOp::kDiscard;
215 fStencilLoadOp = GrLoadOp::kDiscard;
216 }
217}
218
Brian Osman9a9baae2018-11-05 15:06:26 -0500219void GrRenderTargetOpList::fullClear(GrContext* context, const SkPMColor4f& color) {
Robert Phillips380b90c2017-08-30 07:41:07 -0400220
221 // This is conservative. If the opList is marked as needing a stencil buffer then there
222 // may be a prior op that writes to the stencil buffer. Although the clear will ignore the
223 // stencil buffer, following draw ops may not so we can't get rid of all the preceding ops.
224 // Beware! If we ever add any ops that have a side effect beyond modifying the stencil
225 // buffer we will need a more elaborate tracking system (skbug.com/7002).
226 if (this->isEmpty() || !fTarget.get()->asRenderTargetProxy()->needsStencil()) {
Robert Phillipsc994a932018-06-19 13:09:54 -0400227 this->deleteOps();
Brian Osman099fa0f2017-10-02 16:38:32 -0400228 fDeferredProxies.reset();
Robert Phillips380b90c2017-08-30 07:41:07 -0400229 fColorLoadOp = GrLoadOp::kClear;
230 fLoadClearColor = color;
bsalomonfd8d0132016-08-11 11:25:33 -0700231 return;
232 }
Robert Phillips380b90c2017-08-30 07:41:07 -0400233
Robert Phillips7c525e62018-06-12 10:11:12 -0400234 std::unique_ptr<GrClearOp> op(GrClearOp::Make(context, GrFixedClip::Disabled(),
235 color, fTarget.get()));
Robert Phillipsf7a72612017-03-31 10:03:45 -0400236 if (!op) {
237 return;
238 }
Robert Phillips5efd5ea2017-05-30 13:47:32 -0400239
Robert Phillips7c525e62018-06-12 10:11:12 -0400240 this->recordOp(std::move(op), *context->contextPriv().caps());
bsalomon9f129de2016-08-10 16:31:05 -0700241}
242
bsalomon@google.com25fb21f2011-06-21 18:17:25 +0000243////////////////////////////////////////////////////////////////////////////////
bsalomon@google.com86afc2a2011-02-16 16:12:19 +0000244
Robert Phillips81dd3e02017-06-23 11:59:24 -0400245// This closely parallels GrTextureOpList::copySurface but renderTargetOpLists
246// also store the applied clip and dest proxy with the op
Robert Phillips7c525e62018-06-12 10:11:12 -0400247bool GrRenderTargetOpList::copySurface(GrContext* context,
Robert Phillipsa16f6cb2017-06-01 11:06:13 -0400248 GrSurfaceProxy* dst,
Robert Phillipsbf25d432017-04-07 10:08:53 -0400249 GrSurfaceProxy* src,
Robert Phillipsf2361d22016-10-25 14:20:06 -0400250 const SkIRect& srcRect,
251 const SkIPoint& dstPoint) {
Robert Phillips5efd5ea2017-05-30 13:47:32 -0400252 SkASSERT(dst->asRenderTargetProxy() == fTarget.get());
Robert Phillips7c525e62018-06-12 10:11:12 -0400253 std::unique_ptr<GrOp> op = GrCopySurfaceOp::Make(context, dst, src, srcRect, dstPoint);
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500254 if (!op) {
bsalomonb8fea972016-02-16 07:34:17 -0800255 return false;
256 }
robertphillips498d7ac2015-10-30 10:11:30 -0700257
Robert Phillips7c525e62018-06-12 10:11:12 -0400258 this->addOp(std::move(op), *context->contextPriv().caps());
bsalomonb8fea972016-02-16 07:34:17 -0800259 return true;
bsalomon@google.comeb851172013-04-15 13:51:00 +0000260}
261
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500262void GrRenderTargetOpList::purgeOpsWithUninstantiatedProxies() {
263 bool hasUninstantiatedProxy = false;
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400264 auto checkInstantiation = [&hasUninstantiatedProxy](GrSurfaceProxy* p) {
265 if (!p->isInstantiated()) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500266 hasUninstantiatedProxy = true;
267 }
268 };
269 for (RecordedOp& recordedOp : fRecordedOps) {
270 hasUninstantiatedProxy = false;
Robert Phillipsed1205a2018-07-13 11:44:53 -0400271 if (recordedOp.fOp) {
Brian Salomon7d94bb52018-10-12 14:37:19 -0400272 recordedOp.visitProxies(checkInstantiation, GrOp::VisitorType::kOther);
Robert Phillipsed1205a2018-07-13 11:44:53 -0400273 }
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500274 if (hasUninstantiatedProxy) {
275 // When instantiation of the proxy fails we drop the Op
Robert Phillipsc994a932018-06-19 13:09:54 -0400276 recordedOp.deleteOp(fOpMemoryPool.get());
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500277 }
278 }
279}
280
Robert Phillipsd375dbf2017-09-14 12:45:25 -0400281void GrRenderTargetOpList::gatherProxyIntervals(GrResourceAllocator* alloc) const {
282 unsigned int cur = alloc->numOps();
283
Robert Phillips51b20f22017-12-01 15:32:35 -0500284 for (int i = 0; i < fDeferredProxies.count(); ++i) {
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400285 SkASSERT(!fDeferredProxies[i]->isInstantiated());
Robert Phillips51b20f22017-12-01 15:32:35 -0500286 // We give all the deferred proxies a write usage at the very start of flushing. This
287 // locks them out of being reused for the entire flush until they are read - and then
288 // they can be recycled. This is a bit unfortunate because a flush can proceed in waves
289 // with sub-flushes. The deferred proxies only need to be pinned from the start of
290 // the sub-flush in which they appear.
291 alloc->addInterval(fDeferredProxies[i], 0, 0);
292 }
293
Robert Phillipsd375dbf2017-09-14 12:45:25 -0400294 // Add the interval for all the writes to this opList's target
Robert Phillipsf8e25022017-11-08 15:24:31 -0500295 if (fRecordedOps.count()) {
296 alloc->addInterval(fTarget.get(), cur, cur+fRecordedOps.count()-1);
297 } else {
298 // This can happen if there is a loadOp (e.g., a clear) but no other draws. In this case we
299 // still need to add an interval for the destination so we create a fake op# for
300 // the missing clear op.
301 alloc->addInterval(fTarget.get());
302 alloc->incOps();
303 }
Robert Phillipsd375dbf2017-09-14 12:45:25 -0400304
Chris Dalton8816b932017-11-29 16:48:25 -0700305 auto gather = [ alloc SkDEBUGCODE(, this) ] (GrSurfaceProxy* p) {
306 alloc->addInterval(p SkDEBUGCODE(, fTarget.get() == p));
Robert Phillipsd375dbf2017-09-14 12:45:25 -0400307 };
Chris Dalton8816b932017-11-29 16:48:25 -0700308 for (const RecordedOp& recordedOp : fRecordedOps) {
Brian Salomon7d94bb52018-10-12 14:37:19 -0400309 // only diff from the GrTextureOpList version
310 recordedOp.visitProxies(gather, GrOp::VisitorType::kAllocatorGather);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500311
312 // Even though the op may have been moved we still need to increment the op count to
313 // keep all the math consistent.
314 alloc->incOps();
Robert Phillipsd375dbf2017-09-14 12:45:25 -0400315 }
316}
317
Brian Salomona4677b52017-05-04 12:39:56 -0400318static inline bool can_reorder(const SkRect& a, const SkRect& b) { return !GrRectsOverlap(a, b); }
bsalomon88cf17d2016-07-08 06:40:56 -0700319
Brian Salomond25f5bc2018-08-08 11:25:17 -0400320GrOp::CombineResult GrRenderTargetOpList::combineIfPossible(const RecordedOp& a, GrOp* b,
321 const GrAppliedClip* bClip,
322 const DstProxy* bDstProxy,
323 const GrCaps& caps) {
Brian Salomon54d212e2017-03-21 14:22:38 -0400324 if (a.fAppliedClip) {
325 if (!bClip) {
Brian Salomond25f5bc2018-08-08 11:25:17 -0400326 return GrOp::CombineResult::kCannotCombine;
Brian Salomon54d212e2017-03-21 14:22:38 -0400327 }
328 if (*a.fAppliedClip != *bClip) {
Brian Salomond25f5bc2018-08-08 11:25:17 -0400329 return GrOp::CombineResult::kCannotCombine;
Brian Salomon54d212e2017-03-21 14:22:38 -0400330 }
331 } else if (bClip) {
Brian Salomond25f5bc2018-08-08 11:25:17 -0400332 return GrOp::CombineResult::kCannotCombine;
Brian Salomon54d212e2017-03-21 14:22:38 -0400333 }
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400334 if (bDstProxy) {
335 if (a.fDstProxy != *bDstProxy) {
Brian Salomond25f5bc2018-08-08 11:25:17 -0400336 return GrOp::CombineResult::kCannotCombine;
Brian Salomon54d212e2017-03-21 14:22:38 -0400337 }
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400338 } else if (a.fDstProxy.proxy()) {
Brian Salomond25f5bc2018-08-08 11:25:17 -0400339 return GrOp::CombineResult::kCannotCombine;
Brian Salomon54d212e2017-03-21 14:22:38 -0400340 }
Brian Salomond25f5bc2018-08-08 11:25:17 -0400341 return a.fOp->combineIfPossible(b, caps);
Brian Salomon54d212e2017-03-21 14:22:38 -0400342}
343
Brian Salomon348a0372018-10-31 10:42:18 -0400344void GrRenderTargetOpList::recordOp(std::unique_ptr<GrOp> op,
345 const GrCaps& caps,
346 GrAppliedClip* clip,
347 const DstProxy* dstProxy) {
Ethan Nicholas029b22c2018-10-18 16:49:56 -0400348 SkDEBUGCODE(op->validate();)
Robert Phillips318c4192017-05-17 09:36:38 -0400349 SkASSERT(fTarget.get());
Robert Phillipsee683652017-04-26 11:53:10 -0400350
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500351 // A closed GrOpList should never receive new/more ops
robertphillips6a186652015-10-20 07:37:58 -0700352 SkASSERT(!this->isClosed());
robertphillipsa106c622015-10-16 09:07:06 -0700353
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500354 // Check if there is an op we can combine with by linearly searching back until we either
355 // 1) check every op
bsalomon512be532015-09-10 10:42:55 -0700356 // 2) intersect with something
357 // 3) find a 'blocker'
Robert Phillips5efd5ea2017-05-30 13:47:32 -0400358 GR_AUDIT_TRAIL_ADD_OP(fAuditTrail, op.get(), fTarget.get()->uniqueID());
Robert Phillipsf5442bb2017-04-17 14:18:34 -0400359 GrOP_INFO("opList: %d Recording (%s, opID: %u)\n"
360 "\tBounds [L: %.2f, T: %.2f R: %.2f B: %.2f]\n",
361 this->uniqueID(),
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500362 op->name(),
363 op->uniqueID(),
Robert Phillips1119dc32017-04-11 12:54:57 -0400364 op->bounds().fLeft, op->bounds().fTop,
365 op->bounds().fRight, op->bounds().fBottom);
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500366 GrOP_INFO(SkTabString(op->dumpInfo(), 1).c_str());
Brian Salomon25a88092016-12-01 09:36:50 -0500367 GrOP_INFO("\tOutcome:\n");
Robert Phillips8185f592017-04-26 08:31:08 -0400368 int maxCandidates = SkTMin(kMaxOpLookback, fRecordedOps.count());
Robert Phillips318c4192017-05-17 09:36:38 -0400369 if (maxCandidates) {
bsalomon512be532015-09-10 10:42:55 -0700370 int i = 0;
371 while (true) {
Brian Salomon69868af2016-12-22 15:42:51 -0500372 const RecordedOp& candidate = fRecordedOps.fromBack(i);
Brian Salomond25f5bc2018-08-08 11:25:17 -0400373 auto combineResult = this->combineIfPossible(candidate, op.get(), clip, dstProxy, caps);
374 switch (combineResult) {
375 case GrOp::CombineResult::kMayChain:
Brian Salomona7682c82018-10-24 10:04:37 -0400376 // See skbug.com/8491 for an explanation of why op chaining is disabled.
Brian Salomond25f5bc2018-08-08 11:25:17 -0400377 break;
378 case GrOp::CombineResult::kMerged:
379 GrOP_INFO("\t\tBackward: Combining with (%s, opID: %u)\n",
380 candidate.fOp->name(), candidate.fOp->uniqueID());
381 GrOP_INFO("\t\t\tBackward: Combined op info:\n");
382 GrOP_INFO(SkTabString(candidate.fOp->dumpInfo(), 4).c_str());
383 GR_AUDIT_TRAIL_OPS_RESULT_COMBINED(fAuditTrail, candidate.fOp.get(), op.get());
384 fOpMemoryPool->release(std::move(op));
Brian Salomon348a0372018-10-31 10:42:18 -0400385 return;
Brian Salomond25f5bc2018-08-08 11:25:17 -0400386 case GrOp::CombineResult::kCannotCombine:
387 break;
bsalomon512be532015-09-10 10:42:55 -0700388 }
Brian Salomona7682c82018-10-24 10:04:37 -0400389 // Stop going backwards if we would cause a painter's order violation.
390 if (!can_reorder(candidate.fOp->bounds(), op->bounds())) {
Robert Phillipsf5442bb2017-04-17 14:18:34 -0400391 GrOP_INFO("\t\tBackward: Intersects with (%s, opID: %u)\n", candidate.fOp->name(),
Brian Salomon69868af2016-12-22 15:42:51 -0500392 candidate.fOp->uniqueID());
bsalomon512be532015-09-10 10:42:55 -0700393 break;
394 }
395 ++i;
396 if (i == maxCandidates) {
Robert Phillipsf5442bb2017-04-17 14:18:34 -0400397 GrOP_INFO("\t\tBackward: Reached max lookback or beginning of op array %d\n", i);
bsalomon512be532015-09-10 10:42:55 -0700398 break;
399 }
400 }
401 } else {
Robert Phillipsf5442bb2017-04-17 14:18:34 -0400402 GrOP_INFO("\t\tBackward: FirstOp\n");
bsalomon512be532015-09-10 10:42:55 -0700403 }
Brian Salomon42ad83a2016-12-20 16:14:45 -0500404 GR_AUDIT_TRAIL_OP_RESULT_NEW(fAuditTrail, op);
Brian Salomon54d212e2017-03-21 14:22:38 -0400405 if (clip) {
406 clip = fClipAllocator.make<GrAppliedClip>(std::move(*clip));
Robert Phillipsc84c0302017-05-08 15:35:11 -0400407 SkDEBUGCODE(fNumClips++;)
Brian Salomon54d212e2017-03-21 14:22:38 -0400408 }
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400409 fRecordedOps.emplace_back(std::move(op), clip, dstProxy);
bsalomon512be532015-09-10 10:42:55 -0700410}
411
Robert Phillipsee683652017-04-26 11:53:10 -0400412void GrRenderTargetOpList::forwardCombine(const GrCaps& caps) {
Robert Phillipsf5442bb2017-04-17 14:18:34 -0400413 SkASSERT(!this->isClosed());
Robert Phillips48567ac2017-06-01 08:46:00 -0400414 GrOP_INFO("opList: %d ForwardCombine %d ops:\n", this->uniqueID(), fRecordedOps.count());
415
Brian Salomon337432d2017-03-21 17:36:10 -0400416 for (int i = 0; i < fRecordedOps.count() - 1; ++i) {
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500417 GrOp* op = fRecordedOps[i].fOp.get();
Robert Phillips8185f592017-04-26 08:31:08 -0400418 int maxCandidateIdx = SkTMin(i + kMaxOpLookahead, fRecordedOps.count() - 1);
bsalomonaecc0182016-03-07 11:50:44 -0800419 int j = i + 1;
420 while (true) {
Brian Salomon69868af2016-12-22 15:42:51 -0500421 const RecordedOp& candidate = fRecordedOps[j];
Brian Salomond25f5bc2018-08-08 11:25:17 -0400422 auto combineResult =
423 this->combineIfPossible(fRecordedOps[i], candidate.fOp.get(),
424 candidate.fAppliedClip, &candidate.fDstProxy, caps);
425 switch (combineResult) {
426 case GrOp::CombineResult::kMayChain:
Brian Salomona7682c82018-10-24 10:04:37 -0400427 // See skbug.com/8491 for an explanation of why op chaining is disabled.
Brian Salomond25f5bc2018-08-08 11:25:17 -0400428 break;
429 case GrOp::CombineResult::kMerged:
430 GrOP_INFO("\t\t%d: (%s opID: %u) -> Combining with (%s, opID: %u)\n", i,
431 op->name(), op->uniqueID(), candidate.fOp->name(),
432 candidate.fOp->uniqueID());
433 GR_AUDIT_TRAIL_OPS_RESULT_COMBINED(fAuditTrail, op, candidate.fOp.get());
434 fOpMemoryPool->release(std::move(fRecordedOps[j].fOp));
435 fRecordedOps[j].fOp = std::move(fRecordedOps[i].fOp);
436 break;
437 case GrOp::CombineResult::kCannotCombine:
438 break;
439 }
440 if (!fRecordedOps[i].fOp) {
bsalomonaecc0182016-03-07 11:50:44 -0800441 break;
442 }
Robert Phillipsc84c0302017-05-08 15:35:11 -0400443 // Stop traversing if we would cause a painter's order violation.
Brian Salomona7682c82018-10-24 10:04:37 -0400444 if (!can_reorder(candidate.fOp->bounds(), op->bounds())) {
Robert Phillips48567ac2017-06-01 08:46:00 -0400445 GrOP_INFO("\t\t%d: (%s opID: %u) -> Intersects with (%s, opID: %u)\n",
446 i, op->name(), op->uniqueID(),
447 candidate.fOp->name(), candidate.fOp->uniqueID());
bsalomonaecc0182016-03-07 11:50:44 -0800448 break;
449 }
Brian Salomona7682c82018-10-24 10:04:37 -0400450 if (++j > maxCandidateIdx) {
451 GrOP_INFO("\t\t%d: (%s opID: %u) -> Reached max lookahead or end of array\n", i,
452 op->name(), op->uniqueID());
bsalomonaecc0182016-03-07 11:50:44 -0800453 break;
454 }
455 }
456 }
457}
458