blob: d7ace8f5d4c006aa943582d28732115e3a573c3b [file] [log] [blame]
reed@google.comac10a2d2010-12-22 21:39:39 +00001/*
epoger@google.comec3ed6a2011-07-28 14:26:00 +00002 * Copyright 2010 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
reed@google.comac10a2d2010-12-22 21:39:39 +00006 */
7
Robert Phillipsf2361d22016-10-25 14:20:06 -04008#include "GrRenderTargetOpList.h"
joshualitt086cee12016-01-12 06:45:24 -08009#include "GrAuditTrail.h"
bsalomoneb1cb5c2015-05-22 08:01:09 -070010#include "GrCaps.h"
bsalomon4061b122015-05-29 10:26:19 -070011#include "GrGpu.h"
egdaniel9cb63402016-06-23 08:37:05 -070012#include "GrGpuCommandBuffer.h"
Robert Phillips7c525e62018-06-12 10:11:12 -040013#include "GrMemoryPool.h"
Brian Salomona4677b52017-05-04 12:39:56 -040014#include "GrRect.h"
Brian Salomon467921e2017-03-06 16:17:12 -050015#include "GrRenderTargetContext.h"
Robert Phillipsd375dbf2017-09-14 12:45:25 -040016#include "GrResourceAllocator.h"
Brian Salomon89527432016-12-16 09:52:16 -050017#include "ops/GrClearOp.h"
Brian Salomon89527432016-12-16 09:52:16 -050018#include "ops/GrCopySurfaceOp.h"
Stan Iliev2af578d2017-08-16 13:00:28 -040019#include "SkTraceEvent.h"
csmartdaltona7f29642016-07-07 08:49:11 -070020
Robert Phillipsf2361d22016-10-25 14:20:06 -040021
reed@google.comac10a2d2010-12-22 21:39:39 +000022////////////////////////////////////////////////////////////////////////////////
23
Brian Salomon09d994e2016-12-21 11:14:46 -050024// Experimentally we have found that most combining occurs within the first 10 comparisons.
Robert Phillips8185f592017-04-26 08:31:08 -040025static const int kMaxOpLookback = 10;
26static const int kMaxOpLookahead = 10;
bsalomon489147c2015-12-14 12:13:09 -080027
Robert Phillips3a9710b2018-03-27 17:51:55 -040028GrRenderTargetOpList::GrRenderTargetOpList(GrResourceProvider* resourceProvider,
Robert Phillipsc994a932018-06-19 13:09:54 -040029 sk_sp<GrOpMemoryPool> opMemoryPool,
Robert Phillips3a9710b2018-03-27 17:51:55 -040030 GrRenderTargetProxy* proxy,
Robert Phillips8185f592017-04-26 08:31:08 -040031 GrAuditTrail* auditTrail)
Robert Phillipsc994a932018-06-19 13:09:54 -040032 : INHERITED(resourceProvider, std::move(opMemoryPool), proxy, auditTrail)
Brian Salomonc3833b42018-07-09 18:23:58 +000033 , fLastClipStackGenID(SK_InvalidUniqueID)
Robert Phillipsb6deea82017-05-11 14:14:30 -040034 SkDEBUGCODE(, fNumClips(0)) {
bsalomon4061b122015-05-29 10:26:19 -070035}
36
Robert Phillipsc994a932018-06-19 13:09:54 -040037void GrRenderTargetOpList::RecordedOp::deleteOp(GrOpMemoryPool* opMemoryPool) {
38 opMemoryPool->release(std::move(fOp));
39}
40
41void GrRenderTargetOpList::deleteOps() {
42 for (int i = 0; i < fRecordedOps.count(); ++i) {
43 if (fRecordedOps[i].fOp) {
44 fRecordedOps[i].deleteOp(fOpMemoryPool.get());
45 }
46 }
47 fRecordedOps.reset();
48}
49
Robert Phillipsf2361d22016-10-25 14:20:06 -040050GrRenderTargetOpList::~GrRenderTargetOpList() {
Robert Phillipsc994a932018-06-19 13:09:54 -040051 this->deleteOps();
bsalomon@google.com25fb21f2011-06-21 18:17:25 +000052}
53
54////////////////////////////////////////////////////////////////////////////////
55
robertphillips4beb5c12015-10-20 07:50:00 -070056#ifdef SK_DEBUG
Robert Phillips27483912018-04-20 12:43:18 -040057void GrRenderTargetOpList::dump(bool printDependencies) const {
58 INHERITED::dump(printDependencies);
Robert Phillipsf2361d22016-10-25 14:20:06 -040059
Brian Salomon1e41f4a2016-12-07 15:05:04 -050060 SkDebugf("ops (%d):\n", fRecordedOps.count());
61 for (int i = 0; i < fRecordedOps.count(); ++i) {
robertphillips4beb5c12015-10-20 07:50:00 -070062 SkDebugf("*******************************\n");
Brian Salomon1e41f4a2016-12-07 15:05:04 -050063 if (!fRecordedOps[i].fOp) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -050064 SkDebugf("%d: <combined forward or failed instantiation>\n", i);
bsalomonaecc0182016-03-07 11:50:44 -080065 } else {
Brian Salomon1e41f4a2016-12-07 15:05:04 -050066 SkDebugf("%d: %s\n", i, fRecordedOps[i].fOp->name());
67 SkString str = fRecordedOps[i].fOp->dumpInfo();
bsalomonaecc0182016-03-07 11:50:44 -080068 SkDebugf("%s\n", str.c_str());
Brian Salomon9e50f7b2017-03-06 12:02:34 -050069 const SkRect& bounds = fRecordedOps[i].fOp->bounds();
70 SkDebugf("ClippedBounds: [L: %.2f, T: %.2f, R: %.2f, B: %.2f]\n", bounds.fLeft,
71 bounds.fTop, bounds.fRight, bounds.fBottom);
bsalomonaecc0182016-03-07 11:50:44 -080072 }
robertphillips4beb5c12015-10-20 07:50:00 -070073 }
74}
Chris Dalton706a6ff2017-11-29 22:01:06 -070075
76void GrRenderTargetOpList::visitProxies_debugOnly(const GrOp::VisitProxyFunc& func) const {
77 for (const RecordedOp& recordedOp : fRecordedOps) {
78 recordedOp.visitProxies(func);
79 }
80}
robertphillips4beb5c12015-10-20 07:50:00 -070081#endif
82
Brian Osman407b3422017-08-22 15:01:32 -040083void GrRenderTargetOpList::onPrepare(GrOpFlushState* flushState) {
Brian Salomonfd98c2c2018-07-31 17:25:29 -040084 SkASSERT(fTarget.get()->peekRenderTarget());
Robert Phillips6cdc22c2017-05-11 16:29:14 -040085 SkASSERT(this->isClosed());
Stan Iliev2af578d2017-08-16 13:00:28 -040086#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
87 TRACE_EVENT0("skia", TRACE_FUNC);
88#endif
robertphillipsa106c622015-10-16 09:07:06 -070089
Brian Salomon1e41f4a2016-12-07 15:05:04 -050090 // Loop over the ops that haven't yet been prepared.
91 for (int i = 0; i < fRecordedOps.count(); ++i) {
Brian Salomond25f5bc2018-08-08 11:25:17 -040092 if (fRecordedOps[i].fOp && fRecordedOps[i].fOp->isChainHead()) {
Stan Iliev2af578d2017-08-16 13:00:28 -040093#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
94 TRACE_EVENT0("skia", fRecordedOps[i].fOp->name());
95#endif
Brian Salomon29b60c92017-10-31 14:42:10 -040096 GrOpFlushState::OpArgs opArgs = {
97 fRecordedOps[i].fOp.get(),
Robert Phillips2890fbf2017-07-26 15:48:41 -040098 fTarget.get()->asRenderTargetProxy(),
Robert Phillips318c4192017-05-17 09:36:38 -040099 fRecordedOps[i].fAppliedClip,
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400100 fRecordedOps[i].fDstProxy
Robert Phillips318c4192017-05-17 09:36:38 -0400101 };
102
Brian Salomon29b60c92017-10-31 14:42:10 -0400103 flushState->setOpArgs(&opArgs);
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500104 fRecordedOps[i].fOp->prepare(flushState);
Brian Salomon29b60c92017-10-31 14:42:10 -0400105 flushState->setOpArgs(nullptr);
bsalomonaecc0182016-03-07 11:50:44 -0800106 }
bsalomon512be532015-09-10 10:42:55 -0700107 }
robertphillipsa13e2022015-11-11 12:01:09 -0800108}
bsalomon512be532015-09-10 10:42:55 -0700109
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400110static GrGpuRTCommandBuffer* create_command_buffer(GrGpu* gpu,
111 GrRenderTarget* rt,
112 GrSurfaceOrigin origin,
113 GrLoadOp colorLoadOp,
114 GrColor loadClearColor,
115 GrLoadOp stencilLoadOp) {
Robert Phillipscb2e2352017-08-30 16:44:40 -0400116 const GrGpuRTCommandBuffer::LoadAndStoreInfo kColorLoadStoreInfo {
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400117 colorLoadOp,
118 GrStoreOp::kStore,
119 loadClearColor
Robert Phillips178ce3e2017-04-13 09:15:47 -0400120 };
121
Robert Phillips95214472017-08-08 18:00:03 -0400122 // TODO:
123 // We would like to (at this level) only ever clear & discard. We would need
124 // to stop splitting up higher level opLists for copyOps to achieve that.
125 // Note: we would still need SB loads and stores but they would happen at a
126 // lower level (inside the VK command buffer).
Greg Daniel500d58b2017-08-24 15:59:33 -0400127 const GrGpuRTCommandBuffer::StencilLoadAndStoreInfo stencilLoadAndStoreInfo {
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400128 stencilLoadOp,
129 GrStoreOp::kStore,
Robert Phillips95214472017-08-08 18:00:03 -0400130 };
131
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400132 return gpu->getCommandBuffer(rt, origin, kColorLoadStoreInfo, stencilLoadAndStoreInfo);
Robert Phillips178ce3e2017-04-13 09:15:47 -0400133}
134
Brian Salomon25a88092016-12-01 09:36:50 -0500135// TODO: this is where GrOp::renderTarget is used (which is fine since it
Robert Phillips294870f2016-11-11 12:38:40 -0500136// is at flush time). However, we need to store the RenderTargetProxy in the
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500137// Ops and instantiate them here.
Brian Osman407b3422017-08-22 15:01:32 -0400138bool GrRenderTargetOpList::onExecute(GrOpFlushState* flushState) {
Greg Danieldbdba602018-04-20 11:52:43 -0400139 // TODO: Forcing the execution of the discard here isn't ideal since it will cause us to do a
140 // discard and then store the data back in memory so that the load op on future draws doesn't
141 // think the memory is unitialized. Ideally we would want a system where we are tracking whether
142 // the proxy itself has valid data or not, and then use that as a signal on whether we should be
143 // loading or discarding. In that world we wouldni;t need to worry about executing oplists with
144 // no ops just to do a discard.
145 if (0 == fRecordedOps.count() && GrLoadOp::kClear != fColorLoadOp &&
146 GrLoadOp::kDiscard != fColorLoadOp) {
bsalomondc438982016-08-31 11:53:49 -0700147 return false;
egdanielb4021cf2016-07-28 08:53:07 -0700148 }
Robert Phillips4a395042017-04-24 16:27:17 +0000149
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400150 SkASSERT(fTarget.get()->peekRenderTarget());
Stan Iliev2af578d2017-08-16 13:00:28 -0400151 TRACE_EVENT0("skia", TRACE_FUNC);
Robert Phillips6cdc22c2017-05-11 16:29:14 -0400152
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400153 // TODO: at the very least, we want the stencil store op to always be discard (at this
154 // level). In Vulkan, sub-command buffers would still need to load & store the stencil buffer.
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400155 GrGpuRTCommandBuffer* commandBuffer = create_command_buffer(
Robert Phillips95214472017-08-08 18:00:03 -0400156 flushState->gpu(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400157 fTarget.get()->peekRenderTarget(),
Robert Phillips95214472017-08-08 18:00:03 -0400158 fTarget.get()->origin(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400159 fColorLoadOp,
160 fLoadClearColor,
Robert Phillips6b47c7d2017-08-29 07:24:09 -0400161 fStencilLoadOp);
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400162 flushState->setCommandBuffer(commandBuffer);
Robert Phillips95214472017-08-08 18:00:03 -0400163 commandBuffer->begin();
Robert Phillips6cdc22c2017-05-11 16:29:14 -0400164
165 // Draw all the generated geometry.
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500166 for (int i = 0; i < fRecordedOps.count(); ++i) {
Brian Salomond25f5bc2018-08-08 11:25:17 -0400167 if (!fRecordedOps[i].fOp || !fRecordedOps[i].fOp->isChainHead()) {
bsalomonaecc0182016-03-07 11:50:44 -0800168 continue;
169 }
Stan Iliev2af578d2017-08-16 13:00:28 -0400170#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
171 TRACE_EVENT0("skia", fRecordedOps[i].fOp->name());
172#endif
Robert Phillips178ce3e2017-04-13 09:15:47 -0400173
Brian Salomon29b60c92017-10-31 14:42:10 -0400174 GrOpFlushState::OpArgs opArgs {
175 fRecordedOps[i].fOp.get(),
Robert Phillips2890fbf2017-07-26 15:48:41 -0400176 fTarget.get()->asRenderTargetProxy(),
Robert Phillips178ce3e2017-04-13 09:15:47 -0400177 fRecordedOps[i].fAppliedClip,
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400178 fRecordedOps[i].fDstProxy
Robert Phillips178ce3e2017-04-13 09:15:47 -0400179 };
180
Brian Salomon29b60c92017-10-31 14:42:10 -0400181 flushState->setOpArgs(&opArgs);
Brian Salomon9e50f7b2017-03-06 12:02:34 -0500182 fRecordedOps[i].fOp->execute(flushState);
Brian Salomon29b60c92017-10-31 14:42:10 -0400183 flushState->setOpArgs(nullptr);
bsalomon512be532015-09-10 10:42:55 -0700184 }
Robert Phillips178ce3e2017-04-13 09:15:47 -0400185
Robert Phillips5b5d84c2018-08-09 15:12:18 -0400186 commandBuffer->end();
187 flushState->gpu()->submit(commandBuffer);
Robert Phillips178ce3e2017-04-13 09:15:47 -0400188 flushState->setCommandBuffer(nullptr);
ethannicholas22793252016-01-30 09:59:10 -0800189
bsalomondc438982016-08-31 11:53:49 -0700190 return true;
bsalomona73239a2015-04-28 13:35:17 -0700191}
192
Chris Daltona84cacf2017-10-04 10:30:29 -0600193void GrRenderTargetOpList::endFlush() {
Brian Salomonc3833b42018-07-09 18:23:58 +0000194 fLastClipStackGenID = SK_InvalidUniqueID;
Robert Phillipsc994a932018-06-19 13:09:54 -0400195 this->deleteOps();
Chris Daltonc82dd4e2017-11-20 18:20:28 -0700196 fClipAllocator.reset();
Chris Daltona84cacf2017-10-04 10:30:29 -0600197 INHERITED::endFlush();
bsalomon512be532015-09-10 10:42:55 -0700198}
199
Robert Phillips380b90c2017-08-30 07:41:07 -0400200void GrRenderTargetOpList::discard() {
201 // Discard calls to in-progress opLists are ignored. Calls at the start update the
202 // opLists' color & stencil load ops.
203 if (this->isEmpty()) {
204 fColorLoadOp = GrLoadOp::kDiscard;
205 fStencilLoadOp = GrLoadOp::kDiscard;
206 }
207}
208
Robert Phillips7c525e62018-06-12 10:11:12 -0400209void GrRenderTargetOpList::fullClear(GrContext* context, GrColor color) {
Robert Phillips380b90c2017-08-30 07:41:07 -0400210
211 // This is conservative. If the opList is marked as needing a stencil buffer then there
212 // may be a prior op that writes to the stencil buffer. Although the clear will ignore the
213 // stencil buffer, following draw ops may not so we can't get rid of all the preceding ops.
214 // Beware! If we ever add any ops that have a side effect beyond modifying the stencil
215 // buffer we will need a more elaborate tracking system (skbug.com/7002).
216 if (this->isEmpty() || !fTarget.get()->asRenderTargetProxy()->needsStencil()) {
Robert Phillipsc994a932018-06-19 13:09:54 -0400217 this->deleteOps();
Brian Osman099fa0f2017-10-02 16:38:32 -0400218 fDeferredProxies.reset();
Robert Phillips380b90c2017-08-30 07:41:07 -0400219 fColorLoadOp = GrLoadOp::kClear;
220 fLoadClearColor = color;
bsalomonfd8d0132016-08-11 11:25:33 -0700221 return;
222 }
Robert Phillips380b90c2017-08-30 07:41:07 -0400223
Robert Phillips7c525e62018-06-12 10:11:12 -0400224 std::unique_ptr<GrClearOp> op(GrClearOp::Make(context, GrFixedClip::Disabled(),
225 color, fTarget.get()));
Robert Phillipsf7a72612017-03-31 10:03:45 -0400226 if (!op) {
227 return;
228 }
Robert Phillips5efd5ea2017-05-30 13:47:32 -0400229
Robert Phillips7c525e62018-06-12 10:11:12 -0400230 this->recordOp(std::move(op), *context->contextPriv().caps());
bsalomon9f129de2016-08-10 16:31:05 -0700231}
232
bsalomon@google.com25fb21f2011-06-21 18:17:25 +0000233////////////////////////////////////////////////////////////////////////////////
bsalomon@google.com86afc2a2011-02-16 16:12:19 +0000234
Robert Phillips81dd3e02017-06-23 11:59:24 -0400235// This closely parallels GrTextureOpList::copySurface but renderTargetOpLists
236// also store the applied clip and dest proxy with the op
Robert Phillips7c525e62018-06-12 10:11:12 -0400237bool GrRenderTargetOpList::copySurface(GrContext* context,
Robert Phillipsa16f6cb2017-06-01 11:06:13 -0400238 GrSurfaceProxy* dst,
Robert Phillipsbf25d432017-04-07 10:08:53 -0400239 GrSurfaceProxy* src,
Robert Phillipsf2361d22016-10-25 14:20:06 -0400240 const SkIRect& srcRect,
241 const SkIPoint& dstPoint) {
Robert Phillips5efd5ea2017-05-30 13:47:32 -0400242 SkASSERT(dst->asRenderTargetProxy() == fTarget.get());
Robert Phillips7c525e62018-06-12 10:11:12 -0400243 std::unique_ptr<GrOp> op = GrCopySurfaceOp::Make(context, dst, src, srcRect, dstPoint);
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500244 if (!op) {
bsalomonb8fea972016-02-16 07:34:17 -0800245 return false;
246 }
robertphillips498d7ac2015-10-30 10:11:30 -0700247
Robert Phillips7c525e62018-06-12 10:11:12 -0400248 this->addOp(std::move(op), *context->contextPriv().caps());
bsalomonb8fea972016-02-16 07:34:17 -0800249 return true;
bsalomon@google.comeb851172013-04-15 13:51:00 +0000250}
251
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500252void GrRenderTargetOpList::purgeOpsWithUninstantiatedProxies() {
253 bool hasUninstantiatedProxy = false;
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400254 auto checkInstantiation = [&hasUninstantiatedProxy](GrSurfaceProxy* p) {
255 if (!p->isInstantiated()) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500256 hasUninstantiatedProxy = true;
257 }
258 };
259 for (RecordedOp& recordedOp : fRecordedOps) {
260 hasUninstantiatedProxy = false;
Robert Phillipsed1205a2018-07-13 11:44:53 -0400261 if (recordedOp.fOp) {
262 recordedOp.visitProxies(checkInstantiation);
263 }
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500264 if (hasUninstantiatedProxy) {
265 // When instantiation of the proxy fails we drop the Op
Robert Phillipsc994a932018-06-19 13:09:54 -0400266 recordedOp.deleteOp(fOpMemoryPool.get());
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500267 }
268 }
269}
270
Robert Phillipsd375dbf2017-09-14 12:45:25 -0400271void GrRenderTargetOpList::gatherProxyIntervals(GrResourceAllocator* alloc) const {
272 unsigned int cur = alloc->numOps();
273
Robert Phillips51b20f22017-12-01 15:32:35 -0500274 for (int i = 0; i < fDeferredProxies.count(); ++i) {
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400275 SkASSERT(!fDeferredProxies[i]->isInstantiated());
Robert Phillips51b20f22017-12-01 15:32:35 -0500276 // We give all the deferred proxies a write usage at the very start of flushing. This
277 // locks them out of being reused for the entire flush until they are read - and then
278 // they can be recycled. This is a bit unfortunate because a flush can proceed in waves
279 // with sub-flushes. The deferred proxies only need to be pinned from the start of
280 // the sub-flush in which they appear.
281 alloc->addInterval(fDeferredProxies[i], 0, 0);
282 }
283
Robert Phillipsd375dbf2017-09-14 12:45:25 -0400284 // Add the interval for all the writes to this opList's target
Robert Phillipsf8e25022017-11-08 15:24:31 -0500285 if (fRecordedOps.count()) {
286 alloc->addInterval(fTarget.get(), cur, cur+fRecordedOps.count()-1);
287 } else {
288 // This can happen if there is a loadOp (e.g., a clear) but no other draws. In this case we
289 // still need to add an interval for the destination so we create a fake op# for
290 // the missing clear op.
291 alloc->addInterval(fTarget.get());
292 alloc->incOps();
293 }
Robert Phillipsd375dbf2017-09-14 12:45:25 -0400294
Chris Dalton8816b932017-11-29 16:48:25 -0700295 auto gather = [ alloc SkDEBUGCODE(, this) ] (GrSurfaceProxy* p) {
296 alloc->addInterval(p SkDEBUGCODE(, fTarget.get() == p));
Robert Phillipsd375dbf2017-09-14 12:45:25 -0400297 };
Chris Dalton8816b932017-11-29 16:48:25 -0700298 for (const RecordedOp& recordedOp : fRecordedOps) {
299 recordedOp.visitProxies(gather); // only diff from the GrTextureOpList version
Robert Phillipsf8e25022017-11-08 15:24:31 -0500300
301 // Even though the op may have been moved we still need to increment the op count to
302 // keep all the math consistent.
303 alloc->incOps();
Robert Phillipsd375dbf2017-09-14 12:45:25 -0400304 }
305}
306
Brian Salomona4677b52017-05-04 12:39:56 -0400307static inline bool can_reorder(const SkRect& a, const SkRect& b) { return !GrRectsOverlap(a, b); }
bsalomon88cf17d2016-07-08 06:40:56 -0700308
Brian Salomond25f5bc2018-08-08 11:25:17 -0400309GrOp::CombineResult GrRenderTargetOpList::combineIfPossible(const RecordedOp& a, GrOp* b,
310 const GrAppliedClip* bClip,
311 const DstProxy* bDstProxy,
312 const GrCaps& caps) {
Brian Salomon54d212e2017-03-21 14:22:38 -0400313 if (a.fAppliedClip) {
314 if (!bClip) {
Brian Salomond25f5bc2018-08-08 11:25:17 -0400315 return GrOp::CombineResult::kCannotCombine;
Brian Salomon54d212e2017-03-21 14:22:38 -0400316 }
317 if (*a.fAppliedClip != *bClip) {
Brian Salomond25f5bc2018-08-08 11:25:17 -0400318 return GrOp::CombineResult::kCannotCombine;
Brian Salomon54d212e2017-03-21 14:22:38 -0400319 }
320 } else if (bClip) {
Brian Salomond25f5bc2018-08-08 11:25:17 -0400321 return GrOp::CombineResult::kCannotCombine;
Brian Salomon54d212e2017-03-21 14:22:38 -0400322 }
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400323 if (bDstProxy) {
324 if (a.fDstProxy != *bDstProxy) {
Brian Salomond25f5bc2018-08-08 11:25:17 -0400325 return GrOp::CombineResult::kCannotCombine;
Brian Salomon54d212e2017-03-21 14:22:38 -0400326 }
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400327 } else if (a.fDstProxy.proxy()) {
Brian Salomond25f5bc2018-08-08 11:25:17 -0400328 return GrOp::CombineResult::kCannotCombine;
Brian Salomon54d212e2017-03-21 14:22:38 -0400329 }
Brian Salomond25f5bc2018-08-08 11:25:17 -0400330 return a.fOp->combineIfPossible(b, caps);
Brian Salomon54d212e2017-03-21 14:22:38 -0400331}
332
Chris Daltonf104fec2018-05-22 16:17:48 -0600333uint32_t GrRenderTargetOpList::recordOp(std::unique_ptr<GrOp> op,
334 const GrCaps& caps,
335 GrAppliedClip* clip,
336 const DstProxy* dstProxy) {
Robert Phillips318c4192017-05-17 09:36:38 -0400337 SkASSERT(fTarget.get());
Robert Phillipsee683652017-04-26 11:53:10 -0400338
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500339 // A closed GrOpList should never receive new/more ops
robertphillips6a186652015-10-20 07:37:58 -0700340 SkASSERT(!this->isClosed());
robertphillipsa106c622015-10-16 09:07:06 -0700341
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500342 // Check if there is an op we can combine with by linearly searching back until we either
343 // 1) check every op
bsalomon512be532015-09-10 10:42:55 -0700344 // 2) intersect with something
345 // 3) find a 'blocker'
Robert Phillips5efd5ea2017-05-30 13:47:32 -0400346 GR_AUDIT_TRAIL_ADD_OP(fAuditTrail, op.get(), fTarget.get()->uniqueID());
Robert Phillipsf5442bb2017-04-17 14:18:34 -0400347 GrOP_INFO("opList: %d Recording (%s, opID: %u)\n"
348 "\tBounds [L: %.2f, T: %.2f R: %.2f B: %.2f]\n",
349 this->uniqueID(),
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500350 op->name(),
351 op->uniqueID(),
Robert Phillips1119dc32017-04-11 12:54:57 -0400352 op->bounds().fLeft, op->bounds().fTop,
353 op->bounds().fRight, op->bounds().fBottom);
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500354 GrOP_INFO(SkTabString(op->dumpInfo(), 1).c_str());
Brian Salomon25a88092016-12-01 09:36:50 -0500355 GrOP_INFO("\tOutcome:\n");
Robert Phillips8185f592017-04-26 08:31:08 -0400356 int maxCandidates = SkTMin(kMaxOpLookback, fRecordedOps.count());
Brian Salomond25f5bc2018-08-08 11:25:17 -0400357 int firstChainableIdx = -1;
Robert Phillips318c4192017-05-17 09:36:38 -0400358 if (maxCandidates) {
bsalomon512be532015-09-10 10:42:55 -0700359 int i = 0;
360 while (true) {
Brian Salomon69868af2016-12-22 15:42:51 -0500361 const RecordedOp& candidate = fRecordedOps.fromBack(i);
Brian Salomond25f5bc2018-08-08 11:25:17 -0400362 auto combineResult = this->combineIfPossible(candidate, op.get(), clip, dstProxy, caps);
363 switch (combineResult) {
364 case GrOp::CombineResult::kMayChain:
365 if (candidate.fOp->isChainTail() && firstChainableIdx < 0) {
366 GrOP_INFO("\t\tBackward: Can chain with (%s, opID: %u)\n",
367 candidate.fOp->name(), candidate.fOp->uniqueID());
368 firstChainableIdx = i;
369 }
370 break;
371 case GrOp::CombineResult::kMerged:
372 GrOP_INFO("\t\tBackward: Combining with (%s, opID: %u)\n",
373 candidate.fOp->name(), candidate.fOp->uniqueID());
374 GrOP_INFO("\t\t\tBackward: Combined op info:\n");
375 GrOP_INFO(SkTabString(candidate.fOp->dumpInfo(), 4).c_str());
376 GR_AUDIT_TRAIL_OPS_RESULT_COMBINED(fAuditTrail, candidate.fOp.get(), op.get());
377 fOpMemoryPool->release(std::move(op));
378 return SK_InvalidUniqueID;
379 case GrOp::CombineResult::kCannotCombine:
380 break;
bsalomon512be532015-09-10 10:42:55 -0700381 }
Brian Salomon764e5462018-08-21 12:07:00 -0400382 // Stop going backwards if we would cause a painter's order violation. We only need to
383 // test against chain heads as elements of a chain always draw in their chain head's
384 // slot.
385 if (candidate.fOp->isChainHead() &&
386 !can_reorder(candidate.fOp->bounds(), op->bounds())) {
Robert Phillipsf5442bb2017-04-17 14:18:34 -0400387 GrOP_INFO("\t\tBackward: Intersects with (%s, opID: %u)\n", candidate.fOp->name(),
Brian Salomon69868af2016-12-22 15:42:51 -0500388 candidate.fOp->uniqueID());
bsalomon512be532015-09-10 10:42:55 -0700389 break;
390 }
391 ++i;
392 if (i == maxCandidates) {
Robert Phillipsf5442bb2017-04-17 14:18:34 -0400393 GrOP_INFO("\t\tBackward: Reached max lookback or beginning of op array %d\n", i);
bsalomon512be532015-09-10 10:42:55 -0700394 break;
395 }
396 }
397 } else {
Robert Phillipsf5442bb2017-04-17 14:18:34 -0400398 GrOP_INFO("\t\tBackward: FirstOp\n");
bsalomon512be532015-09-10 10:42:55 -0700399 }
Brian Salomon42ad83a2016-12-20 16:14:45 -0500400 GR_AUDIT_TRAIL_OP_RESULT_NEW(fAuditTrail, op);
Brian Salomon54d212e2017-03-21 14:22:38 -0400401 if (clip) {
402 clip = fClipAllocator.make<GrAppliedClip>(std::move(*clip));
Robert Phillipsc84c0302017-05-08 15:35:11 -0400403 SkDEBUGCODE(fNumClips++;)
Brian Salomon54d212e2017-03-21 14:22:38 -0400404 }
Brian Salomond25f5bc2018-08-08 11:25:17 -0400405 if (firstChainableIdx >= 0) {
Brian Salomon764e5462018-08-21 12:07:00 -0400406 // If we chain this op it will draw in the slot of the head of the chain. We have to check
407 // that the new op's bounds don't intersect any of the other ops between firstChainableIdx
408 // and the head of that op's chain. We only need to test against chain heads as elements of
409 // a chain always draw in their chain head's slot.
410 const GrOp* chainHead = fRecordedOps.fromBack(firstChainableIdx).fOp->chainHead();
411 int idx = firstChainableIdx;
412 bool chain = true;
413 while (fRecordedOps.fromBack(idx).fOp.get() != chainHead) {
414 // If idx is not in the same chain then we have to check against its bounds as we will
415 // draw before it (when chainHead draws).
416 const GrOp* testOp = fRecordedOps.fromBack(idx).fOp.get();
417 if (testOp->isChainHead() && !can_reorder(testOp->bounds(), op->bounds())) {
418 GrOP_INFO("\t\tBackward: Intersects with (%s, opID: %u). Cannot chain.\n",
419 testOp->name(), testOp->uniqueID());
420 chain = false;
421 break;
422 }
423 ++idx;
424 // We must encounter the chain head before running off the beginning of the list.
425 SkASSERT(idx < fRecordedOps.count());
426 }
427 if (chain) {
428 GrOp* prevOp = fRecordedOps.fromBack(firstChainableIdx).fOp.get();
429 GrOP_INFO("\t\t\tBackward: Chained to (%s, opID: %u)\n", prevOp->name(),
430 prevOp->uniqueID());
431 prevOp->setNextInChain(op.get());
432 }
Brian Salomond25f5bc2018-08-08 11:25:17 -0400433 }
Robert Phillipsbb581ce2017-05-29 15:05:15 -0400434 fRecordedOps.emplace_back(std::move(op), clip, dstProxy);
Chris Daltonf104fec2018-05-22 16:17:48 -0600435 return this->uniqueID();
bsalomon512be532015-09-10 10:42:55 -0700436}
437
Robert Phillipsee683652017-04-26 11:53:10 -0400438void GrRenderTargetOpList::forwardCombine(const GrCaps& caps) {
Robert Phillipsf5442bb2017-04-17 14:18:34 -0400439 SkASSERT(!this->isClosed());
440
Robert Phillips48567ac2017-06-01 08:46:00 -0400441 GrOP_INFO("opList: %d ForwardCombine %d ops:\n", this->uniqueID(), fRecordedOps.count());
442
Brian Salomon337432d2017-03-21 17:36:10 -0400443 for (int i = 0; i < fRecordedOps.count() - 1; ++i) {
Brian Salomon1e41f4a2016-12-07 15:05:04 -0500444 GrOp* op = fRecordedOps[i].fOp.get();
Robert Phillips318c4192017-05-17 09:36:38 -0400445
Robert Phillips8185f592017-04-26 08:31:08 -0400446 int maxCandidateIdx = SkTMin(i + kMaxOpLookahead, fRecordedOps.count() - 1);
bsalomonaecc0182016-03-07 11:50:44 -0800447 int j = i + 1;
Brian Salomond25f5bc2018-08-08 11:25:17 -0400448 int firstChainableIdx = -1;
bsalomonaecc0182016-03-07 11:50:44 -0800449 while (true) {
Brian Salomon69868af2016-12-22 15:42:51 -0500450 const RecordedOp& candidate = fRecordedOps[j];
Brian Salomond25f5bc2018-08-08 11:25:17 -0400451 auto combineResult =
452 this->combineIfPossible(fRecordedOps[i], candidate.fOp.get(),
453 candidate.fAppliedClip, &candidate.fDstProxy, caps);
454 switch (combineResult) {
455 case GrOp::CombineResult::kMayChain:
456 if (firstChainableIdx < 0 && !fRecordedOps[i].fOp->isChained() &&
457 !fRecordedOps[j].fOp->isChained()) {
458 GrOP_INFO("\t\tForward: Can chain with (%s, opID: %u)\n",
459 candidate.fOp->name(), candidate.fOp->uniqueID());
460 firstChainableIdx = j;
461 }
462 break;
463 case GrOp::CombineResult::kMerged:
464 GrOP_INFO("\t\t%d: (%s opID: %u) -> Combining with (%s, opID: %u)\n", i,
465 op->name(), op->uniqueID(), candidate.fOp->name(),
466 candidate.fOp->uniqueID());
467 GR_AUDIT_TRAIL_OPS_RESULT_COMBINED(fAuditTrail, op, candidate.fOp.get());
468 fOpMemoryPool->release(std::move(fRecordedOps[j].fOp));
469 fRecordedOps[j].fOp = std::move(fRecordedOps[i].fOp);
470 break;
471 case GrOp::CombineResult::kCannotCombine:
472 break;
473 }
474 if (!fRecordedOps[i].fOp) {
bsalomonaecc0182016-03-07 11:50:44 -0800475 break;
476 }
Robert Phillipsc84c0302017-05-08 15:35:11 -0400477 // Stop traversing if we would cause a painter's order violation.
Brian Salomon764e5462018-08-21 12:07:00 -0400478 if (candidate.fOp->isChainHead() &&
479 !can_reorder(candidate.fOp->bounds(), op->bounds())) {
Robert Phillips48567ac2017-06-01 08:46:00 -0400480 GrOP_INFO("\t\t%d: (%s opID: %u) -> Intersects with (%s, opID: %u)\n",
481 i, op->name(), op->uniqueID(),
482 candidate.fOp->name(), candidate.fOp->uniqueID());
bsalomonaecc0182016-03-07 11:50:44 -0800483 break;
484 }
485 ++j;
486 if (j > maxCandidateIdx) {
Brian Salomond25f5bc2018-08-08 11:25:17 -0400487 if (firstChainableIdx >= 0) {
Brian Salomon764e5462018-08-21 12:07:00 -0400488 GrOp* nextOp = fRecordedOps[firstChainableIdx].fOp.get();
489 GrOP_INFO("\t\t\tForward: Chained to (%s, opID: %u)\n", nextOp->name(),
490 nextOp->uniqueID());
Brian Salomond25f5bc2018-08-08 11:25:17 -0400491 // We have to chain i before firstChainableIdx in order to preserve their
492 // relative order as they may overlap.
Brian Salomon764e5462018-08-21 12:07:00 -0400493 fRecordedOps[i].fOp->setNextInChain(nextOp);
Brian Salomond25f5bc2018-08-08 11:25:17 -0400494 // However we want to draw them *after* any ops that occur between them. So move
495 // the head of the new chain to the later slot as we only execute chain heads.
496 std::swap(fRecordedOps[i].fOp, fRecordedOps[firstChainableIdx].fOp);
497 } else {
498 GrOP_INFO("\t\t%d: (%s opID: %u) -> Reached max lookahead or end of array\n", i,
499 op->name(), op->uniqueID());
500 }
bsalomonaecc0182016-03-07 11:50:44 -0800501 break;
502 }
503 }
504 }
505}
506