blob: 5cb4cb2489463194a623a2a1c6c68f36a70ee98c [file] [log] [blame]
Robert Phillips5af44de2017-07-18 14:49:38 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/GrResourceAllocator.h"
Robert Phillips5af44de2017-07-18 14:49:38 -04009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "src/gpu/GrGpuResourcePriv.h"
Greg Danielf41b2bd2019-08-22 16:19:24 -040011#include "src/gpu/GrOpsTask.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040012#include "src/gpu/GrRenderTargetProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "src/gpu/GrResourceProvider.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040014#include "src/gpu/GrSurfaceProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrSurfaceProxyPriv.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040016
Robert Phillipsda1be462018-07-27 07:18:06 -040017#if GR_TRACK_INTERVAL_CREATION
Mike Klein0ec1c572018-12-04 11:52:51 -050018 #include <atomic>
19
20 uint32_t GrResourceAllocator::Interval::CreateUniqueID() {
21 static std::atomic<uint32_t> nextID{1};
22 uint32_t id;
23 do {
Adlai Holler4888cda2020-11-06 16:37:37 -050024 id = nextID.fetch_add(1, std::memory_order_relaxed);
Mike Klein0ec1c572018-12-04 11:52:51 -050025 } while (id == SK_InvalidUniqueID);
26 return id;
27 }
Robert Phillipsda1be462018-07-27 07:18:06 -040028#endif
29
Robert Phillips5b65a842017-11-13 15:48:12 -050030GrResourceAllocator::~GrResourceAllocator() {
Robert Phillips5b65a842017-11-13 15:48:12 -050031 SkASSERT(fIntvlList.empty());
32 SkASSERT(fActiveIntvls.empty());
33 SkASSERT(!fIntvlHash.count());
Robert Phillips5b65a842017-11-13 15:48:12 -050034}
35
Adlai Holler7f7a5df2021-02-09 17:41:10 +000036void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end,
37 ActualUse actualUse
Chris Dalton8816b932017-11-29 16:48:25 -070038 SkDEBUGCODE(, bool isDirectDstRead)) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040039 SkASSERT(start <= end);
40 SkASSERT(!fAssigned); // We shouldn't be adding any intervals after (or during) assignment
Robert Phillips5f78adf2019-04-22 12:41:39 -040041
Chris Dalton97155592019-06-13 13:40:20 -060042 if (proxy->canSkipResourceAllocator()) {
Robert Phillips5f78adf2019-04-22 12:41:39 -040043 return;
44 }
45
Brian Salomon9cadc312018-12-05 15:09:19 -050046 // If a proxy is read only it must refer to a texture with specific content that cannot be
47 // recycled. We don't need to assign a texture to it and no other proxy can be instantiated
48 // with the same texture.
49 if (proxy->readOnly()) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040050 if (proxy->isLazy() && !proxy->priv().doLazyInstantiation(fResourceProvider)) {
Adlai Holler19fd5142021-03-08 10:19:30 -070051 fFailedInstantiation = true;
Brian Salomon9cadc312018-12-05 15:09:19 -050052 } else {
Brian Salomonbeb7f522019-08-30 16:19:42 -040053 // Since we aren't going to add an interval we won't revisit this proxy in assign(). So
54 // must already be instantiated or it must be a lazy proxy that we instantiated above.
55 SkASSERT(proxy->isInstantiated());
Brian Salomon9cadc312018-12-05 15:09:19 -050056 }
Brian Salomonbeb7f522019-08-30 16:19:42 -040057 return;
58 }
Adlai Holler539db2f2021-03-16 09:45:05 -040059 uint32_t proxyID = proxy->uniqueID().asUInt();
60 if (Interval** intvlPtr = fIntvlHash.find(proxyID)) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040061 // Revise the interval for an existing use
Adlai Holler1143b1b2021-03-16 13:07:40 -040062 Interval* intvl = *intvlPtr;
Brian Salomonbeb7f522019-08-30 16:19:42 -040063#ifdef SK_DEBUG
Adlai Holler9e2c50e2021-02-09 14:41:52 -050064 if (0 == start && 0 == end) {
65 // This interval is for the initial upload to a deferred proxy. Due to the vagaries
66 // of how deferred proxies are collected they can appear as uploads multiple times
67 // in a single opsTasks' list and as uploads in several opsTasks.
68 SkASSERT(0 == intvl->start());
69 } else if (isDirectDstRead) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040070 // Direct reads from the render target itself should occur w/in the existing
71 // interval
72 SkASSERT(intvl->start() <= start && intvl->end() >= end);
73 } else {
74 SkASSERT(intvl->end() <= start && intvl->end() <= end);
75 }
76#endif
Adlai Holler7f7a5df2021-02-09 17:41:10 +000077 if (ActualUse::kYes == actualUse) {
78 intvl->addUse();
79 }
Brian Salomonbeb7f522019-08-30 16:19:42 -040080 intvl->extendEnd(end);
81 return;
82 }
Adlai Hollerda163672021-03-15 11:03:37 -040083 Interval* newIntvl = fIntervalAllocator.make<Interval>(proxy, start, end);
Brian Salomonc6093532018-12-05 21:34:36 +000084
Adlai Holler7f7a5df2021-02-09 17:41:10 +000085 if (ActualUse::kYes == actualUse) {
86 newIntvl->addUse();
87 }
Brian Salomonbeb7f522019-08-30 16:19:42 -040088 fIntvlList.insertByIncreasingStart(newIntvl);
Adlai Holler539db2f2021-03-16 09:45:05 -040089 fIntvlHash.set(proxyID, newIntvl);
Robert Phillips5af44de2017-07-18 14:49:38 -040090}
91
Adlai Holler1143b1b2021-03-16 13:07:40 -040092bool GrResourceAllocator::Interval::isSurfaceRecyclable() const {
93 // All the refs on the proxy are known to the resource allocator thus no one
94 // should be holding onto it outside of Ganesh.
95 return !fProxy->refCntGreaterThan(fUses);
96}
97
Robert Phillips5af44de2017-07-18 14:49:38 -040098GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::popHead() {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -040099 SkDEBUGCODE(this->validate());
100
Robert Phillips5af44de2017-07-18 14:49:38 -0400101 Interval* temp = fHead;
102 if (temp) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500103 fHead = temp->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400104 if (!fHead) {
105 fTail = nullptr;
106 }
107 temp->setNext(nullptr);
Robert Phillips5af44de2017-07-18 14:49:38 -0400108 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400109
110 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400111 return temp;
112}
113
114// TODO: fuse this with insertByIncreasingEnd
115void GrResourceAllocator::IntervalList::insertByIncreasingStart(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400116 SkDEBUGCODE(this->validate());
117 SkASSERT(!intvl->next());
118
Robert Phillips5af44de2017-07-18 14:49:38 -0400119 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400120 // 14%
121 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500122 } else if (intvl->start() <= fHead->start()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400123 // 3%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500124 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400125 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400126 } else if (fTail->start() <= intvl->start()) {
127 // 83%
128 fTail->setNext(intvl);
129 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400130 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400131 // almost never
Robert Phillips5af44de2017-07-18 14:49:38 -0400132 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500133 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400134 for (; intvl->start() > next->start(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400135 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400136
137 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500138 intvl->setNext(next);
139 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400140 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400141
142 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400143}
144
145// TODO: fuse this with insertByIncreasingStart
146void GrResourceAllocator::IntervalList::insertByIncreasingEnd(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400147 SkDEBUGCODE(this->validate());
148 SkASSERT(!intvl->next());
149
Robert Phillips5af44de2017-07-18 14:49:38 -0400150 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400151 // 14%
152 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500153 } else if (intvl->end() <= fHead->end()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400154 // 64%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500155 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400156 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400157 } else if (fTail->end() <= intvl->end()) {
158 // 3%
159 fTail->setNext(intvl);
160 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400161 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400162 // 19% but 81% of those land right after the list's head
Robert Phillips5af44de2017-07-18 14:49:38 -0400163 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500164 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400165 for (; intvl->end() > next->end(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400166 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400167
168 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500169 intvl->setNext(next);
170 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400171 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400172
173 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400174}
175
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400176#ifdef SK_DEBUG
177void GrResourceAllocator::IntervalList::validate() const {
178 SkASSERT(SkToBool(fHead) == SkToBool(fTail));
179
180 Interval* prev = nullptr;
181 for (Interval* cur = fHead; cur; prev = cur, cur = cur->next()) {
182 }
183
184 SkASSERT(fTail == prev);
185}
186#endif
Robert Phillips4150eea2018-02-07 17:08:21 -0500187
Robert Phillips5af44de2017-07-18 14:49:38 -0400188// 'surface' can be reused. Add it back to the free pool.
Robert Phillips715d08c2018-07-18 13:56:48 -0400189void GrResourceAllocator::recycleSurface(sk_sp<GrSurface> surface) {
Robert Phillips57aa3672017-07-21 11:38:13 -0400190 const GrScratchKey &key = surface->resourcePriv().getScratchKey();
191
192 if (!key.isValid()) {
193 return; // can't do it w/o a valid scratch key
194 }
195
Robert Phillipsf8e25022017-11-08 15:24:31 -0500196 if (surface->getUniqueKey().isValid()) {
197 // If the surface has a unique key we throw it back into the resource cache.
198 // If things get really tight 'findSurfaceFor' may pull it back out but there is
199 // no need to have it in tight rotation.
200 return;
201 }
202
Robert Phillips715d08c2018-07-18 13:56:48 -0400203#if GR_ALLOCATION_SPEW
204 SkDebugf("putting surface %d back into pool\n", surface->uniqueID().asUInt());
205#endif
Robert Phillips57aa3672017-07-21 11:38:13 -0400206 // TODO: fix this insertion so we get a more LRU-ish behavior
Robert Phillips5b65a842017-11-13 15:48:12 -0500207 fFreePool.insert(key, surface.release());
Robert Phillips5af44de2017-07-18 14:49:38 -0400208}
209
210// First try to reuse one of the recently allocated/used GrSurfaces in the free pool.
211// If we can't find a useable one, create a new one.
Chris Dalton0b68dda2019-11-07 21:08:03 -0700212sk_sp<GrSurface> GrResourceAllocator::findSurfaceFor(const GrSurfaceProxy* proxy) {
Adlai Hollercc119d92021-03-16 15:17:25 -0400213 if (const auto& uniqueKey = proxy->getUniqueKey(); uniqueKey.isValid()) {
214 // First try to reattach to a cached surface if the proxy is uniquely keyed
215 if (sk_sp<GrSurface> surface = fResourceProvider->findByUniqueKey<GrSurface>(uniqueKey)) {
Robert Phillips0790f8a2018-09-18 13:11:03 -0400216 return surface;
217 }
218 }
219
Adlai Hollercc119d92021-03-16 15:17:25 -0400220 // Then look in the free pool
Robert Phillips57aa3672017-07-21 11:38:13 -0400221 GrScratchKey key;
Robert Phillips5af44de2017-07-18 14:49:38 -0400222
Greg Danield51fa2f2020-01-22 16:53:38 -0500223 proxy->priv().computeScratchKey(*fResourceProvider->caps(), &key);
Robert Phillips57aa3672017-07-21 11:38:13 -0400224
Robert Phillips10d17212019-04-24 14:09:10 -0400225 auto filter = [] (const GrSurface* s) {
226 return true;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500227 };
228 sk_sp<GrSurface> surface(fFreePool.findAndRemove(key, filter));
Robert Phillips57aa3672017-07-21 11:38:13 -0400229 if (surface) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500230 if (SkBudgeted::kYes == proxy->isBudgeted() &&
Brian Salomonfa2ebea2019-01-24 15:58:58 -0500231 GrBudgetedType::kBudgeted != surface->resourcePriv().budgetedType()) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500232 // This gets the job done but isn't quite correct. It would be better to try to
Brian Salomonfa2ebea2019-01-24 15:58:58 -0500233 // match budgeted proxies w/ budgeted surfaces and unbudgeted w/ unbudgeted.
Robert Phillipsf8e25022017-11-08 15:24:31 -0500234 surface->resourcePriv().makeBudgeted();
235 }
Robert Phillips0790f8a2018-09-18 13:11:03 -0400236 SkASSERT(!surface->getUniqueKey().isValid());
Robert Phillipsf8e25022017-11-08 15:24:31 -0500237 return surface;
Robert Phillips57aa3672017-07-21 11:38:13 -0400238 }
239
240 // Failing that, try to grab a new one from the resource cache
Robert Phillips5af44de2017-07-18 14:49:38 -0400241 return proxy->priv().createSurface(fResourceProvider);
242}
243
244// Remove any intervals that end before the current index. Return their GrSurfaces
Robert Phillips39667382019-04-17 16:03:30 -0400245// to the free pool if possible.
Robert Phillips5af44de2017-07-18 14:49:38 -0400246void GrResourceAllocator::expire(unsigned int curIndex) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500247 while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->end() < curIndex) {
Adlai Holler729ba5e2021-03-15 12:34:31 -0400248 Interval* intvl = fActiveIntvls.popHead();
249 SkASSERT(!intvl->next());
Robert Phillips5b65a842017-11-13 15:48:12 -0500250
Adlai Holler729ba5e2021-03-15 12:34:31 -0400251 if (GrSurface* surf = intvl->proxy()->peekSurface()) {
Adlai Holler1143b1b2021-03-16 13:07:40 -0400252 if (intvl->isSurfaceRecyclable()) {
Adlai Holler729ba5e2021-03-15 12:34:31 -0400253 this->recycleSurface(sk_ref_sp(surf));
Robert Phillips715d08c2018-07-18 13:56:48 -0400254 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500255 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400256 }
257}
258
Adlai Holler19fd5142021-03-08 10:19:30 -0700259bool GrResourceAllocator::assign() {
Robert Phillips5f78adf2019-04-22 12:41:39 -0400260 fIntvlHash.reset(); // we don't need the interval hash anymore
261
Robert Phillips5af44de2017-07-18 14:49:38 -0400262 SkDEBUGCODE(fAssigned = true;)
263
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500264 if (fIntvlList.empty()) {
Adlai Holler19fd5142021-03-08 10:19:30 -0700265 return !fFailedInstantiation; // no resources to assign
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500266 }
267
Robert Phillips715d08c2018-07-18 13:56:48 -0400268#if GR_ALLOCATION_SPEW
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500269 SkDebugf("assigning %d ops\n", fNumOps);
Robert Phillips715d08c2018-07-18 13:56:48 -0400270 this->dumpIntervals();
271#endif
Robert Phillipseafd48a2017-11-16 07:52:08 -0500272
Adlai Holler1143b1b2021-03-16 13:07:40 -0400273 while (Interval* cur = fIntvlList.popHead()) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500274 this->expire(cur->start());
Robert Phillips57aa3672017-07-21 11:38:13 -0400275
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400276 if (cur->proxy()->isInstantiated()) {
Robert Phillips57aa3672017-07-21 11:38:13 -0400277 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500278
Robert Phillips57aa3672017-07-21 11:38:13 -0400279 continue;
280 }
281
Brian Salomonbeb7f522019-08-30 16:19:42 -0400282 if (cur->proxy()->isLazy()) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500283 if (!cur->proxy()->priv().doLazyInstantiation(fResourceProvider)) {
Adlai Holler19fd5142021-03-08 10:19:30 -0700284 fFailedInstantiation = true;
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500285 }
Chris Dalton0b68dda2019-11-07 21:08:03 -0700286 } else if (sk_sp<GrSurface> surface = this->findSurfaceFor(cur->proxy())) {
Adlai Hollercc119d92021-03-16 15:17:25 -0400287 if (const auto& uniqueKey = cur->proxy()->getUniqueKey(); uniqueKey.isValid()) {
Robert Phillips0790f8a2018-09-18 13:11:03 -0400288 if (!surface->getUniqueKey().isValid()) {
Adlai Hollercc119d92021-03-16 15:17:25 -0400289 fResourceProvider->assignUniqueKeyToResource(uniqueKey, surface.get());
Robert Phillips0790f8a2018-09-18 13:11:03 -0400290 }
Adlai Hollercc119d92021-03-16 15:17:25 -0400291 SkASSERT(surface->getUniqueKey() == uniqueKey);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500292 }
293
Robert Phillips715d08c2018-07-18 13:56:48 -0400294#if GR_ALLOCATION_SPEW
295 SkDebugf("Assigning %d to %d\n",
296 surface->uniqueID().asUInt(),
297 cur->proxy()->uniqueID().asUInt());
298#endif
299
Adlai Holler729ba5e2021-03-15 12:34:31 -0400300 SkASSERT(!cur->proxy()->peekSurface());
301 cur->proxy()->priv().assign(std::move(surface));
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500302 } else {
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400303 SkASSERT(!cur->proxy()->isInstantiated());
Adlai Holler19fd5142021-03-08 10:19:30 -0700304 fFailedInstantiation = true;
Robert Phillips5af44de2017-07-18 14:49:38 -0400305 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500306
Robert Phillips5af44de2017-07-18 14:49:38 -0400307 fActiveIntvls.insertByIncreasingEnd(cur);
308 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500309
310 // expire all the remaining intervals to drain the active interval list
311 this->expire(std::numeric_limits<unsigned int>::max());
Adlai Holler19fd5142021-03-08 10:19:30 -0700312 return !fFailedInstantiation;
Robert Phillips5af44de2017-07-18 14:49:38 -0400313}
Robert Phillips715d08c2018-07-18 13:56:48 -0400314
315#if GR_ALLOCATION_SPEW
316void GrResourceAllocator::dumpIntervals() {
Robert Phillips715d08c2018-07-18 13:56:48 -0400317 // Print all the intervals while computing their range
Robert Phillips3bf3d4a2019-03-27 07:09:09 -0400318 SkDebugf("------------------------------------------------------------\n");
319 unsigned int min = std::numeric_limits<unsigned int>::max();
Robert Phillips715d08c2018-07-18 13:56:48 -0400320 unsigned int max = 0;
321 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
Greg Danielc61d7e32020-02-04 14:27:45 -0500322 SkDebugf("{ %3d,%3d }: [%2d, %2d] - refProxys:%d surfaceRefs:%d\n",
Robert Phillips715d08c2018-07-18 13:56:48 -0400323 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400324 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1,
Robert Phillips715d08c2018-07-18 13:56:48 -0400325 cur->start(),
326 cur->end(),
327 cur->proxy()->priv().getProxyRefCnt(),
Robert Phillipsb5204762019-06-19 14:12:13 -0400328 cur->proxy()->testingOnly_getBackingRefCnt());
Brian Osman788b9162020-02-07 10:36:46 -0500329 min = std::min(min, cur->start());
330 max = std::max(max, cur->end());
Robert Phillips715d08c2018-07-18 13:56:48 -0400331 }
332
333 // Draw a graph of the useage intervals
334 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
335 SkDebugf("{ %3d,%3d }: ",
336 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400337 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1);
Robert Phillips715d08c2018-07-18 13:56:48 -0400338 for (unsigned int i = min; i <= max; ++i) {
339 if (i >= cur->start() && i <= cur->end()) {
340 SkDebugf("x");
341 } else {
342 SkDebugf(" ");
343 }
344 }
345 SkDebugf("\n");
346 }
347}
348#endif