blob: 848665bd94dd5481184aac648a9086567e95ca25 [file] [log] [blame]
Robert Phillips5af44de2017-07-18 14:49:38 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/GrResourceAllocator.h"
Robert Phillips5af44de2017-07-18 14:49:38 -04009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "src/gpu/GrGpuResourcePriv.h"
Greg Danielf41b2bd2019-08-22 16:19:24 -040011#include "src/gpu/GrOpsTask.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040012#include "src/gpu/GrRenderTargetProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "src/gpu/GrResourceProvider.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040014#include "src/gpu/GrSurfaceProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrSurfaceProxyPriv.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040016#include "src/gpu/GrTextureProxy.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040017
Robert Phillipsda1be462018-07-27 07:18:06 -040018#if GR_TRACK_INTERVAL_CREATION
Mike Klein0ec1c572018-12-04 11:52:51 -050019 #include <atomic>
20
21 uint32_t GrResourceAllocator::Interval::CreateUniqueID() {
22 static std::atomic<uint32_t> nextID{1};
23 uint32_t id;
24 do {
Adlai Holler4888cda2020-11-06 16:37:37 -050025 id = nextID.fetch_add(1, std::memory_order_relaxed);
Mike Klein0ec1c572018-12-04 11:52:51 -050026 } while (id == SK_InvalidUniqueID);
27 return id;
28 }
Robert Phillipsda1be462018-07-27 07:18:06 -040029#endif
30
Robert Phillips5b65a842017-11-13 15:48:12 -050031GrResourceAllocator::~GrResourceAllocator() {
Robert Phillips5b65a842017-11-13 15:48:12 -050032 SkASSERT(fIntvlList.empty());
33 SkASSERT(fActiveIntvls.empty());
34 SkASSERT(!fIntvlHash.count());
Robert Phillips5b65a842017-11-13 15:48:12 -050035}
36
Adlai Holler7f7a5df2021-02-09 17:41:10 +000037void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end,
38 ActualUse actualUse
Chris Dalton8816b932017-11-29 16:48:25 -070039 SkDEBUGCODE(, bool isDirectDstRead)) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040040 SkASSERT(start <= end);
41 SkASSERT(!fAssigned); // We shouldn't be adding any intervals after (or during) assignment
Robert Phillips5f78adf2019-04-22 12:41:39 -040042
Chris Dalton97155592019-06-13 13:40:20 -060043 if (proxy->canSkipResourceAllocator()) {
Robert Phillips5f78adf2019-04-22 12:41:39 -040044 return;
45 }
46
Brian Salomon9cadc312018-12-05 15:09:19 -050047 // If a proxy is read only it must refer to a texture with specific content that cannot be
48 // recycled. We don't need to assign a texture to it and no other proxy can be instantiated
49 // with the same texture.
50 if (proxy->readOnly()) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040051 if (proxy->isLazy() && !proxy->priv().doLazyInstantiation(fResourceProvider)) {
Adlai Holler19fd5142021-03-08 10:19:30 -070052 fFailedInstantiation = true;
Brian Salomon9cadc312018-12-05 15:09:19 -050053 } else {
Brian Salomonbeb7f522019-08-30 16:19:42 -040054 // Since we aren't going to add an interval we won't revisit this proxy in assign(). So
55 // must already be instantiated or it must be a lazy proxy that we instantiated above.
56 SkASSERT(proxy->isInstantiated());
Brian Salomon9cadc312018-12-05 15:09:19 -050057 }
Brian Salomonbeb7f522019-08-30 16:19:42 -040058 return;
59 }
Adlai Holler539db2f2021-03-16 09:45:05 -040060 uint32_t proxyID = proxy->uniqueID().asUInt();
61 if (Interval** intvlPtr = fIntvlHash.find(proxyID)) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040062 // Revise the interval for an existing use
Adlai Holler1143b1b2021-03-16 13:07:40 -040063 Interval* intvl = *intvlPtr;
Brian Salomonbeb7f522019-08-30 16:19:42 -040064#ifdef SK_DEBUG
Adlai Holler9e2c50e2021-02-09 14:41:52 -050065 if (0 == start && 0 == end) {
66 // This interval is for the initial upload to a deferred proxy. Due to the vagaries
67 // of how deferred proxies are collected they can appear as uploads multiple times
68 // in a single opsTasks' list and as uploads in several opsTasks.
69 SkASSERT(0 == intvl->start());
70 } else if (isDirectDstRead) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040071 // Direct reads from the render target itself should occur w/in the existing
72 // interval
73 SkASSERT(intvl->start() <= start && intvl->end() >= end);
74 } else {
75 SkASSERT(intvl->end() <= start && intvl->end() <= end);
76 }
77#endif
Adlai Holler7f7a5df2021-02-09 17:41:10 +000078 if (ActualUse::kYes == actualUse) {
79 intvl->addUse();
80 }
Brian Salomonbeb7f522019-08-30 16:19:42 -040081 intvl->extendEnd(end);
82 return;
83 }
Adlai Hollerda163672021-03-15 11:03:37 -040084 Interval* newIntvl = fIntervalAllocator.make<Interval>(proxy, start, end);
Brian Salomonc6093532018-12-05 21:34:36 +000085
Adlai Holler7f7a5df2021-02-09 17:41:10 +000086 if (ActualUse::kYes == actualUse) {
87 newIntvl->addUse();
88 }
Brian Salomonbeb7f522019-08-30 16:19:42 -040089 fIntvlList.insertByIncreasingStart(newIntvl);
Adlai Holler539db2f2021-03-16 09:45:05 -040090 fIntvlHash.set(proxyID, newIntvl);
Robert Phillips5af44de2017-07-18 14:49:38 -040091}
92
Adlai Holler1143b1b2021-03-16 13:07:40 -040093bool GrResourceAllocator::Interval::isSurfaceRecyclable() const {
94 // All the refs on the proxy are known to the resource allocator thus no one
95 // should be holding onto it outside of Ganesh.
96 return !fProxy->refCntGreaterThan(fUses);
97}
98
Robert Phillips5af44de2017-07-18 14:49:38 -040099GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::popHead() {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400100 SkDEBUGCODE(this->validate());
101
Robert Phillips5af44de2017-07-18 14:49:38 -0400102 Interval* temp = fHead;
103 if (temp) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500104 fHead = temp->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400105 if (!fHead) {
106 fTail = nullptr;
107 }
108 temp->setNext(nullptr);
Robert Phillips5af44de2017-07-18 14:49:38 -0400109 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400110
111 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400112 return temp;
113}
114
115// TODO: fuse this with insertByIncreasingEnd
116void GrResourceAllocator::IntervalList::insertByIncreasingStart(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400117 SkDEBUGCODE(this->validate());
118 SkASSERT(!intvl->next());
119
Robert Phillips5af44de2017-07-18 14:49:38 -0400120 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400121 // 14%
122 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500123 } else if (intvl->start() <= fHead->start()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400124 // 3%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500125 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400126 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400127 } else if (fTail->start() <= intvl->start()) {
128 // 83%
129 fTail->setNext(intvl);
130 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400131 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400132 // almost never
Robert Phillips5af44de2017-07-18 14:49:38 -0400133 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500134 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400135 for (; intvl->start() > next->start(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400136 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400137
138 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500139 intvl->setNext(next);
140 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400141 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400142
143 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400144}
145
146// TODO: fuse this with insertByIncreasingStart
147void GrResourceAllocator::IntervalList::insertByIncreasingEnd(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400148 SkDEBUGCODE(this->validate());
149 SkASSERT(!intvl->next());
150
Robert Phillips5af44de2017-07-18 14:49:38 -0400151 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400152 // 14%
153 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500154 } else if (intvl->end() <= fHead->end()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400155 // 64%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500156 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400157 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400158 } else if (fTail->end() <= intvl->end()) {
159 // 3%
160 fTail->setNext(intvl);
161 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400162 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400163 // 19% but 81% of those land right after the list's head
Robert Phillips5af44de2017-07-18 14:49:38 -0400164 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500165 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400166 for (; intvl->end() > next->end(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400167 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400168
169 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500170 intvl->setNext(next);
171 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400172 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400173
174 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400175}
176
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400177#ifdef SK_DEBUG
178void GrResourceAllocator::IntervalList::validate() const {
179 SkASSERT(SkToBool(fHead) == SkToBool(fTail));
180
181 Interval* prev = nullptr;
182 for (Interval* cur = fHead; cur; prev = cur, cur = cur->next()) {
183 }
184
185 SkASSERT(fTail == prev);
186}
187#endif
Robert Phillips4150eea2018-02-07 17:08:21 -0500188
Robert Phillips5af44de2017-07-18 14:49:38 -0400189// 'surface' can be reused. Add it back to the free pool.
Robert Phillips715d08c2018-07-18 13:56:48 -0400190void GrResourceAllocator::recycleSurface(sk_sp<GrSurface> surface) {
Robert Phillips57aa3672017-07-21 11:38:13 -0400191 const GrScratchKey &key = surface->resourcePriv().getScratchKey();
192
193 if (!key.isValid()) {
194 return; // can't do it w/o a valid scratch key
195 }
196
Robert Phillipsf8e25022017-11-08 15:24:31 -0500197 if (surface->getUniqueKey().isValid()) {
198 // If the surface has a unique key we throw it back into the resource cache.
199 // If things get really tight 'findSurfaceFor' may pull it back out but there is
200 // no need to have it in tight rotation.
201 return;
202 }
203
Robert Phillips715d08c2018-07-18 13:56:48 -0400204#if GR_ALLOCATION_SPEW
205 SkDebugf("putting surface %d back into pool\n", surface->uniqueID().asUInt());
206#endif
Robert Phillips57aa3672017-07-21 11:38:13 -0400207 // TODO: fix this insertion so we get a more LRU-ish behavior
Robert Phillips5b65a842017-11-13 15:48:12 -0500208 fFreePool.insert(key, surface.release());
Robert Phillips5af44de2017-07-18 14:49:38 -0400209}
210
211// First try to reuse one of the recently allocated/used GrSurfaces in the free pool.
212// If we can't find a useable one, create a new one.
Chris Dalton0b68dda2019-11-07 21:08:03 -0700213sk_sp<GrSurface> GrResourceAllocator::findSurfaceFor(const GrSurfaceProxy* proxy) {
Robert Phillips0790f8a2018-09-18 13:11:03 -0400214 if (proxy->asTextureProxy() && proxy->asTextureProxy()->getUniqueKey().isValid()) {
215 // First try to reattach to a cached version if the proxy is uniquely keyed
Chris Dalton0b68dda2019-11-07 21:08:03 -0700216 if (sk_sp<GrSurface> surface = fResourceProvider->findByUniqueKey<GrSurface>(
217 proxy->asTextureProxy()->getUniqueKey())) {
Robert Phillips0790f8a2018-09-18 13:11:03 -0400218 return surface;
219 }
220 }
221
Robert Phillips57aa3672017-07-21 11:38:13 -0400222 // First look in the free pool
223 GrScratchKey key;
Robert Phillips5af44de2017-07-18 14:49:38 -0400224
Greg Danield51fa2f2020-01-22 16:53:38 -0500225 proxy->priv().computeScratchKey(*fResourceProvider->caps(), &key);
Robert Phillips57aa3672017-07-21 11:38:13 -0400226
Robert Phillips10d17212019-04-24 14:09:10 -0400227 auto filter = [] (const GrSurface* s) {
228 return true;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500229 };
230 sk_sp<GrSurface> surface(fFreePool.findAndRemove(key, filter));
Robert Phillips57aa3672017-07-21 11:38:13 -0400231 if (surface) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500232 if (SkBudgeted::kYes == proxy->isBudgeted() &&
Brian Salomonfa2ebea2019-01-24 15:58:58 -0500233 GrBudgetedType::kBudgeted != surface->resourcePriv().budgetedType()) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500234 // This gets the job done but isn't quite correct. It would be better to try to
Brian Salomonfa2ebea2019-01-24 15:58:58 -0500235 // match budgeted proxies w/ budgeted surfaces and unbudgeted w/ unbudgeted.
Robert Phillipsf8e25022017-11-08 15:24:31 -0500236 surface->resourcePriv().makeBudgeted();
237 }
Robert Phillips0790f8a2018-09-18 13:11:03 -0400238 SkASSERT(!surface->getUniqueKey().isValid());
Robert Phillipsf8e25022017-11-08 15:24:31 -0500239 return surface;
Robert Phillips57aa3672017-07-21 11:38:13 -0400240 }
241
242 // Failing that, try to grab a new one from the resource cache
Robert Phillips5af44de2017-07-18 14:49:38 -0400243 return proxy->priv().createSurface(fResourceProvider);
244}
245
246// Remove any intervals that end before the current index. Return their GrSurfaces
Robert Phillips39667382019-04-17 16:03:30 -0400247// to the free pool if possible.
Robert Phillips5af44de2017-07-18 14:49:38 -0400248void GrResourceAllocator::expire(unsigned int curIndex) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500249 while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->end() < curIndex) {
Adlai Holler729ba5e2021-03-15 12:34:31 -0400250 Interval* intvl = fActiveIntvls.popHead();
251 SkASSERT(!intvl->next());
Robert Phillips5b65a842017-11-13 15:48:12 -0500252
Adlai Holler729ba5e2021-03-15 12:34:31 -0400253 if (GrSurface* surf = intvl->proxy()->peekSurface()) {
Adlai Holler1143b1b2021-03-16 13:07:40 -0400254 if (intvl->isSurfaceRecyclable()) {
Adlai Holler729ba5e2021-03-15 12:34:31 -0400255 this->recycleSurface(sk_ref_sp(surf));
Robert Phillips715d08c2018-07-18 13:56:48 -0400256 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500257 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400258 }
259}
260
Adlai Holler19fd5142021-03-08 10:19:30 -0700261bool GrResourceAllocator::assign() {
Robert Phillips5f78adf2019-04-22 12:41:39 -0400262 fIntvlHash.reset(); // we don't need the interval hash anymore
263
Robert Phillips5af44de2017-07-18 14:49:38 -0400264 SkDEBUGCODE(fAssigned = true;)
265
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500266 if (fIntvlList.empty()) {
Adlai Holler19fd5142021-03-08 10:19:30 -0700267 return !fFailedInstantiation; // no resources to assign
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500268 }
269
Robert Phillips715d08c2018-07-18 13:56:48 -0400270#if GR_ALLOCATION_SPEW
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500271 SkDebugf("assigning %d ops\n", fNumOps);
Robert Phillips715d08c2018-07-18 13:56:48 -0400272 this->dumpIntervals();
273#endif
Robert Phillipseafd48a2017-11-16 07:52:08 -0500274
Adlai Holler1143b1b2021-03-16 13:07:40 -0400275 while (Interval* cur = fIntvlList.popHead()) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500276 this->expire(cur->start());
Robert Phillips57aa3672017-07-21 11:38:13 -0400277
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400278 if (cur->proxy()->isInstantiated()) {
Robert Phillips57aa3672017-07-21 11:38:13 -0400279 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500280
Robert Phillips57aa3672017-07-21 11:38:13 -0400281 continue;
282 }
283
Brian Salomonbeb7f522019-08-30 16:19:42 -0400284 if (cur->proxy()->isLazy()) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500285 if (!cur->proxy()->priv().doLazyInstantiation(fResourceProvider)) {
Adlai Holler19fd5142021-03-08 10:19:30 -0700286 fFailedInstantiation = true;
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500287 }
Chris Dalton0b68dda2019-11-07 21:08:03 -0700288 } else if (sk_sp<GrSurface> surface = this->findSurfaceFor(cur->proxy())) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500289 // TODO: make getUniqueKey virtual on GrSurfaceProxy
Robert Phillips0790f8a2018-09-18 13:11:03 -0400290 GrTextureProxy* texProxy = cur->proxy()->asTextureProxy();
291
292 if (texProxy && texProxy->getUniqueKey().isValid()) {
293 if (!surface->getUniqueKey().isValid()) {
294 fResourceProvider->assignUniqueKeyToResource(texProxy->getUniqueKey(),
295 surface.get());
296 }
297 SkASSERT(surface->getUniqueKey() == texProxy->getUniqueKey());
Robert Phillipsf8e25022017-11-08 15:24:31 -0500298 }
299
Robert Phillips715d08c2018-07-18 13:56:48 -0400300#if GR_ALLOCATION_SPEW
301 SkDebugf("Assigning %d to %d\n",
302 surface->uniqueID().asUInt(),
303 cur->proxy()->uniqueID().asUInt());
304#endif
305
Adlai Holler729ba5e2021-03-15 12:34:31 -0400306 SkASSERT(!cur->proxy()->peekSurface());
307 cur->proxy()->priv().assign(std::move(surface));
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500308 } else {
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400309 SkASSERT(!cur->proxy()->isInstantiated());
Adlai Holler19fd5142021-03-08 10:19:30 -0700310 fFailedInstantiation = true;
Robert Phillips5af44de2017-07-18 14:49:38 -0400311 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500312
Robert Phillips5af44de2017-07-18 14:49:38 -0400313 fActiveIntvls.insertByIncreasingEnd(cur);
314 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500315
316 // expire all the remaining intervals to drain the active interval list
317 this->expire(std::numeric_limits<unsigned int>::max());
Adlai Holler19fd5142021-03-08 10:19:30 -0700318 return !fFailedInstantiation;
Robert Phillips5af44de2017-07-18 14:49:38 -0400319}
Robert Phillips715d08c2018-07-18 13:56:48 -0400320
321#if GR_ALLOCATION_SPEW
322void GrResourceAllocator::dumpIntervals() {
Robert Phillips715d08c2018-07-18 13:56:48 -0400323 // Print all the intervals while computing their range
Robert Phillips3bf3d4a2019-03-27 07:09:09 -0400324 SkDebugf("------------------------------------------------------------\n");
325 unsigned int min = std::numeric_limits<unsigned int>::max();
Robert Phillips715d08c2018-07-18 13:56:48 -0400326 unsigned int max = 0;
327 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
Greg Danielc61d7e32020-02-04 14:27:45 -0500328 SkDebugf("{ %3d,%3d }: [%2d, %2d] - refProxys:%d surfaceRefs:%d\n",
Robert Phillips715d08c2018-07-18 13:56:48 -0400329 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400330 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1,
Robert Phillips715d08c2018-07-18 13:56:48 -0400331 cur->start(),
332 cur->end(),
333 cur->proxy()->priv().getProxyRefCnt(),
Robert Phillipsb5204762019-06-19 14:12:13 -0400334 cur->proxy()->testingOnly_getBackingRefCnt());
Brian Osman788b9162020-02-07 10:36:46 -0500335 min = std::min(min, cur->start());
336 max = std::max(max, cur->end());
Robert Phillips715d08c2018-07-18 13:56:48 -0400337 }
338
339 // Draw a graph of the useage intervals
340 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
341 SkDebugf("{ %3d,%3d }: ",
342 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400343 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1);
Robert Phillips715d08c2018-07-18 13:56:48 -0400344 for (unsigned int i = min; i <= max; ++i) {
345 if (i >= cur->start() && i <= cur->end()) {
346 SkDebugf("x");
347 } else {
348 SkDebugf(" ");
349 }
350 }
351 SkDebugf("\n");
352 }
353}
354#endif