blob: f271f4e74d3b153235824bd4fd32349ea6d8fc05 [file] [log] [blame]
Robert Phillips5af44de2017-07-18 14:49:38 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/GrResourceAllocator.h"
Robert Phillips5af44de2017-07-18 14:49:38 -04009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "src/gpu/GrGpuResourcePriv.h"
Greg Danielf41b2bd2019-08-22 16:19:24 -040011#include "src/gpu/GrOpsTask.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040012#include "src/gpu/GrRenderTargetProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "src/gpu/GrResourceProvider.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040014#include "src/gpu/GrSurfaceProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrSurfaceProxyPriv.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040016#include "src/gpu/GrTextureProxy.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040017
Robert Phillipsda1be462018-07-27 07:18:06 -040018#if GR_TRACK_INTERVAL_CREATION
Mike Klein0ec1c572018-12-04 11:52:51 -050019 #include <atomic>
20
21 uint32_t GrResourceAllocator::Interval::CreateUniqueID() {
22 static std::atomic<uint32_t> nextID{1};
23 uint32_t id;
24 do {
Adlai Holler4888cda2020-11-06 16:37:37 -050025 id = nextID.fetch_add(1, std::memory_order_relaxed);
Mike Klein0ec1c572018-12-04 11:52:51 -050026 } while (id == SK_InvalidUniqueID);
27 return id;
28 }
Robert Phillipsda1be462018-07-27 07:18:06 -040029#endif
30
Robert Phillipsc73666f2019-04-24 08:49:48 -040031void GrResourceAllocator::determineRecyclability() {
32 for (Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
33 if (cur->proxy()->canSkipResourceAllocator()) {
34 // These types of proxies can slip in here if they require a stencil buffer
35 continue;
36 }
37
Brian Salomon557e8122019-10-24 10:37:08 -040038 if (!cur->proxy()->refCntGreaterThan(cur->uses())) {
Robert Phillipsc73666f2019-04-24 08:49:48 -040039 // All the refs on the proxy are known to the resource allocator thus no one
40 // should be holding onto it outside of Ganesh.
Robert Phillipsc73666f2019-04-24 08:49:48 -040041 cur->markAsRecyclable();
42 }
43 }
44}
45
Robert Phillips5b65a842017-11-13 15:48:12 -050046GrResourceAllocator::~GrResourceAllocator() {
Robert Phillips5b65a842017-11-13 15:48:12 -050047 SkASSERT(fIntvlList.empty());
48 SkASSERT(fActiveIntvls.empty());
49 SkASSERT(!fIntvlHash.count());
Robert Phillips5b65a842017-11-13 15:48:12 -050050}
51
Adlai Holler7f7a5df2021-02-09 17:41:10 +000052void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end,
53 ActualUse actualUse
Chris Dalton8816b932017-11-29 16:48:25 -070054 SkDEBUGCODE(, bool isDirectDstRead)) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040055 SkASSERT(start <= end);
56 SkASSERT(!fAssigned); // We shouldn't be adding any intervals after (or during) assignment
Robert Phillips5f78adf2019-04-22 12:41:39 -040057
Chris Dalton97155592019-06-13 13:40:20 -060058 if (proxy->canSkipResourceAllocator()) {
Robert Phillips5f78adf2019-04-22 12:41:39 -040059 return;
60 }
61
Brian Salomon9cadc312018-12-05 15:09:19 -050062 // If a proxy is read only it must refer to a texture with specific content that cannot be
63 // recycled. We don't need to assign a texture to it and no other proxy can be instantiated
64 // with the same texture.
65 if (proxy->readOnly()) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040066 if (proxy->isLazy() && !proxy->priv().doLazyInstantiation(fResourceProvider)) {
Adlai Holler19fd5142021-03-08 10:19:30 -070067 fFailedInstantiation = true;
Brian Salomon9cadc312018-12-05 15:09:19 -050068 } else {
Brian Salomonbeb7f522019-08-30 16:19:42 -040069 // Since we aren't going to add an interval we won't revisit this proxy in assign(). So
70 // must already be instantiated or it must be a lazy proxy that we instantiated above.
71 SkASSERT(proxy->isInstantiated());
Brian Salomon9cadc312018-12-05 15:09:19 -050072 }
Brian Salomonbeb7f522019-08-30 16:19:42 -040073 return;
74 }
Adlai Holler539db2f2021-03-16 09:45:05 -040075 uint32_t proxyID = proxy->uniqueID().asUInt();
76 if (Interval** intvlPtr = fIntvlHash.find(proxyID)) {
77 Interval* intvl = *intvlPtr;
Brian Salomonbeb7f522019-08-30 16:19:42 -040078 // Revise the interval for an existing use
79#ifdef SK_DEBUG
Adlai Holler9e2c50e2021-02-09 14:41:52 -050080 if (0 == start && 0 == end) {
81 // This interval is for the initial upload to a deferred proxy. Due to the vagaries
82 // of how deferred proxies are collected they can appear as uploads multiple times
83 // in a single opsTasks' list and as uploads in several opsTasks.
84 SkASSERT(0 == intvl->start());
85 } else if (isDirectDstRead) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040086 // Direct reads from the render target itself should occur w/in the existing
87 // interval
88 SkASSERT(intvl->start() <= start && intvl->end() >= end);
89 } else {
90 SkASSERT(intvl->end() <= start && intvl->end() <= end);
91 }
92#endif
Adlai Holler7f7a5df2021-02-09 17:41:10 +000093 if (ActualUse::kYes == actualUse) {
94 intvl->addUse();
95 }
Brian Salomonbeb7f522019-08-30 16:19:42 -040096 intvl->extendEnd(end);
97 return;
98 }
Adlai Hollerda163672021-03-15 11:03:37 -040099 Interval* newIntvl = fIntervalAllocator.make<Interval>(proxy, start, end);
Brian Salomonc6093532018-12-05 21:34:36 +0000100
Adlai Holler7f7a5df2021-02-09 17:41:10 +0000101 if (ActualUse::kYes == actualUse) {
102 newIntvl->addUse();
103 }
Brian Salomonbeb7f522019-08-30 16:19:42 -0400104 fIntvlList.insertByIncreasingStart(newIntvl);
Adlai Holler539db2f2021-03-16 09:45:05 -0400105 fIntvlHash.set(proxyID, newIntvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400106}
107
108GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::popHead() {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400109 SkDEBUGCODE(this->validate());
110
Robert Phillips5af44de2017-07-18 14:49:38 -0400111 Interval* temp = fHead;
112 if (temp) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500113 fHead = temp->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400114 if (!fHead) {
115 fTail = nullptr;
116 }
117 temp->setNext(nullptr);
Robert Phillips5af44de2017-07-18 14:49:38 -0400118 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400119
120 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400121 return temp;
122}
123
124// TODO: fuse this with insertByIncreasingEnd
125void GrResourceAllocator::IntervalList::insertByIncreasingStart(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400126 SkDEBUGCODE(this->validate());
127 SkASSERT(!intvl->next());
128
Robert Phillips5af44de2017-07-18 14:49:38 -0400129 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400130 // 14%
131 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500132 } else if (intvl->start() <= fHead->start()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400133 // 3%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500134 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400135 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400136 } else if (fTail->start() <= intvl->start()) {
137 // 83%
138 fTail->setNext(intvl);
139 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400140 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400141 // almost never
Robert Phillips5af44de2017-07-18 14:49:38 -0400142 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500143 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400144 for (; intvl->start() > next->start(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400145 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400146
147 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500148 intvl->setNext(next);
149 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400150 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400151
152 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400153}
154
155// TODO: fuse this with insertByIncreasingStart
156void GrResourceAllocator::IntervalList::insertByIncreasingEnd(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400157 SkDEBUGCODE(this->validate());
158 SkASSERT(!intvl->next());
159
Robert Phillips5af44de2017-07-18 14:49:38 -0400160 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400161 // 14%
162 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500163 } else if (intvl->end() <= fHead->end()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400164 // 64%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500165 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400166 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400167 } else if (fTail->end() <= intvl->end()) {
168 // 3%
169 fTail->setNext(intvl);
170 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400171 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400172 // 19% but 81% of those land right after the list's head
Robert Phillips5af44de2017-07-18 14:49:38 -0400173 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500174 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400175 for (; intvl->end() > next->end(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400176 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400177
178 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500179 intvl->setNext(next);
180 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400181 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400182
183 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400184}
185
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400186#ifdef SK_DEBUG
187void GrResourceAllocator::IntervalList::validate() const {
188 SkASSERT(SkToBool(fHead) == SkToBool(fTail));
189
190 Interval* prev = nullptr;
191 for (Interval* cur = fHead; cur; prev = cur, cur = cur->next()) {
192 }
193
194 SkASSERT(fTail == prev);
195}
196#endif
Robert Phillips4150eea2018-02-07 17:08:21 -0500197
198 GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::detachAll() {
199 Interval* tmp = fHead;
200 fHead = nullptr;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400201 fTail = nullptr;
Robert Phillips4150eea2018-02-07 17:08:21 -0500202 return tmp;
203}
204
Robert Phillips5af44de2017-07-18 14:49:38 -0400205// 'surface' can be reused. Add it back to the free pool.
Robert Phillips715d08c2018-07-18 13:56:48 -0400206void GrResourceAllocator::recycleSurface(sk_sp<GrSurface> surface) {
Robert Phillips57aa3672017-07-21 11:38:13 -0400207 const GrScratchKey &key = surface->resourcePriv().getScratchKey();
208
209 if (!key.isValid()) {
210 return; // can't do it w/o a valid scratch key
211 }
212
Robert Phillipsf8e25022017-11-08 15:24:31 -0500213 if (surface->getUniqueKey().isValid()) {
214 // If the surface has a unique key we throw it back into the resource cache.
215 // If things get really tight 'findSurfaceFor' may pull it back out but there is
216 // no need to have it in tight rotation.
217 return;
218 }
219
Robert Phillips715d08c2018-07-18 13:56:48 -0400220#if GR_ALLOCATION_SPEW
221 SkDebugf("putting surface %d back into pool\n", surface->uniqueID().asUInt());
222#endif
Robert Phillips57aa3672017-07-21 11:38:13 -0400223 // TODO: fix this insertion so we get a more LRU-ish behavior
Robert Phillips5b65a842017-11-13 15:48:12 -0500224 fFreePool.insert(key, surface.release());
Robert Phillips5af44de2017-07-18 14:49:38 -0400225}
226
227// First try to reuse one of the recently allocated/used GrSurfaces in the free pool.
228// If we can't find a useable one, create a new one.
Chris Dalton0b68dda2019-11-07 21:08:03 -0700229sk_sp<GrSurface> GrResourceAllocator::findSurfaceFor(const GrSurfaceProxy* proxy) {
Robert Phillips0790f8a2018-09-18 13:11:03 -0400230 if (proxy->asTextureProxy() && proxy->asTextureProxy()->getUniqueKey().isValid()) {
231 // First try to reattach to a cached version if the proxy is uniquely keyed
Chris Dalton0b68dda2019-11-07 21:08:03 -0700232 if (sk_sp<GrSurface> surface = fResourceProvider->findByUniqueKey<GrSurface>(
233 proxy->asTextureProxy()->getUniqueKey())) {
Robert Phillips0790f8a2018-09-18 13:11:03 -0400234 return surface;
235 }
236 }
237
Robert Phillips57aa3672017-07-21 11:38:13 -0400238 // First look in the free pool
239 GrScratchKey key;
Robert Phillips5af44de2017-07-18 14:49:38 -0400240
Greg Danield51fa2f2020-01-22 16:53:38 -0500241 proxy->priv().computeScratchKey(*fResourceProvider->caps(), &key);
Robert Phillips57aa3672017-07-21 11:38:13 -0400242
Robert Phillips10d17212019-04-24 14:09:10 -0400243 auto filter = [] (const GrSurface* s) {
244 return true;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500245 };
246 sk_sp<GrSurface> surface(fFreePool.findAndRemove(key, filter));
Robert Phillips57aa3672017-07-21 11:38:13 -0400247 if (surface) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500248 if (SkBudgeted::kYes == proxy->isBudgeted() &&
Brian Salomonfa2ebea2019-01-24 15:58:58 -0500249 GrBudgetedType::kBudgeted != surface->resourcePriv().budgetedType()) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500250 // This gets the job done but isn't quite correct. It would be better to try to
Brian Salomonfa2ebea2019-01-24 15:58:58 -0500251 // match budgeted proxies w/ budgeted surfaces and unbudgeted w/ unbudgeted.
Robert Phillipsf8e25022017-11-08 15:24:31 -0500252 surface->resourcePriv().makeBudgeted();
253 }
Robert Phillips0790f8a2018-09-18 13:11:03 -0400254 SkASSERT(!surface->getUniqueKey().isValid());
Robert Phillipsf8e25022017-11-08 15:24:31 -0500255 return surface;
Robert Phillips57aa3672017-07-21 11:38:13 -0400256 }
257
258 // Failing that, try to grab a new one from the resource cache
Robert Phillips5af44de2017-07-18 14:49:38 -0400259 return proxy->priv().createSurface(fResourceProvider);
260}
261
262// Remove any intervals that end before the current index. Return their GrSurfaces
Robert Phillips39667382019-04-17 16:03:30 -0400263// to the free pool if possible.
Robert Phillips5af44de2017-07-18 14:49:38 -0400264void GrResourceAllocator::expire(unsigned int curIndex) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500265 while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->end() < curIndex) {
Adlai Holler729ba5e2021-03-15 12:34:31 -0400266 Interval* intvl = fActiveIntvls.popHead();
267 SkASSERT(!intvl->next());
Robert Phillips5b65a842017-11-13 15:48:12 -0500268
Adlai Holler729ba5e2021-03-15 12:34:31 -0400269 if (GrSurface* surf = intvl->proxy()->peekSurface()) {
270 if (intvl->isRecyclable()) {
271 this->recycleSurface(sk_ref_sp(surf));
Robert Phillips715d08c2018-07-18 13:56:48 -0400272 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500273 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400274 }
275}
276
Adlai Holler19fd5142021-03-08 10:19:30 -0700277bool GrResourceAllocator::assign() {
Robert Phillips5f78adf2019-04-22 12:41:39 -0400278 fIntvlHash.reset(); // we don't need the interval hash anymore
279
Robert Phillips5af44de2017-07-18 14:49:38 -0400280 SkDEBUGCODE(fAssigned = true;)
281
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500282 if (fIntvlList.empty()) {
Adlai Holler19fd5142021-03-08 10:19:30 -0700283 return !fFailedInstantiation; // no resources to assign
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500284 }
285
Robert Phillips715d08c2018-07-18 13:56:48 -0400286#if GR_ALLOCATION_SPEW
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500287 SkDebugf("assigning %d ops\n", fNumOps);
Robert Phillips715d08c2018-07-18 13:56:48 -0400288 this->dumpIntervals();
289#endif
Robert Phillipseafd48a2017-11-16 07:52:08 -0500290
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500291 // TODO: Can this be done inline during the main iteration?
292 this->determineRecyclability();
293
Adlai Holler19fd5142021-03-08 10:19:30 -0700294 Interval* cur = nullptr;
Adlai Holler043a7372021-03-08 17:35:27 -0700295 while ((cur = fIntvlList.popHead())) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500296 this->expire(cur->start());
Robert Phillips57aa3672017-07-21 11:38:13 -0400297
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400298 if (cur->proxy()->isInstantiated()) {
Robert Phillips57aa3672017-07-21 11:38:13 -0400299 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500300
Robert Phillips57aa3672017-07-21 11:38:13 -0400301 continue;
302 }
303
Brian Salomonbeb7f522019-08-30 16:19:42 -0400304 if (cur->proxy()->isLazy()) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500305 if (!cur->proxy()->priv().doLazyInstantiation(fResourceProvider)) {
Adlai Holler19fd5142021-03-08 10:19:30 -0700306 fFailedInstantiation = true;
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500307 }
Chris Dalton0b68dda2019-11-07 21:08:03 -0700308 } else if (sk_sp<GrSurface> surface = this->findSurfaceFor(cur->proxy())) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500309 // TODO: make getUniqueKey virtual on GrSurfaceProxy
Robert Phillips0790f8a2018-09-18 13:11:03 -0400310 GrTextureProxy* texProxy = cur->proxy()->asTextureProxy();
311
312 if (texProxy && texProxy->getUniqueKey().isValid()) {
313 if (!surface->getUniqueKey().isValid()) {
314 fResourceProvider->assignUniqueKeyToResource(texProxy->getUniqueKey(),
315 surface.get());
316 }
317 SkASSERT(surface->getUniqueKey() == texProxy->getUniqueKey());
Robert Phillipsf8e25022017-11-08 15:24:31 -0500318 }
319
Robert Phillips715d08c2018-07-18 13:56:48 -0400320#if GR_ALLOCATION_SPEW
321 SkDebugf("Assigning %d to %d\n",
322 surface->uniqueID().asUInt(),
323 cur->proxy()->uniqueID().asUInt());
324#endif
325
Adlai Holler729ba5e2021-03-15 12:34:31 -0400326 SkASSERT(!cur->proxy()->peekSurface());
327 cur->proxy()->priv().assign(std::move(surface));
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500328 } else {
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400329 SkASSERT(!cur->proxy()->isInstantiated());
Adlai Holler19fd5142021-03-08 10:19:30 -0700330 fFailedInstantiation = true;
Robert Phillips5af44de2017-07-18 14:49:38 -0400331 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500332
Robert Phillips5af44de2017-07-18 14:49:38 -0400333 fActiveIntvls.insertByIncreasingEnd(cur);
334 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500335
336 // expire all the remaining intervals to drain the active interval list
337 this->expire(std::numeric_limits<unsigned int>::max());
Adlai Holler19fd5142021-03-08 10:19:30 -0700338 return !fFailedInstantiation;
Robert Phillips5af44de2017-07-18 14:49:38 -0400339}
Robert Phillips715d08c2018-07-18 13:56:48 -0400340
341#if GR_ALLOCATION_SPEW
342void GrResourceAllocator::dumpIntervals() {
Robert Phillips715d08c2018-07-18 13:56:48 -0400343 // Print all the intervals while computing their range
Robert Phillips3bf3d4a2019-03-27 07:09:09 -0400344 SkDebugf("------------------------------------------------------------\n");
345 unsigned int min = std::numeric_limits<unsigned int>::max();
Robert Phillips715d08c2018-07-18 13:56:48 -0400346 unsigned int max = 0;
347 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
Greg Danielc61d7e32020-02-04 14:27:45 -0500348 SkDebugf("{ %3d,%3d }: [%2d, %2d] - refProxys:%d surfaceRefs:%d\n",
Robert Phillips715d08c2018-07-18 13:56:48 -0400349 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400350 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1,
Robert Phillips715d08c2018-07-18 13:56:48 -0400351 cur->start(),
352 cur->end(),
353 cur->proxy()->priv().getProxyRefCnt(),
Robert Phillipsb5204762019-06-19 14:12:13 -0400354 cur->proxy()->testingOnly_getBackingRefCnt());
Brian Osman788b9162020-02-07 10:36:46 -0500355 min = std::min(min, cur->start());
356 max = std::max(max, cur->end());
Robert Phillips715d08c2018-07-18 13:56:48 -0400357 }
358
359 // Draw a graph of the useage intervals
360 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
361 SkDebugf("{ %3d,%3d }: ",
362 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400363 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1);
Robert Phillips715d08c2018-07-18 13:56:48 -0400364 for (unsigned int i = min; i <= max; ++i) {
365 if (i >= cur->start() && i <= cur->end()) {
366 SkDebugf("x");
367 } else {
368 SkDebugf(" ");
369 }
370 }
371 SkDebugf("\n");
372 }
373}
374#endif