blob: 5106a888916665c8bdd6edebb2434e371ee28ffb [file] [log] [blame]
Robert Phillips5af44de2017-07-18 14:49:38 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/GrResourceAllocator.h"
Robert Phillips5af44de2017-07-18 14:49:38 -04009
Adlai Hollerca1137b2021-04-08 11:39:55 -040010#include "src/gpu/GrDirectContextPriv.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "src/gpu/GrGpuResourcePriv.h"
Greg Danielf41b2bd2019-08-22 16:19:24 -040012#include "src/gpu/GrOpsTask.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040013#include "src/gpu/GrRenderTargetProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050014#include "src/gpu/GrResourceProvider.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040015#include "src/gpu/GrSurfaceProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050016#include "src/gpu/GrSurfaceProxyPriv.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040017
Adlai Holler4cfbe532021-03-17 10:36:39 -040018#ifdef SK_DEBUG
19#include <atomic>
Mike Klein0ec1c572018-12-04 11:52:51 -050020
Adlai Holler4cfbe532021-03-17 10:36:39 -040021uint32_t GrResourceAllocator::Interval::CreateUniqueID() {
22 static std::atomic<uint32_t> nextID{1};
23 uint32_t id;
24 do {
25 id = nextID.fetch_add(1, std::memory_order_relaxed);
26 } while (id == SK_InvalidUniqueID);
27 return id;
28}
29
30uint32_t GrResourceAllocator::Register::CreateUniqueID() {
31 static std::atomic<uint32_t> nextID{1};
32 uint32_t id;
33 do {
34 id = nextID.fetch_add(1, std::memory_order_relaxed);
35 } while (id == SK_InvalidUniqueID);
36 return id;
37}
Robert Phillipsda1be462018-07-27 07:18:06 -040038#endif
39
Robert Phillips5b65a842017-11-13 15:48:12 -050040GrResourceAllocator::~GrResourceAllocator() {
Robert Phillips5b65a842017-11-13 15:48:12 -050041 SkASSERT(fIntvlList.empty());
42 SkASSERT(fActiveIntvls.empty());
43 SkASSERT(!fIntvlHash.count());
Robert Phillips5b65a842017-11-13 15:48:12 -050044}
45
Adlai Holler7f7a5df2021-02-09 17:41:10 +000046void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end,
47 ActualUse actualUse
Chris Dalton8816b932017-11-29 16:48:25 -070048 SkDEBUGCODE(, bool isDirectDstRead)) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040049 SkASSERT(start <= end);
50 SkASSERT(!fAssigned); // We shouldn't be adding any intervals after (or during) assignment
Robert Phillips5f78adf2019-04-22 12:41:39 -040051
Chris Dalton97155592019-06-13 13:40:20 -060052 if (proxy->canSkipResourceAllocator()) {
Robert Phillips5f78adf2019-04-22 12:41:39 -040053 return;
54 }
55
Brian Salomon9cadc312018-12-05 15:09:19 -050056 // If a proxy is read only it must refer to a texture with specific content that cannot be
57 // recycled. We don't need to assign a texture to it and no other proxy can be instantiated
58 // with the same texture.
59 if (proxy->readOnly()) {
Adlai Hollerca1137b2021-04-08 11:39:55 -040060 auto resourceProvider = fDContext->priv().resourceProvider();
61 if (proxy->isLazy() && !proxy->priv().doLazyInstantiation(resourceProvider)) {
Adlai Holler19fd5142021-03-08 10:19:30 -070062 fFailedInstantiation = true;
Brian Salomon9cadc312018-12-05 15:09:19 -050063 } else {
Brian Salomonbeb7f522019-08-30 16:19:42 -040064 // Since we aren't going to add an interval we won't revisit this proxy in assign(). So
65 // must already be instantiated or it must be a lazy proxy that we instantiated above.
66 SkASSERT(proxy->isInstantiated());
Brian Salomon9cadc312018-12-05 15:09:19 -050067 }
Brian Salomonbeb7f522019-08-30 16:19:42 -040068 return;
69 }
Adlai Holler539db2f2021-03-16 09:45:05 -040070 uint32_t proxyID = proxy->uniqueID().asUInt();
71 if (Interval** intvlPtr = fIntvlHash.find(proxyID)) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040072 // Revise the interval for an existing use
Adlai Holler1143b1b2021-03-16 13:07:40 -040073 Interval* intvl = *intvlPtr;
Brian Salomonbeb7f522019-08-30 16:19:42 -040074#ifdef SK_DEBUG
Adlai Holler9e2c50e2021-02-09 14:41:52 -050075 if (0 == start && 0 == end) {
76 // This interval is for the initial upload to a deferred proxy. Due to the vagaries
77 // of how deferred proxies are collected they can appear as uploads multiple times
78 // in a single opsTasks' list and as uploads in several opsTasks.
79 SkASSERT(0 == intvl->start());
80 } else if (isDirectDstRead) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040081 // Direct reads from the render target itself should occur w/in the existing
82 // interval
83 SkASSERT(intvl->start() <= start && intvl->end() >= end);
84 } else {
85 SkASSERT(intvl->end() <= start && intvl->end() <= end);
86 }
87#endif
Adlai Holler7f7a5df2021-02-09 17:41:10 +000088 if (ActualUse::kYes == actualUse) {
89 intvl->addUse();
90 }
Brian Salomonbeb7f522019-08-30 16:19:42 -040091 intvl->extendEnd(end);
92 return;
93 }
Adlai Holler4cfbe532021-03-17 10:36:39 -040094 Interval* newIntvl = fInternalAllocator.make<Interval>(proxy, start, end);
Brian Salomonc6093532018-12-05 21:34:36 +000095
Adlai Holler7f7a5df2021-02-09 17:41:10 +000096 if (ActualUse::kYes == actualUse) {
97 newIntvl->addUse();
98 }
Brian Salomonbeb7f522019-08-30 16:19:42 -040099 fIntvlList.insertByIncreasingStart(newIntvl);
Adlai Holler539db2f2021-03-16 09:45:05 -0400100 fIntvlHash.set(proxyID, newIntvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400101}
102
Adlai Holler3cffe812021-04-09 13:43:32 -0400103GrResourceAllocator::Register::Register(GrSurfaceProxy* originatingProxy,
104 GrScratchKey scratchKey)
105 : fOriginatingProxy(originatingProxy)
106 , fScratchKey(std::move(scratchKey)) {
107 SkASSERT(originatingProxy);
108 SkASSERT(!originatingProxy->isInstantiated());
109 SkASSERT(!originatingProxy->isLazy());
110 SkDEBUGCODE(fUniqueID = CreateUniqueID();)
111}
112
Adlai Holler7df8d222021-03-19 12:27:49 -0400113bool GrResourceAllocator::Register::isRecyclable(const GrCaps& caps,
114 GrSurfaceProxy* proxy,
115 int knownUseCount) const {
116 if (!caps.reuseScratchTextures() && !proxy->asRenderTargetProxy()) {
117 // Tragically, scratch texture reuse is totally disabled in this case.
118 return false;
119 }
120
121 if (!this->scratchKey().isValid()) {
122 return false; // no scratch key, no free pool
123 }
124 if (this->uniqueKey().isValid()) {
125 return false; // rely on the resource cache to hold onto uniquely-keyed surfaces.
126 }
127 // If all the refs on the proxy are known to the resource allocator then no one
Adlai Holler1143b1b2021-03-16 13:07:40 -0400128 // should be holding onto it outside of Ganesh.
Adlai Holler7df8d222021-03-19 12:27:49 -0400129 return !proxy->refCntGreaterThan(knownUseCount);
130}
131
132bool GrResourceAllocator::Register::instantiateSurface(GrSurfaceProxy* proxy,
133 GrResourceProvider* resourceProvider) {
134 SkASSERT(!proxy->peekSurface());
135
136 sk_sp<GrSurface> surface;
137 if (const auto& uniqueKey = proxy->getUniqueKey(); uniqueKey.isValid()) {
138 SkASSERT(uniqueKey == fOriginatingProxy->getUniqueKey());
139 // First try to reattach to a cached surface if the proxy is uniquely keyed
140 surface = resourceProvider->findByUniqueKey<GrSurface>(uniqueKey);
141 }
142 if (!surface) {
143 if (proxy == fOriginatingProxy) {
144 surface = proxy->priv().createSurface(resourceProvider);
145 } else {
146 surface = sk_ref_sp(fOriginatingProxy->peekSurface());
147 }
148 }
149 if (!surface) {
150 return false;
151 }
152
153 // Make surface budgeted if this proxy is budgeted.
154 if (SkBudgeted::kYes == proxy->isBudgeted() &&
155 GrBudgetedType::kBudgeted != surface->resourcePriv().budgetedType()) {
156 // This gets the job done but isn't quite correct. It would be better to try to
157 // match budgeted proxies w/ budgeted surfaces and unbudgeted w/ unbudgeted.
158 surface->resourcePriv().makeBudgeted();
159 }
160
161 // Propagate the proxy unique key to the surface if we have one.
162 if (const auto& uniqueKey = proxy->getUniqueKey(); uniqueKey.isValid()) {
163 if (!surface->getUniqueKey().isValid()) {
164 resourceProvider->assignUniqueKeyToResource(uniqueKey, surface.get());
165 }
166 SkASSERT(surface->getUniqueKey() == uniqueKey);
167 }
168 proxy->priv().assign(std::move(surface));
169 return true;
Adlai Holler1143b1b2021-03-16 13:07:40 -0400170}
171
Robert Phillips5af44de2017-07-18 14:49:38 -0400172GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::popHead() {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400173 SkDEBUGCODE(this->validate());
174
Robert Phillips5af44de2017-07-18 14:49:38 -0400175 Interval* temp = fHead;
176 if (temp) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500177 fHead = temp->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400178 if (!fHead) {
179 fTail = nullptr;
180 }
181 temp->setNext(nullptr);
Robert Phillips5af44de2017-07-18 14:49:38 -0400182 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400183
184 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400185 return temp;
186}
187
188// TODO: fuse this with insertByIncreasingEnd
189void GrResourceAllocator::IntervalList::insertByIncreasingStart(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400190 SkDEBUGCODE(this->validate());
191 SkASSERT(!intvl->next());
192
Robert Phillips5af44de2017-07-18 14:49:38 -0400193 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400194 // 14%
195 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500196 } else if (intvl->start() <= fHead->start()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400197 // 3%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500198 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400199 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400200 } else if (fTail->start() <= intvl->start()) {
201 // 83%
202 fTail->setNext(intvl);
203 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400204 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400205 // almost never
Robert Phillips5af44de2017-07-18 14:49:38 -0400206 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500207 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400208 for (; intvl->start() > next->start(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400209 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400210
211 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500212 intvl->setNext(next);
213 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400214 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400215
216 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400217}
218
219// TODO: fuse this with insertByIncreasingStart
220void GrResourceAllocator::IntervalList::insertByIncreasingEnd(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400221 SkDEBUGCODE(this->validate());
222 SkASSERT(!intvl->next());
223
Robert Phillips5af44de2017-07-18 14:49:38 -0400224 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400225 // 14%
226 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500227 } else if (intvl->end() <= fHead->end()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400228 // 64%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500229 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400230 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400231 } else if (fTail->end() <= intvl->end()) {
232 // 3%
233 fTail->setNext(intvl);
234 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400235 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400236 // 19% but 81% of those land right after the list's head
Robert Phillips5af44de2017-07-18 14:49:38 -0400237 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500238 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400239 for (; intvl->end() > next->end(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400240 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400241
242 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500243 intvl->setNext(next);
244 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400245 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400246
247 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400248}
249
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400250#ifdef SK_DEBUG
251void GrResourceAllocator::IntervalList::validate() const {
252 SkASSERT(SkToBool(fHead) == SkToBool(fTail));
253
254 Interval* prev = nullptr;
255 for (Interval* cur = fHead; cur; prev = cur, cur = cur->next()) {
256 }
257
258 SkASSERT(fTail == prev);
259}
260#endif
Robert Phillips4150eea2018-02-07 17:08:21 -0500261
Adlai Holler4cfbe532021-03-17 10:36:39 -0400262// First try to reuse one of the recently allocated/used registers in the free pool.
Adlai Holler7df8d222021-03-19 12:27:49 -0400263GrResourceAllocator::Register* GrResourceAllocator::findOrCreateRegisterFor(GrSurfaceProxy* proxy) {
264 // Handle uniquely keyed proxies
Adlai Hollercc119d92021-03-16 15:17:25 -0400265 if (const auto& uniqueKey = proxy->getUniqueKey(); uniqueKey.isValid()) {
Adlai Holler7df8d222021-03-19 12:27:49 -0400266 if (auto p = fUniqueKeyRegisters.find(uniqueKey)) {
267 return *p;
Robert Phillips0790f8a2018-09-18 13:11:03 -0400268 }
Adlai Holler7df8d222021-03-19 12:27:49 -0400269 // No need for a scratch key. These don't go in the free pool.
270 Register* r = fInternalAllocator.make<Register>(proxy, GrScratchKey());
271 fUniqueKeyRegisters.set(uniqueKey, r);
272 return r;
Robert Phillips0790f8a2018-09-18 13:11:03 -0400273 }
274
Adlai Hollercc119d92021-03-16 15:17:25 -0400275 // Then look in the free pool
Adlai Holler7df8d222021-03-19 12:27:49 -0400276 GrScratchKey scratchKey;
Adlai Hollerca1137b2021-04-08 11:39:55 -0400277 proxy->priv().computeScratchKey(*fDContext->priv().caps(), &scratchKey);
Robert Phillips57aa3672017-07-21 11:38:13 -0400278
Adlai Holler4cfbe532021-03-17 10:36:39 -0400279 auto filter = [] (const Register* r) {
Robert Phillips10d17212019-04-24 14:09:10 -0400280 return true;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500281 };
Adlai Holler7df8d222021-03-19 12:27:49 -0400282 if (Register* r = fFreePool.findAndRemove(scratchKey, filter)) {
Adlai Holler4cfbe532021-03-17 10:36:39 -0400283 return r;
Robert Phillips57aa3672017-07-21 11:38:13 -0400284 }
285
Adlai Holler7df8d222021-03-19 12:27:49 -0400286 return fInternalAllocator.make<Register>(proxy, std::move(scratchKey));
Robert Phillips5af44de2017-07-18 14:49:38 -0400287}
288
Adlai Holler7df8d222021-03-19 12:27:49 -0400289// Remove any intervals that end before the current index. Add their registers
Robert Phillips39667382019-04-17 16:03:30 -0400290// to the free pool if possible.
Robert Phillips5af44de2017-07-18 14:49:38 -0400291void GrResourceAllocator::expire(unsigned int curIndex) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500292 while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->end() < curIndex) {
Adlai Holler729ba5e2021-03-15 12:34:31 -0400293 Interval* intvl = fActiveIntvls.popHead();
294 SkASSERT(!intvl->next());
Robert Phillips5b65a842017-11-13 15:48:12 -0500295
Adlai Holler7df8d222021-03-19 12:27:49 -0400296 Register* r = intvl->getRegister();
Adlai Hollerca1137b2021-04-08 11:39:55 -0400297 if (r && r->isRecyclable(*fDContext->priv().caps(), intvl->proxy(), intvl->uses())) {
Adlai Holler7df8d222021-03-19 12:27:49 -0400298#if GR_ALLOCATION_SPEW
299 SkDebugf("putting register %d back into pool\n", r->uniqueID());
300#endif
301 // TODO: fix this insertion so we get a more LRU-ish behavior
302 fFreePool.insert(r->scratchKey(), r);
Robert Phillips5b65a842017-11-13 15:48:12 -0500303 }
Adlai Holler7df8d222021-03-19 12:27:49 -0400304 fFinishedIntvls.insertByIncreasingStart(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400305 }
306}
307
Adlai Holler19fd5142021-03-08 10:19:30 -0700308bool GrResourceAllocator::assign() {
Robert Phillips5f78adf2019-04-22 12:41:39 -0400309 fIntvlHash.reset(); // we don't need the interval hash anymore
310
Robert Phillips5af44de2017-07-18 14:49:38 -0400311 SkDEBUGCODE(fAssigned = true;)
312
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500313 if (fIntvlList.empty()) {
Adlai Holler19fd5142021-03-08 10:19:30 -0700314 return !fFailedInstantiation; // no resources to assign
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500315 }
316
Robert Phillips715d08c2018-07-18 13:56:48 -0400317#if GR_ALLOCATION_SPEW
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500318 SkDebugf("assigning %d ops\n", fNumOps);
Robert Phillips715d08c2018-07-18 13:56:48 -0400319 this->dumpIntervals();
320#endif
Robert Phillipseafd48a2017-11-16 07:52:08 -0500321
Adlai Holler1143b1b2021-03-16 13:07:40 -0400322 while (Interval* cur = fIntvlList.popHead()) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500323 this->expire(cur->start());
Robert Phillips57aa3672017-07-21 11:38:13 -0400324
Adlai Holler7df8d222021-03-19 12:27:49 -0400325 // Already-instantiated proxies and lazy proxies don't use registers.
326 // No need to compute scratch keys (or CANT, in the case of fully-lazy).
327 if (cur->proxy()->isInstantiated() || cur->proxy()->isLazy()) {
Robert Phillips57aa3672017-07-21 11:38:13 -0400328 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500329
Robert Phillips57aa3672017-07-21 11:38:13 -0400330 continue;
331 }
332
Adlai Holler7df8d222021-03-19 12:27:49 -0400333 Register* r = this->findOrCreateRegisterFor(cur->proxy());
Robert Phillips715d08c2018-07-18 13:56:48 -0400334#if GR_ALLOCATION_SPEW
Adlai Holler7df8d222021-03-19 12:27:49 -0400335 SkDebugf("Assigning register %d to %d\n",
336 r->uniqueID(),
337 cur->proxy()->uniqueID().asUInt());
Robert Phillips715d08c2018-07-18 13:56:48 -0400338#endif
Adlai Holler7df8d222021-03-19 12:27:49 -0400339 SkASSERT(!cur->proxy()->peekSurface());
340 cur->setRegister(r);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500341
Robert Phillips5af44de2017-07-18 14:49:38 -0400342 fActiveIntvls.insertByIncreasingEnd(cur);
343 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500344
345 // expire all the remaining intervals to drain the active interval list
346 this->expire(std::numeric_limits<unsigned int>::max());
Adlai Holler7df8d222021-03-19 12:27:49 -0400347
348 // TODO: Return here and give the caller a chance to estimate memory cost and bail before
349 // instantiating anything.
350
351 // Instantiate surfaces
Adlai Hollerca1137b2021-04-08 11:39:55 -0400352 auto resourceProvider = fDContext->priv().resourceProvider();
Adlai Holler7df8d222021-03-19 12:27:49 -0400353 while (Interval* cur = fFinishedIntvls.popHead()) {
354 if (fFailedInstantiation) {
355 break;
356 }
357 if (cur->proxy()->isInstantiated()) {
358 continue;
359 }
360 if (cur->proxy()->isLazy()) {
Adlai Hollerca1137b2021-04-08 11:39:55 -0400361 fFailedInstantiation = !cur->proxy()->priv().doLazyInstantiation(resourceProvider);
Adlai Holler7df8d222021-03-19 12:27:49 -0400362 continue;
363 }
364 Register* r = cur->getRegister();
365 SkASSERT(r);
Adlai Hollerca1137b2021-04-08 11:39:55 -0400366 fFailedInstantiation = !r->instantiateSurface(cur->proxy(), resourceProvider);
Adlai Holler7df8d222021-03-19 12:27:49 -0400367 }
Adlai Holler19fd5142021-03-08 10:19:30 -0700368 return !fFailedInstantiation;
Robert Phillips5af44de2017-07-18 14:49:38 -0400369}
Robert Phillips715d08c2018-07-18 13:56:48 -0400370
371#if GR_ALLOCATION_SPEW
372void GrResourceAllocator::dumpIntervals() {
Robert Phillips715d08c2018-07-18 13:56:48 -0400373 // Print all the intervals while computing their range
Robert Phillips3bf3d4a2019-03-27 07:09:09 -0400374 SkDebugf("------------------------------------------------------------\n");
375 unsigned int min = std::numeric_limits<unsigned int>::max();
Robert Phillips715d08c2018-07-18 13:56:48 -0400376 unsigned int max = 0;
377 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
Greg Danielc61d7e32020-02-04 14:27:45 -0500378 SkDebugf("{ %3d,%3d }: [%2d, %2d] - refProxys:%d surfaceRefs:%d\n",
Robert Phillips715d08c2018-07-18 13:56:48 -0400379 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400380 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1,
Robert Phillips715d08c2018-07-18 13:56:48 -0400381 cur->start(),
382 cur->end(),
383 cur->proxy()->priv().getProxyRefCnt(),
Robert Phillipsb5204762019-06-19 14:12:13 -0400384 cur->proxy()->testingOnly_getBackingRefCnt());
Brian Osman788b9162020-02-07 10:36:46 -0500385 min = std::min(min, cur->start());
386 max = std::max(max, cur->end());
Robert Phillips715d08c2018-07-18 13:56:48 -0400387 }
388
389 // Draw a graph of the useage intervals
390 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
391 SkDebugf("{ %3d,%3d }: ",
392 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400393 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1);
Robert Phillips715d08c2018-07-18 13:56:48 -0400394 for (unsigned int i = min; i <= max; ++i) {
395 if (i >= cur->start() && i <= cur->end()) {
396 SkDebugf("x");
397 } else {
398 SkDebugf(" ");
399 }
400 }
401 SkDebugf("\n");
402 }
403}
404#endif