blob: e5767dd67140df15929b7523b3180f2284170b6f [file] [log] [blame]
Robert Phillips5af44de2017-07-18 14:49:38 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrResourceAllocator.h"
9
Robert Phillipsf8e25022017-11-08 15:24:31 -050010#include "GrGpuResourcePriv.h"
Robert Phillips5b65a842017-11-13 15:48:12 -050011#include "GrOpList.h"
Robert Phillipseafd48a2017-11-16 07:52:08 -050012#include "GrRenderTargetProxy.h"
13#include "GrResourceCache.h"
Robert Phillipsf8e25022017-11-08 15:24:31 -050014#include "GrResourceProvider.h"
15#include "GrSurfacePriv.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040016#include "GrSurfaceProxy.h"
17#include "GrSurfaceProxyPriv.h"
Robert Phillipsf8e25022017-11-08 15:24:31 -050018#include "GrTextureProxy.h"
Greg Daniel4684f822018-03-08 15:27:36 -050019#include "GrUninstantiateProxyTracker.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040020
Robert Phillipsda1be462018-07-27 07:18:06 -040021#if GR_TRACK_INTERVAL_CREATION
22uint32_t GrResourceAllocator::Interval::CreateUniqueID() {
23 static int32_t gUniqueID = SK_InvalidUniqueID;
24 uint32_t id;
25 do {
26 id = static_cast<uint32_t>(sk_atomic_inc(&gUniqueID) + 1);
27 } while (id == SK_InvalidUniqueID);
28 return id;
29}
30#endif
31
Robert Phillips5b65a842017-11-13 15:48:12 -050032void GrResourceAllocator::Interval::assign(sk_sp<GrSurface> s) {
33 SkASSERT(!fAssignedSurface);
34 fAssignedSurface = s;
35 fProxy->priv().assign(std::move(s));
36}
37
Robert Phillipseafd48a2017-11-16 07:52:08 -050038
39void GrResourceAllocator::markEndOfOpList(int opListIndex) {
40 SkASSERT(!fAssigned); // We shouldn't be adding any opLists after (or during) assignment
41
42 SkASSERT(fEndOfOpListOpIndices.count() == opListIndex);
43 if (!fEndOfOpListOpIndices.empty()) {
44 SkASSERT(fEndOfOpListOpIndices.back() < this->curOp());
45 }
46
47 fEndOfOpListOpIndices.push_back(this->curOp()); // This is the first op index of the next opList
48}
49
Robert Phillips5b65a842017-11-13 15:48:12 -050050GrResourceAllocator::~GrResourceAllocator() {
Robert Phillips5b65a842017-11-13 15:48:12 -050051 SkASSERT(fIntvlList.empty());
52 SkASSERT(fActiveIntvls.empty());
53 SkASSERT(!fIntvlHash.count());
Robert Phillips5b65a842017-11-13 15:48:12 -050054}
55
Chris Dalton8816b932017-11-29 16:48:25 -070056void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end
57 SkDEBUGCODE(, bool isDirectDstRead)) {
Robert Phillips5af44de2017-07-18 14:49:38 -040058 SkASSERT(start <= end);
59 SkASSERT(!fAssigned); // We shouldn't be adding any intervals after (or during) assignment
60
61 if (Interval* intvl = fIntvlHash.find(proxy->uniqueID().asUInt())) {
62 // Revise the interval for an existing use
Chris Dalton8816b932017-11-29 16:48:25 -070063#ifdef SK_DEBUG
Robert Phillips51b20f22017-12-01 15:32:35 -050064 if (0 == start && 0 == end) {
65 // This interval is for the initial upload to a deferred proxy. Due to the vagaries
66 // of how deferred proxies are collected they can appear as uploads multiple times in a
67 // single opLists' list and as uploads in several opLists.
68 SkASSERT(0 == intvl->start());
69 } else if (isDirectDstRead) {
Chris Dalton8816b932017-11-29 16:48:25 -070070 // Direct reads from the render target itself should occur w/in the existing interval
71 SkASSERT(intvl->start() <= start && intvl->end() >= end);
72 } else {
73 SkASSERT(intvl->end() <= start && intvl->end() <= end);
74 }
75#endif
Robert Phillipseafd48a2017-11-16 07:52:08 -050076 intvl->extendEnd(end);
Robert Phillips5af44de2017-07-18 14:49:38 -040077 return;
78 }
79
Robert Phillips8186cbe2017-11-01 17:32:39 -040080 Interval* newIntvl;
81 if (fFreeIntervalList) {
82 newIntvl = fFreeIntervalList;
Robert Phillipsf8e25022017-11-08 15:24:31 -050083 fFreeIntervalList = newIntvl->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -040084 newIntvl->setNext(nullptr);
Robert Phillips8186cbe2017-11-01 17:32:39 -040085 newIntvl->resetTo(proxy, start, end);
86 } else {
87 newIntvl = fIntervalAllocator.make<Interval>(proxy, start, end);
88 }
Robert Phillips5af44de2017-07-18 14:49:38 -040089
90 fIntvlList.insertByIncreasingStart(newIntvl);
91 fIntvlHash.add(newIntvl);
Chris Dalton706a6ff2017-11-29 22:01:06 -070092
Robert Phillips4150eea2018-02-07 17:08:21 -050093 if (!fResourceProvider->explicitlyAllocateGPUResources()) {
94 // FIXME: remove this once we can do the lazy instantiation from assign instead.
95 if (GrSurfaceProxy::LazyState::kNot != proxy->lazyInstantiationState()) {
96 proxy->priv().doLazyInstantiation(fResourceProvider);
97 }
Chris Dalton706a6ff2017-11-29 22:01:06 -070098 }
Robert Phillips5af44de2017-07-18 14:49:38 -040099}
100
101GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::popHead() {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400102 SkDEBUGCODE(this->validate());
103
Robert Phillips5af44de2017-07-18 14:49:38 -0400104 Interval* temp = fHead;
105 if (temp) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500106 fHead = temp->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400107 if (!fHead) {
108 fTail = nullptr;
109 }
110 temp->setNext(nullptr);
Robert Phillips5af44de2017-07-18 14:49:38 -0400111 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400112
113 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400114 return temp;
115}
116
117// TODO: fuse this with insertByIncreasingEnd
118void GrResourceAllocator::IntervalList::insertByIncreasingStart(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400119 SkDEBUGCODE(this->validate());
120 SkASSERT(!intvl->next());
121
Robert Phillips5af44de2017-07-18 14:49:38 -0400122 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400123 // 14%
124 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500125 } else if (intvl->start() <= fHead->start()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400126 // 3%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500127 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400128 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400129 } else if (fTail->start() <= intvl->start()) {
130 // 83%
131 fTail->setNext(intvl);
132 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400133 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400134 // almost never
Robert Phillips5af44de2017-07-18 14:49:38 -0400135 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500136 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400137 for (; intvl->start() > next->start(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400138 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400139
140 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500141 intvl->setNext(next);
142 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400143 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400144
145 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400146}
147
148// TODO: fuse this with insertByIncreasingStart
149void GrResourceAllocator::IntervalList::insertByIncreasingEnd(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400150 SkDEBUGCODE(this->validate());
151 SkASSERT(!intvl->next());
152
Robert Phillips5af44de2017-07-18 14:49:38 -0400153 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400154 // 14%
155 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500156 } else if (intvl->end() <= fHead->end()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400157 // 64%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500158 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400159 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400160 } else if (fTail->end() <= intvl->end()) {
161 // 3%
162 fTail->setNext(intvl);
163 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400164 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400165 // 19% but 81% of those land right after the list's head
Robert Phillips5af44de2017-07-18 14:49:38 -0400166 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500167 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400168 for (; intvl->end() > next->end(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400169 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400170
171 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500172 intvl->setNext(next);
173 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400174 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400175
176 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400177}
178
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400179#ifdef SK_DEBUG
180void GrResourceAllocator::IntervalList::validate() const {
181 SkASSERT(SkToBool(fHead) == SkToBool(fTail));
182
183 Interval* prev = nullptr;
184 for (Interval* cur = fHead; cur; prev = cur, cur = cur->next()) {
185 }
186
187 SkASSERT(fTail == prev);
188}
189#endif
Robert Phillips4150eea2018-02-07 17:08:21 -0500190
191 GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::detachAll() {
192 Interval* tmp = fHead;
193 fHead = nullptr;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400194 fTail = nullptr;
Robert Phillips4150eea2018-02-07 17:08:21 -0500195 return tmp;
196}
197
Robert Phillips5af44de2017-07-18 14:49:38 -0400198// 'surface' can be reused. Add it back to the free pool.
Robert Phillips715d08c2018-07-18 13:56:48 -0400199void GrResourceAllocator::recycleSurface(sk_sp<GrSurface> surface) {
Robert Phillips57aa3672017-07-21 11:38:13 -0400200 const GrScratchKey &key = surface->resourcePriv().getScratchKey();
201
202 if (!key.isValid()) {
203 return; // can't do it w/o a valid scratch key
204 }
205
Robert Phillipsf8e25022017-11-08 15:24:31 -0500206 if (surface->getUniqueKey().isValid()) {
207 // If the surface has a unique key we throw it back into the resource cache.
208 // If things get really tight 'findSurfaceFor' may pull it back out but there is
209 // no need to have it in tight rotation.
210 return;
211 }
212
Robert Phillips715d08c2018-07-18 13:56:48 -0400213#if GR_ALLOCATION_SPEW
214 SkDebugf("putting surface %d back into pool\n", surface->uniqueID().asUInt());
215#endif
Robert Phillips57aa3672017-07-21 11:38:13 -0400216 // TODO: fix this insertion so we get a more LRU-ish behavior
Robert Phillips5b65a842017-11-13 15:48:12 -0500217 fFreePool.insert(key, surface.release());
Robert Phillips5af44de2017-07-18 14:49:38 -0400218}
219
220// First try to reuse one of the recently allocated/used GrSurfaces in the free pool.
221// If we can't find a useable one, create a new one.
Robert Phillipseafd48a2017-11-16 07:52:08 -0500222sk_sp<GrSurface> GrResourceAllocator::findSurfaceFor(const GrSurfaceProxy* proxy,
223 bool needsStencil) {
Robert Phillips57aa3672017-07-21 11:38:13 -0400224 // First look in the free pool
225 GrScratchKey key;
Robert Phillips5af44de2017-07-18 14:49:38 -0400226
Robert Phillips57aa3672017-07-21 11:38:13 -0400227 proxy->priv().computeScratchKey(&key);
228
Robert Phillipsf8e25022017-11-08 15:24:31 -0500229 auto filter = [&] (const GrSurface* s) {
230 return !proxy->priv().requiresNoPendingIO() || !s->surfacePriv().hasPendingIO();
231 };
232 sk_sp<GrSurface> surface(fFreePool.findAndRemove(key, filter));
Robert Phillips57aa3672017-07-21 11:38:13 -0400233 if (surface) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500234 if (SkBudgeted::kYes == proxy->isBudgeted() &&
235 SkBudgeted::kNo == surface->resourcePriv().isBudgeted()) {
236 // This gets the job done but isn't quite correct. It would be better to try to
237 // match budgeted proxies w/ budgeted surface and unbudgeted w/ unbudgeted.
238 surface->resourcePriv().makeBudgeted();
239 }
240
Robert Phillips01a91282018-07-26 08:03:04 -0400241 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(fResourceProvider, surface.get(),
242 needsStencil)) {
243 return nullptr;
244 }
Robert Phillipsf8e25022017-11-08 15:24:31 -0500245 return surface;
Robert Phillips57aa3672017-07-21 11:38:13 -0400246 }
247
248 // Failing that, try to grab a new one from the resource cache
Robert Phillips5af44de2017-07-18 14:49:38 -0400249 return proxy->priv().createSurface(fResourceProvider);
250}
251
252// Remove any intervals that end before the current index. Return their GrSurfaces
253// to the free pool.
254void GrResourceAllocator::expire(unsigned int curIndex) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500255 while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->end() < curIndex) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400256 Interval* temp = fActiveIntvls.popHead();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400257 SkASSERT(!temp->next());
Robert Phillips5b65a842017-11-13 15:48:12 -0500258
259 if (temp->wasAssignedSurface()) {
Robert Phillips715d08c2018-07-18 13:56:48 -0400260 sk_sp<GrSurface> surface = temp->detachSurface();
261
262 // If the proxy has an actual live ref on it that means someone wants to retain its
263 // contents. In that case we cannot recycle it (until the external holder lets
264 // go of it).
265 if (0 == temp->proxy()->priv().getProxyRefCnt()) {
266 this->recycleSurface(std::move(surface));
267 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500268 }
Robert Phillips8186cbe2017-11-01 17:32:39 -0400269
270 // Add temp to the free interval list so it can be reused
Robert Phillips715d08c2018-07-18 13:56:48 -0400271 SkASSERT(!temp->wasAssignedSurface()); // it had better not have a ref on a surface
Robert Phillipsf8e25022017-11-08 15:24:31 -0500272 temp->setNext(fFreeIntervalList);
Robert Phillips8186cbe2017-11-01 17:32:39 -0400273 fFreeIntervalList = temp;
Robert Phillips5af44de2017-07-18 14:49:38 -0400274 }
275}
276
Greg Daniel4684f822018-03-08 15:27:36 -0500277bool GrResourceAllocator::assign(int* startIndex, int* stopIndex,
278 GrUninstantiateProxyTracker* uninstantiateTracker,
279 AssignError* outError) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500280 SkASSERT(outError);
281 *outError = AssignError::kNoError;
282
Robert Phillipseafd48a2017-11-16 07:52:08 -0500283 fIntvlHash.reset(); // we don't need the interval hash anymore
284 if (fIntvlList.empty()) {
285 return false; // nothing to render
286 }
287
288 *startIndex = fCurOpListIndex;
289 *stopIndex = fEndOfOpListOpIndices.count();
290
Robert Phillips4150eea2018-02-07 17:08:21 -0500291 if (!fResourceProvider->explicitlyAllocateGPUResources()) {
292 fIntvlList.detachAll(); // arena allocator will clean these up for us
293 return true;
294 }
295
Robert Phillips5af44de2017-07-18 14:49:38 -0400296 SkDEBUGCODE(fAssigned = true;)
297
Robert Phillips715d08c2018-07-18 13:56:48 -0400298#if GR_ALLOCATION_SPEW
299 this->dumpIntervals();
300#endif
Robert Phillips5af44de2017-07-18 14:49:38 -0400301 while (Interval* cur = fIntvlList.popHead()) {
Robert Phillipseafd48a2017-11-16 07:52:08 -0500302 if (fEndOfOpListOpIndices[fCurOpListIndex] < cur->start()) {
303 fCurOpListIndex++;
304 }
305
Robert Phillipsf8e25022017-11-08 15:24:31 -0500306 this->expire(cur->start());
Robert Phillips57aa3672017-07-21 11:38:13 -0400307
Robert Phillipseafd48a2017-11-16 07:52:08 -0500308 bool needsStencil = cur->proxy()->asRenderTargetProxy()
309 ? cur->proxy()->asRenderTargetProxy()->needsStencil()
310 : false;
311
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400312 if (cur->proxy()->isInstantiated()) {
313 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(
314 fResourceProvider, cur->proxy()->peekSurface(), needsStencil)) {
Robert Phillips01a91282018-07-26 08:03:04 -0400315 *outError = AssignError::kFailedProxyInstantiation;
316 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500317
Robert Phillips57aa3672017-07-21 11:38:13 -0400318 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500319
320 if (fResourceProvider->overBudget()) {
321 // Only force intermediate draws on opList boundaries
322 if (!fIntvlList.empty() &&
323 fEndOfOpListOpIndices[fCurOpListIndex] < fIntvlList.peekHead()->start()) {
324 *stopIndex = fCurOpListIndex+1;
Robert Phillips1734dd32018-08-21 13:52:09 -0400325
326 // This is interrupting the allocation of resources for this flush. We need to
327 // proactively clear the active interval list of any intervals that aren't
328 // guaranteed to survive the partial flush lest they become zombies (i.e.,
329 // holding a deleted surface proxy).
330 if (const Interval* tmp = fIntvlList.peekHead()) {
331 this->expire(tmp->start());
332 } else {
333 this->expire(std::numeric_limits<unsigned int>::max());
334 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500335 return true;
336 }
337 }
338
Robert Phillips57aa3672017-07-21 11:38:13 -0400339 continue;
340 }
341
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500342 if (GrSurfaceProxy::LazyState::kNot != cur->proxy()->lazyInstantiationState()) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500343 if (!cur->proxy()->priv().doLazyInstantiation(fResourceProvider)) {
344 *outError = AssignError::kFailedProxyInstantiation;
Greg Daniel4684f822018-03-08 15:27:36 -0500345 } else {
346 if (GrSurfaceProxy::LazyInstantiationType::kUninstantiate ==
347 cur->proxy()->priv().lazyInstantiationType()) {
348 uninstantiateTracker->addProxy(cur->proxy());
349 }
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500350 }
Chris Dalton706a6ff2017-11-29 22:01:06 -0700351 } else if (sk_sp<GrSurface> surface = this->findSurfaceFor(cur->proxy(), needsStencil)) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500352 // TODO: make getUniqueKey virtual on GrSurfaceProxy
353 GrTextureProxy* tex = cur->proxy()->asTextureProxy();
354 if (tex && tex->getUniqueKey().isValid()) {
355 fResourceProvider->assignUniqueKeyToResource(tex->getUniqueKey(), surface.get());
356 SkASSERT(surface->getUniqueKey() == tex->getUniqueKey());
357 }
358
Robert Phillips715d08c2018-07-18 13:56:48 -0400359#if GR_ALLOCATION_SPEW
360 SkDebugf("Assigning %d to %d\n",
361 surface->uniqueID().asUInt(),
362 cur->proxy()->uniqueID().asUInt());
363#endif
364
Robert Phillips5b65a842017-11-13 15:48:12 -0500365 cur->assign(std::move(surface));
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500366 } else {
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400367 SkASSERT(!cur->proxy()->isInstantiated());
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500368 *outError = AssignError::kFailedProxyInstantiation;
Robert Phillips5af44de2017-07-18 14:49:38 -0400369 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500370
Robert Phillips5af44de2017-07-18 14:49:38 -0400371 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500372
373 if (fResourceProvider->overBudget()) {
374 // Only force intermediate draws on opList boundaries
375 if (!fIntvlList.empty() &&
376 fEndOfOpListOpIndices[fCurOpListIndex] < fIntvlList.peekHead()->start()) {
377 *stopIndex = fCurOpListIndex+1;
Robert Phillips1734dd32018-08-21 13:52:09 -0400378
379 // This is interrupting the allocation of resources for this flush. We need to
380 // proactively clear the active interval list of any intervals that aren't
381 // guaranteed to survive the partial flush lest they become zombies (i.e.,
382 // holding a deleted surface proxy).
383 if (const Interval* tmp = fIntvlList.peekHead()) {
384 this->expire(tmp->start());
385 } else {
386 this->expire(std::numeric_limits<unsigned int>::max());
387 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500388 return true;
389 }
390 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400391 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500392
393 // expire all the remaining intervals to drain the active interval list
394 this->expire(std::numeric_limits<unsigned int>::max());
Robert Phillipseafd48a2017-11-16 07:52:08 -0500395 return true;
Robert Phillips5af44de2017-07-18 14:49:38 -0400396}
Robert Phillips715d08c2018-07-18 13:56:48 -0400397
398#if GR_ALLOCATION_SPEW
399void GrResourceAllocator::dumpIntervals() {
400
401 // Print all the intervals while computing their range
402 unsigned int min = fNumOps+1;
403 unsigned int max = 0;
404 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
405 SkDebugf("{ %3d,%3d }: [%2d, %2d] - proxyRefs:%d surfaceRefs:%d R:%d W:%d\n",
406 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400407 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1,
Robert Phillips715d08c2018-07-18 13:56:48 -0400408 cur->start(),
409 cur->end(),
410 cur->proxy()->priv().getProxyRefCnt(),
411 cur->proxy()->getBackingRefCnt_TestOnly(),
412 cur->proxy()->getPendingReadCnt_TestOnly(),
413 cur->proxy()->getPendingWriteCnt_TestOnly());
414 min = SkTMin(min, cur->start());
415 max = SkTMax(max, cur->end());
416 }
417
418 // Draw a graph of the useage intervals
419 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
420 SkDebugf("{ %3d,%3d }: ",
421 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400422 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1);
Robert Phillips715d08c2018-07-18 13:56:48 -0400423 for (unsigned int i = min; i <= max; ++i) {
424 if (i >= cur->start() && i <= cur->end()) {
425 SkDebugf("x");
426 } else {
427 SkDebugf(" ");
428 }
429 }
430 SkDebugf("\n");
431 }
432}
433#endif