blob: f8706ebf91df56a42965979ab6fb91f637081a9b [file] [log] [blame]
Robert Phillips5af44de2017-07-18 14:49:38 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrResourceAllocator.h"
9
Brian Salomon876a0172019-03-08 11:12:14 -050010#include "GrDeinstantiateProxyTracker.h"
Robert Phillipsf8e25022017-11-08 15:24:31 -050011#include "GrGpuResourcePriv.h"
Robert Phillips5b65a842017-11-13 15:48:12 -050012#include "GrOpList.h"
Robert Phillipseafd48a2017-11-16 07:52:08 -050013#include "GrRenderTargetProxy.h"
14#include "GrResourceCache.h"
Robert Phillipsf8e25022017-11-08 15:24:31 -050015#include "GrResourceProvider.h"
16#include "GrSurfacePriv.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040017#include "GrSurfaceProxy.h"
18#include "GrSurfaceProxyPriv.h"
Robert Phillipsf8e25022017-11-08 15:24:31 -050019#include "GrTextureProxy.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040020
Robert Phillipsda1be462018-07-27 07:18:06 -040021#if GR_TRACK_INTERVAL_CREATION
Mike Klein0ec1c572018-12-04 11:52:51 -050022 #include <atomic>
23
24 uint32_t GrResourceAllocator::Interval::CreateUniqueID() {
25 static std::atomic<uint32_t> nextID{1};
26 uint32_t id;
27 do {
28 id = nextID++;
29 } while (id == SK_InvalidUniqueID);
30 return id;
31 }
Robert Phillipsda1be462018-07-27 07:18:06 -040032#endif
33
Robert Phillips5b65a842017-11-13 15:48:12 -050034void GrResourceAllocator::Interval::assign(sk_sp<GrSurface> s) {
35 SkASSERT(!fAssignedSurface);
36 fAssignedSurface = s;
37 fProxy->priv().assign(std::move(s));
38}
39
Robert Phillipsc73666f2019-04-24 08:49:48 -040040void GrResourceAllocator::determineRecyclability() {
41 for (Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
42 if (cur->proxy()->canSkipResourceAllocator()) {
43 // These types of proxies can slip in here if they require a stencil buffer
44 continue;
45 }
46
47 if (cur->uses() >= cur->proxy()->priv().getTotalRefs()) {
48 // All the refs on the proxy are known to the resource allocator thus no one
49 // should be holding onto it outside of Ganesh.
50 SkASSERT(cur->uses() == cur->proxy()->priv().getTotalRefs());
51 cur->markAsRecyclable();
52 }
53 }
54}
55
Robert Phillipseafd48a2017-11-16 07:52:08 -050056void GrResourceAllocator::markEndOfOpList(int opListIndex) {
57 SkASSERT(!fAssigned); // We shouldn't be adding any opLists after (or during) assignment
58
59 SkASSERT(fEndOfOpListOpIndices.count() == opListIndex);
60 if (!fEndOfOpListOpIndices.empty()) {
61 SkASSERT(fEndOfOpListOpIndices.back() < this->curOp());
62 }
63
64 fEndOfOpListOpIndices.push_back(this->curOp()); // This is the first op index of the next opList
Robert Phillipsc476e5d2019-03-26 14:50:08 -040065 SkASSERT(fEndOfOpListOpIndices.count() <= fNumOpLists);
Robert Phillipseafd48a2017-11-16 07:52:08 -050066}
67
Robert Phillips5b65a842017-11-13 15:48:12 -050068GrResourceAllocator::~GrResourceAllocator() {
Robert Phillips5b65a842017-11-13 15:48:12 -050069 SkASSERT(fIntvlList.empty());
70 SkASSERT(fActiveIntvls.empty());
71 SkASSERT(!fIntvlHash.count());
Robert Phillips5b65a842017-11-13 15:48:12 -050072}
73
Robert Phillipsc73666f2019-04-24 08:49:48 -040074void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end,
75 ActualUse actualUse
Chris Dalton8816b932017-11-29 16:48:25 -070076 SkDEBUGCODE(, bool isDirectDstRead)) {
Robert Phillips5f78adf2019-04-22 12:41:39 -040077
78 bool needsStencil = proxy->asRenderTargetProxy()
79 ? proxy->asRenderTargetProxy()->needsStencil()
80 : false;
81
82 // If we're going to need to add a stencil buffer in assign, we
83 // need to add at least a symbolic interval
84 // TODO: adding this interval just to add a stencil buffer is
85 // a bit heavy weight. Is there a simpler way to accomplish this?
86 if (!needsStencil && proxy->canSkipResourceAllocator()) {
87 return;
88 }
89
90 SkASSERT(!proxy->priv().ignoredByResourceAllocator());
91
Robert Phillips5af44de2017-07-18 14:49:38 -040092 SkASSERT(start <= end);
93 SkASSERT(!fAssigned); // We shouldn't be adding any intervals after (or during) assignment
94
Brian Salomon9cadc312018-12-05 15:09:19 -050095 // If a proxy is read only it must refer to a texture with specific content that cannot be
96 // recycled. We don't need to assign a texture to it and no other proxy can be instantiated
97 // with the same texture.
98 if (proxy->readOnly()) {
99 // Since we aren't going to add an interval we won't revisit this proxy in assign(). So it
100 // must already be instantiated or it must be a lazy proxy that we will instantiate below.
101 SkASSERT(proxy->isInstantiated() ||
102 GrSurfaceProxy::LazyState::kNot != proxy->lazyInstantiationState());
Brian Salomonc6093532018-12-05 21:34:36 +0000103 } else {
Brian Salomon9cadc312018-12-05 15:09:19 -0500104 if (Interval* intvl = fIntvlHash.find(proxy->uniqueID().asUInt())) {
105 // Revise the interval for an existing use
106#ifdef SK_DEBUG
107 if (0 == start && 0 == end) {
108 // This interval is for the initial upload to a deferred proxy. Due to the vagaries
109 // of how deferred proxies are collected they can appear as uploads multiple times
110 // in a single opLists' list and as uploads in several opLists.
111 SkASSERT(0 == intvl->start());
112 } else if (isDirectDstRead) {
113 // Direct reads from the render target itself should occur w/in the existing
114 // interval
115 SkASSERT(intvl->start() <= start && intvl->end() >= end);
116 } else {
117 SkASSERT(intvl->end() <= start && intvl->end() <= end);
118 }
119#endif
Robert Phillipsc73666f2019-04-24 08:49:48 -0400120 if (ActualUse::kYes == actualUse) {
121 intvl->addUse();
122 }
Brian Salomon9cadc312018-12-05 15:09:19 -0500123 intvl->extendEnd(end);
124 return;
125 }
126 Interval* newIntvl;
127 if (fFreeIntervalList) {
128 newIntvl = fFreeIntervalList;
129 fFreeIntervalList = newIntvl->next();
130 newIntvl->setNext(nullptr);
131 newIntvl->resetTo(proxy, start, end);
132 } else {
133 newIntvl = fIntervalAllocator.make<Interval>(proxy, start, end);
134 }
135
Robert Phillipsc73666f2019-04-24 08:49:48 -0400136 if (ActualUse::kYes == actualUse) {
137 newIntvl->addUse();
138 }
Brian Salomon9cadc312018-12-05 15:09:19 -0500139 fIntvlList.insertByIncreasingStart(newIntvl);
140 fIntvlHash.add(newIntvl);
Brian Salomonc6093532018-12-05 21:34:36 +0000141 }
142
Brian Salomon9cadc312018-12-05 15:09:19 -0500143 // Because readOnly proxies do not get a usage interval we must instantiate them here (since it
144 // won't occur in GrResourceAllocator::assign)
Robert Phillips12c46292019-04-23 07:36:17 -0400145 if (proxy->readOnly()) {
Robert Phillips4150eea2018-02-07 17:08:21 -0500146 // FIXME: remove this once we can do the lazy instantiation from assign instead.
147 if (GrSurfaceProxy::LazyState::kNot != proxy->lazyInstantiationState()) {
Brian Salomon876a0172019-03-08 11:12:14 -0500148 if (proxy->priv().doLazyInstantiation(fResourceProvider)) {
149 if (proxy->priv().lazyInstantiationType() ==
150 GrSurfaceProxy::LazyInstantiationType::kDeinstantiate) {
151 fDeinstantiateTracker->addProxy(proxy);
152 }
153 }
Robert Phillips4150eea2018-02-07 17:08:21 -0500154 }
Chris Dalton706a6ff2017-11-29 22:01:06 -0700155 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400156}
157
158GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::popHead() {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400159 SkDEBUGCODE(this->validate());
160
Robert Phillips5af44de2017-07-18 14:49:38 -0400161 Interval* temp = fHead;
162 if (temp) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500163 fHead = temp->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400164 if (!fHead) {
165 fTail = nullptr;
166 }
167 temp->setNext(nullptr);
Robert Phillips5af44de2017-07-18 14:49:38 -0400168 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400169
170 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400171 return temp;
172}
173
174// TODO: fuse this with insertByIncreasingEnd
175void GrResourceAllocator::IntervalList::insertByIncreasingStart(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400176 SkDEBUGCODE(this->validate());
177 SkASSERT(!intvl->next());
178
Robert Phillips5af44de2017-07-18 14:49:38 -0400179 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400180 // 14%
181 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500182 } else if (intvl->start() <= fHead->start()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400183 // 3%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500184 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400185 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400186 } else if (fTail->start() <= intvl->start()) {
187 // 83%
188 fTail->setNext(intvl);
189 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400190 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400191 // almost never
Robert Phillips5af44de2017-07-18 14:49:38 -0400192 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500193 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400194 for (; intvl->start() > next->start(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400195 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400196
197 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500198 intvl->setNext(next);
199 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400200 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400201
202 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400203}
204
205// TODO: fuse this with insertByIncreasingStart
206void GrResourceAllocator::IntervalList::insertByIncreasingEnd(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400207 SkDEBUGCODE(this->validate());
208 SkASSERT(!intvl->next());
209
Robert Phillips5af44de2017-07-18 14:49:38 -0400210 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400211 // 14%
212 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500213 } else if (intvl->end() <= fHead->end()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400214 // 64%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500215 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400216 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400217 } else if (fTail->end() <= intvl->end()) {
218 // 3%
219 fTail->setNext(intvl);
220 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400221 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400222 // 19% but 81% of those land right after the list's head
Robert Phillips5af44de2017-07-18 14:49:38 -0400223 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500224 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400225 for (; intvl->end() > next->end(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400226 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400227
228 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500229 intvl->setNext(next);
230 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400231 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400232
233 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400234}
235
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400236#ifdef SK_DEBUG
237void GrResourceAllocator::IntervalList::validate() const {
238 SkASSERT(SkToBool(fHead) == SkToBool(fTail));
239
240 Interval* prev = nullptr;
241 for (Interval* cur = fHead; cur; prev = cur, cur = cur->next()) {
242 }
243
244 SkASSERT(fTail == prev);
245}
246#endif
Robert Phillips4150eea2018-02-07 17:08:21 -0500247
248 GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::detachAll() {
249 Interval* tmp = fHead;
250 fHead = nullptr;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400251 fTail = nullptr;
Robert Phillips4150eea2018-02-07 17:08:21 -0500252 return tmp;
253}
254
Robert Phillips5af44de2017-07-18 14:49:38 -0400255// 'surface' can be reused. Add it back to the free pool.
Robert Phillips715d08c2018-07-18 13:56:48 -0400256void GrResourceAllocator::recycleSurface(sk_sp<GrSurface> surface) {
Robert Phillips57aa3672017-07-21 11:38:13 -0400257 const GrScratchKey &key = surface->resourcePriv().getScratchKey();
258
259 if (!key.isValid()) {
260 return; // can't do it w/o a valid scratch key
261 }
262
Robert Phillipsf8e25022017-11-08 15:24:31 -0500263 if (surface->getUniqueKey().isValid()) {
264 // If the surface has a unique key we throw it back into the resource cache.
265 // If things get really tight 'findSurfaceFor' may pull it back out but there is
266 // no need to have it in tight rotation.
267 return;
268 }
269
Robert Phillips715d08c2018-07-18 13:56:48 -0400270#if GR_ALLOCATION_SPEW
271 SkDebugf("putting surface %d back into pool\n", surface->uniqueID().asUInt());
272#endif
Robert Phillips57aa3672017-07-21 11:38:13 -0400273 // TODO: fix this insertion so we get a more LRU-ish behavior
Robert Phillips5b65a842017-11-13 15:48:12 -0500274 fFreePool.insert(key, surface.release());
Robert Phillips5af44de2017-07-18 14:49:38 -0400275}
276
277// First try to reuse one of the recently allocated/used GrSurfaces in the free pool.
278// If we can't find a useable one, create a new one.
Robert Phillipseafd48a2017-11-16 07:52:08 -0500279sk_sp<GrSurface> GrResourceAllocator::findSurfaceFor(const GrSurfaceProxy* proxy,
280 bool needsStencil) {
Robert Phillips0790f8a2018-09-18 13:11:03 -0400281
282 if (proxy->asTextureProxy() && proxy->asTextureProxy()->getUniqueKey().isValid()) {
283 // First try to reattach to a cached version if the proxy is uniquely keyed
284 sk_sp<GrSurface> surface = fResourceProvider->findByUniqueKey<GrSurface>(
285 proxy->asTextureProxy()->getUniqueKey());
286 if (surface) {
287 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(fResourceProvider, surface.get(),
288 needsStencil)) {
289 return nullptr;
290 }
291
292 return surface;
293 }
294 }
295
Robert Phillips57aa3672017-07-21 11:38:13 -0400296 // First look in the free pool
297 GrScratchKey key;
Robert Phillips5af44de2017-07-18 14:49:38 -0400298
Robert Phillips57aa3672017-07-21 11:38:13 -0400299 proxy->priv().computeScratchKey(&key);
300
Robert Phillipsf8e25022017-11-08 15:24:31 -0500301 auto filter = [&] (const GrSurface* s) {
302 return !proxy->priv().requiresNoPendingIO() || !s->surfacePriv().hasPendingIO();
303 };
304 sk_sp<GrSurface> surface(fFreePool.findAndRemove(key, filter));
Robert Phillips57aa3672017-07-21 11:38:13 -0400305 if (surface) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500306 if (SkBudgeted::kYes == proxy->isBudgeted() &&
Brian Salomonfa2ebea2019-01-24 15:58:58 -0500307 GrBudgetedType::kBudgeted != surface->resourcePriv().budgetedType()) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500308 // This gets the job done but isn't quite correct. It would be better to try to
Brian Salomonfa2ebea2019-01-24 15:58:58 -0500309 // match budgeted proxies w/ budgeted surfaces and unbudgeted w/ unbudgeted.
Robert Phillipsf8e25022017-11-08 15:24:31 -0500310 surface->resourcePriv().makeBudgeted();
311 }
312
Robert Phillips01a91282018-07-26 08:03:04 -0400313 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(fResourceProvider, surface.get(),
314 needsStencil)) {
315 return nullptr;
316 }
Robert Phillips0790f8a2018-09-18 13:11:03 -0400317 SkASSERT(!surface->getUniqueKey().isValid());
Robert Phillipsf8e25022017-11-08 15:24:31 -0500318 return surface;
Robert Phillips57aa3672017-07-21 11:38:13 -0400319 }
320
321 // Failing that, try to grab a new one from the resource cache
Robert Phillips5af44de2017-07-18 14:49:38 -0400322 return proxy->priv().createSurface(fResourceProvider);
323}
324
325// Remove any intervals that end before the current index. Return their GrSurfaces
Robert Phillips39667382019-04-17 16:03:30 -0400326// to the free pool if possible.
Robert Phillips5af44de2017-07-18 14:49:38 -0400327void GrResourceAllocator::expire(unsigned int curIndex) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500328 while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->end() < curIndex) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400329 Interval* temp = fActiveIntvls.popHead();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400330 SkASSERT(!temp->next());
Robert Phillips5b65a842017-11-13 15:48:12 -0500331
332 if (temp->wasAssignedSurface()) {
Robert Phillips715d08c2018-07-18 13:56:48 -0400333 sk_sp<GrSurface> surface = temp->detachSurface();
334
Robert Phillipsc73666f2019-04-24 08:49:48 -0400335 if (temp->isRecyclable()) {
Robert Phillips715d08c2018-07-18 13:56:48 -0400336 this->recycleSurface(std::move(surface));
337 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500338 }
Robert Phillips8186cbe2017-11-01 17:32:39 -0400339
340 // Add temp to the free interval list so it can be reused
Robert Phillips715d08c2018-07-18 13:56:48 -0400341 SkASSERT(!temp->wasAssignedSurface()); // it had better not have a ref on a surface
Robert Phillipsf8e25022017-11-08 15:24:31 -0500342 temp->setNext(fFreeIntervalList);
Robert Phillips8186cbe2017-11-01 17:32:39 -0400343 fFreeIntervalList = temp;
Robert Phillips5af44de2017-07-18 14:49:38 -0400344 }
345}
346
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400347bool GrResourceAllocator::onOpListBoundary() const {
348 if (fIntvlList.empty()) {
349 SkASSERT(fCurOpListIndex+1 <= fNumOpLists);
350 // Although technically on an opList boundary there is no need to force an
351 // intermediate flush here
352 return false;
353 }
354
355 const Interval* tmp = fIntvlList.peekHead();
356 return fEndOfOpListOpIndices[fCurOpListIndex] <= tmp->start();
357}
358
359void GrResourceAllocator::forceIntermediateFlush(int* stopIndex) {
360 *stopIndex = fCurOpListIndex+1;
361
362 // This is interrupting the allocation of resources for this flush. We need to
363 // proactively clear the active interval list of any intervals that aren't
364 // guaranteed to survive the partial flush lest they become zombies (i.e.,
365 // holding a deleted surface proxy).
366 const Interval* tmp = fIntvlList.peekHead();
367 SkASSERT(fEndOfOpListOpIndices[fCurOpListIndex] <= tmp->start());
368
369 fCurOpListIndex++;
370 SkASSERT(fCurOpListIndex < fNumOpLists);
371
372 this->expire(tmp->start());
373}
374
Brian Salomon577aa0f2018-11-30 13:32:23 -0500375bool GrResourceAllocator::assign(int* startIndex, int* stopIndex, AssignError* outError) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500376 SkASSERT(outError);
377 *outError = AssignError::kNoError;
378
Mike Klein6350cb02019-04-22 12:09:45 +0000379 SkASSERT(fNumOpLists == fEndOfOpListOpIndices.count());
380
Robert Phillips5f78adf2019-04-22 12:41:39 -0400381 fIntvlHash.reset(); // we don't need the interval hash anymore
382
383 if (fCurOpListIndex >= fEndOfOpListOpIndices.count()) {
384 return false; // nothing to render
385 }
386
Robert Phillipseafd48a2017-11-16 07:52:08 -0500387 *startIndex = fCurOpListIndex;
388 *stopIndex = fEndOfOpListOpIndices.count();
389
Robert Phillips5f78adf2019-04-22 12:41:39 -0400390 if (fIntvlList.empty()) {
391 fCurOpListIndex = fEndOfOpListOpIndices.count();
392 return true; // no resources to assign
393 }
394
Robert Phillips3bf3d4a2019-03-27 07:09:09 -0400395#if GR_ALLOCATION_SPEW
396 SkDebugf("assigning opLists %d through %d out of %d numOpLists\n",
397 *startIndex, *stopIndex, fNumOpLists);
398 SkDebugf("EndOfOpListIndices: ");
399 for (int i = 0; i < fEndOfOpListOpIndices.count(); ++i) {
400 SkDebugf("%d ", fEndOfOpListOpIndices[i]);
401 }
402 SkDebugf("\n");
403#endif
404
Robert Phillips5af44de2017-07-18 14:49:38 -0400405 SkDEBUGCODE(fAssigned = true;)
406
Robert Phillips715d08c2018-07-18 13:56:48 -0400407#if GR_ALLOCATION_SPEW
408 this->dumpIntervals();
409#endif
Robert Phillips5af44de2017-07-18 14:49:38 -0400410 while (Interval* cur = fIntvlList.popHead()) {
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400411 if (fEndOfOpListOpIndices[fCurOpListIndex] <= cur->start()) {
Robert Phillipseafd48a2017-11-16 07:52:08 -0500412 fCurOpListIndex++;
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400413 SkASSERT(fCurOpListIndex < fNumOpLists);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500414 }
415
Robert Phillipsf8e25022017-11-08 15:24:31 -0500416 this->expire(cur->start());
Robert Phillips57aa3672017-07-21 11:38:13 -0400417
Robert Phillipseafd48a2017-11-16 07:52:08 -0500418 bool needsStencil = cur->proxy()->asRenderTargetProxy()
419 ? cur->proxy()->asRenderTargetProxy()->needsStencil()
420 : false;
421
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400422 if (cur->proxy()->isInstantiated()) {
423 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(
424 fResourceProvider, cur->proxy()->peekSurface(), needsStencil)) {
Robert Phillips01a91282018-07-26 08:03:04 -0400425 *outError = AssignError::kFailedProxyInstantiation;
426 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500427
Robert Phillips57aa3672017-07-21 11:38:13 -0400428 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500429
430 if (fResourceProvider->overBudget()) {
431 // Only force intermediate draws on opList boundaries
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400432 if (this->onOpListBoundary()) {
433 this->forceIntermediateFlush(stopIndex);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500434 return true;
435 }
436 }
437
Robert Phillips57aa3672017-07-21 11:38:13 -0400438 continue;
439 }
440
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500441 if (GrSurfaceProxy::LazyState::kNot != cur->proxy()->lazyInstantiationState()) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500442 if (!cur->proxy()->priv().doLazyInstantiation(fResourceProvider)) {
443 *outError = AssignError::kFailedProxyInstantiation;
Brian Salomon876a0172019-03-08 11:12:14 -0500444 } else {
445 if (GrSurfaceProxy::LazyInstantiationType::kDeinstantiate ==
446 cur->proxy()->priv().lazyInstantiationType()) {
447 fDeinstantiateTracker->addProxy(cur->proxy());
448 }
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500449 }
Chris Dalton706a6ff2017-11-29 22:01:06 -0700450 } else if (sk_sp<GrSurface> surface = this->findSurfaceFor(cur->proxy(), needsStencil)) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500451 // TODO: make getUniqueKey virtual on GrSurfaceProxy
Robert Phillips0790f8a2018-09-18 13:11:03 -0400452 GrTextureProxy* texProxy = cur->proxy()->asTextureProxy();
453
454 if (texProxy && texProxy->getUniqueKey().isValid()) {
455 if (!surface->getUniqueKey().isValid()) {
456 fResourceProvider->assignUniqueKeyToResource(texProxy->getUniqueKey(),
457 surface.get());
458 }
459 SkASSERT(surface->getUniqueKey() == texProxy->getUniqueKey());
Robert Phillipsf8e25022017-11-08 15:24:31 -0500460 }
461
Robert Phillips715d08c2018-07-18 13:56:48 -0400462#if GR_ALLOCATION_SPEW
463 SkDebugf("Assigning %d to %d\n",
464 surface->uniqueID().asUInt(),
465 cur->proxy()->uniqueID().asUInt());
466#endif
467
Robert Phillips5b65a842017-11-13 15:48:12 -0500468 cur->assign(std::move(surface));
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500469 } else {
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400470 SkASSERT(!cur->proxy()->isInstantiated());
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500471 *outError = AssignError::kFailedProxyInstantiation;
Robert Phillips5af44de2017-07-18 14:49:38 -0400472 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500473
Robert Phillips5af44de2017-07-18 14:49:38 -0400474 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500475
476 if (fResourceProvider->overBudget()) {
477 // Only force intermediate draws on opList boundaries
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400478 if (this->onOpListBoundary()) {
479 this->forceIntermediateFlush(stopIndex);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500480 return true;
481 }
482 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400483 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500484
485 // expire all the remaining intervals to drain the active interval list
486 this->expire(std::numeric_limits<unsigned int>::max());
Robert Phillipseafd48a2017-11-16 07:52:08 -0500487 return true;
Robert Phillips5af44de2017-07-18 14:49:38 -0400488}
Robert Phillips715d08c2018-07-18 13:56:48 -0400489
490#if GR_ALLOCATION_SPEW
491void GrResourceAllocator::dumpIntervals() {
Robert Phillips715d08c2018-07-18 13:56:48 -0400492 // Print all the intervals while computing their range
Robert Phillips3bf3d4a2019-03-27 07:09:09 -0400493 SkDebugf("------------------------------------------------------------\n");
494 unsigned int min = std::numeric_limits<unsigned int>::max();
Robert Phillips715d08c2018-07-18 13:56:48 -0400495 unsigned int max = 0;
496 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
497 SkDebugf("{ %3d,%3d }: [%2d, %2d] - proxyRefs:%d surfaceRefs:%d R:%d W:%d\n",
498 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400499 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1,
Robert Phillips715d08c2018-07-18 13:56:48 -0400500 cur->start(),
501 cur->end(),
502 cur->proxy()->priv().getProxyRefCnt(),
503 cur->proxy()->getBackingRefCnt_TestOnly(),
504 cur->proxy()->getPendingReadCnt_TestOnly(),
505 cur->proxy()->getPendingWriteCnt_TestOnly());
506 min = SkTMin(min, cur->start());
507 max = SkTMax(max, cur->end());
508 }
509
510 // Draw a graph of the useage intervals
511 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
512 SkDebugf("{ %3d,%3d }: ",
513 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400514 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1);
Robert Phillips715d08c2018-07-18 13:56:48 -0400515 for (unsigned int i = min; i <= max; ++i) {
516 if (i >= cur->start() && i <= cur->end()) {
517 SkDebugf("x");
518 } else {
519 SkDebugf(" ");
520 }
521 }
522 SkDebugf("\n");
523 }
524}
525#endif