blob: 0f7089465d420f60f0f8660537e6a174cda26413 [file] [log] [blame]
Robert Phillips5af44de2017-07-18 14:49:38 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrResourceAllocator.h"
9
Brian Salomon967df202018-12-07 11:15:53 -050010#include "GrDeinstantiateProxyTracker.h"
Robert Phillipsf8e25022017-11-08 15:24:31 -050011#include "GrGpuResourcePriv.h"
Robert Phillips5b65a842017-11-13 15:48:12 -050012#include "GrOpList.h"
Robert Phillipseafd48a2017-11-16 07:52:08 -050013#include "GrRenderTargetProxy.h"
14#include "GrResourceCache.h"
Robert Phillipsf8e25022017-11-08 15:24:31 -050015#include "GrResourceProvider.h"
16#include "GrSurfacePriv.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040017#include "GrSurfaceProxy.h"
18#include "GrSurfaceProxyPriv.h"
Robert Phillipsf8e25022017-11-08 15:24:31 -050019#include "GrTextureProxy.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040020
Robert Phillipsda1be462018-07-27 07:18:06 -040021#if GR_TRACK_INTERVAL_CREATION
Mike Klein0ec1c572018-12-04 11:52:51 -050022 #include <atomic>
23
24 uint32_t GrResourceAllocator::Interval::CreateUniqueID() {
25 static std::atomic<uint32_t> nextID{1};
26 uint32_t id;
27 do {
28 id = nextID++;
29 } while (id == SK_InvalidUniqueID);
30 return id;
31 }
Robert Phillipsda1be462018-07-27 07:18:06 -040032#endif
33
Robert Phillips5b65a842017-11-13 15:48:12 -050034void GrResourceAllocator::Interval::assign(sk_sp<GrSurface> s) {
35 SkASSERT(!fAssignedSurface);
36 fAssignedSurface = s;
37 fProxy->priv().assign(std::move(s));
38}
39
Robert Phillipseafd48a2017-11-16 07:52:08 -050040
41void GrResourceAllocator::markEndOfOpList(int opListIndex) {
42 SkASSERT(!fAssigned); // We shouldn't be adding any opLists after (or during) assignment
43
44 SkASSERT(fEndOfOpListOpIndices.count() == opListIndex);
45 if (!fEndOfOpListOpIndices.empty()) {
46 SkASSERT(fEndOfOpListOpIndices.back() < this->curOp());
47 }
48
49 fEndOfOpListOpIndices.push_back(this->curOp()); // This is the first op index of the next opList
50}
51
Robert Phillips5b65a842017-11-13 15:48:12 -050052GrResourceAllocator::~GrResourceAllocator() {
Robert Phillips5b65a842017-11-13 15:48:12 -050053 SkASSERT(fIntvlList.empty());
54 SkASSERT(fActiveIntvls.empty());
55 SkASSERT(!fIntvlHash.count());
Robert Phillips5b65a842017-11-13 15:48:12 -050056}
57
Chris Dalton8816b932017-11-29 16:48:25 -070058void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end
59 SkDEBUGCODE(, bool isDirectDstRead)) {
Robert Phillips5af44de2017-07-18 14:49:38 -040060 SkASSERT(start <= end);
61 SkASSERT(!fAssigned); // We shouldn't be adding any intervals after (or during) assignment
62
Brian Salomon9cadc312018-12-05 15:09:19 -050063 // If a proxy is read only it must refer to a texture with specific content that cannot be
64 // recycled. We don't need to assign a texture to it and no other proxy can be instantiated
65 // with the same texture.
66 if (proxy->readOnly()) {
67 // Since we aren't going to add an interval we won't revisit this proxy in assign(). So it
68 // must already be instantiated or it must be a lazy proxy that we will instantiate below.
69 SkASSERT(proxy->isInstantiated() ||
70 GrSurfaceProxy::LazyState::kNot != proxy->lazyInstantiationState());
Brian Salomonc6093532018-12-05 21:34:36 +000071 } else {
Brian Salomon9cadc312018-12-05 15:09:19 -050072 if (Interval* intvl = fIntvlHash.find(proxy->uniqueID().asUInt())) {
73 // Revise the interval for an existing use
74#ifdef SK_DEBUG
75 if (0 == start && 0 == end) {
76 // This interval is for the initial upload to a deferred proxy. Due to the vagaries
77 // of how deferred proxies are collected they can appear as uploads multiple times
78 // in a single opLists' list and as uploads in several opLists.
79 SkASSERT(0 == intvl->start());
80 } else if (isDirectDstRead) {
81 // Direct reads from the render target itself should occur w/in the existing
82 // interval
83 SkASSERT(intvl->start() <= start && intvl->end() >= end);
84 } else {
85 SkASSERT(intvl->end() <= start && intvl->end() <= end);
86 }
87#endif
88 intvl->extendEnd(end);
89 return;
90 }
91 Interval* newIntvl;
92 if (fFreeIntervalList) {
93 newIntvl = fFreeIntervalList;
94 fFreeIntervalList = newIntvl->next();
95 newIntvl->setNext(nullptr);
96 newIntvl->resetTo(proxy, start, end);
97 } else {
98 newIntvl = fIntervalAllocator.make<Interval>(proxy, start, end);
99 }
100
101 fIntvlList.insertByIncreasingStart(newIntvl);
102 fIntvlHash.add(newIntvl);
Brian Salomonc6093532018-12-05 21:34:36 +0000103 }
104
Brian Salomon9cadc312018-12-05 15:09:19 -0500105 // Because readOnly proxies do not get a usage interval we must instantiate them here (since it
106 // won't occur in GrResourceAllocator::assign)
107 if (proxy->readOnly() || !fResourceProvider->explicitlyAllocateGPUResources()) {
Robert Phillips4150eea2018-02-07 17:08:21 -0500108 // FIXME: remove this once we can do the lazy instantiation from assign instead.
109 if (GrSurfaceProxy::LazyState::kNot != proxy->lazyInstantiationState()) {
Brian Salomon577aa0f2018-11-30 13:32:23 -0500110 if (proxy->priv().doLazyInstantiation(fResourceProvider)) {
111 if (proxy->priv().lazyInstantiationType() ==
Brian Salomon967df202018-12-07 11:15:53 -0500112 GrSurfaceProxy::LazyInstantiationType::kDeinstantiate) {
113 fDeinstantiateTracker->addProxy(proxy);
Brian Salomon577aa0f2018-11-30 13:32:23 -0500114 }
115 }
Robert Phillips4150eea2018-02-07 17:08:21 -0500116 }
Chris Dalton706a6ff2017-11-29 22:01:06 -0700117 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400118}
119
120GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::popHead() {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400121 SkDEBUGCODE(this->validate());
122
Robert Phillips5af44de2017-07-18 14:49:38 -0400123 Interval* temp = fHead;
124 if (temp) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500125 fHead = temp->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400126 if (!fHead) {
127 fTail = nullptr;
128 }
129 temp->setNext(nullptr);
Robert Phillips5af44de2017-07-18 14:49:38 -0400130 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400131
132 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400133 return temp;
134}
135
136// TODO: fuse this with insertByIncreasingEnd
137void GrResourceAllocator::IntervalList::insertByIncreasingStart(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400138 SkDEBUGCODE(this->validate());
139 SkASSERT(!intvl->next());
140
Robert Phillips5af44de2017-07-18 14:49:38 -0400141 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400142 // 14%
143 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500144 } else if (intvl->start() <= fHead->start()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400145 // 3%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500146 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400147 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400148 } else if (fTail->start() <= intvl->start()) {
149 // 83%
150 fTail->setNext(intvl);
151 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400152 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400153 // almost never
Robert Phillips5af44de2017-07-18 14:49:38 -0400154 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500155 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400156 for (; intvl->start() > next->start(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400157 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400158
159 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500160 intvl->setNext(next);
161 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400162 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400163
164 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400165}
166
167// TODO: fuse this with insertByIncreasingStart
168void GrResourceAllocator::IntervalList::insertByIncreasingEnd(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400169 SkDEBUGCODE(this->validate());
170 SkASSERT(!intvl->next());
171
Robert Phillips5af44de2017-07-18 14:49:38 -0400172 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400173 // 14%
174 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500175 } else if (intvl->end() <= fHead->end()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400176 // 64%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500177 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400178 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400179 } else if (fTail->end() <= intvl->end()) {
180 // 3%
181 fTail->setNext(intvl);
182 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400183 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400184 // 19% but 81% of those land right after the list's head
Robert Phillips5af44de2017-07-18 14:49:38 -0400185 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500186 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400187 for (; intvl->end() > next->end(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400188 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400189
190 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500191 intvl->setNext(next);
192 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400193 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400194
195 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400196}
197
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400198#ifdef SK_DEBUG
199void GrResourceAllocator::IntervalList::validate() const {
200 SkASSERT(SkToBool(fHead) == SkToBool(fTail));
201
202 Interval* prev = nullptr;
203 for (Interval* cur = fHead; cur; prev = cur, cur = cur->next()) {
204 }
205
206 SkASSERT(fTail == prev);
207}
208#endif
Robert Phillips4150eea2018-02-07 17:08:21 -0500209
210 GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::detachAll() {
211 Interval* tmp = fHead;
212 fHead = nullptr;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400213 fTail = nullptr;
Robert Phillips4150eea2018-02-07 17:08:21 -0500214 return tmp;
215}
216
Robert Phillips5af44de2017-07-18 14:49:38 -0400217// 'surface' can be reused. Add it back to the free pool.
Robert Phillips715d08c2018-07-18 13:56:48 -0400218void GrResourceAllocator::recycleSurface(sk_sp<GrSurface> surface) {
Robert Phillips57aa3672017-07-21 11:38:13 -0400219 const GrScratchKey &key = surface->resourcePriv().getScratchKey();
220
221 if (!key.isValid()) {
222 return; // can't do it w/o a valid scratch key
223 }
224
Robert Phillipsf8e25022017-11-08 15:24:31 -0500225 if (surface->getUniqueKey().isValid()) {
226 // If the surface has a unique key we throw it back into the resource cache.
227 // If things get really tight 'findSurfaceFor' may pull it back out but there is
228 // no need to have it in tight rotation.
229 return;
230 }
231
Robert Phillips715d08c2018-07-18 13:56:48 -0400232#if GR_ALLOCATION_SPEW
233 SkDebugf("putting surface %d back into pool\n", surface->uniqueID().asUInt());
234#endif
Robert Phillips57aa3672017-07-21 11:38:13 -0400235 // TODO: fix this insertion so we get a more LRU-ish behavior
Robert Phillips5b65a842017-11-13 15:48:12 -0500236 fFreePool.insert(key, surface.release());
Robert Phillips5af44de2017-07-18 14:49:38 -0400237}
238
239// First try to reuse one of the recently allocated/used GrSurfaces in the free pool.
240// If we can't find a useable one, create a new one.
Robert Phillipseafd48a2017-11-16 07:52:08 -0500241sk_sp<GrSurface> GrResourceAllocator::findSurfaceFor(const GrSurfaceProxy* proxy,
242 bool needsStencil) {
Robert Phillips0790f8a2018-09-18 13:11:03 -0400243
244 if (proxy->asTextureProxy() && proxy->asTextureProxy()->getUniqueKey().isValid()) {
245 // First try to reattach to a cached version if the proxy is uniquely keyed
246 sk_sp<GrSurface> surface = fResourceProvider->findByUniqueKey<GrSurface>(
247 proxy->asTextureProxy()->getUniqueKey());
248 if (surface) {
249 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(fResourceProvider, surface.get(),
250 needsStencil)) {
251 return nullptr;
252 }
253
254 return surface;
255 }
256 }
257
Robert Phillips57aa3672017-07-21 11:38:13 -0400258 // First look in the free pool
259 GrScratchKey key;
Robert Phillips5af44de2017-07-18 14:49:38 -0400260
Robert Phillips57aa3672017-07-21 11:38:13 -0400261 proxy->priv().computeScratchKey(&key);
262
Robert Phillipsf8e25022017-11-08 15:24:31 -0500263 auto filter = [&] (const GrSurface* s) {
264 return !proxy->priv().requiresNoPendingIO() || !s->surfacePriv().hasPendingIO();
265 };
266 sk_sp<GrSurface> surface(fFreePool.findAndRemove(key, filter));
Robert Phillips57aa3672017-07-21 11:38:13 -0400267 if (surface) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500268 if (SkBudgeted::kYes == proxy->isBudgeted() &&
269 SkBudgeted::kNo == surface->resourcePriv().isBudgeted()) {
270 // This gets the job done but isn't quite correct. It would be better to try to
271 // match budgeted proxies w/ budgeted surface and unbudgeted w/ unbudgeted.
272 surface->resourcePriv().makeBudgeted();
273 }
274
Robert Phillips01a91282018-07-26 08:03:04 -0400275 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(fResourceProvider, surface.get(),
276 needsStencil)) {
277 return nullptr;
278 }
Robert Phillips0790f8a2018-09-18 13:11:03 -0400279 SkASSERT(!surface->getUniqueKey().isValid());
Robert Phillipsf8e25022017-11-08 15:24:31 -0500280 return surface;
Robert Phillips57aa3672017-07-21 11:38:13 -0400281 }
282
283 // Failing that, try to grab a new one from the resource cache
Robert Phillips5af44de2017-07-18 14:49:38 -0400284 return proxy->priv().createSurface(fResourceProvider);
285}
286
287// Remove any intervals that end before the current index. Return their GrSurfaces
288// to the free pool.
289void GrResourceAllocator::expire(unsigned int curIndex) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500290 while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->end() < curIndex) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400291 Interval* temp = fActiveIntvls.popHead();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400292 SkASSERT(!temp->next());
Robert Phillips5b65a842017-11-13 15:48:12 -0500293
294 if (temp->wasAssignedSurface()) {
Robert Phillips715d08c2018-07-18 13:56:48 -0400295 sk_sp<GrSurface> surface = temp->detachSurface();
296
297 // If the proxy has an actual live ref on it that means someone wants to retain its
298 // contents. In that case we cannot recycle it (until the external holder lets
299 // go of it).
300 if (0 == temp->proxy()->priv().getProxyRefCnt()) {
301 this->recycleSurface(std::move(surface));
302 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500303 }
Robert Phillips8186cbe2017-11-01 17:32:39 -0400304
305 // Add temp to the free interval list so it can be reused
Robert Phillips715d08c2018-07-18 13:56:48 -0400306 SkASSERT(!temp->wasAssignedSurface()); // it had better not have a ref on a surface
Robert Phillipsf8e25022017-11-08 15:24:31 -0500307 temp->setNext(fFreeIntervalList);
Robert Phillips8186cbe2017-11-01 17:32:39 -0400308 fFreeIntervalList = temp;
Robert Phillips5af44de2017-07-18 14:49:38 -0400309 }
310}
311
Brian Salomon577aa0f2018-11-30 13:32:23 -0500312bool GrResourceAllocator::assign(int* startIndex, int* stopIndex, AssignError* outError) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500313 SkASSERT(outError);
314 *outError = AssignError::kNoError;
315
Robert Phillipseafd48a2017-11-16 07:52:08 -0500316 fIntvlHash.reset(); // we don't need the interval hash anymore
317 if (fIntvlList.empty()) {
318 return false; // nothing to render
319 }
320
321 *startIndex = fCurOpListIndex;
322 *stopIndex = fEndOfOpListOpIndices.count();
323
Robert Phillips4150eea2018-02-07 17:08:21 -0500324 if (!fResourceProvider->explicitlyAllocateGPUResources()) {
325 fIntvlList.detachAll(); // arena allocator will clean these up for us
326 return true;
327 }
328
Robert Phillips5af44de2017-07-18 14:49:38 -0400329 SkDEBUGCODE(fAssigned = true;)
330
Robert Phillips715d08c2018-07-18 13:56:48 -0400331#if GR_ALLOCATION_SPEW
332 this->dumpIntervals();
333#endif
Robert Phillips5af44de2017-07-18 14:49:38 -0400334 while (Interval* cur = fIntvlList.popHead()) {
Robert Phillipseafd48a2017-11-16 07:52:08 -0500335 if (fEndOfOpListOpIndices[fCurOpListIndex] < cur->start()) {
336 fCurOpListIndex++;
337 }
338
Robert Phillipsf8e25022017-11-08 15:24:31 -0500339 this->expire(cur->start());
Robert Phillips57aa3672017-07-21 11:38:13 -0400340
Robert Phillipseafd48a2017-11-16 07:52:08 -0500341 bool needsStencil = cur->proxy()->asRenderTargetProxy()
342 ? cur->proxy()->asRenderTargetProxy()->needsStencil()
343 : false;
344
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400345 if (cur->proxy()->isInstantiated()) {
346 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(
347 fResourceProvider, cur->proxy()->peekSurface(), needsStencil)) {
Robert Phillips01a91282018-07-26 08:03:04 -0400348 *outError = AssignError::kFailedProxyInstantiation;
349 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500350
Robert Phillips57aa3672017-07-21 11:38:13 -0400351 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500352
353 if (fResourceProvider->overBudget()) {
354 // Only force intermediate draws on opList boundaries
355 if (!fIntvlList.empty() &&
356 fEndOfOpListOpIndices[fCurOpListIndex] < fIntvlList.peekHead()->start()) {
357 *stopIndex = fCurOpListIndex+1;
Robert Phillips1734dd32018-08-21 13:52:09 -0400358
359 // This is interrupting the allocation of resources for this flush. We need to
360 // proactively clear the active interval list of any intervals that aren't
361 // guaranteed to survive the partial flush lest they become zombies (i.e.,
362 // holding a deleted surface proxy).
363 if (const Interval* tmp = fIntvlList.peekHead()) {
364 this->expire(tmp->start());
365 } else {
366 this->expire(std::numeric_limits<unsigned int>::max());
367 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500368 return true;
369 }
370 }
371
Robert Phillips57aa3672017-07-21 11:38:13 -0400372 continue;
373 }
374
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500375 if (GrSurfaceProxy::LazyState::kNot != cur->proxy()->lazyInstantiationState()) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500376 if (!cur->proxy()->priv().doLazyInstantiation(fResourceProvider)) {
377 *outError = AssignError::kFailedProxyInstantiation;
Greg Daniel4684f822018-03-08 15:27:36 -0500378 } else {
Brian Salomon967df202018-12-07 11:15:53 -0500379 if (GrSurfaceProxy::LazyInstantiationType::kDeinstantiate ==
Greg Daniel4684f822018-03-08 15:27:36 -0500380 cur->proxy()->priv().lazyInstantiationType()) {
Brian Salomon967df202018-12-07 11:15:53 -0500381 fDeinstantiateTracker->addProxy(cur->proxy());
Greg Daniel4684f822018-03-08 15:27:36 -0500382 }
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500383 }
Chris Dalton706a6ff2017-11-29 22:01:06 -0700384 } else if (sk_sp<GrSurface> surface = this->findSurfaceFor(cur->proxy(), needsStencil)) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500385 // TODO: make getUniqueKey virtual on GrSurfaceProxy
Robert Phillips0790f8a2018-09-18 13:11:03 -0400386 GrTextureProxy* texProxy = cur->proxy()->asTextureProxy();
387
388 if (texProxy && texProxy->getUniqueKey().isValid()) {
389 if (!surface->getUniqueKey().isValid()) {
390 fResourceProvider->assignUniqueKeyToResource(texProxy->getUniqueKey(),
391 surface.get());
392 }
393 SkASSERT(surface->getUniqueKey() == texProxy->getUniqueKey());
Robert Phillipsf8e25022017-11-08 15:24:31 -0500394 }
395
Robert Phillips715d08c2018-07-18 13:56:48 -0400396#if GR_ALLOCATION_SPEW
397 SkDebugf("Assigning %d to %d\n",
398 surface->uniqueID().asUInt(),
399 cur->proxy()->uniqueID().asUInt());
400#endif
401
Robert Phillips5b65a842017-11-13 15:48:12 -0500402 cur->assign(std::move(surface));
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500403 } else {
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400404 SkASSERT(!cur->proxy()->isInstantiated());
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500405 *outError = AssignError::kFailedProxyInstantiation;
Robert Phillips5af44de2017-07-18 14:49:38 -0400406 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500407
Robert Phillips5af44de2017-07-18 14:49:38 -0400408 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500409
410 if (fResourceProvider->overBudget()) {
411 // Only force intermediate draws on opList boundaries
412 if (!fIntvlList.empty() &&
413 fEndOfOpListOpIndices[fCurOpListIndex] < fIntvlList.peekHead()->start()) {
414 *stopIndex = fCurOpListIndex+1;
Robert Phillips1734dd32018-08-21 13:52:09 -0400415
416 // This is interrupting the allocation of resources for this flush. We need to
417 // proactively clear the active interval list of any intervals that aren't
418 // guaranteed to survive the partial flush lest they become zombies (i.e.,
419 // holding a deleted surface proxy).
420 if (const Interval* tmp = fIntvlList.peekHead()) {
421 this->expire(tmp->start());
422 } else {
423 this->expire(std::numeric_limits<unsigned int>::max());
424 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500425 return true;
426 }
427 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400428 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500429
430 // expire all the remaining intervals to drain the active interval list
431 this->expire(std::numeric_limits<unsigned int>::max());
Robert Phillipseafd48a2017-11-16 07:52:08 -0500432 return true;
Robert Phillips5af44de2017-07-18 14:49:38 -0400433}
Robert Phillips715d08c2018-07-18 13:56:48 -0400434
435#if GR_ALLOCATION_SPEW
436void GrResourceAllocator::dumpIntervals() {
437
438 // Print all the intervals while computing their range
439 unsigned int min = fNumOps+1;
440 unsigned int max = 0;
441 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
442 SkDebugf("{ %3d,%3d }: [%2d, %2d] - proxyRefs:%d surfaceRefs:%d R:%d W:%d\n",
443 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400444 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1,
Robert Phillips715d08c2018-07-18 13:56:48 -0400445 cur->start(),
446 cur->end(),
447 cur->proxy()->priv().getProxyRefCnt(),
448 cur->proxy()->getBackingRefCnt_TestOnly(),
449 cur->proxy()->getPendingReadCnt_TestOnly(),
450 cur->proxy()->getPendingWriteCnt_TestOnly());
451 min = SkTMin(min, cur->start());
452 max = SkTMax(max, cur->end());
453 }
454
455 // Draw a graph of the useage intervals
456 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
457 SkDebugf("{ %3d,%3d }: ",
458 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400459 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1);
Robert Phillips715d08c2018-07-18 13:56:48 -0400460 for (unsigned int i = min; i <= max; ++i) {
461 if (i >= cur->start() && i <= cur->end()) {
462 SkDebugf("x");
463 } else {
464 SkDebugf(" ");
465 }
466 }
467 SkDebugf("\n");
468 }
469}
470#endif