blob: 82c8b82fac17f83c3c8b70558f656e6c81698b94 [file] [log] [blame]
Robert Phillips5af44de2017-07-18 14:49:38 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/GrResourceAllocator.h"
Robert Phillips5af44de2017-07-18 14:49:38 -04009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "src/gpu/GrDeinstantiateProxyTracker.h"
11#include "src/gpu/GrGpuResourcePriv.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040012#include "src/gpu/GrOpList.h"
13#include "src/gpu/GrRenderTargetProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050014#include "src/gpu/GrResourceCache.h"
15#include "src/gpu/GrResourceProvider.h"
16#include "src/gpu/GrSurfacePriv.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040017#include "src/gpu/GrSurfaceProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrSurfaceProxyPriv.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040019#include "src/gpu/GrTextureProxy.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040020
Robert Phillipsda1be462018-07-27 07:18:06 -040021#if GR_TRACK_INTERVAL_CREATION
Mike Klein0ec1c572018-12-04 11:52:51 -050022 #include <atomic>
23
24 uint32_t GrResourceAllocator::Interval::CreateUniqueID() {
25 static std::atomic<uint32_t> nextID{1};
26 uint32_t id;
27 do {
28 id = nextID++;
29 } while (id == SK_InvalidUniqueID);
30 return id;
31 }
Robert Phillipsda1be462018-07-27 07:18:06 -040032#endif
33
Robert Phillips5b65a842017-11-13 15:48:12 -050034void GrResourceAllocator::Interval::assign(sk_sp<GrSurface> s) {
35 SkASSERT(!fAssignedSurface);
36 fAssignedSurface = s;
37 fProxy->priv().assign(std::move(s));
38}
39
Robert Phillipsc73666f2019-04-24 08:49:48 -040040void GrResourceAllocator::determineRecyclability() {
41 for (Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
42 if (cur->proxy()->canSkipResourceAllocator()) {
43 // These types of proxies can slip in here if they require a stencil buffer
44 continue;
45 }
46
Robert Phillipse5f73282019-06-18 17:15:04 -040047 if (cur->uses() >= cur->proxy()->priv().getProxyRefCnt()) {
Robert Phillipsc73666f2019-04-24 08:49:48 -040048 // All the refs on the proxy are known to the resource allocator thus no one
49 // should be holding onto it outside of Ganesh.
Robert Phillipse5f73282019-06-18 17:15:04 -040050 SkASSERT(cur->uses() == cur->proxy()->priv().getProxyRefCnt());
Robert Phillipsc73666f2019-04-24 08:49:48 -040051 cur->markAsRecyclable();
52 }
53 }
54}
55
Robert Phillipseafd48a2017-11-16 07:52:08 -050056void GrResourceAllocator::markEndOfOpList(int opListIndex) {
57 SkASSERT(!fAssigned); // We shouldn't be adding any opLists after (or during) assignment
58
59 SkASSERT(fEndOfOpListOpIndices.count() == opListIndex);
60 if (!fEndOfOpListOpIndices.empty()) {
61 SkASSERT(fEndOfOpListOpIndices.back() < this->curOp());
62 }
63
64 fEndOfOpListOpIndices.push_back(this->curOp()); // This is the first op index of the next opList
Robert Phillipsc476e5d2019-03-26 14:50:08 -040065 SkASSERT(fEndOfOpListOpIndices.count() <= fNumOpLists);
Robert Phillipseafd48a2017-11-16 07:52:08 -050066}
67
Robert Phillips5b65a842017-11-13 15:48:12 -050068GrResourceAllocator::~GrResourceAllocator() {
Robert Phillips5b65a842017-11-13 15:48:12 -050069 SkASSERT(fIntvlList.empty());
70 SkASSERT(fActiveIntvls.empty());
71 SkASSERT(!fIntvlHash.count());
Robert Phillips5b65a842017-11-13 15:48:12 -050072}
73
Robert Phillipsc73666f2019-04-24 08:49:48 -040074void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end,
75 ActualUse actualUse
Chris Dalton8816b932017-11-29 16:48:25 -070076 SkDEBUGCODE(, bool isDirectDstRead)) {
Robert Phillips5f78adf2019-04-22 12:41:39 -040077
78 bool needsStencil = proxy->asRenderTargetProxy()
79 ? proxy->asRenderTargetProxy()->needsStencil()
80 : false;
81
Chris Dalton97155592019-06-13 13:40:20 -060082 if (proxy->canSkipResourceAllocator()) {
83 if (needsStencil && proxy->isInstantiated()) {
84 // If the proxy is still not instantiated at this point but will need stencil, it will
85 // attach its own stencil buffer upon onFlush instantiation.
86 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(
87 fResourceProvider, proxy->peekSurface(), true /*needsStencil*/)) {
88 SkDebugf("WARNING: failed to attach stencil buffer. Rendering may be incorrect.\n");
89 }
90 }
Robert Phillips5f78adf2019-04-22 12:41:39 -040091 return;
92 }
93
94 SkASSERT(!proxy->priv().ignoredByResourceAllocator());
95
Robert Phillips5af44de2017-07-18 14:49:38 -040096 SkASSERT(start <= end);
97 SkASSERT(!fAssigned); // We shouldn't be adding any intervals after (or during) assignment
98
Brian Salomon9cadc312018-12-05 15:09:19 -050099 // If a proxy is read only it must refer to a texture with specific content that cannot be
100 // recycled. We don't need to assign a texture to it and no other proxy can be instantiated
101 // with the same texture.
102 if (proxy->readOnly()) {
103 // Since we aren't going to add an interval we won't revisit this proxy in assign(). So it
104 // must already be instantiated or it must be a lazy proxy that we will instantiate below.
105 SkASSERT(proxy->isInstantiated() ||
106 GrSurfaceProxy::LazyState::kNot != proxy->lazyInstantiationState());
Brian Salomonc6093532018-12-05 21:34:36 +0000107 } else {
Brian Salomon9cadc312018-12-05 15:09:19 -0500108 if (Interval* intvl = fIntvlHash.find(proxy->uniqueID().asUInt())) {
109 // Revise the interval for an existing use
110#ifdef SK_DEBUG
111 if (0 == start && 0 == end) {
112 // This interval is for the initial upload to a deferred proxy. Due to the vagaries
113 // of how deferred proxies are collected they can appear as uploads multiple times
114 // in a single opLists' list and as uploads in several opLists.
115 SkASSERT(0 == intvl->start());
116 } else if (isDirectDstRead) {
117 // Direct reads from the render target itself should occur w/in the existing
118 // interval
119 SkASSERT(intvl->start() <= start && intvl->end() >= end);
120 } else {
121 SkASSERT(intvl->end() <= start && intvl->end() <= end);
122 }
123#endif
Robert Phillipsc73666f2019-04-24 08:49:48 -0400124 if (ActualUse::kYes == actualUse) {
125 intvl->addUse();
126 }
Brian Salomon9cadc312018-12-05 15:09:19 -0500127 intvl->extendEnd(end);
128 return;
129 }
130 Interval* newIntvl;
131 if (fFreeIntervalList) {
132 newIntvl = fFreeIntervalList;
133 fFreeIntervalList = newIntvl->next();
134 newIntvl->setNext(nullptr);
135 newIntvl->resetTo(proxy, start, end);
136 } else {
137 newIntvl = fIntervalAllocator.make<Interval>(proxy, start, end);
138 }
139
Robert Phillipsc73666f2019-04-24 08:49:48 -0400140 if (ActualUse::kYes == actualUse) {
141 newIntvl->addUse();
142 }
Brian Salomon9cadc312018-12-05 15:09:19 -0500143 fIntvlList.insertByIncreasingStart(newIntvl);
144 fIntvlHash.add(newIntvl);
Brian Salomonc6093532018-12-05 21:34:36 +0000145 }
146
Brian Salomon9cadc312018-12-05 15:09:19 -0500147 // Because readOnly proxies do not get a usage interval we must instantiate them here (since it
148 // won't occur in GrResourceAllocator::assign)
Robert Phillips12c46292019-04-23 07:36:17 -0400149 if (proxy->readOnly()) {
Robert Phillips4150eea2018-02-07 17:08:21 -0500150 // FIXME: remove this once we can do the lazy instantiation from assign instead.
151 if (GrSurfaceProxy::LazyState::kNot != proxy->lazyInstantiationState()) {
Brian Salomon876a0172019-03-08 11:12:14 -0500152 if (proxy->priv().doLazyInstantiation(fResourceProvider)) {
153 if (proxy->priv().lazyInstantiationType() ==
154 GrSurfaceProxy::LazyInstantiationType::kDeinstantiate) {
155 fDeinstantiateTracker->addProxy(proxy);
156 }
157 }
Robert Phillips4150eea2018-02-07 17:08:21 -0500158 }
Chris Dalton706a6ff2017-11-29 22:01:06 -0700159 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400160}
161
162GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::popHead() {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400163 SkDEBUGCODE(this->validate());
164
Robert Phillips5af44de2017-07-18 14:49:38 -0400165 Interval* temp = fHead;
166 if (temp) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500167 fHead = temp->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400168 if (!fHead) {
169 fTail = nullptr;
170 }
171 temp->setNext(nullptr);
Robert Phillips5af44de2017-07-18 14:49:38 -0400172 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400173
174 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400175 return temp;
176}
177
178// TODO: fuse this with insertByIncreasingEnd
179void GrResourceAllocator::IntervalList::insertByIncreasingStart(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400180 SkDEBUGCODE(this->validate());
181 SkASSERT(!intvl->next());
182
Robert Phillips5af44de2017-07-18 14:49:38 -0400183 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400184 // 14%
185 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500186 } else if (intvl->start() <= fHead->start()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400187 // 3%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500188 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400189 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400190 } else if (fTail->start() <= intvl->start()) {
191 // 83%
192 fTail->setNext(intvl);
193 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400194 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400195 // almost never
Robert Phillips5af44de2017-07-18 14:49:38 -0400196 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500197 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400198 for (; intvl->start() > next->start(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400199 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400200
201 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500202 intvl->setNext(next);
203 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400204 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400205
206 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400207}
208
209// TODO: fuse this with insertByIncreasingStart
210void GrResourceAllocator::IntervalList::insertByIncreasingEnd(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400211 SkDEBUGCODE(this->validate());
212 SkASSERT(!intvl->next());
213
Robert Phillips5af44de2017-07-18 14:49:38 -0400214 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400215 // 14%
216 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500217 } else if (intvl->end() <= fHead->end()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400218 // 64%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500219 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400220 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400221 } else if (fTail->end() <= intvl->end()) {
222 // 3%
223 fTail->setNext(intvl);
224 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400225 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400226 // 19% but 81% of those land right after the list's head
Robert Phillips5af44de2017-07-18 14:49:38 -0400227 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500228 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400229 for (; intvl->end() > next->end(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400230 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400231
232 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500233 intvl->setNext(next);
234 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400235 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400236
237 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400238}
239
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400240#ifdef SK_DEBUG
241void GrResourceAllocator::IntervalList::validate() const {
242 SkASSERT(SkToBool(fHead) == SkToBool(fTail));
243
244 Interval* prev = nullptr;
245 for (Interval* cur = fHead; cur; prev = cur, cur = cur->next()) {
246 }
247
248 SkASSERT(fTail == prev);
249}
250#endif
Robert Phillips4150eea2018-02-07 17:08:21 -0500251
252 GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::detachAll() {
253 Interval* tmp = fHead;
254 fHead = nullptr;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400255 fTail = nullptr;
Robert Phillips4150eea2018-02-07 17:08:21 -0500256 return tmp;
257}
258
Robert Phillips5af44de2017-07-18 14:49:38 -0400259// 'surface' can be reused. Add it back to the free pool.
Robert Phillips715d08c2018-07-18 13:56:48 -0400260void GrResourceAllocator::recycleSurface(sk_sp<GrSurface> surface) {
Robert Phillips57aa3672017-07-21 11:38:13 -0400261 const GrScratchKey &key = surface->resourcePriv().getScratchKey();
262
263 if (!key.isValid()) {
264 return; // can't do it w/o a valid scratch key
265 }
266
Robert Phillipsf8e25022017-11-08 15:24:31 -0500267 if (surface->getUniqueKey().isValid()) {
268 // If the surface has a unique key we throw it back into the resource cache.
269 // If things get really tight 'findSurfaceFor' may pull it back out but there is
270 // no need to have it in tight rotation.
271 return;
272 }
273
Robert Phillips715d08c2018-07-18 13:56:48 -0400274#if GR_ALLOCATION_SPEW
275 SkDebugf("putting surface %d back into pool\n", surface->uniqueID().asUInt());
276#endif
Robert Phillips57aa3672017-07-21 11:38:13 -0400277 // TODO: fix this insertion so we get a more LRU-ish behavior
Robert Phillips5b65a842017-11-13 15:48:12 -0500278 fFreePool.insert(key, surface.release());
Robert Phillips5af44de2017-07-18 14:49:38 -0400279}
280
281// First try to reuse one of the recently allocated/used GrSurfaces in the free pool.
282// If we can't find a useable one, create a new one.
Robert Phillipseafd48a2017-11-16 07:52:08 -0500283sk_sp<GrSurface> GrResourceAllocator::findSurfaceFor(const GrSurfaceProxy* proxy,
284 bool needsStencil) {
Robert Phillips0790f8a2018-09-18 13:11:03 -0400285
286 if (proxy->asTextureProxy() && proxy->asTextureProxy()->getUniqueKey().isValid()) {
287 // First try to reattach to a cached version if the proxy is uniquely keyed
288 sk_sp<GrSurface> surface = fResourceProvider->findByUniqueKey<GrSurface>(
289 proxy->asTextureProxy()->getUniqueKey());
290 if (surface) {
291 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(fResourceProvider, surface.get(),
292 needsStencil)) {
293 return nullptr;
294 }
295
296 return surface;
297 }
298 }
299
Robert Phillips57aa3672017-07-21 11:38:13 -0400300 // First look in the free pool
301 GrScratchKey key;
Robert Phillips5af44de2017-07-18 14:49:38 -0400302
Robert Phillips57aa3672017-07-21 11:38:13 -0400303 proxy->priv().computeScratchKey(&key);
304
Robert Phillips10d17212019-04-24 14:09:10 -0400305 auto filter = [] (const GrSurface* s) {
306 return true;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500307 };
308 sk_sp<GrSurface> surface(fFreePool.findAndRemove(key, filter));
Robert Phillips57aa3672017-07-21 11:38:13 -0400309 if (surface) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500310 if (SkBudgeted::kYes == proxy->isBudgeted() &&
Brian Salomonfa2ebea2019-01-24 15:58:58 -0500311 GrBudgetedType::kBudgeted != surface->resourcePriv().budgetedType()) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500312 // This gets the job done but isn't quite correct. It would be better to try to
Brian Salomonfa2ebea2019-01-24 15:58:58 -0500313 // match budgeted proxies w/ budgeted surfaces and unbudgeted w/ unbudgeted.
Robert Phillipsf8e25022017-11-08 15:24:31 -0500314 surface->resourcePriv().makeBudgeted();
315 }
316
Robert Phillips01a91282018-07-26 08:03:04 -0400317 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(fResourceProvider, surface.get(),
318 needsStencil)) {
319 return nullptr;
320 }
Robert Phillips0790f8a2018-09-18 13:11:03 -0400321 SkASSERT(!surface->getUniqueKey().isValid());
Robert Phillipsf8e25022017-11-08 15:24:31 -0500322 return surface;
Robert Phillips57aa3672017-07-21 11:38:13 -0400323 }
324
325 // Failing that, try to grab a new one from the resource cache
Robert Phillips5af44de2017-07-18 14:49:38 -0400326 return proxy->priv().createSurface(fResourceProvider);
327}
328
329// Remove any intervals that end before the current index. Return their GrSurfaces
Robert Phillips39667382019-04-17 16:03:30 -0400330// to the free pool if possible.
Robert Phillips5af44de2017-07-18 14:49:38 -0400331void GrResourceAllocator::expire(unsigned int curIndex) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500332 while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->end() < curIndex) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400333 Interval* temp = fActiveIntvls.popHead();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400334 SkASSERT(!temp->next());
Robert Phillips5b65a842017-11-13 15:48:12 -0500335
336 if (temp->wasAssignedSurface()) {
Robert Phillips715d08c2018-07-18 13:56:48 -0400337 sk_sp<GrSurface> surface = temp->detachSurface();
338
Robert Phillipsc73666f2019-04-24 08:49:48 -0400339 if (temp->isRecyclable()) {
Robert Phillips715d08c2018-07-18 13:56:48 -0400340 this->recycleSurface(std::move(surface));
341 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500342 }
Robert Phillips8186cbe2017-11-01 17:32:39 -0400343
344 // Add temp to the free interval list so it can be reused
Robert Phillips715d08c2018-07-18 13:56:48 -0400345 SkASSERT(!temp->wasAssignedSurface()); // it had better not have a ref on a surface
Robert Phillipsf8e25022017-11-08 15:24:31 -0500346 temp->setNext(fFreeIntervalList);
Robert Phillips8186cbe2017-11-01 17:32:39 -0400347 fFreeIntervalList = temp;
Robert Phillips5af44de2017-07-18 14:49:38 -0400348 }
349}
350
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400351bool GrResourceAllocator::onOpListBoundary() const {
352 if (fIntvlList.empty()) {
353 SkASSERT(fCurOpListIndex+1 <= fNumOpLists);
354 // Although technically on an opList boundary there is no need to force an
355 // intermediate flush here
356 return false;
357 }
358
359 const Interval* tmp = fIntvlList.peekHead();
360 return fEndOfOpListOpIndices[fCurOpListIndex] <= tmp->start();
361}
362
363void GrResourceAllocator::forceIntermediateFlush(int* stopIndex) {
364 *stopIndex = fCurOpListIndex+1;
365
366 // This is interrupting the allocation of resources for this flush. We need to
367 // proactively clear the active interval list of any intervals that aren't
368 // guaranteed to survive the partial flush lest they become zombies (i.e.,
369 // holding a deleted surface proxy).
370 const Interval* tmp = fIntvlList.peekHead();
371 SkASSERT(fEndOfOpListOpIndices[fCurOpListIndex] <= tmp->start());
372
373 fCurOpListIndex++;
374 SkASSERT(fCurOpListIndex < fNumOpLists);
375
376 this->expire(tmp->start());
377}
378
Brian Salomon577aa0f2018-11-30 13:32:23 -0500379bool GrResourceAllocator::assign(int* startIndex, int* stopIndex, AssignError* outError) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500380 SkASSERT(outError);
381 *outError = AssignError::kNoError;
382
Mike Klein6350cb02019-04-22 12:09:45 +0000383 SkASSERT(fNumOpLists == fEndOfOpListOpIndices.count());
384
Robert Phillips5f78adf2019-04-22 12:41:39 -0400385 fIntvlHash.reset(); // we don't need the interval hash anymore
386
387 if (fCurOpListIndex >= fEndOfOpListOpIndices.count()) {
388 return false; // nothing to render
389 }
390
Robert Phillipseafd48a2017-11-16 07:52:08 -0500391 *startIndex = fCurOpListIndex;
392 *stopIndex = fEndOfOpListOpIndices.count();
393
Robert Phillips5f78adf2019-04-22 12:41:39 -0400394 if (fIntvlList.empty()) {
395 fCurOpListIndex = fEndOfOpListOpIndices.count();
396 return true; // no resources to assign
397 }
398
Robert Phillips3bf3d4a2019-03-27 07:09:09 -0400399#if GR_ALLOCATION_SPEW
400 SkDebugf("assigning opLists %d through %d out of %d numOpLists\n",
401 *startIndex, *stopIndex, fNumOpLists);
402 SkDebugf("EndOfOpListIndices: ");
403 for (int i = 0; i < fEndOfOpListOpIndices.count(); ++i) {
404 SkDebugf("%d ", fEndOfOpListOpIndices[i]);
405 }
406 SkDebugf("\n");
407#endif
408
Robert Phillips5af44de2017-07-18 14:49:38 -0400409 SkDEBUGCODE(fAssigned = true;)
410
Robert Phillips715d08c2018-07-18 13:56:48 -0400411#if GR_ALLOCATION_SPEW
412 this->dumpIntervals();
413#endif
Robert Phillips5af44de2017-07-18 14:49:38 -0400414 while (Interval* cur = fIntvlList.popHead()) {
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400415 if (fEndOfOpListOpIndices[fCurOpListIndex] <= cur->start()) {
Robert Phillipseafd48a2017-11-16 07:52:08 -0500416 fCurOpListIndex++;
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400417 SkASSERT(fCurOpListIndex < fNumOpLists);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500418 }
419
Robert Phillipsf8e25022017-11-08 15:24:31 -0500420 this->expire(cur->start());
Robert Phillips57aa3672017-07-21 11:38:13 -0400421
Robert Phillipseafd48a2017-11-16 07:52:08 -0500422 bool needsStencil = cur->proxy()->asRenderTargetProxy()
423 ? cur->proxy()->asRenderTargetProxy()->needsStencil()
424 : false;
425
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400426 if (cur->proxy()->isInstantiated()) {
427 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(
428 fResourceProvider, cur->proxy()->peekSurface(), needsStencil)) {
Robert Phillips01a91282018-07-26 08:03:04 -0400429 *outError = AssignError::kFailedProxyInstantiation;
430 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500431
Robert Phillips57aa3672017-07-21 11:38:13 -0400432 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500433
434 if (fResourceProvider->overBudget()) {
435 // Only force intermediate draws on opList boundaries
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400436 if (this->onOpListBoundary()) {
437 this->forceIntermediateFlush(stopIndex);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500438 return true;
439 }
440 }
441
Robert Phillips57aa3672017-07-21 11:38:13 -0400442 continue;
443 }
444
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500445 if (GrSurfaceProxy::LazyState::kNot != cur->proxy()->lazyInstantiationState()) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500446 if (!cur->proxy()->priv().doLazyInstantiation(fResourceProvider)) {
447 *outError = AssignError::kFailedProxyInstantiation;
Brian Salomon876a0172019-03-08 11:12:14 -0500448 } else {
449 if (GrSurfaceProxy::LazyInstantiationType::kDeinstantiate ==
450 cur->proxy()->priv().lazyInstantiationType()) {
451 fDeinstantiateTracker->addProxy(cur->proxy());
452 }
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500453 }
Chris Dalton706a6ff2017-11-29 22:01:06 -0700454 } else if (sk_sp<GrSurface> surface = this->findSurfaceFor(cur->proxy(), needsStencil)) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500455 // TODO: make getUniqueKey virtual on GrSurfaceProxy
Robert Phillips0790f8a2018-09-18 13:11:03 -0400456 GrTextureProxy* texProxy = cur->proxy()->asTextureProxy();
457
458 if (texProxy && texProxy->getUniqueKey().isValid()) {
459 if (!surface->getUniqueKey().isValid()) {
460 fResourceProvider->assignUniqueKeyToResource(texProxy->getUniqueKey(),
461 surface.get());
462 }
463 SkASSERT(surface->getUniqueKey() == texProxy->getUniqueKey());
Robert Phillipsf8e25022017-11-08 15:24:31 -0500464 }
465
Robert Phillips715d08c2018-07-18 13:56:48 -0400466#if GR_ALLOCATION_SPEW
467 SkDebugf("Assigning %d to %d\n",
468 surface->uniqueID().asUInt(),
469 cur->proxy()->uniqueID().asUInt());
470#endif
471
Robert Phillips5b65a842017-11-13 15:48:12 -0500472 cur->assign(std::move(surface));
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500473 } else {
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400474 SkASSERT(!cur->proxy()->isInstantiated());
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500475 *outError = AssignError::kFailedProxyInstantiation;
Robert Phillips5af44de2017-07-18 14:49:38 -0400476 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500477
Robert Phillips5af44de2017-07-18 14:49:38 -0400478 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500479
480 if (fResourceProvider->overBudget()) {
481 // Only force intermediate draws on opList boundaries
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400482 if (this->onOpListBoundary()) {
483 this->forceIntermediateFlush(stopIndex);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500484 return true;
485 }
486 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400487 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500488
489 // expire all the remaining intervals to drain the active interval list
490 this->expire(std::numeric_limits<unsigned int>::max());
Robert Phillipseafd48a2017-11-16 07:52:08 -0500491 return true;
Robert Phillips5af44de2017-07-18 14:49:38 -0400492}
Robert Phillips715d08c2018-07-18 13:56:48 -0400493
494#if GR_ALLOCATION_SPEW
495void GrResourceAllocator::dumpIntervals() {
Robert Phillips715d08c2018-07-18 13:56:48 -0400496 // Print all the intervals while computing their range
Robert Phillips3bf3d4a2019-03-27 07:09:09 -0400497 SkDebugf("------------------------------------------------------------\n");
498 unsigned int min = std::numeric_limits<unsigned int>::max();
Robert Phillips715d08c2018-07-18 13:56:48 -0400499 unsigned int max = 0;
500 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
501 SkDebugf("{ %3d,%3d }: [%2d, %2d] - proxyRefs:%d surfaceRefs:%d R:%d W:%d\n",
502 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400503 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1,
Robert Phillips715d08c2018-07-18 13:56:48 -0400504 cur->start(),
505 cur->end(),
506 cur->proxy()->priv().getProxyRefCnt(),
507 cur->proxy()->getBackingRefCnt_TestOnly(),
508 cur->proxy()->getPendingReadCnt_TestOnly(),
509 cur->proxy()->getPendingWriteCnt_TestOnly());
510 min = SkTMin(min, cur->start());
511 max = SkTMax(max, cur->end());
512 }
513
514 // Draw a graph of the useage intervals
515 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
516 SkDebugf("{ %3d,%3d }: ",
517 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400518 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1);
Robert Phillips715d08c2018-07-18 13:56:48 -0400519 for (unsigned int i = min; i <= max; ++i) {
520 if (i >= cur->start() && i <= cur->end()) {
521 SkDebugf("x");
522 } else {
523 SkDebugf(" ");
524 }
525 }
526 SkDebugf("\n");
527 }
528}
529#endif