blob: 1dff77dc980889f541496606b1a455a1091bd8ff [file] [log] [blame]
Robert Phillips5af44de2017-07-18 14:49:38 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/GrResourceAllocator.h"
Robert Phillips5af44de2017-07-18 14:49:38 -04009
Adlai Hollerca1137b2021-04-08 11:39:55 -040010#include "src/gpu/GrDirectContextPriv.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "src/gpu/GrGpuResourcePriv.h"
Greg Danielf41b2bd2019-08-22 16:19:24 -040012#include "src/gpu/GrOpsTask.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040013#include "src/gpu/GrRenderTargetProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050014#include "src/gpu/GrResourceProvider.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040015#include "src/gpu/GrSurfaceProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050016#include "src/gpu/GrSurfaceProxyPriv.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040017
Adlai Holler4cfbe532021-03-17 10:36:39 -040018#ifdef SK_DEBUG
19#include <atomic>
Mike Klein0ec1c572018-12-04 11:52:51 -050020
Adlai Holler4cfbe532021-03-17 10:36:39 -040021uint32_t GrResourceAllocator::Interval::CreateUniqueID() {
22 static std::atomic<uint32_t> nextID{1};
23 uint32_t id;
24 do {
25 id = nextID.fetch_add(1, std::memory_order_relaxed);
26 } while (id == SK_InvalidUniqueID);
27 return id;
28}
29
30uint32_t GrResourceAllocator::Register::CreateUniqueID() {
31 static std::atomic<uint32_t> nextID{1};
32 uint32_t id;
33 do {
34 id = nextID.fetch_add(1, std::memory_order_relaxed);
35 } while (id == SK_InvalidUniqueID);
36 return id;
37}
Robert Phillipsda1be462018-07-27 07:18:06 -040038#endif
39
Robert Phillips5b65a842017-11-13 15:48:12 -050040GrResourceAllocator::~GrResourceAllocator() {
Adlai Holleree2837b2021-04-09 16:52:48 -040041 SkASSERT(fFailedInstantiation || fIntvlList.empty());
Robert Phillips5b65a842017-11-13 15:48:12 -050042 SkASSERT(fActiveIntvls.empty());
43 SkASSERT(!fIntvlHash.count());
Robert Phillips5b65a842017-11-13 15:48:12 -050044}
45
Adlai Holler7f7a5df2021-02-09 17:41:10 +000046void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end,
47 ActualUse actualUse
Chris Dalton8816b932017-11-29 16:48:25 -070048 SkDEBUGCODE(, bool isDirectDstRead)) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040049 SkASSERT(start <= end);
50 SkASSERT(!fAssigned); // We shouldn't be adding any intervals after (or during) assignment
Robert Phillips5f78adf2019-04-22 12:41:39 -040051
Chris Dalton97155592019-06-13 13:40:20 -060052 if (proxy->canSkipResourceAllocator()) {
Robert Phillips5f78adf2019-04-22 12:41:39 -040053 return;
54 }
55
Brian Salomon9cadc312018-12-05 15:09:19 -050056 // If a proxy is read only it must refer to a texture with specific content that cannot be
57 // recycled. We don't need to assign a texture to it and no other proxy can be instantiated
58 // with the same texture.
59 if (proxy->readOnly()) {
Adlai Hollerca1137b2021-04-08 11:39:55 -040060 auto resourceProvider = fDContext->priv().resourceProvider();
61 if (proxy->isLazy() && !proxy->priv().doLazyInstantiation(resourceProvider)) {
Adlai Holler19fd5142021-03-08 10:19:30 -070062 fFailedInstantiation = true;
Brian Salomon9cadc312018-12-05 15:09:19 -050063 } else {
Brian Salomonbeb7f522019-08-30 16:19:42 -040064 // Since we aren't going to add an interval we won't revisit this proxy in assign(). So
65 // must already be instantiated or it must be a lazy proxy that we instantiated above.
66 SkASSERT(proxy->isInstantiated());
Brian Salomon9cadc312018-12-05 15:09:19 -050067 }
Brian Salomonbeb7f522019-08-30 16:19:42 -040068 return;
69 }
Adlai Holler539db2f2021-03-16 09:45:05 -040070 uint32_t proxyID = proxy->uniqueID().asUInt();
71 if (Interval** intvlPtr = fIntvlHash.find(proxyID)) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040072 // Revise the interval for an existing use
Adlai Holler1143b1b2021-03-16 13:07:40 -040073 Interval* intvl = *intvlPtr;
Brian Salomonbeb7f522019-08-30 16:19:42 -040074#ifdef SK_DEBUG
Adlai Holler9e2c50e2021-02-09 14:41:52 -050075 if (0 == start && 0 == end) {
76 // This interval is for the initial upload to a deferred proxy. Due to the vagaries
77 // of how deferred proxies are collected they can appear as uploads multiple times
78 // in a single opsTasks' list and as uploads in several opsTasks.
79 SkASSERT(0 == intvl->start());
80 } else if (isDirectDstRead) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040081 // Direct reads from the render target itself should occur w/in the existing
82 // interval
83 SkASSERT(intvl->start() <= start && intvl->end() >= end);
84 } else {
85 SkASSERT(intvl->end() <= start && intvl->end() <= end);
86 }
87#endif
Adlai Holler7f7a5df2021-02-09 17:41:10 +000088 if (ActualUse::kYes == actualUse) {
89 intvl->addUse();
90 }
Brian Salomonbeb7f522019-08-30 16:19:42 -040091 intvl->extendEnd(end);
92 return;
93 }
Adlai Holler4cfbe532021-03-17 10:36:39 -040094 Interval* newIntvl = fInternalAllocator.make<Interval>(proxy, start, end);
Brian Salomonc6093532018-12-05 21:34:36 +000095
Adlai Holler7f7a5df2021-02-09 17:41:10 +000096 if (ActualUse::kYes == actualUse) {
97 newIntvl->addUse();
98 }
Brian Salomonbeb7f522019-08-30 16:19:42 -040099 fIntvlList.insertByIncreasingStart(newIntvl);
Adlai Holler539db2f2021-03-16 09:45:05 -0400100 fIntvlHash.set(proxyID, newIntvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400101}
102
Adlai Holleree2837b2021-04-09 16:52:48 -0400103// Tragically we have cases where we always have to make new textures.
104static bool can_proxy_use_scratch(const GrCaps& caps, GrSurfaceProxy* proxy) {
105 return caps.reuseScratchTextures() || proxy->asRenderTargetProxy();
106}
107
Adlai Holler3cffe812021-04-09 13:43:32 -0400108GrResourceAllocator::Register::Register(GrSurfaceProxy* originatingProxy,
Adlai Holleree2837b2021-04-09 16:52:48 -0400109 GrScratchKey scratchKey,
110 GrResourceProvider* provider)
Adlai Holler3cffe812021-04-09 13:43:32 -0400111 : fOriginatingProxy(originatingProxy)
112 , fScratchKey(std::move(scratchKey)) {
113 SkASSERT(originatingProxy);
114 SkASSERT(!originatingProxy->isInstantiated());
115 SkASSERT(!originatingProxy->isLazy());
116 SkDEBUGCODE(fUniqueID = CreateUniqueID();)
Adlai Holleree2837b2021-04-09 16:52:48 -0400117 if (scratchKey.isValid()) {
118 if (can_proxy_use_scratch(*provider->caps(), originatingProxy)) {
119 fExistingSurface = provider->findAndRefScratchTexture(fScratchKey);
120 }
121 } else {
122 SkASSERT(this->uniqueKey().isValid());
123 fExistingSurface = provider->findByUniqueKey<GrSurface>(this->uniqueKey());
124 }
Adlai Holler3cffe812021-04-09 13:43:32 -0400125}
126
Adlai Holler7df8d222021-03-19 12:27:49 -0400127bool GrResourceAllocator::Register::isRecyclable(const GrCaps& caps,
128 GrSurfaceProxy* proxy,
129 int knownUseCount) const {
Adlai Holleree2837b2021-04-09 16:52:48 -0400130 if (!can_proxy_use_scratch(caps, proxy)) {
Adlai Holler7df8d222021-03-19 12:27:49 -0400131 return false;
132 }
133
134 if (!this->scratchKey().isValid()) {
135 return false; // no scratch key, no free pool
136 }
137 if (this->uniqueKey().isValid()) {
138 return false; // rely on the resource cache to hold onto uniquely-keyed surfaces.
139 }
140 // If all the refs on the proxy are known to the resource allocator then no one
Adlai Holler1143b1b2021-03-16 13:07:40 -0400141 // should be holding onto it outside of Ganesh.
Adlai Holler7df8d222021-03-19 12:27:49 -0400142 return !proxy->refCntGreaterThan(knownUseCount);
143}
144
145bool GrResourceAllocator::Register::instantiateSurface(GrSurfaceProxy* proxy,
146 GrResourceProvider* resourceProvider) {
147 SkASSERT(!proxy->peekSurface());
148
Adlai Holleree2837b2021-04-09 16:52:48 -0400149 sk_sp<GrSurface> newSurface;
150 if (!fExistingSurface) {
Adlai Holler7df8d222021-03-19 12:27:49 -0400151 if (proxy == fOriginatingProxy) {
Adlai Holleree2837b2021-04-09 16:52:48 -0400152 newSurface = proxy->priv().createSurface(resourceProvider);
Adlai Holler7df8d222021-03-19 12:27:49 -0400153 } else {
Adlai Holleree2837b2021-04-09 16:52:48 -0400154 newSurface = sk_ref_sp(fOriginatingProxy->peekSurface());
Adlai Holler7df8d222021-03-19 12:27:49 -0400155 }
156 }
Adlai Holleree2837b2021-04-09 16:52:48 -0400157 if (!fExistingSurface && !newSurface) {
Adlai Holler7df8d222021-03-19 12:27:49 -0400158 return false;
159 }
160
Adlai Holleree2837b2021-04-09 16:52:48 -0400161 GrSurface* surface = newSurface ? newSurface.get() : fExistingSurface.get();
Adlai Holler7df8d222021-03-19 12:27:49 -0400162 // Make surface budgeted if this proxy is budgeted.
163 if (SkBudgeted::kYes == proxy->isBudgeted() &&
164 GrBudgetedType::kBudgeted != surface->resourcePriv().budgetedType()) {
165 // This gets the job done but isn't quite correct. It would be better to try to
166 // match budgeted proxies w/ budgeted surfaces and unbudgeted w/ unbudgeted.
167 surface->resourcePriv().makeBudgeted();
168 }
169
170 // Propagate the proxy unique key to the surface if we have one.
171 if (const auto& uniqueKey = proxy->getUniqueKey(); uniqueKey.isValid()) {
172 if (!surface->getUniqueKey().isValid()) {
Adlai Holleree2837b2021-04-09 16:52:48 -0400173 resourceProvider->assignUniqueKeyToResource(uniqueKey, surface);
Adlai Holler7df8d222021-03-19 12:27:49 -0400174 }
175 SkASSERT(surface->getUniqueKey() == uniqueKey);
176 }
Adlai Holleree2837b2021-04-09 16:52:48 -0400177 proxy->priv().assign(fExistingSurface ? fExistingSurface : std::move(newSurface));
Adlai Holler7df8d222021-03-19 12:27:49 -0400178 return true;
Adlai Holler1143b1b2021-03-16 13:07:40 -0400179}
180
Robert Phillips5af44de2017-07-18 14:49:38 -0400181GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::popHead() {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400182 SkDEBUGCODE(this->validate());
183
Robert Phillips5af44de2017-07-18 14:49:38 -0400184 Interval* temp = fHead;
185 if (temp) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500186 fHead = temp->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400187 if (!fHead) {
188 fTail = nullptr;
189 }
190 temp->setNext(nullptr);
Robert Phillips5af44de2017-07-18 14:49:38 -0400191 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400192
193 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400194 return temp;
195}
196
197// TODO: fuse this with insertByIncreasingEnd
198void GrResourceAllocator::IntervalList::insertByIncreasingStart(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400199 SkDEBUGCODE(this->validate());
200 SkASSERT(!intvl->next());
201
Robert Phillips5af44de2017-07-18 14:49:38 -0400202 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400203 // 14%
204 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500205 } else if (intvl->start() <= fHead->start()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400206 // 3%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500207 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400208 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400209 } else if (fTail->start() <= intvl->start()) {
210 // 83%
211 fTail->setNext(intvl);
212 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400213 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400214 // almost never
Robert Phillips5af44de2017-07-18 14:49:38 -0400215 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500216 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400217 for (; intvl->start() > next->start(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400218 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400219
220 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500221 intvl->setNext(next);
222 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400223 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400224
225 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400226}
227
228// TODO: fuse this with insertByIncreasingStart
229void GrResourceAllocator::IntervalList::insertByIncreasingEnd(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400230 SkDEBUGCODE(this->validate());
231 SkASSERT(!intvl->next());
232
Robert Phillips5af44de2017-07-18 14:49:38 -0400233 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400234 // 14%
235 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500236 } else if (intvl->end() <= fHead->end()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400237 // 64%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500238 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400239 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400240 } else if (fTail->end() <= intvl->end()) {
241 // 3%
242 fTail->setNext(intvl);
243 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400244 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400245 // 19% but 81% of those land right after the list's head
Robert Phillips5af44de2017-07-18 14:49:38 -0400246 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500247 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400248 for (; intvl->end() > next->end(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400249 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400250
251 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500252 intvl->setNext(next);
253 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400254 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400255
256 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400257}
258
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400259#ifdef SK_DEBUG
260void GrResourceAllocator::IntervalList::validate() const {
261 SkASSERT(SkToBool(fHead) == SkToBool(fTail));
262
263 Interval* prev = nullptr;
264 for (Interval* cur = fHead; cur; prev = cur, cur = cur->next()) {
265 }
266
267 SkASSERT(fTail == prev);
268}
269#endif
Robert Phillips4150eea2018-02-07 17:08:21 -0500270
Adlai Holler4cfbe532021-03-17 10:36:39 -0400271// First try to reuse one of the recently allocated/used registers in the free pool.
Adlai Holler7df8d222021-03-19 12:27:49 -0400272GrResourceAllocator::Register* GrResourceAllocator::findOrCreateRegisterFor(GrSurfaceProxy* proxy) {
Adlai Holleree2837b2021-04-09 16:52:48 -0400273 auto resourceProvider = fDContext->priv().resourceProvider();
Adlai Holler7df8d222021-03-19 12:27:49 -0400274 // Handle uniquely keyed proxies
Adlai Hollercc119d92021-03-16 15:17:25 -0400275 if (const auto& uniqueKey = proxy->getUniqueKey(); uniqueKey.isValid()) {
Adlai Holler7df8d222021-03-19 12:27:49 -0400276 if (auto p = fUniqueKeyRegisters.find(uniqueKey)) {
277 return *p;
Robert Phillips0790f8a2018-09-18 13:11:03 -0400278 }
Adlai Holler7df8d222021-03-19 12:27:49 -0400279 // No need for a scratch key. These don't go in the free pool.
Adlai Holleree2837b2021-04-09 16:52:48 -0400280 Register* r = fInternalAllocator.make<Register>(proxy, GrScratchKey(), resourceProvider);
Adlai Holler7df8d222021-03-19 12:27:49 -0400281 fUniqueKeyRegisters.set(uniqueKey, r);
282 return r;
Robert Phillips0790f8a2018-09-18 13:11:03 -0400283 }
284
Adlai Hollercc119d92021-03-16 15:17:25 -0400285 // Then look in the free pool
Adlai Holler7df8d222021-03-19 12:27:49 -0400286 GrScratchKey scratchKey;
Adlai Hollerca1137b2021-04-08 11:39:55 -0400287 proxy->priv().computeScratchKey(*fDContext->priv().caps(), &scratchKey);
Robert Phillips57aa3672017-07-21 11:38:13 -0400288
Adlai Holler4cfbe532021-03-17 10:36:39 -0400289 auto filter = [] (const Register* r) {
Robert Phillips10d17212019-04-24 14:09:10 -0400290 return true;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500291 };
Adlai Holler7df8d222021-03-19 12:27:49 -0400292 if (Register* r = fFreePool.findAndRemove(scratchKey, filter)) {
Adlai Holler4cfbe532021-03-17 10:36:39 -0400293 return r;
Robert Phillips57aa3672017-07-21 11:38:13 -0400294 }
295
Adlai Holleree2837b2021-04-09 16:52:48 -0400296 return fInternalAllocator.make<Register>(proxy, std::move(scratchKey), resourceProvider);
Robert Phillips5af44de2017-07-18 14:49:38 -0400297}
298
Adlai Holler7df8d222021-03-19 12:27:49 -0400299// Remove any intervals that end before the current index. Add their registers
Robert Phillips39667382019-04-17 16:03:30 -0400300// to the free pool if possible.
Robert Phillips5af44de2017-07-18 14:49:38 -0400301void GrResourceAllocator::expire(unsigned int curIndex) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500302 while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->end() < curIndex) {
Adlai Holler729ba5e2021-03-15 12:34:31 -0400303 Interval* intvl = fActiveIntvls.popHead();
304 SkASSERT(!intvl->next());
Robert Phillips5b65a842017-11-13 15:48:12 -0500305
Adlai Holler7df8d222021-03-19 12:27:49 -0400306 Register* r = intvl->getRegister();
Adlai Hollerca1137b2021-04-08 11:39:55 -0400307 if (r && r->isRecyclable(*fDContext->priv().caps(), intvl->proxy(), intvl->uses())) {
Adlai Holler7df8d222021-03-19 12:27:49 -0400308#if GR_ALLOCATION_SPEW
309 SkDebugf("putting register %d back into pool\n", r->uniqueID());
310#endif
311 // TODO: fix this insertion so we get a more LRU-ish behavior
312 fFreePool.insert(r->scratchKey(), r);
Robert Phillips5b65a842017-11-13 15:48:12 -0500313 }
Adlai Holler7df8d222021-03-19 12:27:49 -0400314 fFinishedIntvls.insertByIncreasingStart(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400315 }
316}
317
Adlai Holleree2837b2021-04-09 16:52:48 -0400318bool GrResourceAllocator::planAssignment() {
Robert Phillips5f78adf2019-04-22 12:41:39 -0400319 fIntvlHash.reset(); // we don't need the interval hash anymore
320
Adlai Holleree2837b2021-04-09 16:52:48 -0400321 SkASSERT(!fPlanned && !fAssigned);
322 SkDEBUGCODE(fPlanned = true;)
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500323
Robert Phillips715d08c2018-07-18 13:56:48 -0400324#if GR_ALLOCATION_SPEW
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500325 SkDebugf("assigning %d ops\n", fNumOps);
Robert Phillips715d08c2018-07-18 13:56:48 -0400326 this->dumpIntervals();
327#endif
Robert Phillipseafd48a2017-11-16 07:52:08 -0500328
Adlai Holleree2837b2021-04-09 16:52:48 -0400329 auto resourceProvider = fDContext->priv().resourceProvider();
Adlai Holler1143b1b2021-03-16 13:07:40 -0400330 while (Interval* cur = fIntvlList.popHead()) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500331 this->expire(cur->start());
Adlai Holleree2837b2021-04-09 16:52:48 -0400332 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillips57aa3672017-07-21 11:38:13 -0400333
Adlai Holler7df8d222021-03-19 12:27:49 -0400334 // Already-instantiated proxies and lazy proxies don't use registers.
Adlai Holleree2837b2021-04-09 16:52:48 -0400335 if (cur->proxy()->isInstantiated()) {
336 continue;
337 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500338
Adlai Holleree2837b2021-04-09 16:52:48 -0400339 // Instantiate fully-lazy proxies immediately. Ignore other lazy proxies at this stage.
340 if (cur->proxy()->isLazy()) {
341 if (cur->proxy()->isFullyLazy()) {
342 fFailedInstantiation = !cur->proxy()->priv().doLazyInstantiation(resourceProvider);
343 if (fFailedInstantiation) {
344 break;
345 }
346 }
Robert Phillips57aa3672017-07-21 11:38:13 -0400347 continue;
348 }
349
Adlai Holler7df8d222021-03-19 12:27:49 -0400350 Register* r = this->findOrCreateRegisterFor(cur->proxy());
Robert Phillips715d08c2018-07-18 13:56:48 -0400351#if GR_ALLOCATION_SPEW
Adlai Holler7df8d222021-03-19 12:27:49 -0400352 SkDebugf("Assigning register %d to %d\n",
353 r->uniqueID(),
354 cur->proxy()->uniqueID().asUInt());
Robert Phillips715d08c2018-07-18 13:56:48 -0400355#endif
Adlai Holler7df8d222021-03-19 12:27:49 -0400356 SkASSERT(!cur->proxy()->peekSurface());
357 cur->setRegister(r);
Robert Phillips5af44de2017-07-18 14:49:38 -0400358 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500359
360 // expire all the remaining intervals to drain the active interval list
361 this->expire(std::numeric_limits<unsigned int>::max());
Adlai Holleree2837b2021-04-09 16:52:48 -0400362 return !fFailedInstantiation;
363}
Adlai Holler7df8d222021-03-19 12:27:49 -0400364
Adlai Hollercd2f96d2021-04-09 17:58:14 -0400365bool GrResourceAllocator::makeBudgetHeadroom() {
366 SkASSERT(fPlanned);
367 SkASSERT(!fFailedInstantiation);
368 size_t additionalBytesNeeded = 0;
369 for (Interval* cur = fFinishedIntvls.peekHead(); cur; cur = cur->next()) {
370 GrSurfaceProxy* proxy = cur->proxy();
371 if (SkBudgeted::kNo == proxy->isBudgeted() || proxy->isInstantiated()) {
372 continue;
373 }
374
375 // N.B Fully-lazy proxies were already instantiated in planAssignment
376 if (proxy->isLazy()) {
377 additionalBytesNeeded += proxy->gpuMemorySize();
378 } else {
379 Register* r = cur->getRegister();
380 SkASSERT(r);
381 if (!r->accountedForInBudget() && !r->existingSurface()) {
382 additionalBytesNeeded += proxy->gpuMemorySize();
383 }
384 r->setAccountedForInBudget();
385 }
386 }
387 return fDContext->priv().getResourceCache()->purgeToMakeHeadroom(additionalBytesNeeded);
388}
389
Adlai Holleree2837b2021-04-09 16:52:48 -0400390bool GrResourceAllocator::assign() {
391 SkASSERT(fPlanned && !fAssigned);
392 SkDEBUGCODE(fAssigned = true;)
Adlai Hollerca1137b2021-04-08 11:39:55 -0400393 auto resourceProvider = fDContext->priv().resourceProvider();
Adlai Holler7df8d222021-03-19 12:27:49 -0400394 while (Interval* cur = fFinishedIntvls.popHead()) {
395 if (fFailedInstantiation) {
396 break;
397 }
398 if (cur->proxy()->isInstantiated()) {
399 continue;
400 }
401 if (cur->proxy()->isLazy()) {
Adlai Hollerca1137b2021-04-08 11:39:55 -0400402 fFailedInstantiation = !cur->proxy()->priv().doLazyInstantiation(resourceProvider);
Adlai Holler7df8d222021-03-19 12:27:49 -0400403 continue;
404 }
405 Register* r = cur->getRegister();
406 SkASSERT(r);
Adlai Hollerca1137b2021-04-08 11:39:55 -0400407 fFailedInstantiation = !r->instantiateSurface(cur->proxy(), resourceProvider);
Adlai Holler7df8d222021-03-19 12:27:49 -0400408 }
Adlai Holler19fd5142021-03-08 10:19:30 -0700409 return !fFailedInstantiation;
Robert Phillips5af44de2017-07-18 14:49:38 -0400410}
Robert Phillips715d08c2018-07-18 13:56:48 -0400411
412#if GR_ALLOCATION_SPEW
413void GrResourceAllocator::dumpIntervals() {
Robert Phillips715d08c2018-07-18 13:56:48 -0400414 // Print all the intervals while computing their range
Robert Phillips3bf3d4a2019-03-27 07:09:09 -0400415 SkDebugf("------------------------------------------------------------\n");
416 unsigned int min = std::numeric_limits<unsigned int>::max();
Robert Phillips715d08c2018-07-18 13:56:48 -0400417 unsigned int max = 0;
418 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
Greg Danielc61d7e32020-02-04 14:27:45 -0500419 SkDebugf("{ %3d,%3d }: [%2d, %2d] - refProxys:%d surfaceRefs:%d\n",
Robert Phillips715d08c2018-07-18 13:56:48 -0400420 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400421 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1,
Robert Phillips715d08c2018-07-18 13:56:48 -0400422 cur->start(),
423 cur->end(),
424 cur->proxy()->priv().getProxyRefCnt(),
Robert Phillipsb5204762019-06-19 14:12:13 -0400425 cur->proxy()->testingOnly_getBackingRefCnt());
Brian Osman788b9162020-02-07 10:36:46 -0500426 min = std::min(min, cur->start());
427 max = std::max(max, cur->end());
Robert Phillips715d08c2018-07-18 13:56:48 -0400428 }
429
430 // Draw a graph of the useage intervals
431 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
432 SkDebugf("{ %3d,%3d }: ",
433 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400434 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1);
Robert Phillips715d08c2018-07-18 13:56:48 -0400435 for (unsigned int i = min; i <= max; ++i) {
436 if (i >= cur->start() && i <= cur->end()) {
437 SkDebugf("x");
438 } else {
439 SkDebugf(" ");
440 }
441 }
442 SkDebugf("\n");
443 }
444}
445#endif