blob: 67480ecf961502186b5cd39cf2857e257d00c4d7 [file] [log] [blame]
Robert Phillips5af44de2017-07-18 14:49:38 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/GrResourceAllocator.h"
Robert Phillips5af44de2017-07-18 14:49:38 -04009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "src/gpu/GrGpuResourcePriv.h"
Greg Danielf41b2bd2019-08-22 16:19:24 -040011#include "src/gpu/GrOpsTask.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040012#include "src/gpu/GrRenderTargetProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "src/gpu/GrResourceProvider.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040014#include "src/gpu/GrSurfaceProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/gpu/GrSurfaceProxyPriv.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040016
Adlai Holler4cfbe532021-03-17 10:36:39 -040017#ifdef SK_DEBUG
18#include <atomic>
Mike Klein0ec1c572018-12-04 11:52:51 -050019
Adlai Holler4cfbe532021-03-17 10:36:39 -040020uint32_t GrResourceAllocator::Interval::CreateUniqueID() {
21 static std::atomic<uint32_t> nextID{1};
22 uint32_t id;
23 do {
24 id = nextID.fetch_add(1, std::memory_order_relaxed);
25 } while (id == SK_InvalidUniqueID);
26 return id;
27}
28
29uint32_t GrResourceAllocator::Register::CreateUniqueID() {
30 static std::atomic<uint32_t> nextID{1};
31 uint32_t id;
32 do {
33 id = nextID.fetch_add(1, std::memory_order_relaxed);
34 } while (id == SK_InvalidUniqueID);
35 return id;
36}
Robert Phillipsda1be462018-07-27 07:18:06 -040037#endif
38
Robert Phillips5b65a842017-11-13 15:48:12 -050039GrResourceAllocator::~GrResourceAllocator() {
Robert Phillips5b65a842017-11-13 15:48:12 -050040 SkASSERT(fIntvlList.empty());
41 SkASSERT(fActiveIntvls.empty());
42 SkASSERT(!fIntvlHash.count());
Robert Phillips5b65a842017-11-13 15:48:12 -050043}
44
Adlai Holler7f7a5df2021-02-09 17:41:10 +000045void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end,
46 ActualUse actualUse
Chris Dalton8816b932017-11-29 16:48:25 -070047 SkDEBUGCODE(, bool isDirectDstRead)) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040048 SkASSERT(start <= end);
49 SkASSERT(!fAssigned); // We shouldn't be adding any intervals after (or during) assignment
Robert Phillips5f78adf2019-04-22 12:41:39 -040050
Chris Dalton97155592019-06-13 13:40:20 -060051 if (proxy->canSkipResourceAllocator()) {
Robert Phillips5f78adf2019-04-22 12:41:39 -040052 return;
53 }
54
Brian Salomon9cadc312018-12-05 15:09:19 -050055 // If a proxy is read only it must refer to a texture with specific content that cannot be
56 // recycled. We don't need to assign a texture to it and no other proxy can be instantiated
57 // with the same texture.
58 if (proxy->readOnly()) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040059 if (proxy->isLazy() && !proxy->priv().doLazyInstantiation(fResourceProvider)) {
Adlai Holler19fd5142021-03-08 10:19:30 -070060 fFailedInstantiation = true;
Brian Salomon9cadc312018-12-05 15:09:19 -050061 } else {
Brian Salomonbeb7f522019-08-30 16:19:42 -040062 // Since we aren't going to add an interval we won't revisit this proxy in assign(). So
63 // must already be instantiated or it must be a lazy proxy that we instantiated above.
64 SkASSERT(proxy->isInstantiated());
Brian Salomon9cadc312018-12-05 15:09:19 -050065 }
Brian Salomonbeb7f522019-08-30 16:19:42 -040066 return;
67 }
Adlai Holler539db2f2021-03-16 09:45:05 -040068 uint32_t proxyID = proxy->uniqueID().asUInt();
69 if (Interval** intvlPtr = fIntvlHash.find(proxyID)) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040070 // Revise the interval for an existing use
Adlai Holler1143b1b2021-03-16 13:07:40 -040071 Interval* intvl = *intvlPtr;
Brian Salomonbeb7f522019-08-30 16:19:42 -040072#ifdef SK_DEBUG
Adlai Holler9e2c50e2021-02-09 14:41:52 -050073 if (0 == start && 0 == end) {
74 // This interval is for the initial upload to a deferred proxy. Due to the vagaries
75 // of how deferred proxies are collected they can appear as uploads multiple times
76 // in a single opsTasks' list and as uploads in several opsTasks.
77 SkASSERT(0 == intvl->start());
78 } else if (isDirectDstRead) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040079 // Direct reads from the render target itself should occur w/in the existing
80 // interval
81 SkASSERT(intvl->start() <= start && intvl->end() >= end);
82 } else {
83 SkASSERT(intvl->end() <= start && intvl->end() <= end);
84 }
85#endif
Adlai Holler7f7a5df2021-02-09 17:41:10 +000086 if (ActualUse::kYes == actualUse) {
87 intvl->addUse();
88 }
Brian Salomonbeb7f522019-08-30 16:19:42 -040089 intvl->extendEnd(end);
90 return;
91 }
Adlai Holler4cfbe532021-03-17 10:36:39 -040092 Interval* newIntvl = fInternalAllocator.make<Interval>(proxy, start, end);
Brian Salomonc6093532018-12-05 21:34:36 +000093
Adlai Holler7f7a5df2021-02-09 17:41:10 +000094 if (ActualUse::kYes == actualUse) {
95 newIntvl->addUse();
96 }
Brian Salomonbeb7f522019-08-30 16:19:42 -040097 fIntvlList.insertByIncreasingStart(newIntvl);
Adlai Holler539db2f2021-03-16 09:45:05 -040098 fIntvlHash.set(proxyID, newIntvl);
Robert Phillips5af44de2017-07-18 14:49:38 -040099}
100
Adlai Holler9f358822021-03-18 20:41:08 +0000101bool GrResourceAllocator::Interval::isSurfaceRecyclable() const {
102 // All the refs on the proxy are known to the resource allocator thus no one
Adlai Holler1143b1b2021-03-16 13:07:40 -0400103 // should be holding onto it outside of Ganesh.
Adlai Holler9f358822021-03-18 20:41:08 +0000104 return !fProxy->refCntGreaterThan(fUses);
Adlai Holler1143b1b2021-03-16 13:07:40 -0400105}
106
Robert Phillips5af44de2017-07-18 14:49:38 -0400107GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::popHead() {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400108 SkDEBUGCODE(this->validate());
109
Robert Phillips5af44de2017-07-18 14:49:38 -0400110 Interval* temp = fHead;
111 if (temp) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500112 fHead = temp->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400113 if (!fHead) {
114 fTail = nullptr;
115 }
116 temp->setNext(nullptr);
Robert Phillips5af44de2017-07-18 14:49:38 -0400117 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400118
119 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400120 return temp;
121}
122
123// TODO: fuse this with insertByIncreasingEnd
124void GrResourceAllocator::IntervalList::insertByIncreasingStart(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400125 SkDEBUGCODE(this->validate());
126 SkASSERT(!intvl->next());
127
Robert Phillips5af44de2017-07-18 14:49:38 -0400128 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400129 // 14%
130 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500131 } else if (intvl->start() <= fHead->start()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400132 // 3%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500133 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400134 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400135 } else if (fTail->start() <= intvl->start()) {
136 // 83%
137 fTail->setNext(intvl);
138 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400139 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400140 // almost never
Robert Phillips5af44de2017-07-18 14:49:38 -0400141 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500142 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400143 for (; intvl->start() > next->start(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400144 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400145
146 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500147 intvl->setNext(next);
148 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400149 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400150
151 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400152}
153
154// TODO: fuse this with insertByIncreasingStart
155void GrResourceAllocator::IntervalList::insertByIncreasingEnd(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400156 SkDEBUGCODE(this->validate());
157 SkASSERT(!intvl->next());
158
Robert Phillips5af44de2017-07-18 14:49:38 -0400159 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400160 // 14%
161 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500162 } else if (intvl->end() <= fHead->end()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400163 // 64%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500164 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400165 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400166 } else if (fTail->end() <= intvl->end()) {
167 // 3%
168 fTail->setNext(intvl);
169 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400170 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400171 // 19% but 81% of those land right after the list's head
Robert Phillips5af44de2017-07-18 14:49:38 -0400172 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500173 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400174 for (; intvl->end() > next->end(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400175 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400176
177 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500178 intvl->setNext(next);
179 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400180 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400181
182 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400183}
184
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400185#ifdef SK_DEBUG
186void GrResourceAllocator::IntervalList::validate() const {
187 SkASSERT(SkToBool(fHead) == SkToBool(fTail));
188
189 Interval* prev = nullptr;
190 for (Interval* cur = fHead; cur; prev = cur, cur = cur->next()) {
191 }
192
193 SkASSERT(fTail == prev);
194}
195#endif
Robert Phillips4150eea2018-02-07 17:08:21 -0500196
Adlai Holler9f358822021-03-18 20:41:08 +0000197// 'surface' can be reused. Add it back to the free pool.
198void GrResourceAllocator::recycleRegister(Register* r) {
199 const GrScratchKey &key = r->scratchKey();
200
201 if (!key.isValid()) {
202 return; // can't do it w/o a valid scratch key
203 }
204
205 GrSurface* surface = r->surface();
206 if (surface->getUniqueKey().isValid()) {
207 // If the surface has a unique key we throw it back into the resource cache.
208 // If things get really tight 'findRegisterFor' may pull it back out but there is
209 // no need to have it in tight rotation.
210 return;
211 }
212
213#if GR_ALLOCATION_SPEW
214 SkDebugf("putting register %d back into pool\n", r->uniqueID());
215#endif
216 // TODO: fix this insertion so we get a more LRU-ish behavior
217 fFreePool.insert(key, r);
218}
219
Adlai Holler4cfbe532021-03-17 10:36:39 -0400220// First try to reuse one of the recently allocated/used registers in the free pool.
Adlai Holler9f358822021-03-18 20:41:08 +0000221// If we can't find a usable one, try to instantiate a surface and wrap it in a new one.
222GrResourceAllocator::Register* GrResourceAllocator::findRegisterFor(const GrSurfaceProxy* proxy) {
Adlai Hollercc119d92021-03-16 15:17:25 -0400223 if (const auto& uniqueKey = proxy->getUniqueKey(); uniqueKey.isValid()) {
Adlai Holler9f358822021-03-18 20:41:08 +0000224 // First try to reattach to a cached surface if the proxy is uniquely keyed
225 if (sk_sp<GrSurface> surface = fResourceProvider->findByUniqueKey<GrSurface>(uniqueKey)) {
226 // TODO: Find the register if we've encountered this unique key before.
227 return fInternalAllocator.make<Register>(std::move(surface));
Robert Phillips0790f8a2018-09-18 13:11:03 -0400228 }
229 }
230
Adlai Hollercc119d92021-03-16 15:17:25 -0400231 // Then look in the free pool
Adlai Holler9f358822021-03-18 20:41:08 +0000232 GrScratchKey key;
233
234 proxy->priv().computeScratchKey(*fResourceProvider->caps(), &key);
Robert Phillips57aa3672017-07-21 11:38:13 -0400235
Adlai Holler4cfbe532021-03-17 10:36:39 -0400236 auto filter = [] (const Register* r) {
Robert Phillips10d17212019-04-24 14:09:10 -0400237 return true;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500238 };
Adlai Holler9f358822021-03-18 20:41:08 +0000239 if (Register* r = fFreePool.findAndRemove(key, filter)) {
240 GrSurface* surface = r->surface();
241 if (SkBudgeted::kYes == proxy->isBudgeted() &&
242 GrBudgetedType::kBudgeted != surface->resourcePriv().budgetedType()) {
243 // This gets the job done but isn't quite correct. It would be better to try to
244 // match budgeted proxies w/ budgeted surfaces and unbudgeted w/ unbudgeted.
245 surface->resourcePriv().makeBudgeted();
246 }
247 SkASSERT(!surface->getUniqueKey().isValid());
Adlai Holler4cfbe532021-03-17 10:36:39 -0400248 return r;
Robert Phillips57aa3672017-07-21 11:38:13 -0400249 }
250
Adlai Holler9f358822021-03-18 20:41:08 +0000251 if (sk_sp<GrSurface> surf = proxy->priv().createSurface(fResourceProvider)) {
252 return fInternalAllocator.make<Register>(std::move(surf));
253 }
254 return nullptr;
Robert Phillips5af44de2017-07-18 14:49:38 -0400255}
256
Adlai Holler9f358822021-03-18 20:41:08 +0000257// Remove any intervals that end before the current index. Return their GrSurfaces
Robert Phillips39667382019-04-17 16:03:30 -0400258// to the free pool if possible.
Robert Phillips5af44de2017-07-18 14:49:38 -0400259void GrResourceAllocator::expire(unsigned int curIndex) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500260 while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->end() < curIndex) {
Adlai Holler729ba5e2021-03-15 12:34:31 -0400261 Interval* intvl = fActiveIntvls.popHead();
262 SkASSERT(!intvl->next());
Robert Phillips5b65a842017-11-13 15:48:12 -0500263
Adlai Holler9f358822021-03-18 20:41:08 +0000264 if (Register* r = intvl->getRegister()) {
265 if (intvl->isSurfaceRecyclable()) {
266 this->recycleRegister(r);
267 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500268 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400269 }
270}
271
Adlai Holler19fd5142021-03-08 10:19:30 -0700272bool GrResourceAllocator::assign() {
Robert Phillips5f78adf2019-04-22 12:41:39 -0400273 fIntvlHash.reset(); // we don't need the interval hash anymore
274
Robert Phillips5af44de2017-07-18 14:49:38 -0400275 SkDEBUGCODE(fAssigned = true;)
276
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500277 if (fIntvlList.empty()) {
Adlai Holler19fd5142021-03-08 10:19:30 -0700278 return !fFailedInstantiation; // no resources to assign
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500279 }
280
Robert Phillips715d08c2018-07-18 13:56:48 -0400281#if GR_ALLOCATION_SPEW
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500282 SkDebugf("assigning %d ops\n", fNumOps);
Robert Phillips715d08c2018-07-18 13:56:48 -0400283 this->dumpIntervals();
284#endif
Robert Phillipseafd48a2017-11-16 07:52:08 -0500285
Adlai Holler1143b1b2021-03-16 13:07:40 -0400286 while (Interval* cur = fIntvlList.popHead()) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500287 this->expire(cur->start());
Robert Phillips57aa3672017-07-21 11:38:13 -0400288
Adlai Holler9f358822021-03-18 20:41:08 +0000289 if (cur->proxy()->isInstantiated()) {
Robert Phillips57aa3672017-07-21 11:38:13 -0400290 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500291
Robert Phillips57aa3672017-07-21 11:38:13 -0400292 continue;
293 }
294
Adlai Holler9f358822021-03-18 20:41:08 +0000295 if (cur->proxy()->isLazy()) {
296 if (!cur->proxy()->priv().doLazyInstantiation(fResourceProvider)) {
297 fFailedInstantiation = true;
298 }
299 } else if (Register* r = this->findRegisterFor(cur->proxy())) {
300 sk_sp<GrSurface> surface = r->refSurface();
301
302 // propagate the proxy unique key to the surface if we have one.
303 if (const auto& uniqueKey = cur->proxy()->getUniqueKey(); uniqueKey.isValid()) {
304 if (!surface->getUniqueKey().isValid()) {
305 fResourceProvider->assignUniqueKeyToResource(uniqueKey, surface.get());
306 }
307 SkASSERT(surface->getUniqueKey() == uniqueKey);
308 }
309
Robert Phillips715d08c2018-07-18 13:56:48 -0400310#if GR_ALLOCATION_SPEW
Adlai Holler9f358822021-03-18 20:41:08 +0000311 SkDebugf("Assigning %d to %d\n",
312 surface->uniqueID().asUInt(),
313 cur->proxy()->uniqueID().asUInt());
Robert Phillips715d08c2018-07-18 13:56:48 -0400314#endif
Adlai Holler9f358822021-03-18 20:41:08 +0000315
316 SkASSERT(!cur->proxy()->peekSurface());
317 cur->setRegister(r);
318 // TODO: surface creation and assignment should happen later
319 cur->proxy()->priv().assign(std::move(surface));
320 } else {
321 SkASSERT(!cur->proxy()->isInstantiated());
322 fFailedInstantiation = true;
323 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500324
Robert Phillips5af44de2017-07-18 14:49:38 -0400325 fActiveIntvls.insertByIncreasingEnd(cur);
326 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500327
328 // expire all the remaining intervals to drain the active interval list
329 this->expire(std::numeric_limits<unsigned int>::max());
Adlai Holler19fd5142021-03-08 10:19:30 -0700330 return !fFailedInstantiation;
Robert Phillips5af44de2017-07-18 14:49:38 -0400331}
Robert Phillips715d08c2018-07-18 13:56:48 -0400332
333#if GR_ALLOCATION_SPEW
334void GrResourceAllocator::dumpIntervals() {
Robert Phillips715d08c2018-07-18 13:56:48 -0400335 // Print all the intervals while computing their range
Robert Phillips3bf3d4a2019-03-27 07:09:09 -0400336 SkDebugf("------------------------------------------------------------\n");
337 unsigned int min = std::numeric_limits<unsigned int>::max();
Robert Phillips715d08c2018-07-18 13:56:48 -0400338 unsigned int max = 0;
339 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
Greg Danielc61d7e32020-02-04 14:27:45 -0500340 SkDebugf("{ %3d,%3d }: [%2d, %2d] - refProxys:%d surfaceRefs:%d\n",
Robert Phillips715d08c2018-07-18 13:56:48 -0400341 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400342 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1,
Robert Phillips715d08c2018-07-18 13:56:48 -0400343 cur->start(),
344 cur->end(),
345 cur->proxy()->priv().getProxyRefCnt(),
Robert Phillipsb5204762019-06-19 14:12:13 -0400346 cur->proxy()->testingOnly_getBackingRefCnt());
Brian Osman788b9162020-02-07 10:36:46 -0500347 min = std::min(min, cur->start());
348 max = std::max(max, cur->end());
Robert Phillips715d08c2018-07-18 13:56:48 -0400349 }
350
351 // Draw a graph of the useage intervals
352 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
353 SkDebugf("{ %3d,%3d }: ",
354 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400355 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1);
Robert Phillips715d08c2018-07-18 13:56:48 -0400356 for (unsigned int i = min; i <= max; ++i) {
357 if (i >= cur->start() && i <= cur->end()) {
358 SkDebugf("x");
359 } else {
360 SkDebugf(" ");
361 }
362 }
363 SkDebugf("\n");
364 }
365}
366#endif