Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2017 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 8 | #include "src/gpu/GrResourceAllocator.h" |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 9 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 10 | #include "src/gpu/GrGpuResourcePriv.h" |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 11 | #include "src/gpu/GrOpsTask.h" |
Greg Daniel | f91aeb2 | 2019-06-18 09:58:02 -0400 | [diff] [blame] | 12 | #include "src/gpu/GrRenderTargetProxy.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 13 | #include "src/gpu/GrResourceProvider.h" |
Greg Daniel | f91aeb2 | 2019-06-18 09:58:02 -0400 | [diff] [blame] | 14 | #include "src/gpu/GrSurfaceProxy.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 15 | #include "src/gpu/GrSurfaceProxyPriv.h" |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 16 | |
Adlai Holler | 4cfbe53 | 2021-03-17 10:36:39 -0400 | [diff] [blame] | 17 | #ifdef SK_DEBUG |
| 18 | #include <atomic> |
Mike Klein | 0ec1c57 | 2018-12-04 11:52:51 -0500 | [diff] [blame] | 19 | |
Adlai Holler | 4cfbe53 | 2021-03-17 10:36:39 -0400 | [diff] [blame] | 20 | uint32_t GrResourceAllocator::Interval::CreateUniqueID() { |
| 21 | static std::atomic<uint32_t> nextID{1}; |
| 22 | uint32_t id; |
| 23 | do { |
| 24 | id = nextID.fetch_add(1, std::memory_order_relaxed); |
| 25 | } while (id == SK_InvalidUniqueID); |
| 26 | return id; |
| 27 | } |
| 28 | |
| 29 | uint32_t GrResourceAllocator::Register::CreateUniqueID() { |
| 30 | static std::atomic<uint32_t> nextID{1}; |
| 31 | uint32_t id; |
| 32 | do { |
| 33 | id = nextID.fetch_add(1, std::memory_order_relaxed); |
| 34 | } while (id == SK_InvalidUniqueID); |
| 35 | return id; |
| 36 | } |
Robert Phillips | da1be46 | 2018-07-27 07:18:06 -0400 | [diff] [blame] | 37 | #endif |
| 38 | |
Robert Phillips | 5b65a84 | 2017-11-13 15:48:12 -0500 | [diff] [blame] | 39 | GrResourceAllocator::~GrResourceAllocator() { |
Robert Phillips | 5b65a84 | 2017-11-13 15:48:12 -0500 | [diff] [blame] | 40 | SkASSERT(fIntvlList.empty()); |
| 41 | SkASSERT(fActiveIntvls.empty()); |
| 42 | SkASSERT(!fIntvlHash.count()); |
Robert Phillips | 5b65a84 | 2017-11-13 15:48:12 -0500 | [diff] [blame] | 43 | } |
| 44 | |
Adlai Holler | 7f7a5df | 2021-02-09 17:41:10 +0000 | [diff] [blame] | 45 | void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end, |
| 46 | ActualUse actualUse |
Chris Dalton | 8816b93 | 2017-11-29 16:48:25 -0700 | [diff] [blame] | 47 | SkDEBUGCODE(, bool isDirectDstRead)) { |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 48 | SkASSERT(start <= end); |
| 49 | SkASSERT(!fAssigned); // We shouldn't be adding any intervals after (or during) assignment |
Robert Phillips | 5f78adf | 2019-04-22 12:41:39 -0400 | [diff] [blame] | 50 | |
Chris Dalton | 9715559 | 2019-06-13 13:40:20 -0600 | [diff] [blame] | 51 | if (proxy->canSkipResourceAllocator()) { |
Robert Phillips | 5f78adf | 2019-04-22 12:41:39 -0400 | [diff] [blame] | 52 | return; |
| 53 | } |
| 54 | |
Brian Salomon | 9cadc31 | 2018-12-05 15:09:19 -0500 | [diff] [blame] | 55 | // If a proxy is read only it must refer to a texture with specific content that cannot be |
| 56 | // recycled. We don't need to assign a texture to it and no other proxy can be instantiated |
| 57 | // with the same texture. |
| 58 | if (proxy->readOnly()) { |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 59 | if (proxy->isLazy() && !proxy->priv().doLazyInstantiation(fResourceProvider)) { |
Adlai Holler | 19fd514 | 2021-03-08 10:19:30 -0700 | [diff] [blame] | 60 | fFailedInstantiation = true; |
Brian Salomon | 9cadc31 | 2018-12-05 15:09:19 -0500 | [diff] [blame] | 61 | } else { |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 62 | // Since we aren't going to add an interval we won't revisit this proxy in assign(). So |
| 63 | // must already be instantiated or it must be a lazy proxy that we instantiated above. |
| 64 | SkASSERT(proxy->isInstantiated()); |
Brian Salomon | 9cadc31 | 2018-12-05 15:09:19 -0500 | [diff] [blame] | 65 | } |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 66 | return; |
| 67 | } |
Adlai Holler | 539db2f | 2021-03-16 09:45:05 -0400 | [diff] [blame] | 68 | uint32_t proxyID = proxy->uniqueID().asUInt(); |
| 69 | if (Interval** intvlPtr = fIntvlHash.find(proxyID)) { |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 70 | // Revise the interval for an existing use |
Adlai Holler | 1143b1b | 2021-03-16 13:07:40 -0400 | [diff] [blame] | 71 | Interval* intvl = *intvlPtr; |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 72 | #ifdef SK_DEBUG |
Adlai Holler | 9e2c50e | 2021-02-09 14:41:52 -0500 | [diff] [blame] | 73 | if (0 == start && 0 == end) { |
| 74 | // This interval is for the initial upload to a deferred proxy. Due to the vagaries |
| 75 | // of how deferred proxies are collected they can appear as uploads multiple times |
| 76 | // in a single opsTasks' list and as uploads in several opsTasks. |
| 77 | SkASSERT(0 == intvl->start()); |
| 78 | } else if (isDirectDstRead) { |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 79 | // Direct reads from the render target itself should occur w/in the existing |
| 80 | // interval |
| 81 | SkASSERT(intvl->start() <= start && intvl->end() >= end); |
| 82 | } else { |
| 83 | SkASSERT(intvl->end() <= start && intvl->end() <= end); |
| 84 | } |
| 85 | #endif |
Adlai Holler | 7f7a5df | 2021-02-09 17:41:10 +0000 | [diff] [blame] | 86 | if (ActualUse::kYes == actualUse) { |
| 87 | intvl->addUse(); |
| 88 | } |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 89 | intvl->extendEnd(end); |
| 90 | return; |
| 91 | } |
Adlai Holler | 4cfbe53 | 2021-03-17 10:36:39 -0400 | [diff] [blame] | 92 | Interval* newIntvl = fInternalAllocator.make<Interval>(proxy, start, end); |
Brian Salomon | c609353 | 2018-12-05 21:34:36 +0000 | [diff] [blame] | 93 | |
Adlai Holler | 7f7a5df | 2021-02-09 17:41:10 +0000 | [diff] [blame] | 94 | if (ActualUse::kYes == actualUse) { |
| 95 | newIntvl->addUse(); |
| 96 | } |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 97 | fIntvlList.insertByIncreasingStart(newIntvl); |
Adlai Holler | 539db2f | 2021-03-16 09:45:05 -0400 | [diff] [blame] | 98 | fIntvlHash.set(proxyID, newIntvl); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 99 | } |
| 100 | |
Adlai Holler | 7df8d22 | 2021-03-19 12:27:49 -0400 | [diff] [blame^] | 101 | bool GrResourceAllocator::Register::isRecyclable(const GrCaps& caps, |
| 102 | GrSurfaceProxy* proxy, |
| 103 | int knownUseCount) const { |
| 104 | if (!caps.reuseScratchTextures() && !proxy->asRenderTargetProxy()) { |
| 105 | // Tragically, scratch texture reuse is totally disabled in this case. |
| 106 | return false; |
| 107 | } |
| 108 | |
| 109 | if (!this->scratchKey().isValid()) { |
| 110 | return false; // no scratch key, no free pool |
| 111 | } |
| 112 | if (this->uniqueKey().isValid()) { |
| 113 | return false; // rely on the resource cache to hold onto uniquely-keyed surfaces. |
| 114 | } |
| 115 | // If all the refs on the proxy are known to the resource allocator then no one |
Adlai Holler | 1143b1b | 2021-03-16 13:07:40 -0400 | [diff] [blame] | 116 | // should be holding onto it outside of Ganesh. |
Adlai Holler | 7df8d22 | 2021-03-19 12:27:49 -0400 | [diff] [blame^] | 117 | return !proxy->refCntGreaterThan(knownUseCount); |
| 118 | } |
| 119 | |
| 120 | bool GrResourceAllocator::Register::instantiateSurface(GrSurfaceProxy* proxy, |
| 121 | GrResourceProvider* resourceProvider) { |
| 122 | SkASSERT(!proxy->peekSurface()); |
| 123 | |
| 124 | sk_sp<GrSurface> surface; |
| 125 | if (const auto& uniqueKey = proxy->getUniqueKey(); uniqueKey.isValid()) { |
| 126 | SkASSERT(uniqueKey == fOriginatingProxy->getUniqueKey()); |
| 127 | // First try to reattach to a cached surface if the proxy is uniquely keyed |
| 128 | surface = resourceProvider->findByUniqueKey<GrSurface>(uniqueKey); |
| 129 | } |
| 130 | if (!surface) { |
| 131 | if (proxy == fOriginatingProxy) { |
| 132 | surface = proxy->priv().createSurface(resourceProvider); |
| 133 | } else { |
| 134 | surface = sk_ref_sp(fOriginatingProxy->peekSurface()); |
| 135 | } |
| 136 | } |
| 137 | if (!surface) { |
| 138 | return false; |
| 139 | } |
| 140 | |
| 141 | // Make surface budgeted if this proxy is budgeted. |
| 142 | if (SkBudgeted::kYes == proxy->isBudgeted() && |
| 143 | GrBudgetedType::kBudgeted != surface->resourcePriv().budgetedType()) { |
| 144 | // This gets the job done but isn't quite correct. It would be better to try to |
| 145 | // match budgeted proxies w/ budgeted surfaces and unbudgeted w/ unbudgeted. |
| 146 | surface->resourcePriv().makeBudgeted(); |
| 147 | } |
| 148 | |
| 149 | // Propagate the proxy unique key to the surface if we have one. |
| 150 | if (const auto& uniqueKey = proxy->getUniqueKey(); uniqueKey.isValid()) { |
| 151 | if (!surface->getUniqueKey().isValid()) { |
| 152 | resourceProvider->assignUniqueKeyToResource(uniqueKey, surface.get()); |
| 153 | } |
| 154 | SkASSERT(surface->getUniqueKey() == uniqueKey); |
| 155 | } |
| 156 | proxy->priv().assign(std::move(surface)); |
| 157 | return true; |
Adlai Holler | 1143b1b | 2021-03-16 13:07:40 -0400 | [diff] [blame] | 158 | } |
| 159 | |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 160 | GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::popHead() { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 161 | SkDEBUGCODE(this->validate()); |
| 162 | |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 163 | Interval* temp = fHead; |
| 164 | if (temp) { |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 165 | fHead = temp->next(); |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 166 | if (!fHead) { |
| 167 | fTail = nullptr; |
| 168 | } |
| 169 | temp->setNext(nullptr); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 170 | } |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 171 | |
| 172 | SkDEBUGCODE(this->validate()); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 173 | return temp; |
| 174 | } |
| 175 | |
| 176 | // TODO: fuse this with insertByIncreasingEnd |
| 177 | void GrResourceAllocator::IntervalList::insertByIncreasingStart(Interval* intvl) { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 178 | SkDEBUGCODE(this->validate()); |
| 179 | SkASSERT(!intvl->next()); |
| 180 | |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 181 | if (!fHead) { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 182 | // 14% |
| 183 | fHead = fTail = intvl; |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 184 | } else if (intvl->start() <= fHead->start()) { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 185 | // 3% |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 186 | intvl->setNext(fHead); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 187 | fHead = intvl; |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 188 | } else if (fTail->start() <= intvl->start()) { |
| 189 | // 83% |
| 190 | fTail->setNext(intvl); |
| 191 | fTail = intvl; |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 192 | } else { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 193 | // almost never |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 194 | Interval* prev = fHead; |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 195 | Interval* next = prev->next(); |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 196 | for (; intvl->start() > next->start(); prev = next, next = next->next()) { |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 197 | } |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 198 | |
| 199 | SkASSERT(next); |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 200 | intvl->setNext(next); |
| 201 | prev->setNext(intvl); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 202 | } |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 203 | |
| 204 | SkDEBUGCODE(this->validate()); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 205 | } |
| 206 | |
| 207 | // TODO: fuse this with insertByIncreasingStart |
| 208 | void GrResourceAllocator::IntervalList::insertByIncreasingEnd(Interval* intvl) { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 209 | SkDEBUGCODE(this->validate()); |
| 210 | SkASSERT(!intvl->next()); |
| 211 | |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 212 | if (!fHead) { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 213 | // 14% |
| 214 | fHead = fTail = intvl; |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 215 | } else if (intvl->end() <= fHead->end()) { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 216 | // 64% |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 217 | intvl->setNext(fHead); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 218 | fHead = intvl; |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 219 | } else if (fTail->end() <= intvl->end()) { |
| 220 | // 3% |
| 221 | fTail->setNext(intvl); |
| 222 | fTail = intvl; |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 223 | } else { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 224 | // 19% but 81% of those land right after the list's head |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 225 | Interval* prev = fHead; |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 226 | Interval* next = prev->next(); |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 227 | for (; intvl->end() > next->end(); prev = next, next = next->next()) { |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 228 | } |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 229 | |
| 230 | SkASSERT(next); |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 231 | intvl->setNext(next); |
| 232 | prev->setNext(intvl); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 233 | } |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 234 | |
| 235 | SkDEBUGCODE(this->validate()); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 236 | } |
| 237 | |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 238 | #ifdef SK_DEBUG |
| 239 | void GrResourceAllocator::IntervalList::validate() const { |
| 240 | SkASSERT(SkToBool(fHead) == SkToBool(fTail)); |
| 241 | |
| 242 | Interval* prev = nullptr; |
| 243 | for (Interval* cur = fHead; cur; prev = cur, cur = cur->next()) { |
| 244 | } |
| 245 | |
| 246 | SkASSERT(fTail == prev); |
| 247 | } |
| 248 | #endif |
Robert Phillips | 4150eea | 2018-02-07 17:08:21 -0500 | [diff] [blame] | 249 | |
Adlai Holler | 4cfbe53 | 2021-03-17 10:36:39 -0400 | [diff] [blame] | 250 | // First try to reuse one of the recently allocated/used registers in the free pool. |
Adlai Holler | 7df8d22 | 2021-03-19 12:27:49 -0400 | [diff] [blame^] | 251 | GrResourceAllocator::Register* GrResourceAllocator::findOrCreateRegisterFor(GrSurfaceProxy* proxy) { |
| 252 | // Handle uniquely keyed proxies |
Adlai Holler | cc119d9 | 2021-03-16 15:17:25 -0400 | [diff] [blame] | 253 | if (const auto& uniqueKey = proxy->getUniqueKey(); uniqueKey.isValid()) { |
Adlai Holler | 7df8d22 | 2021-03-19 12:27:49 -0400 | [diff] [blame^] | 254 | if (auto p = fUniqueKeyRegisters.find(uniqueKey)) { |
| 255 | return *p; |
Robert Phillips | 0790f8a | 2018-09-18 13:11:03 -0400 | [diff] [blame] | 256 | } |
Adlai Holler | 7df8d22 | 2021-03-19 12:27:49 -0400 | [diff] [blame^] | 257 | // No need for a scratch key. These don't go in the free pool. |
| 258 | Register* r = fInternalAllocator.make<Register>(proxy, GrScratchKey()); |
| 259 | fUniqueKeyRegisters.set(uniqueKey, r); |
| 260 | return r; |
Robert Phillips | 0790f8a | 2018-09-18 13:11:03 -0400 | [diff] [blame] | 261 | } |
| 262 | |
Adlai Holler | cc119d9 | 2021-03-16 15:17:25 -0400 | [diff] [blame] | 263 | // Then look in the free pool |
Adlai Holler | 7df8d22 | 2021-03-19 12:27:49 -0400 | [diff] [blame^] | 264 | GrScratchKey scratchKey; |
| 265 | proxy->priv().computeScratchKey(*fResourceProvider->caps(), &scratchKey); |
Robert Phillips | 57aa367 | 2017-07-21 11:38:13 -0400 | [diff] [blame] | 266 | |
Adlai Holler | 4cfbe53 | 2021-03-17 10:36:39 -0400 | [diff] [blame] | 267 | auto filter = [] (const Register* r) { |
Robert Phillips | 10d1721 | 2019-04-24 14:09:10 -0400 | [diff] [blame] | 268 | return true; |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 269 | }; |
Adlai Holler | 7df8d22 | 2021-03-19 12:27:49 -0400 | [diff] [blame^] | 270 | if (Register* r = fFreePool.findAndRemove(scratchKey, filter)) { |
Adlai Holler | 4cfbe53 | 2021-03-17 10:36:39 -0400 | [diff] [blame] | 271 | return r; |
Robert Phillips | 57aa367 | 2017-07-21 11:38:13 -0400 | [diff] [blame] | 272 | } |
| 273 | |
Adlai Holler | 7df8d22 | 2021-03-19 12:27:49 -0400 | [diff] [blame^] | 274 | return fInternalAllocator.make<Register>(proxy, std::move(scratchKey)); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 275 | } |
| 276 | |
Adlai Holler | 7df8d22 | 2021-03-19 12:27:49 -0400 | [diff] [blame^] | 277 | // Remove any intervals that end before the current index. Add their registers |
Robert Phillips | 3966738 | 2019-04-17 16:03:30 -0400 | [diff] [blame] | 278 | // to the free pool if possible. |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 279 | void GrResourceAllocator::expire(unsigned int curIndex) { |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 280 | while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->end() < curIndex) { |
Adlai Holler | 729ba5e | 2021-03-15 12:34:31 -0400 | [diff] [blame] | 281 | Interval* intvl = fActiveIntvls.popHead(); |
| 282 | SkASSERT(!intvl->next()); |
Robert Phillips | 5b65a84 | 2017-11-13 15:48:12 -0500 | [diff] [blame] | 283 | |
Adlai Holler | 7df8d22 | 2021-03-19 12:27:49 -0400 | [diff] [blame^] | 284 | Register* r = intvl->getRegister(); |
| 285 | if (r && r->isRecyclable(*fResourceProvider->caps(), intvl->proxy(), intvl->uses())) { |
| 286 | #if GR_ALLOCATION_SPEW |
| 287 | SkDebugf("putting register %d back into pool\n", r->uniqueID()); |
| 288 | #endif |
| 289 | // TODO: fix this insertion so we get a more LRU-ish behavior |
| 290 | fFreePool.insert(r->scratchKey(), r); |
Robert Phillips | 5b65a84 | 2017-11-13 15:48:12 -0500 | [diff] [blame] | 291 | } |
Adlai Holler | 7df8d22 | 2021-03-19 12:27:49 -0400 | [diff] [blame^] | 292 | fFinishedIntvls.insertByIncreasingStart(intvl); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 293 | } |
| 294 | } |
| 295 | |
Adlai Holler | 19fd514 | 2021-03-08 10:19:30 -0700 | [diff] [blame] | 296 | bool GrResourceAllocator::assign() { |
Robert Phillips | 5f78adf | 2019-04-22 12:41:39 -0400 | [diff] [blame] | 297 | fIntvlHash.reset(); // we don't need the interval hash anymore |
| 298 | |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 299 | SkDEBUGCODE(fAssigned = true;) |
| 300 | |
Adlai Holler | c616e1c | 2021-02-11 15:18:17 -0500 | [diff] [blame] | 301 | if (fIntvlList.empty()) { |
Adlai Holler | 19fd514 | 2021-03-08 10:19:30 -0700 | [diff] [blame] | 302 | return !fFailedInstantiation; // no resources to assign |
Adlai Holler | c616e1c | 2021-02-11 15:18:17 -0500 | [diff] [blame] | 303 | } |
| 304 | |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 305 | #if GR_ALLOCATION_SPEW |
Adlai Holler | c616e1c | 2021-02-11 15:18:17 -0500 | [diff] [blame] | 306 | SkDebugf("assigning %d ops\n", fNumOps); |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 307 | this->dumpIntervals(); |
| 308 | #endif |
Robert Phillips | eafd48a | 2017-11-16 07:52:08 -0500 | [diff] [blame] | 309 | |
Adlai Holler | 1143b1b | 2021-03-16 13:07:40 -0400 | [diff] [blame] | 310 | while (Interval* cur = fIntvlList.popHead()) { |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 311 | this->expire(cur->start()); |
Robert Phillips | 57aa367 | 2017-07-21 11:38:13 -0400 | [diff] [blame] | 312 | |
Adlai Holler | 7df8d22 | 2021-03-19 12:27:49 -0400 | [diff] [blame^] | 313 | // Already-instantiated proxies and lazy proxies don't use registers. |
| 314 | // No need to compute scratch keys (or CANT, in the case of fully-lazy). |
| 315 | if (cur->proxy()->isInstantiated() || cur->proxy()->isLazy()) { |
Robert Phillips | 57aa367 | 2017-07-21 11:38:13 -0400 | [diff] [blame] | 316 | fActiveIntvls.insertByIncreasingEnd(cur); |
Robert Phillips | eafd48a | 2017-11-16 07:52:08 -0500 | [diff] [blame] | 317 | |
Robert Phillips | 57aa367 | 2017-07-21 11:38:13 -0400 | [diff] [blame] | 318 | continue; |
| 319 | } |
| 320 | |
Adlai Holler | 7df8d22 | 2021-03-19 12:27:49 -0400 | [diff] [blame^] | 321 | Register* r = this->findOrCreateRegisterFor(cur->proxy()); |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 322 | #if GR_ALLOCATION_SPEW |
Adlai Holler | 7df8d22 | 2021-03-19 12:27:49 -0400 | [diff] [blame^] | 323 | SkDebugf("Assigning register %d to %d\n", |
| 324 | r->uniqueID(), |
| 325 | cur->proxy()->uniqueID().asUInt()); |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 326 | #endif |
Adlai Holler | 7df8d22 | 2021-03-19 12:27:49 -0400 | [diff] [blame^] | 327 | SkASSERT(!cur->proxy()->peekSurface()); |
| 328 | cur->setRegister(r); |
Robert Phillips | eafd48a | 2017-11-16 07:52:08 -0500 | [diff] [blame] | 329 | |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 330 | fActiveIntvls.insertByIncreasingEnd(cur); |
| 331 | } |
Robert Phillips | 5b65a84 | 2017-11-13 15:48:12 -0500 | [diff] [blame] | 332 | |
| 333 | // expire all the remaining intervals to drain the active interval list |
| 334 | this->expire(std::numeric_limits<unsigned int>::max()); |
Adlai Holler | 7df8d22 | 2021-03-19 12:27:49 -0400 | [diff] [blame^] | 335 | |
| 336 | // TODO: Return here and give the caller a chance to estimate memory cost and bail before |
| 337 | // instantiating anything. |
| 338 | |
| 339 | // Instantiate surfaces |
| 340 | while (Interval* cur = fFinishedIntvls.popHead()) { |
| 341 | if (fFailedInstantiation) { |
| 342 | break; |
| 343 | } |
| 344 | if (cur->proxy()->isInstantiated()) { |
| 345 | continue; |
| 346 | } |
| 347 | if (cur->proxy()->isLazy()) { |
| 348 | fFailedInstantiation = !cur->proxy()->priv().doLazyInstantiation(fResourceProvider); |
| 349 | continue; |
| 350 | } |
| 351 | Register* r = cur->getRegister(); |
| 352 | SkASSERT(r); |
| 353 | fFailedInstantiation = !r->instantiateSurface(cur->proxy(), fResourceProvider); |
| 354 | } |
Adlai Holler | 19fd514 | 2021-03-08 10:19:30 -0700 | [diff] [blame] | 355 | return !fFailedInstantiation; |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 356 | } |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 357 | |
| 358 | #if GR_ALLOCATION_SPEW |
| 359 | void GrResourceAllocator::dumpIntervals() { |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 360 | // Print all the intervals while computing their range |
Robert Phillips | 3bf3d4a | 2019-03-27 07:09:09 -0400 | [diff] [blame] | 361 | SkDebugf("------------------------------------------------------------\n"); |
| 362 | unsigned int min = std::numeric_limits<unsigned int>::max(); |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 363 | unsigned int max = 0; |
| 364 | for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) { |
Greg Daniel | c61d7e3 | 2020-02-04 14:27:45 -0500 | [diff] [blame] | 365 | SkDebugf("{ %3d,%3d }: [%2d, %2d] - refProxys:%d surfaceRefs:%d\n", |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 366 | cur->proxy()->uniqueID().asUInt(), |
Brian Salomon | fd98c2c | 2018-07-31 17:25:29 -0400 | [diff] [blame] | 367 | cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1, |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 368 | cur->start(), |
| 369 | cur->end(), |
| 370 | cur->proxy()->priv().getProxyRefCnt(), |
Robert Phillips | b520476 | 2019-06-19 14:12:13 -0400 | [diff] [blame] | 371 | cur->proxy()->testingOnly_getBackingRefCnt()); |
Brian Osman | 788b916 | 2020-02-07 10:36:46 -0500 | [diff] [blame] | 372 | min = std::min(min, cur->start()); |
| 373 | max = std::max(max, cur->end()); |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 374 | } |
| 375 | |
| 376 | // Draw a graph of the useage intervals |
| 377 | for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) { |
| 378 | SkDebugf("{ %3d,%3d }: ", |
| 379 | cur->proxy()->uniqueID().asUInt(), |
Brian Salomon | fd98c2c | 2018-07-31 17:25:29 -0400 | [diff] [blame] | 380 | cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1); |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 381 | for (unsigned int i = min; i <= max; ++i) { |
| 382 | if (i >= cur->start() && i <= cur->end()) { |
| 383 | SkDebugf("x"); |
| 384 | } else { |
| 385 | SkDebugf(" "); |
| 386 | } |
| 387 | } |
| 388 | SkDebugf("\n"); |
| 389 | } |
| 390 | } |
| 391 | #endif |