Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2017 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 8 | #include "src/gpu/GrResourceAllocator.h" |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 9 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 10 | #include "src/gpu/GrGpuResourcePriv.h" |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 11 | #include "src/gpu/GrOpsTask.h" |
Greg Daniel | f91aeb2 | 2019-06-18 09:58:02 -0400 | [diff] [blame] | 12 | #include "src/gpu/GrRenderTargetProxy.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 13 | #include "src/gpu/GrResourceProvider.h" |
Greg Daniel | f91aeb2 | 2019-06-18 09:58:02 -0400 | [diff] [blame] | 14 | #include "src/gpu/GrSurfaceProxy.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 15 | #include "src/gpu/GrSurfaceProxyPriv.h" |
Greg Daniel | f91aeb2 | 2019-06-18 09:58:02 -0400 | [diff] [blame] | 16 | #include "src/gpu/GrTextureProxy.h" |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 17 | |
Robert Phillips | da1be46 | 2018-07-27 07:18:06 -0400 | [diff] [blame] | 18 | #if GR_TRACK_INTERVAL_CREATION |
Mike Klein | 0ec1c57 | 2018-12-04 11:52:51 -0500 | [diff] [blame] | 19 | #include <atomic> |
| 20 | |
| 21 | uint32_t GrResourceAllocator::Interval::CreateUniqueID() { |
| 22 | static std::atomic<uint32_t> nextID{1}; |
| 23 | uint32_t id; |
| 24 | do { |
Adlai Holler | 4888cda | 2020-11-06 16:37:37 -0500 | [diff] [blame] | 25 | id = nextID.fetch_add(1, std::memory_order_relaxed); |
Mike Klein | 0ec1c57 | 2018-12-04 11:52:51 -0500 | [diff] [blame] | 26 | } while (id == SK_InvalidUniqueID); |
| 27 | return id; |
| 28 | } |
Robert Phillips | da1be46 | 2018-07-27 07:18:06 -0400 | [diff] [blame] | 29 | #endif |
| 30 | |
Robert Phillips | 5b65a84 | 2017-11-13 15:48:12 -0500 | [diff] [blame] | 31 | void GrResourceAllocator::Interval::assign(sk_sp<GrSurface> s) { |
| 32 | SkASSERT(!fAssignedSurface); |
| 33 | fAssignedSurface = s; |
| 34 | fProxy->priv().assign(std::move(s)); |
| 35 | } |
| 36 | |
Robert Phillips | c73666f | 2019-04-24 08:49:48 -0400 | [diff] [blame] | 37 | void GrResourceAllocator::determineRecyclability() { |
| 38 | for (Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) { |
| 39 | if (cur->proxy()->canSkipResourceAllocator()) { |
| 40 | // These types of proxies can slip in here if they require a stencil buffer |
| 41 | continue; |
| 42 | } |
| 43 | |
Brian Salomon | 557e812 | 2019-10-24 10:37:08 -0400 | [diff] [blame] | 44 | if (!cur->proxy()->refCntGreaterThan(cur->uses())) { |
Robert Phillips | c73666f | 2019-04-24 08:49:48 -0400 | [diff] [blame] | 45 | // All the refs on the proxy are known to the resource allocator thus no one |
| 46 | // should be holding onto it outside of Ganesh. |
Robert Phillips | c73666f | 2019-04-24 08:49:48 -0400 | [diff] [blame] | 47 | cur->markAsRecyclable(); |
| 48 | } |
| 49 | } |
| 50 | } |
| 51 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 52 | void GrResourceAllocator::markEndOfOpsTask(int opsTaskIndex) { |
| 53 | SkASSERT(!fAssigned); // We shouldn't be adding any opsTasks after (or during) assignment |
Robert Phillips | eafd48a | 2017-11-16 07:52:08 -0500 | [diff] [blame] | 54 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 55 | SkASSERT(fEndOfOpsTaskOpIndices.count() == opsTaskIndex); |
| 56 | if (!fEndOfOpsTaskOpIndices.empty()) { |
| 57 | SkASSERT(fEndOfOpsTaskOpIndices.back() < this->curOp()); |
Robert Phillips | eafd48a | 2017-11-16 07:52:08 -0500 | [diff] [blame] | 58 | } |
| 59 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 60 | // This is the first op index of the next opsTask |
| 61 | fEndOfOpsTaskOpIndices.push_back(this->curOp()); |
| 62 | SkASSERT(fEndOfOpsTaskOpIndices.count() <= fNumOpsTasks); |
Robert Phillips | eafd48a | 2017-11-16 07:52:08 -0500 | [diff] [blame] | 63 | } |
| 64 | |
Robert Phillips | 5b65a84 | 2017-11-13 15:48:12 -0500 | [diff] [blame] | 65 | GrResourceAllocator::~GrResourceAllocator() { |
Robert Phillips | 5b65a84 | 2017-11-13 15:48:12 -0500 | [diff] [blame] | 66 | SkASSERT(fIntvlList.empty()); |
| 67 | SkASSERT(fActiveIntvls.empty()); |
| 68 | SkASSERT(!fIntvlHash.count()); |
Robert Phillips | 5b65a84 | 2017-11-13 15:48:12 -0500 | [diff] [blame] | 69 | } |
| 70 | |
Robert Phillips | c73666f | 2019-04-24 08:49:48 -0400 | [diff] [blame] | 71 | void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end, |
| 72 | ActualUse actualUse |
Chris Dalton | 8816b93 | 2017-11-29 16:48:25 -0700 | [diff] [blame] | 73 | SkDEBUGCODE(, bool isDirectDstRead)) { |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 74 | SkASSERT(start <= end); |
| 75 | SkASSERT(!fAssigned); // We shouldn't be adding any intervals after (or during) assignment |
Robert Phillips | 5f78adf | 2019-04-22 12:41:39 -0400 | [diff] [blame] | 76 | |
Chris Dalton | 9715559 | 2019-06-13 13:40:20 -0600 | [diff] [blame] | 77 | if (proxy->canSkipResourceAllocator()) { |
Robert Phillips | 5f78adf | 2019-04-22 12:41:39 -0400 | [diff] [blame] | 78 | return; |
| 79 | } |
| 80 | |
Brian Salomon | 9cadc31 | 2018-12-05 15:09:19 -0500 | [diff] [blame] | 81 | // If a proxy is read only it must refer to a texture with specific content that cannot be |
| 82 | // recycled. We don't need to assign a texture to it and no other proxy can be instantiated |
| 83 | // with the same texture. |
| 84 | if (proxy->readOnly()) { |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 85 | if (proxy->isLazy() && !proxy->priv().doLazyInstantiation(fResourceProvider)) { |
| 86 | fLazyInstantiationError = true; |
Brian Salomon | 9cadc31 | 2018-12-05 15:09:19 -0500 | [diff] [blame] | 87 | } else { |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 88 | // Since we aren't going to add an interval we won't revisit this proxy in assign(). So |
| 89 | // must already be instantiated or it must be a lazy proxy that we instantiated above. |
| 90 | SkASSERT(proxy->isInstantiated()); |
Brian Salomon | 9cadc31 | 2018-12-05 15:09:19 -0500 | [diff] [blame] | 91 | } |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 92 | return; |
| 93 | } |
| 94 | if (Interval* intvl = fIntvlHash.find(proxy->uniqueID().asUInt())) { |
| 95 | // Revise the interval for an existing use |
| 96 | #ifdef SK_DEBUG |
| 97 | if (0 == start && 0 == end) { |
| 98 | // This interval is for the initial upload to a deferred proxy. Due to the vagaries |
| 99 | // of how deferred proxies are collected they can appear as uploads multiple times |
| 100 | // in a single opsTasks' list and as uploads in several opsTasks. |
| 101 | SkASSERT(0 == intvl->start()); |
| 102 | } else if (isDirectDstRead) { |
| 103 | // Direct reads from the render target itself should occur w/in the existing |
| 104 | // interval |
| 105 | SkASSERT(intvl->start() <= start && intvl->end() >= end); |
| 106 | } else { |
| 107 | SkASSERT(intvl->end() <= start && intvl->end() <= end); |
| 108 | } |
| 109 | #endif |
Robert Phillips | c73666f | 2019-04-24 08:49:48 -0400 | [diff] [blame] | 110 | if (ActualUse::kYes == actualUse) { |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 111 | intvl->addUse(); |
Robert Phillips | c73666f | 2019-04-24 08:49:48 -0400 | [diff] [blame] | 112 | } |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 113 | intvl->extendEnd(end); |
| 114 | return; |
| 115 | } |
| 116 | Interval* newIntvl; |
| 117 | if (fFreeIntervalList) { |
| 118 | newIntvl = fFreeIntervalList; |
| 119 | fFreeIntervalList = newIntvl->next(); |
| 120 | newIntvl->setNext(nullptr); |
| 121 | newIntvl->resetTo(proxy, start, end); |
| 122 | } else { |
| 123 | newIntvl = fIntervalAllocator.make<Interval>(proxy, start, end); |
Brian Salomon | c609353 | 2018-12-05 21:34:36 +0000 | [diff] [blame] | 124 | } |
| 125 | |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 126 | if (ActualUse::kYes == actualUse) { |
| 127 | newIntvl->addUse(); |
Chris Dalton | 706a6ff | 2017-11-29 22:01:06 -0700 | [diff] [blame] | 128 | } |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 129 | fIntvlList.insertByIncreasingStart(newIntvl); |
| 130 | fIntvlHash.add(newIntvl); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 131 | } |
| 132 | |
| 133 | GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::popHead() { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 134 | SkDEBUGCODE(this->validate()); |
| 135 | |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 136 | Interval* temp = fHead; |
| 137 | if (temp) { |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 138 | fHead = temp->next(); |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 139 | if (!fHead) { |
| 140 | fTail = nullptr; |
| 141 | } |
| 142 | temp->setNext(nullptr); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 143 | } |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 144 | |
| 145 | SkDEBUGCODE(this->validate()); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 146 | return temp; |
| 147 | } |
| 148 | |
| 149 | // TODO: fuse this with insertByIncreasingEnd |
| 150 | void GrResourceAllocator::IntervalList::insertByIncreasingStart(Interval* intvl) { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 151 | SkDEBUGCODE(this->validate()); |
| 152 | SkASSERT(!intvl->next()); |
| 153 | |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 154 | if (!fHead) { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 155 | // 14% |
| 156 | fHead = fTail = intvl; |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 157 | } else if (intvl->start() <= fHead->start()) { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 158 | // 3% |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 159 | intvl->setNext(fHead); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 160 | fHead = intvl; |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 161 | } else if (fTail->start() <= intvl->start()) { |
| 162 | // 83% |
| 163 | fTail->setNext(intvl); |
| 164 | fTail = intvl; |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 165 | } else { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 166 | // almost never |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 167 | Interval* prev = fHead; |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 168 | Interval* next = prev->next(); |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 169 | for (; intvl->start() > next->start(); prev = next, next = next->next()) { |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 170 | } |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 171 | |
| 172 | SkASSERT(next); |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 173 | intvl->setNext(next); |
| 174 | prev->setNext(intvl); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 175 | } |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 176 | |
| 177 | SkDEBUGCODE(this->validate()); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 178 | } |
| 179 | |
| 180 | // TODO: fuse this with insertByIncreasingStart |
| 181 | void GrResourceAllocator::IntervalList::insertByIncreasingEnd(Interval* intvl) { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 182 | SkDEBUGCODE(this->validate()); |
| 183 | SkASSERT(!intvl->next()); |
| 184 | |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 185 | if (!fHead) { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 186 | // 14% |
| 187 | fHead = fTail = intvl; |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 188 | } else if (intvl->end() <= fHead->end()) { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 189 | // 64% |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 190 | intvl->setNext(fHead); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 191 | fHead = intvl; |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 192 | } else if (fTail->end() <= intvl->end()) { |
| 193 | // 3% |
| 194 | fTail->setNext(intvl); |
| 195 | fTail = intvl; |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 196 | } else { |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 197 | // 19% but 81% of those land right after the list's head |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 198 | Interval* prev = fHead; |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 199 | Interval* next = prev->next(); |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 200 | for (; intvl->end() > next->end(); prev = next, next = next->next()) { |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 201 | } |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 202 | |
| 203 | SkASSERT(next); |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 204 | intvl->setNext(next); |
| 205 | prev->setNext(intvl); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 206 | } |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 207 | |
| 208 | SkDEBUGCODE(this->validate()); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 209 | } |
| 210 | |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 211 | #ifdef SK_DEBUG |
| 212 | void GrResourceAllocator::IntervalList::validate() const { |
| 213 | SkASSERT(SkToBool(fHead) == SkToBool(fTail)); |
| 214 | |
| 215 | Interval* prev = nullptr; |
| 216 | for (Interval* cur = fHead; cur; prev = cur, cur = cur->next()) { |
| 217 | } |
| 218 | |
| 219 | SkASSERT(fTail == prev); |
| 220 | } |
| 221 | #endif |
Robert Phillips | 4150eea | 2018-02-07 17:08:21 -0500 | [diff] [blame] | 222 | |
| 223 | GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::detachAll() { |
| 224 | Interval* tmp = fHead; |
| 225 | fHead = nullptr; |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 226 | fTail = nullptr; |
Robert Phillips | 4150eea | 2018-02-07 17:08:21 -0500 | [diff] [blame] | 227 | return tmp; |
| 228 | } |
| 229 | |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 230 | // 'surface' can be reused. Add it back to the free pool. |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 231 | void GrResourceAllocator::recycleSurface(sk_sp<GrSurface> surface) { |
Robert Phillips | 57aa367 | 2017-07-21 11:38:13 -0400 | [diff] [blame] | 232 | const GrScratchKey &key = surface->resourcePriv().getScratchKey(); |
| 233 | |
| 234 | if (!key.isValid()) { |
| 235 | return; // can't do it w/o a valid scratch key |
| 236 | } |
| 237 | |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 238 | if (surface->getUniqueKey().isValid()) { |
| 239 | // If the surface has a unique key we throw it back into the resource cache. |
| 240 | // If things get really tight 'findSurfaceFor' may pull it back out but there is |
| 241 | // no need to have it in tight rotation. |
| 242 | return; |
| 243 | } |
| 244 | |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 245 | #if GR_ALLOCATION_SPEW |
| 246 | SkDebugf("putting surface %d back into pool\n", surface->uniqueID().asUInt()); |
| 247 | #endif |
Robert Phillips | 57aa367 | 2017-07-21 11:38:13 -0400 | [diff] [blame] | 248 | // TODO: fix this insertion so we get a more LRU-ish behavior |
Robert Phillips | 5b65a84 | 2017-11-13 15:48:12 -0500 | [diff] [blame] | 249 | fFreePool.insert(key, surface.release()); |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 250 | } |
| 251 | |
| 252 | // First try to reuse one of the recently allocated/used GrSurfaces in the free pool. |
| 253 | // If we can't find a useable one, create a new one. |
Chris Dalton | 0b68dda | 2019-11-07 21:08:03 -0700 | [diff] [blame] | 254 | sk_sp<GrSurface> GrResourceAllocator::findSurfaceFor(const GrSurfaceProxy* proxy) { |
Robert Phillips | 0790f8a | 2018-09-18 13:11:03 -0400 | [diff] [blame] | 255 | if (proxy->asTextureProxy() && proxy->asTextureProxy()->getUniqueKey().isValid()) { |
| 256 | // First try to reattach to a cached version if the proxy is uniquely keyed |
Chris Dalton | 0b68dda | 2019-11-07 21:08:03 -0700 | [diff] [blame] | 257 | if (sk_sp<GrSurface> surface = fResourceProvider->findByUniqueKey<GrSurface>( |
| 258 | proxy->asTextureProxy()->getUniqueKey())) { |
Robert Phillips | 0790f8a | 2018-09-18 13:11:03 -0400 | [diff] [blame] | 259 | return surface; |
| 260 | } |
| 261 | } |
| 262 | |
Robert Phillips | 57aa367 | 2017-07-21 11:38:13 -0400 | [diff] [blame] | 263 | // First look in the free pool |
| 264 | GrScratchKey key; |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 265 | |
Greg Daniel | d51fa2f | 2020-01-22 16:53:38 -0500 | [diff] [blame] | 266 | proxy->priv().computeScratchKey(*fResourceProvider->caps(), &key); |
Robert Phillips | 57aa367 | 2017-07-21 11:38:13 -0400 | [diff] [blame] | 267 | |
Robert Phillips | 10d1721 | 2019-04-24 14:09:10 -0400 | [diff] [blame] | 268 | auto filter = [] (const GrSurface* s) { |
| 269 | return true; |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 270 | }; |
| 271 | sk_sp<GrSurface> surface(fFreePool.findAndRemove(key, filter)); |
Robert Phillips | 57aa367 | 2017-07-21 11:38:13 -0400 | [diff] [blame] | 272 | if (surface) { |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 273 | if (SkBudgeted::kYes == proxy->isBudgeted() && |
Brian Salomon | fa2ebea | 2019-01-24 15:58:58 -0500 | [diff] [blame] | 274 | GrBudgetedType::kBudgeted != surface->resourcePriv().budgetedType()) { |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 275 | // This gets the job done but isn't quite correct. It would be better to try to |
Brian Salomon | fa2ebea | 2019-01-24 15:58:58 -0500 | [diff] [blame] | 276 | // match budgeted proxies w/ budgeted surfaces and unbudgeted w/ unbudgeted. |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 277 | surface->resourcePriv().makeBudgeted(); |
| 278 | } |
Robert Phillips | 0790f8a | 2018-09-18 13:11:03 -0400 | [diff] [blame] | 279 | SkASSERT(!surface->getUniqueKey().isValid()); |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 280 | return surface; |
Robert Phillips | 57aa367 | 2017-07-21 11:38:13 -0400 | [diff] [blame] | 281 | } |
| 282 | |
| 283 | // Failing that, try to grab a new one from the resource cache |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 284 | return proxy->priv().createSurface(fResourceProvider); |
| 285 | } |
| 286 | |
| 287 | // Remove any intervals that end before the current index. Return their GrSurfaces |
Robert Phillips | 3966738 | 2019-04-17 16:03:30 -0400 | [diff] [blame] | 288 | // to the free pool if possible. |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 289 | void GrResourceAllocator::expire(unsigned int curIndex) { |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 290 | while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->end() < curIndex) { |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 291 | Interval* temp = fActiveIntvls.popHead(); |
Robert Phillips | df25e3a | 2018-08-08 12:48:40 -0400 | [diff] [blame] | 292 | SkASSERT(!temp->next()); |
Robert Phillips | 5b65a84 | 2017-11-13 15:48:12 -0500 | [diff] [blame] | 293 | |
| 294 | if (temp->wasAssignedSurface()) { |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 295 | sk_sp<GrSurface> surface = temp->detachSurface(); |
| 296 | |
Robert Phillips | c73666f | 2019-04-24 08:49:48 -0400 | [diff] [blame] | 297 | if (temp->isRecyclable()) { |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 298 | this->recycleSurface(std::move(surface)); |
| 299 | } |
Robert Phillips | 5b65a84 | 2017-11-13 15:48:12 -0500 | [diff] [blame] | 300 | } |
Robert Phillips | 8186cbe | 2017-11-01 17:32:39 -0400 | [diff] [blame] | 301 | |
| 302 | // Add temp to the free interval list so it can be reused |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 303 | SkASSERT(!temp->wasAssignedSurface()); // it had better not have a ref on a surface |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 304 | temp->setNext(fFreeIntervalList); |
Robert Phillips | 8186cbe | 2017-11-01 17:32:39 -0400 | [diff] [blame] | 305 | fFreeIntervalList = temp; |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 306 | } |
| 307 | } |
| 308 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 309 | bool GrResourceAllocator::onOpsTaskBoundary() const { |
Robert Phillips | c476e5d | 2019-03-26 14:50:08 -0400 | [diff] [blame] | 310 | if (fIntvlList.empty()) { |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 311 | SkASSERT(fCurOpsTaskIndex+1 <= fNumOpsTasks); |
| 312 | // Although technically on an opsTask boundary there is no need to force an |
Robert Phillips | c476e5d | 2019-03-26 14:50:08 -0400 | [diff] [blame] | 313 | // intermediate flush here |
| 314 | return false; |
| 315 | } |
| 316 | |
| 317 | const Interval* tmp = fIntvlList.peekHead(); |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 318 | return fEndOfOpsTaskOpIndices[fCurOpsTaskIndex] <= tmp->start(); |
Robert Phillips | c476e5d | 2019-03-26 14:50:08 -0400 | [diff] [blame] | 319 | } |
| 320 | |
| 321 | void GrResourceAllocator::forceIntermediateFlush(int* stopIndex) { |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 322 | *stopIndex = fCurOpsTaskIndex+1; |
Robert Phillips | c476e5d | 2019-03-26 14:50:08 -0400 | [diff] [blame] | 323 | |
| 324 | // This is interrupting the allocation of resources for this flush. We need to |
| 325 | // proactively clear the active interval list of any intervals that aren't |
| 326 | // guaranteed to survive the partial flush lest they become zombies (i.e., |
| 327 | // holding a deleted surface proxy). |
| 328 | const Interval* tmp = fIntvlList.peekHead(); |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 329 | SkASSERT(fEndOfOpsTaskOpIndices[fCurOpsTaskIndex] <= tmp->start()); |
Robert Phillips | c476e5d | 2019-03-26 14:50:08 -0400 | [diff] [blame] | 330 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 331 | fCurOpsTaskIndex++; |
| 332 | SkASSERT(fCurOpsTaskIndex < fNumOpsTasks); |
Robert Phillips | c476e5d | 2019-03-26 14:50:08 -0400 | [diff] [blame] | 333 | |
| 334 | this->expire(tmp->start()); |
| 335 | } |
| 336 | |
Brian Salomon | 577aa0f | 2018-11-30 13:32:23 -0500 | [diff] [blame] | 337 | bool GrResourceAllocator::assign(int* startIndex, int* stopIndex, AssignError* outError) { |
Greg Daniel | aa3dfbe | 2018-01-29 10:34:25 -0500 | [diff] [blame] | 338 | SkASSERT(outError); |
Robert Phillips | 82774f8 | 2019-06-20 14:38:27 -0400 | [diff] [blame] | 339 | *outError = fLazyInstantiationError ? AssignError::kFailedProxyInstantiation |
| 340 | : AssignError::kNoError; |
Greg Daniel | aa3dfbe | 2018-01-29 10:34:25 -0500 | [diff] [blame] | 341 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 342 | SkASSERT(fNumOpsTasks == fEndOfOpsTaskOpIndices.count()); |
Mike Klein | 6350cb0 | 2019-04-22 12:09:45 +0000 | [diff] [blame] | 343 | |
Robert Phillips | 5f78adf | 2019-04-22 12:41:39 -0400 | [diff] [blame] | 344 | fIntvlHash.reset(); // we don't need the interval hash anymore |
| 345 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 346 | if (fCurOpsTaskIndex >= fEndOfOpsTaskOpIndices.count()) { |
Robert Phillips | 5f78adf | 2019-04-22 12:41:39 -0400 | [diff] [blame] | 347 | return false; // nothing to render |
| 348 | } |
| 349 | |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 350 | *startIndex = fCurOpsTaskIndex; |
| 351 | *stopIndex = fEndOfOpsTaskOpIndices.count(); |
Robert Phillips | eafd48a | 2017-11-16 07:52:08 -0500 | [diff] [blame] | 352 | |
Robert Phillips | 5f78adf | 2019-04-22 12:41:39 -0400 | [diff] [blame] | 353 | if (fIntvlList.empty()) { |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 354 | fCurOpsTaskIndex = fEndOfOpsTaskOpIndices.count(); |
Robert Phillips | 5f78adf | 2019-04-22 12:41:39 -0400 | [diff] [blame] | 355 | return true; // no resources to assign |
| 356 | } |
| 357 | |
Robert Phillips | 3bf3d4a | 2019-03-27 07:09:09 -0400 | [diff] [blame] | 358 | #if GR_ALLOCATION_SPEW |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 359 | SkDebugf("assigning opsTasks %d through %d out of %d numOpsTasks\n", |
| 360 | *startIndex, *stopIndex, fNumOpsTasks); |
| 361 | SkDebugf("EndOfOpsTaskIndices: "); |
| 362 | for (int i = 0; i < fEndOfOpsTaskOpIndices.count(); ++i) { |
| 363 | SkDebugf("%d ", fEndOfOpsTaskOpIndices[i]); |
Robert Phillips | 3bf3d4a | 2019-03-27 07:09:09 -0400 | [diff] [blame] | 364 | } |
| 365 | SkDebugf("\n"); |
| 366 | #endif |
| 367 | |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 368 | SkDEBUGCODE(fAssigned = true;) |
| 369 | |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 370 | #if GR_ALLOCATION_SPEW |
| 371 | this->dumpIntervals(); |
| 372 | #endif |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 373 | while (Interval* cur = fIntvlList.popHead()) { |
Greg Daniel | d72dd4d | 2019-08-29 14:37:46 -0400 | [diff] [blame] | 374 | while (fEndOfOpsTaskOpIndices[fCurOpsTaskIndex] <= cur->start()) { |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 375 | fCurOpsTaskIndex++; |
| 376 | SkASSERT(fCurOpsTaskIndex < fNumOpsTasks); |
Robert Phillips | eafd48a | 2017-11-16 07:52:08 -0500 | [diff] [blame] | 377 | } |
| 378 | |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 379 | this->expire(cur->start()); |
Robert Phillips | 57aa367 | 2017-07-21 11:38:13 -0400 | [diff] [blame] | 380 | |
Brian Salomon | fd98c2c | 2018-07-31 17:25:29 -0400 | [diff] [blame] | 381 | if (cur->proxy()->isInstantiated()) { |
Robert Phillips | 57aa367 | 2017-07-21 11:38:13 -0400 | [diff] [blame] | 382 | fActiveIntvls.insertByIncreasingEnd(cur); |
Robert Phillips | eafd48a | 2017-11-16 07:52:08 -0500 | [diff] [blame] | 383 | |
| 384 | if (fResourceProvider->overBudget()) { |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 385 | // Only force intermediate draws on opsTask boundaries |
| 386 | if (this->onOpsTaskBoundary()) { |
Robert Phillips | c476e5d | 2019-03-26 14:50:08 -0400 | [diff] [blame] | 387 | this->forceIntermediateFlush(stopIndex); |
Robert Phillips | eafd48a | 2017-11-16 07:52:08 -0500 | [diff] [blame] | 388 | return true; |
| 389 | } |
| 390 | } |
| 391 | |
Robert Phillips | 57aa367 | 2017-07-21 11:38:13 -0400 | [diff] [blame] | 392 | continue; |
| 393 | } |
| 394 | |
Brian Salomon | beb7f52 | 2019-08-30 16:19:42 -0400 | [diff] [blame] | 395 | if (cur->proxy()->isLazy()) { |
Greg Daniel | aa3dfbe | 2018-01-29 10:34:25 -0500 | [diff] [blame] | 396 | if (!cur->proxy()->priv().doLazyInstantiation(fResourceProvider)) { |
| 397 | *outError = AssignError::kFailedProxyInstantiation; |
| 398 | } |
Chris Dalton | 0b68dda | 2019-11-07 21:08:03 -0700 | [diff] [blame] | 399 | } else if (sk_sp<GrSurface> surface = this->findSurfaceFor(cur->proxy())) { |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 400 | // TODO: make getUniqueKey virtual on GrSurfaceProxy |
Robert Phillips | 0790f8a | 2018-09-18 13:11:03 -0400 | [diff] [blame] | 401 | GrTextureProxy* texProxy = cur->proxy()->asTextureProxy(); |
| 402 | |
| 403 | if (texProxy && texProxy->getUniqueKey().isValid()) { |
| 404 | if (!surface->getUniqueKey().isValid()) { |
| 405 | fResourceProvider->assignUniqueKeyToResource(texProxy->getUniqueKey(), |
| 406 | surface.get()); |
| 407 | } |
| 408 | SkASSERT(surface->getUniqueKey() == texProxy->getUniqueKey()); |
Robert Phillips | f8e2502 | 2017-11-08 15:24:31 -0500 | [diff] [blame] | 409 | } |
| 410 | |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 411 | #if GR_ALLOCATION_SPEW |
| 412 | SkDebugf("Assigning %d to %d\n", |
| 413 | surface->uniqueID().asUInt(), |
| 414 | cur->proxy()->uniqueID().asUInt()); |
| 415 | #endif |
| 416 | |
Robert Phillips | 5b65a84 | 2017-11-13 15:48:12 -0500 | [diff] [blame] | 417 | cur->assign(std::move(surface)); |
Greg Daniel | aa3dfbe | 2018-01-29 10:34:25 -0500 | [diff] [blame] | 418 | } else { |
Brian Salomon | fd98c2c | 2018-07-31 17:25:29 -0400 | [diff] [blame] | 419 | SkASSERT(!cur->proxy()->isInstantiated()); |
Greg Daniel | aa3dfbe | 2018-01-29 10:34:25 -0500 | [diff] [blame] | 420 | *outError = AssignError::kFailedProxyInstantiation; |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 421 | } |
Robert Phillips | eafd48a | 2017-11-16 07:52:08 -0500 | [diff] [blame] | 422 | |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 423 | fActiveIntvls.insertByIncreasingEnd(cur); |
Robert Phillips | eafd48a | 2017-11-16 07:52:08 -0500 | [diff] [blame] | 424 | |
| 425 | if (fResourceProvider->overBudget()) { |
Greg Daniel | f41b2bd | 2019-08-22 16:19:24 -0400 | [diff] [blame] | 426 | // Only force intermediate draws on opsTask boundaries |
| 427 | if (this->onOpsTaskBoundary()) { |
Robert Phillips | c476e5d | 2019-03-26 14:50:08 -0400 | [diff] [blame] | 428 | this->forceIntermediateFlush(stopIndex); |
Robert Phillips | eafd48a | 2017-11-16 07:52:08 -0500 | [diff] [blame] | 429 | return true; |
| 430 | } |
| 431 | } |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 432 | } |
Robert Phillips | 5b65a84 | 2017-11-13 15:48:12 -0500 | [diff] [blame] | 433 | |
| 434 | // expire all the remaining intervals to drain the active interval list |
| 435 | this->expire(std::numeric_limits<unsigned int>::max()); |
Robert Phillips | eafd48a | 2017-11-16 07:52:08 -0500 | [diff] [blame] | 436 | return true; |
Robert Phillips | 5af44de | 2017-07-18 14:49:38 -0400 | [diff] [blame] | 437 | } |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 438 | |
| 439 | #if GR_ALLOCATION_SPEW |
| 440 | void GrResourceAllocator::dumpIntervals() { |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 441 | // Print all the intervals while computing their range |
Robert Phillips | 3bf3d4a | 2019-03-27 07:09:09 -0400 | [diff] [blame] | 442 | SkDebugf("------------------------------------------------------------\n"); |
| 443 | unsigned int min = std::numeric_limits<unsigned int>::max(); |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 444 | unsigned int max = 0; |
| 445 | for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) { |
Greg Daniel | c61d7e3 | 2020-02-04 14:27:45 -0500 | [diff] [blame] | 446 | SkDebugf("{ %3d,%3d }: [%2d, %2d] - refProxys:%d surfaceRefs:%d\n", |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 447 | cur->proxy()->uniqueID().asUInt(), |
Brian Salomon | fd98c2c | 2018-07-31 17:25:29 -0400 | [diff] [blame] | 448 | cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1, |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 449 | cur->start(), |
| 450 | cur->end(), |
| 451 | cur->proxy()->priv().getProxyRefCnt(), |
Robert Phillips | b520476 | 2019-06-19 14:12:13 -0400 | [diff] [blame] | 452 | cur->proxy()->testingOnly_getBackingRefCnt()); |
Brian Osman | 788b916 | 2020-02-07 10:36:46 -0500 | [diff] [blame] | 453 | min = std::min(min, cur->start()); |
| 454 | max = std::max(max, cur->end()); |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 455 | } |
| 456 | |
| 457 | // Draw a graph of the useage intervals |
| 458 | for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) { |
| 459 | SkDebugf("{ %3d,%3d }: ", |
| 460 | cur->proxy()->uniqueID().asUInt(), |
Brian Salomon | fd98c2c | 2018-07-31 17:25:29 -0400 | [diff] [blame] | 461 | cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1); |
Robert Phillips | 715d08c | 2018-07-18 13:56:48 -0400 | [diff] [blame] | 462 | for (unsigned int i = min; i <= max; ++i) { |
| 463 | if (i >= cur->start() && i <= cur->end()) { |
| 464 | SkDebugf("x"); |
| 465 | } else { |
| 466 | SkDebugf(" "); |
| 467 | } |
| 468 | } |
| 469 | SkDebugf("\n"); |
| 470 | } |
| 471 | } |
| 472 | #endif |