blob: 6ab2bf04886301f72e47bb5dde59be8736f2b0ef [file] [log] [blame]
Robert Phillips5af44de2017-07-18 14:49:38 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/GrResourceAllocator.h"
Robert Phillips5af44de2017-07-18 14:49:38 -04009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "src/gpu/GrGpuResourcePriv.h"
Greg Danielf41b2bd2019-08-22 16:19:24 -040011#include "src/gpu/GrOpsTask.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040012#include "src/gpu/GrRenderTargetProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "src/gpu/GrResourceProvider.h"
14#include "src/gpu/GrSurfacePriv.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040015#include "src/gpu/GrSurfaceProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050016#include "src/gpu/GrSurfaceProxyPriv.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040017#include "src/gpu/GrTextureProxy.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040018
Robert Phillipsda1be462018-07-27 07:18:06 -040019#if GR_TRACK_INTERVAL_CREATION
Mike Klein0ec1c572018-12-04 11:52:51 -050020 #include <atomic>
21
22 uint32_t GrResourceAllocator::Interval::CreateUniqueID() {
23 static std::atomic<uint32_t> nextID{1};
24 uint32_t id;
25 do {
26 id = nextID++;
27 } while (id == SK_InvalidUniqueID);
28 return id;
29 }
Robert Phillipsda1be462018-07-27 07:18:06 -040030#endif
31
Robert Phillips5b65a842017-11-13 15:48:12 -050032void GrResourceAllocator::Interval::assign(sk_sp<GrSurface> s) {
33 SkASSERT(!fAssignedSurface);
34 fAssignedSurface = s;
35 fProxy->priv().assign(std::move(s));
36}
37
Robert Phillipsc73666f2019-04-24 08:49:48 -040038void GrResourceAllocator::determineRecyclability() {
39 for (Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
40 if (cur->proxy()->canSkipResourceAllocator()) {
41 // These types of proxies can slip in here if they require a stencil buffer
42 continue;
43 }
44
Brian Salomona036f0d2019-08-29 11:16:04 -040045 if (cur->uses() >= cur->proxy()->refCnt()) {
Robert Phillipsc73666f2019-04-24 08:49:48 -040046 // All the refs on the proxy are known to the resource allocator thus no one
47 // should be holding onto it outside of Ganesh.
Brian Salomona036f0d2019-08-29 11:16:04 -040048 SkASSERT(cur->uses() == cur->proxy()->refCnt());
Robert Phillipsc73666f2019-04-24 08:49:48 -040049 cur->markAsRecyclable();
50 }
51 }
52}
53
Greg Danielf41b2bd2019-08-22 16:19:24 -040054void GrResourceAllocator::markEndOfOpsTask(int opsTaskIndex) {
55 SkASSERT(!fAssigned); // We shouldn't be adding any opsTasks after (or during) assignment
Robert Phillipseafd48a2017-11-16 07:52:08 -050056
Greg Danielf41b2bd2019-08-22 16:19:24 -040057 SkASSERT(fEndOfOpsTaskOpIndices.count() == opsTaskIndex);
58 if (!fEndOfOpsTaskOpIndices.empty()) {
59 SkASSERT(fEndOfOpsTaskOpIndices.back() < this->curOp());
Robert Phillipseafd48a2017-11-16 07:52:08 -050060 }
61
Greg Danielf41b2bd2019-08-22 16:19:24 -040062 // This is the first op index of the next opsTask
63 fEndOfOpsTaskOpIndices.push_back(this->curOp());
64 SkASSERT(fEndOfOpsTaskOpIndices.count() <= fNumOpsTasks);
Robert Phillipseafd48a2017-11-16 07:52:08 -050065}
66
Robert Phillips5b65a842017-11-13 15:48:12 -050067GrResourceAllocator::~GrResourceAllocator() {
Robert Phillips5b65a842017-11-13 15:48:12 -050068 SkASSERT(fIntvlList.empty());
69 SkASSERT(fActiveIntvls.empty());
70 SkASSERT(!fIntvlHash.count());
Robert Phillips5b65a842017-11-13 15:48:12 -050071}
72
Robert Phillipsc73666f2019-04-24 08:49:48 -040073void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end,
74 ActualUse actualUse
Chris Dalton8816b932017-11-29 16:48:25 -070075 SkDEBUGCODE(, bool isDirectDstRead)) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040076 SkASSERT(start <= end);
77 SkASSERT(!fAssigned); // We shouldn't be adding any intervals after (or during) assignment
Robert Phillips5f78adf2019-04-22 12:41:39 -040078
Chris Dalton97155592019-06-13 13:40:20 -060079 if (proxy->canSkipResourceAllocator()) {
Chris Daltoneffee202019-07-01 22:28:03 -060080 // If the proxy is still not instantiated at this point but will need stencil, it will
81 // attach its own stencil buffer upon onFlush instantiation.
82 if (proxy->isInstantiated()) {
Brian Salomonbeb7f522019-08-30 16:19:42 -040083 auto rt = proxy->asRenderTargetProxy();
84 int minStencilSampleCount = rt ? rt->numStencilSamples() : 0;
Chris Daltoneffee202019-07-01 22:28:03 -060085 if (minStencilSampleCount) {
86 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(
87 fResourceProvider, proxy->peekSurface(), minStencilSampleCount)) {
88 SkDebugf("WARNING: failed to attach stencil buffer. "
89 "Rendering may be incorrect.\n");
90 }
Chris Dalton97155592019-06-13 13:40:20 -060091 }
92 }
Robert Phillips5f78adf2019-04-22 12:41:39 -040093 return;
94 }
95
Brian Salomon9cadc312018-12-05 15:09:19 -050096 // If a proxy is read only it must refer to a texture with specific content that cannot be
97 // recycled. We don't need to assign a texture to it and no other proxy can be instantiated
98 // with the same texture.
99 if (proxy->readOnly()) {
Brian Salomonbeb7f522019-08-30 16:19:42 -0400100 if (proxy->isLazy() && !proxy->priv().doLazyInstantiation(fResourceProvider)) {
101 fLazyInstantiationError = true;
Brian Salomon9cadc312018-12-05 15:09:19 -0500102 } else {
Brian Salomonbeb7f522019-08-30 16:19:42 -0400103 // Since we aren't going to add an interval we won't revisit this proxy in assign(). So
104 // must already be instantiated or it must be a lazy proxy that we instantiated above.
105 SkASSERT(proxy->isInstantiated());
Brian Salomon9cadc312018-12-05 15:09:19 -0500106 }
Brian Salomonbeb7f522019-08-30 16:19:42 -0400107 return;
108 }
109 if (Interval* intvl = fIntvlHash.find(proxy->uniqueID().asUInt())) {
110 // Revise the interval for an existing use
111#ifdef SK_DEBUG
112 if (0 == start && 0 == end) {
113 // This interval is for the initial upload to a deferred proxy. Due to the vagaries
114 // of how deferred proxies are collected they can appear as uploads multiple times
115 // in a single opsTasks' list and as uploads in several opsTasks.
116 SkASSERT(0 == intvl->start());
117 } else if (isDirectDstRead) {
118 // Direct reads from the render target itself should occur w/in the existing
119 // interval
120 SkASSERT(intvl->start() <= start && intvl->end() >= end);
121 } else {
122 SkASSERT(intvl->end() <= start && intvl->end() <= end);
123 }
124#endif
Robert Phillipsc73666f2019-04-24 08:49:48 -0400125 if (ActualUse::kYes == actualUse) {
Brian Salomonbeb7f522019-08-30 16:19:42 -0400126 intvl->addUse();
Robert Phillipsc73666f2019-04-24 08:49:48 -0400127 }
Brian Salomonbeb7f522019-08-30 16:19:42 -0400128 intvl->extendEnd(end);
129 return;
130 }
131 Interval* newIntvl;
132 if (fFreeIntervalList) {
133 newIntvl = fFreeIntervalList;
134 fFreeIntervalList = newIntvl->next();
135 newIntvl->setNext(nullptr);
136 newIntvl->resetTo(proxy, start, end);
137 } else {
138 newIntvl = fIntervalAllocator.make<Interval>(proxy, start, end);
Brian Salomonc6093532018-12-05 21:34:36 +0000139 }
140
Brian Salomonbeb7f522019-08-30 16:19:42 -0400141 if (ActualUse::kYes == actualUse) {
142 newIntvl->addUse();
Chris Dalton706a6ff2017-11-29 22:01:06 -0700143 }
Brian Salomonbeb7f522019-08-30 16:19:42 -0400144 fIntvlList.insertByIncreasingStart(newIntvl);
145 fIntvlHash.add(newIntvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400146}
147
148GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::popHead() {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400149 SkDEBUGCODE(this->validate());
150
Robert Phillips5af44de2017-07-18 14:49:38 -0400151 Interval* temp = fHead;
152 if (temp) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500153 fHead = temp->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400154 if (!fHead) {
155 fTail = nullptr;
156 }
157 temp->setNext(nullptr);
Robert Phillips5af44de2017-07-18 14:49:38 -0400158 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400159
160 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400161 return temp;
162}
163
164// TODO: fuse this with insertByIncreasingEnd
165void GrResourceAllocator::IntervalList::insertByIncreasingStart(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400166 SkDEBUGCODE(this->validate());
167 SkASSERT(!intvl->next());
168
Robert Phillips5af44de2017-07-18 14:49:38 -0400169 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400170 // 14%
171 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500172 } else if (intvl->start() <= fHead->start()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400173 // 3%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500174 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400175 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400176 } else if (fTail->start() <= intvl->start()) {
177 // 83%
178 fTail->setNext(intvl);
179 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400180 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400181 // almost never
Robert Phillips5af44de2017-07-18 14:49:38 -0400182 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500183 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400184 for (; intvl->start() > next->start(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400185 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400186
187 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500188 intvl->setNext(next);
189 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400190 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400191
192 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400193}
194
195// TODO: fuse this with insertByIncreasingStart
196void GrResourceAllocator::IntervalList::insertByIncreasingEnd(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400197 SkDEBUGCODE(this->validate());
198 SkASSERT(!intvl->next());
199
Robert Phillips5af44de2017-07-18 14:49:38 -0400200 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400201 // 14%
202 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500203 } else if (intvl->end() <= fHead->end()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400204 // 64%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500205 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400206 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400207 } else if (fTail->end() <= intvl->end()) {
208 // 3%
209 fTail->setNext(intvl);
210 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400211 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400212 // 19% but 81% of those land right after the list's head
Robert Phillips5af44de2017-07-18 14:49:38 -0400213 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500214 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400215 for (; intvl->end() > next->end(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400216 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400217
218 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500219 intvl->setNext(next);
220 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400221 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400222
223 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400224}
225
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400226#ifdef SK_DEBUG
227void GrResourceAllocator::IntervalList::validate() const {
228 SkASSERT(SkToBool(fHead) == SkToBool(fTail));
229
230 Interval* prev = nullptr;
231 for (Interval* cur = fHead; cur; prev = cur, cur = cur->next()) {
232 }
233
234 SkASSERT(fTail == prev);
235}
236#endif
Robert Phillips4150eea2018-02-07 17:08:21 -0500237
238 GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::detachAll() {
239 Interval* tmp = fHead;
240 fHead = nullptr;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400241 fTail = nullptr;
Robert Phillips4150eea2018-02-07 17:08:21 -0500242 return tmp;
243}
244
Robert Phillips5af44de2017-07-18 14:49:38 -0400245// 'surface' can be reused. Add it back to the free pool.
Robert Phillips715d08c2018-07-18 13:56:48 -0400246void GrResourceAllocator::recycleSurface(sk_sp<GrSurface> surface) {
Robert Phillips57aa3672017-07-21 11:38:13 -0400247 const GrScratchKey &key = surface->resourcePriv().getScratchKey();
248
249 if (!key.isValid()) {
250 return; // can't do it w/o a valid scratch key
251 }
252
Robert Phillipsf8e25022017-11-08 15:24:31 -0500253 if (surface->getUniqueKey().isValid()) {
254 // If the surface has a unique key we throw it back into the resource cache.
255 // If things get really tight 'findSurfaceFor' may pull it back out but there is
256 // no need to have it in tight rotation.
257 return;
258 }
259
Robert Phillips715d08c2018-07-18 13:56:48 -0400260#if GR_ALLOCATION_SPEW
261 SkDebugf("putting surface %d back into pool\n", surface->uniqueID().asUInt());
262#endif
Robert Phillips57aa3672017-07-21 11:38:13 -0400263 // TODO: fix this insertion so we get a more LRU-ish behavior
Robert Phillips5b65a842017-11-13 15:48:12 -0500264 fFreePool.insert(key, surface.release());
Robert Phillips5af44de2017-07-18 14:49:38 -0400265}
266
267// First try to reuse one of the recently allocated/used GrSurfaces in the free pool.
268// If we can't find a useable one, create a new one.
Robert Phillipseafd48a2017-11-16 07:52:08 -0500269sk_sp<GrSurface> GrResourceAllocator::findSurfaceFor(const GrSurfaceProxy* proxy,
Chris Daltoneffee202019-07-01 22:28:03 -0600270 int minStencilSampleCount) {
Robert Phillips0790f8a2018-09-18 13:11:03 -0400271
272 if (proxy->asTextureProxy() && proxy->asTextureProxy()->getUniqueKey().isValid()) {
273 // First try to reattach to a cached version if the proxy is uniquely keyed
274 sk_sp<GrSurface> surface = fResourceProvider->findByUniqueKey<GrSurface>(
275 proxy->asTextureProxy()->getUniqueKey());
276 if (surface) {
277 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(fResourceProvider, surface.get(),
Chris Daltoneffee202019-07-01 22:28:03 -0600278 minStencilSampleCount)) {
Robert Phillips0790f8a2018-09-18 13:11:03 -0400279 return nullptr;
280 }
281
282 return surface;
283 }
284 }
285
Robert Phillips57aa3672017-07-21 11:38:13 -0400286 // First look in the free pool
287 GrScratchKey key;
Robert Phillips5af44de2017-07-18 14:49:38 -0400288
Robert Phillips57aa3672017-07-21 11:38:13 -0400289 proxy->priv().computeScratchKey(&key);
290
Robert Phillips10d17212019-04-24 14:09:10 -0400291 auto filter = [] (const GrSurface* s) {
292 return true;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500293 };
294 sk_sp<GrSurface> surface(fFreePool.findAndRemove(key, filter));
Robert Phillips57aa3672017-07-21 11:38:13 -0400295 if (surface) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500296 if (SkBudgeted::kYes == proxy->isBudgeted() &&
Brian Salomonfa2ebea2019-01-24 15:58:58 -0500297 GrBudgetedType::kBudgeted != surface->resourcePriv().budgetedType()) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500298 // This gets the job done but isn't quite correct. It would be better to try to
Brian Salomonfa2ebea2019-01-24 15:58:58 -0500299 // match budgeted proxies w/ budgeted surfaces and unbudgeted w/ unbudgeted.
Robert Phillipsf8e25022017-11-08 15:24:31 -0500300 surface->resourcePriv().makeBudgeted();
301 }
302
Robert Phillips01a91282018-07-26 08:03:04 -0400303 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(fResourceProvider, surface.get(),
Chris Daltoneffee202019-07-01 22:28:03 -0600304 minStencilSampleCount)) {
Robert Phillips01a91282018-07-26 08:03:04 -0400305 return nullptr;
306 }
Robert Phillips0790f8a2018-09-18 13:11:03 -0400307 SkASSERT(!surface->getUniqueKey().isValid());
Robert Phillipsf8e25022017-11-08 15:24:31 -0500308 return surface;
Robert Phillips57aa3672017-07-21 11:38:13 -0400309 }
310
311 // Failing that, try to grab a new one from the resource cache
Robert Phillips5af44de2017-07-18 14:49:38 -0400312 return proxy->priv().createSurface(fResourceProvider);
313}
314
315// Remove any intervals that end before the current index. Return their GrSurfaces
Robert Phillips39667382019-04-17 16:03:30 -0400316// to the free pool if possible.
Robert Phillips5af44de2017-07-18 14:49:38 -0400317void GrResourceAllocator::expire(unsigned int curIndex) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500318 while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->end() < curIndex) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400319 Interval* temp = fActiveIntvls.popHead();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400320 SkASSERT(!temp->next());
Robert Phillips5b65a842017-11-13 15:48:12 -0500321
322 if (temp->wasAssignedSurface()) {
Robert Phillips715d08c2018-07-18 13:56:48 -0400323 sk_sp<GrSurface> surface = temp->detachSurface();
324
Robert Phillipsc73666f2019-04-24 08:49:48 -0400325 if (temp->isRecyclable()) {
Robert Phillips715d08c2018-07-18 13:56:48 -0400326 this->recycleSurface(std::move(surface));
327 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500328 }
Robert Phillips8186cbe2017-11-01 17:32:39 -0400329
330 // Add temp to the free interval list so it can be reused
Robert Phillips715d08c2018-07-18 13:56:48 -0400331 SkASSERT(!temp->wasAssignedSurface()); // it had better not have a ref on a surface
Robert Phillipsf8e25022017-11-08 15:24:31 -0500332 temp->setNext(fFreeIntervalList);
Robert Phillips8186cbe2017-11-01 17:32:39 -0400333 fFreeIntervalList = temp;
Robert Phillips5af44de2017-07-18 14:49:38 -0400334 }
335}
336
Greg Danielf41b2bd2019-08-22 16:19:24 -0400337bool GrResourceAllocator::onOpsTaskBoundary() const {
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400338 if (fIntvlList.empty()) {
Greg Danielf41b2bd2019-08-22 16:19:24 -0400339 SkASSERT(fCurOpsTaskIndex+1 <= fNumOpsTasks);
340 // Although technically on an opsTask boundary there is no need to force an
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400341 // intermediate flush here
342 return false;
343 }
344
345 const Interval* tmp = fIntvlList.peekHead();
Greg Danielf41b2bd2019-08-22 16:19:24 -0400346 return fEndOfOpsTaskOpIndices[fCurOpsTaskIndex] <= tmp->start();
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400347}
348
349void GrResourceAllocator::forceIntermediateFlush(int* stopIndex) {
Greg Danielf41b2bd2019-08-22 16:19:24 -0400350 *stopIndex = fCurOpsTaskIndex+1;
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400351
352 // This is interrupting the allocation of resources for this flush. We need to
353 // proactively clear the active interval list of any intervals that aren't
354 // guaranteed to survive the partial flush lest they become zombies (i.e.,
355 // holding a deleted surface proxy).
356 const Interval* tmp = fIntvlList.peekHead();
Greg Danielf41b2bd2019-08-22 16:19:24 -0400357 SkASSERT(fEndOfOpsTaskOpIndices[fCurOpsTaskIndex] <= tmp->start());
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400358
Greg Danielf41b2bd2019-08-22 16:19:24 -0400359 fCurOpsTaskIndex++;
360 SkASSERT(fCurOpsTaskIndex < fNumOpsTasks);
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400361
362 this->expire(tmp->start());
363}
364
Brian Salomon577aa0f2018-11-30 13:32:23 -0500365bool GrResourceAllocator::assign(int* startIndex, int* stopIndex, AssignError* outError) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500366 SkASSERT(outError);
Robert Phillips82774f82019-06-20 14:38:27 -0400367 *outError = fLazyInstantiationError ? AssignError::kFailedProxyInstantiation
368 : AssignError::kNoError;
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500369
Greg Danielf41b2bd2019-08-22 16:19:24 -0400370 SkASSERT(fNumOpsTasks == fEndOfOpsTaskOpIndices.count());
Mike Klein6350cb02019-04-22 12:09:45 +0000371
Robert Phillips5f78adf2019-04-22 12:41:39 -0400372 fIntvlHash.reset(); // we don't need the interval hash anymore
373
Greg Danielf41b2bd2019-08-22 16:19:24 -0400374 if (fCurOpsTaskIndex >= fEndOfOpsTaskOpIndices.count()) {
Robert Phillips5f78adf2019-04-22 12:41:39 -0400375 return false; // nothing to render
376 }
377
Greg Danielf41b2bd2019-08-22 16:19:24 -0400378 *startIndex = fCurOpsTaskIndex;
379 *stopIndex = fEndOfOpsTaskOpIndices.count();
Robert Phillipseafd48a2017-11-16 07:52:08 -0500380
Robert Phillips5f78adf2019-04-22 12:41:39 -0400381 if (fIntvlList.empty()) {
Greg Danielf41b2bd2019-08-22 16:19:24 -0400382 fCurOpsTaskIndex = fEndOfOpsTaskOpIndices.count();
Robert Phillips5f78adf2019-04-22 12:41:39 -0400383 return true; // no resources to assign
384 }
385
Robert Phillips3bf3d4a2019-03-27 07:09:09 -0400386#if GR_ALLOCATION_SPEW
Greg Danielf41b2bd2019-08-22 16:19:24 -0400387 SkDebugf("assigning opsTasks %d through %d out of %d numOpsTasks\n",
388 *startIndex, *stopIndex, fNumOpsTasks);
389 SkDebugf("EndOfOpsTaskIndices: ");
390 for (int i = 0; i < fEndOfOpsTaskOpIndices.count(); ++i) {
391 SkDebugf("%d ", fEndOfOpsTaskOpIndices[i]);
Robert Phillips3bf3d4a2019-03-27 07:09:09 -0400392 }
393 SkDebugf("\n");
394#endif
395
Robert Phillips5af44de2017-07-18 14:49:38 -0400396 SkDEBUGCODE(fAssigned = true;)
397
Robert Phillips715d08c2018-07-18 13:56:48 -0400398#if GR_ALLOCATION_SPEW
399 this->dumpIntervals();
400#endif
Robert Phillips5af44de2017-07-18 14:49:38 -0400401 while (Interval* cur = fIntvlList.popHead()) {
Greg Danield72dd4d2019-08-29 14:37:46 -0400402 while (fEndOfOpsTaskOpIndices[fCurOpsTaskIndex] <= cur->start()) {
Greg Danielf41b2bd2019-08-22 16:19:24 -0400403 fCurOpsTaskIndex++;
404 SkASSERT(fCurOpsTaskIndex < fNumOpsTasks);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500405 }
406
Robert Phillipsf8e25022017-11-08 15:24:31 -0500407 this->expire(cur->start());
Robert Phillips57aa3672017-07-21 11:38:13 -0400408
Chris Daltoneffee202019-07-01 22:28:03 -0600409 int minStencilSampleCount = (cur->proxy()->asRenderTargetProxy())
410 ? cur->proxy()->asRenderTargetProxy()->numStencilSamples()
411 : 0;
Robert Phillipseafd48a2017-11-16 07:52:08 -0500412
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400413 if (cur->proxy()->isInstantiated()) {
414 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(
Chris Daltoneffee202019-07-01 22:28:03 -0600415 fResourceProvider, cur->proxy()->peekSurface(), minStencilSampleCount)) {
Robert Phillips01a91282018-07-26 08:03:04 -0400416 *outError = AssignError::kFailedProxyInstantiation;
417 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500418
Robert Phillips57aa3672017-07-21 11:38:13 -0400419 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500420
421 if (fResourceProvider->overBudget()) {
Greg Danielf41b2bd2019-08-22 16:19:24 -0400422 // Only force intermediate draws on opsTask boundaries
423 if (this->onOpsTaskBoundary()) {
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400424 this->forceIntermediateFlush(stopIndex);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500425 return true;
426 }
427 }
428
Robert Phillips57aa3672017-07-21 11:38:13 -0400429 continue;
430 }
431
Brian Salomonbeb7f522019-08-30 16:19:42 -0400432 if (cur->proxy()->isLazy()) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500433 if (!cur->proxy()->priv().doLazyInstantiation(fResourceProvider)) {
434 *outError = AssignError::kFailedProxyInstantiation;
435 }
Brian Salomonbeb7f522019-08-30 16:19:42 -0400436 } else if (sk_sp<GrSurface> surface =
437 this->findSurfaceFor(cur->proxy(), minStencilSampleCount)) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500438 // TODO: make getUniqueKey virtual on GrSurfaceProxy
Robert Phillips0790f8a2018-09-18 13:11:03 -0400439 GrTextureProxy* texProxy = cur->proxy()->asTextureProxy();
440
441 if (texProxy && texProxy->getUniqueKey().isValid()) {
442 if (!surface->getUniqueKey().isValid()) {
443 fResourceProvider->assignUniqueKeyToResource(texProxy->getUniqueKey(),
444 surface.get());
445 }
446 SkASSERT(surface->getUniqueKey() == texProxy->getUniqueKey());
Robert Phillipsf8e25022017-11-08 15:24:31 -0500447 }
448
Robert Phillips715d08c2018-07-18 13:56:48 -0400449#if GR_ALLOCATION_SPEW
450 SkDebugf("Assigning %d to %d\n",
451 surface->uniqueID().asUInt(),
452 cur->proxy()->uniqueID().asUInt());
453#endif
454
Robert Phillips5b65a842017-11-13 15:48:12 -0500455 cur->assign(std::move(surface));
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500456 } else {
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400457 SkASSERT(!cur->proxy()->isInstantiated());
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500458 *outError = AssignError::kFailedProxyInstantiation;
Robert Phillips5af44de2017-07-18 14:49:38 -0400459 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500460
Robert Phillips5af44de2017-07-18 14:49:38 -0400461 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500462
463 if (fResourceProvider->overBudget()) {
Greg Danielf41b2bd2019-08-22 16:19:24 -0400464 // Only force intermediate draws on opsTask boundaries
465 if (this->onOpsTaskBoundary()) {
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400466 this->forceIntermediateFlush(stopIndex);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500467 return true;
468 }
469 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400470 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500471
472 // expire all the remaining intervals to drain the active interval list
473 this->expire(std::numeric_limits<unsigned int>::max());
Robert Phillipseafd48a2017-11-16 07:52:08 -0500474 return true;
Robert Phillips5af44de2017-07-18 14:49:38 -0400475}
Robert Phillips715d08c2018-07-18 13:56:48 -0400476
477#if GR_ALLOCATION_SPEW
478void GrResourceAllocator::dumpIntervals() {
Robert Phillips715d08c2018-07-18 13:56:48 -0400479 // Print all the intervals while computing their range
Robert Phillips3bf3d4a2019-03-27 07:09:09 -0400480 SkDebugf("------------------------------------------------------------\n");
481 unsigned int min = std::numeric_limits<unsigned int>::max();
Robert Phillips715d08c2018-07-18 13:56:48 -0400482 unsigned int max = 0;
483 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
Robert Phillipsb5204762019-06-19 14:12:13 -0400484 SkDebugf("{ %3d,%3d }: [%2d, %2d] - proxyRefs:%d surfaceRefs:%d\n",
Robert Phillips715d08c2018-07-18 13:56:48 -0400485 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400486 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1,
Robert Phillips715d08c2018-07-18 13:56:48 -0400487 cur->start(),
488 cur->end(),
489 cur->proxy()->priv().getProxyRefCnt(),
Robert Phillipsb5204762019-06-19 14:12:13 -0400490 cur->proxy()->testingOnly_getBackingRefCnt());
Robert Phillips715d08c2018-07-18 13:56:48 -0400491 min = SkTMin(min, cur->start());
492 max = SkTMax(max, cur->end());
493 }
494
495 // Draw a graph of the useage intervals
496 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
497 SkDebugf("{ %3d,%3d }: ",
498 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400499 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1);
Robert Phillips715d08c2018-07-18 13:56:48 -0400500 for (unsigned int i = min; i <= max; ++i) {
501 if (i >= cur->start() && i <= cur->end()) {
502 SkDebugf("x");
503 } else {
504 SkDebugf(" ");
505 }
506 }
507 SkDebugf("\n");
508 }
509}
510#endif