blob: f73f5be81465853bba5aba5a1654b1923043039f [file] [log] [blame]
Robert Phillips5af44de2017-07-18 14:49:38 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/GrResourceAllocator.h"
Robert Phillips5af44de2017-07-18 14:49:38 -04009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "src/gpu/GrDeinstantiateProxyTracker.h"
11#include "src/gpu/GrGpuResourcePriv.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040012#include "src/gpu/GrOpList.h"
13#include "src/gpu/GrRenderTargetProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050014#include "src/gpu/GrResourceCache.h"
15#include "src/gpu/GrResourceProvider.h"
16#include "src/gpu/GrSurfacePriv.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040017#include "src/gpu/GrSurfaceProxy.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/gpu/GrSurfaceProxyPriv.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040019#include "src/gpu/GrTextureProxy.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040020
Robert Phillipsda1be462018-07-27 07:18:06 -040021#if GR_TRACK_INTERVAL_CREATION
Mike Klein0ec1c572018-12-04 11:52:51 -050022 #include <atomic>
23
24 uint32_t GrResourceAllocator::Interval::CreateUniqueID() {
25 static std::atomic<uint32_t> nextID{1};
26 uint32_t id;
27 do {
28 id = nextID++;
29 } while (id == SK_InvalidUniqueID);
30 return id;
31 }
Robert Phillipsda1be462018-07-27 07:18:06 -040032#endif
33
Robert Phillips5b65a842017-11-13 15:48:12 -050034void GrResourceAllocator::Interval::assign(sk_sp<GrSurface> s) {
35 SkASSERT(!fAssignedSurface);
36 fAssignedSurface = s;
37 fProxy->priv().assign(std::move(s));
38}
39
Robert Phillipsc73666f2019-04-24 08:49:48 -040040void GrResourceAllocator::determineRecyclability() {
41 for (Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
42 if (cur->proxy()->canSkipResourceAllocator()) {
43 // These types of proxies can slip in here if they require a stencil buffer
44 continue;
45 }
46
Robert Phillipse5f73282019-06-18 17:15:04 -040047 if (cur->uses() >= cur->proxy()->priv().getProxyRefCnt()) {
Robert Phillipsc73666f2019-04-24 08:49:48 -040048 // All the refs on the proxy are known to the resource allocator thus no one
49 // should be holding onto it outside of Ganesh.
Robert Phillipse5f73282019-06-18 17:15:04 -040050 SkASSERT(cur->uses() == cur->proxy()->priv().getProxyRefCnt());
Robert Phillipsc73666f2019-04-24 08:49:48 -040051 cur->markAsRecyclable();
52 }
53 }
54}
55
Robert Phillipseafd48a2017-11-16 07:52:08 -050056void GrResourceAllocator::markEndOfOpList(int opListIndex) {
57 SkASSERT(!fAssigned); // We shouldn't be adding any opLists after (or during) assignment
58
59 SkASSERT(fEndOfOpListOpIndices.count() == opListIndex);
60 if (!fEndOfOpListOpIndices.empty()) {
61 SkASSERT(fEndOfOpListOpIndices.back() < this->curOp());
62 }
63
64 fEndOfOpListOpIndices.push_back(this->curOp()); // This is the first op index of the next opList
Robert Phillipsc476e5d2019-03-26 14:50:08 -040065 SkASSERT(fEndOfOpListOpIndices.count() <= fNumOpLists);
Robert Phillipseafd48a2017-11-16 07:52:08 -050066}
67
Robert Phillips5b65a842017-11-13 15:48:12 -050068GrResourceAllocator::~GrResourceAllocator() {
Robert Phillips5b65a842017-11-13 15:48:12 -050069 SkASSERT(fIntvlList.empty());
70 SkASSERT(fActiveIntvls.empty());
71 SkASSERT(!fIntvlHash.count());
Robert Phillips5b65a842017-11-13 15:48:12 -050072}
73
Robert Phillipsc73666f2019-04-24 08:49:48 -040074void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end,
75 ActualUse actualUse
Chris Dalton8816b932017-11-29 16:48:25 -070076 SkDEBUGCODE(, bool isDirectDstRead)) {
Robert Phillips5f78adf2019-04-22 12:41:39 -040077
Chris Dalton97155592019-06-13 13:40:20 -060078 if (proxy->canSkipResourceAllocator()) {
Chris Daltoneffee202019-07-01 22:28:03 -060079 // If the proxy is still not instantiated at this point but will need stencil, it will
80 // attach its own stencil buffer upon onFlush instantiation.
81 if (proxy->isInstantiated()) {
82 int minStencilSampleCount = (proxy->asRenderTargetProxy())
83 ? proxy->asRenderTargetProxy()->numStencilSamples()
84 : 0;
85 if (minStencilSampleCount) {
86 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(
87 fResourceProvider, proxy->peekSurface(), minStencilSampleCount)) {
88 SkDebugf("WARNING: failed to attach stencil buffer. "
89 "Rendering may be incorrect.\n");
90 }
Chris Dalton97155592019-06-13 13:40:20 -060091 }
92 }
Robert Phillips5f78adf2019-04-22 12:41:39 -040093 return;
94 }
95
96 SkASSERT(!proxy->priv().ignoredByResourceAllocator());
97
Robert Phillips5af44de2017-07-18 14:49:38 -040098 SkASSERT(start <= end);
99 SkASSERT(!fAssigned); // We shouldn't be adding any intervals after (or during) assignment
100
Brian Salomon9cadc312018-12-05 15:09:19 -0500101 // If a proxy is read only it must refer to a texture with specific content that cannot be
102 // recycled. We don't need to assign a texture to it and no other proxy can be instantiated
103 // with the same texture.
104 if (proxy->readOnly()) {
105 // Since we aren't going to add an interval we won't revisit this proxy in assign(). So it
106 // must already be instantiated or it must be a lazy proxy that we will instantiate below.
107 SkASSERT(proxy->isInstantiated() ||
108 GrSurfaceProxy::LazyState::kNot != proxy->lazyInstantiationState());
Brian Salomonc6093532018-12-05 21:34:36 +0000109 } else {
Brian Salomon9cadc312018-12-05 15:09:19 -0500110 if (Interval* intvl = fIntvlHash.find(proxy->uniqueID().asUInt())) {
111 // Revise the interval for an existing use
112#ifdef SK_DEBUG
113 if (0 == start && 0 == end) {
114 // This interval is for the initial upload to a deferred proxy. Due to the vagaries
115 // of how deferred proxies are collected they can appear as uploads multiple times
116 // in a single opLists' list and as uploads in several opLists.
117 SkASSERT(0 == intvl->start());
118 } else if (isDirectDstRead) {
119 // Direct reads from the render target itself should occur w/in the existing
120 // interval
121 SkASSERT(intvl->start() <= start && intvl->end() >= end);
122 } else {
123 SkASSERT(intvl->end() <= start && intvl->end() <= end);
124 }
125#endif
Robert Phillipsc73666f2019-04-24 08:49:48 -0400126 if (ActualUse::kYes == actualUse) {
127 intvl->addUse();
128 }
Brian Salomon9cadc312018-12-05 15:09:19 -0500129 intvl->extendEnd(end);
130 return;
131 }
132 Interval* newIntvl;
133 if (fFreeIntervalList) {
134 newIntvl = fFreeIntervalList;
135 fFreeIntervalList = newIntvl->next();
136 newIntvl->setNext(nullptr);
137 newIntvl->resetTo(proxy, start, end);
138 } else {
139 newIntvl = fIntervalAllocator.make<Interval>(proxy, start, end);
140 }
141
Robert Phillipsc73666f2019-04-24 08:49:48 -0400142 if (ActualUse::kYes == actualUse) {
143 newIntvl->addUse();
144 }
Brian Salomon9cadc312018-12-05 15:09:19 -0500145 fIntvlList.insertByIncreasingStart(newIntvl);
146 fIntvlHash.add(newIntvl);
Brian Salomonc6093532018-12-05 21:34:36 +0000147 }
148
Brian Salomon9cadc312018-12-05 15:09:19 -0500149 // Because readOnly proxies do not get a usage interval we must instantiate them here (since it
150 // won't occur in GrResourceAllocator::assign)
Robert Phillips12c46292019-04-23 07:36:17 -0400151 if (proxy->readOnly()) {
Robert Phillips4150eea2018-02-07 17:08:21 -0500152 // FIXME: remove this once we can do the lazy instantiation from assign instead.
153 if (GrSurfaceProxy::LazyState::kNot != proxy->lazyInstantiationState()) {
Brian Salomon876a0172019-03-08 11:12:14 -0500154 if (proxy->priv().doLazyInstantiation(fResourceProvider)) {
155 if (proxy->priv().lazyInstantiationType() ==
156 GrSurfaceProxy::LazyInstantiationType::kDeinstantiate) {
157 fDeinstantiateTracker->addProxy(proxy);
158 }
Robert Phillips82774f82019-06-20 14:38:27 -0400159 } else {
160 fLazyInstantiationError = true;
Brian Salomon876a0172019-03-08 11:12:14 -0500161 }
Robert Phillips4150eea2018-02-07 17:08:21 -0500162 }
Chris Dalton706a6ff2017-11-29 22:01:06 -0700163 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400164}
165
166GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::popHead() {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400167 SkDEBUGCODE(this->validate());
168
Robert Phillips5af44de2017-07-18 14:49:38 -0400169 Interval* temp = fHead;
170 if (temp) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500171 fHead = temp->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400172 if (!fHead) {
173 fTail = nullptr;
174 }
175 temp->setNext(nullptr);
Robert Phillips5af44de2017-07-18 14:49:38 -0400176 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400177
178 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400179 return temp;
180}
181
182// TODO: fuse this with insertByIncreasingEnd
183void GrResourceAllocator::IntervalList::insertByIncreasingStart(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400184 SkDEBUGCODE(this->validate());
185 SkASSERT(!intvl->next());
186
Robert Phillips5af44de2017-07-18 14:49:38 -0400187 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400188 // 14%
189 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500190 } else if (intvl->start() <= fHead->start()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400191 // 3%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500192 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400193 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400194 } else if (fTail->start() <= intvl->start()) {
195 // 83%
196 fTail->setNext(intvl);
197 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400198 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400199 // almost never
Robert Phillips5af44de2017-07-18 14:49:38 -0400200 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500201 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400202 for (; intvl->start() > next->start(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400203 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400204
205 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500206 intvl->setNext(next);
207 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400208 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400209
210 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400211}
212
213// TODO: fuse this with insertByIncreasingStart
214void GrResourceAllocator::IntervalList::insertByIncreasingEnd(Interval* intvl) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400215 SkDEBUGCODE(this->validate());
216 SkASSERT(!intvl->next());
217
Robert Phillips5af44de2017-07-18 14:49:38 -0400218 if (!fHead) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400219 // 14%
220 fHead = fTail = intvl;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500221 } else if (intvl->end() <= fHead->end()) {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400222 // 64%
Robert Phillipsf8e25022017-11-08 15:24:31 -0500223 intvl->setNext(fHead);
Robert Phillips5af44de2017-07-18 14:49:38 -0400224 fHead = intvl;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400225 } else if (fTail->end() <= intvl->end()) {
226 // 3%
227 fTail->setNext(intvl);
228 fTail = intvl;
Robert Phillips5af44de2017-07-18 14:49:38 -0400229 } else {
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400230 // 19% but 81% of those land right after the list's head
Robert Phillips5af44de2017-07-18 14:49:38 -0400231 Interval* prev = fHead;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500232 Interval* next = prev->next();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400233 for (; intvl->end() > next->end(); prev = next, next = next->next()) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400234 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400235
236 SkASSERT(next);
Robert Phillipsf8e25022017-11-08 15:24:31 -0500237 intvl->setNext(next);
238 prev->setNext(intvl);
Robert Phillips5af44de2017-07-18 14:49:38 -0400239 }
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400240
241 SkDEBUGCODE(this->validate());
Robert Phillips5af44de2017-07-18 14:49:38 -0400242}
243
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400244#ifdef SK_DEBUG
245void GrResourceAllocator::IntervalList::validate() const {
246 SkASSERT(SkToBool(fHead) == SkToBool(fTail));
247
248 Interval* prev = nullptr;
249 for (Interval* cur = fHead; cur; prev = cur, cur = cur->next()) {
250 }
251
252 SkASSERT(fTail == prev);
253}
254#endif
Robert Phillips4150eea2018-02-07 17:08:21 -0500255
256 GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::detachAll() {
257 Interval* tmp = fHead;
258 fHead = nullptr;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400259 fTail = nullptr;
Robert Phillips4150eea2018-02-07 17:08:21 -0500260 return tmp;
261}
262
Robert Phillips5af44de2017-07-18 14:49:38 -0400263// 'surface' can be reused. Add it back to the free pool.
Robert Phillips715d08c2018-07-18 13:56:48 -0400264void GrResourceAllocator::recycleSurface(sk_sp<GrSurface> surface) {
Robert Phillips57aa3672017-07-21 11:38:13 -0400265 const GrScratchKey &key = surface->resourcePriv().getScratchKey();
266
267 if (!key.isValid()) {
268 return; // can't do it w/o a valid scratch key
269 }
270
Robert Phillipsf8e25022017-11-08 15:24:31 -0500271 if (surface->getUniqueKey().isValid()) {
272 // If the surface has a unique key we throw it back into the resource cache.
273 // If things get really tight 'findSurfaceFor' may pull it back out but there is
274 // no need to have it in tight rotation.
275 return;
276 }
277
Robert Phillips715d08c2018-07-18 13:56:48 -0400278#if GR_ALLOCATION_SPEW
279 SkDebugf("putting surface %d back into pool\n", surface->uniqueID().asUInt());
280#endif
Robert Phillips57aa3672017-07-21 11:38:13 -0400281 // TODO: fix this insertion so we get a more LRU-ish behavior
Robert Phillips5b65a842017-11-13 15:48:12 -0500282 fFreePool.insert(key, surface.release());
Robert Phillips5af44de2017-07-18 14:49:38 -0400283}
284
285// First try to reuse one of the recently allocated/used GrSurfaces in the free pool.
286// If we can't find a useable one, create a new one.
Robert Phillipseafd48a2017-11-16 07:52:08 -0500287sk_sp<GrSurface> GrResourceAllocator::findSurfaceFor(const GrSurfaceProxy* proxy,
Chris Daltoneffee202019-07-01 22:28:03 -0600288 int minStencilSampleCount) {
Robert Phillips0790f8a2018-09-18 13:11:03 -0400289
290 if (proxy->asTextureProxy() && proxy->asTextureProxy()->getUniqueKey().isValid()) {
291 // First try to reattach to a cached version if the proxy is uniquely keyed
292 sk_sp<GrSurface> surface = fResourceProvider->findByUniqueKey<GrSurface>(
293 proxy->asTextureProxy()->getUniqueKey());
294 if (surface) {
295 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(fResourceProvider, surface.get(),
Chris Daltoneffee202019-07-01 22:28:03 -0600296 minStencilSampleCount)) {
Robert Phillips0790f8a2018-09-18 13:11:03 -0400297 return nullptr;
298 }
299
300 return surface;
301 }
302 }
303
Robert Phillips57aa3672017-07-21 11:38:13 -0400304 // First look in the free pool
305 GrScratchKey key;
Robert Phillips5af44de2017-07-18 14:49:38 -0400306
Robert Phillips57aa3672017-07-21 11:38:13 -0400307 proxy->priv().computeScratchKey(&key);
308
Robert Phillips10d17212019-04-24 14:09:10 -0400309 auto filter = [] (const GrSurface* s) {
310 return true;
Robert Phillipsf8e25022017-11-08 15:24:31 -0500311 };
312 sk_sp<GrSurface> surface(fFreePool.findAndRemove(key, filter));
Robert Phillips57aa3672017-07-21 11:38:13 -0400313 if (surface) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500314 if (SkBudgeted::kYes == proxy->isBudgeted() &&
Brian Salomonfa2ebea2019-01-24 15:58:58 -0500315 GrBudgetedType::kBudgeted != surface->resourcePriv().budgetedType()) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500316 // This gets the job done but isn't quite correct. It would be better to try to
Brian Salomonfa2ebea2019-01-24 15:58:58 -0500317 // match budgeted proxies w/ budgeted surfaces and unbudgeted w/ unbudgeted.
Robert Phillipsf8e25022017-11-08 15:24:31 -0500318 surface->resourcePriv().makeBudgeted();
319 }
320
Robert Phillips01a91282018-07-26 08:03:04 -0400321 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(fResourceProvider, surface.get(),
Chris Daltoneffee202019-07-01 22:28:03 -0600322 minStencilSampleCount)) {
Robert Phillips01a91282018-07-26 08:03:04 -0400323 return nullptr;
324 }
Robert Phillips0790f8a2018-09-18 13:11:03 -0400325 SkASSERT(!surface->getUniqueKey().isValid());
Robert Phillipsf8e25022017-11-08 15:24:31 -0500326 return surface;
Robert Phillips57aa3672017-07-21 11:38:13 -0400327 }
328
329 // Failing that, try to grab a new one from the resource cache
Robert Phillips5af44de2017-07-18 14:49:38 -0400330 return proxy->priv().createSurface(fResourceProvider);
331}
332
333// Remove any intervals that end before the current index. Return their GrSurfaces
Robert Phillips39667382019-04-17 16:03:30 -0400334// to the free pool if possible.
Robert Phillips5af44de2017-07-18 14:49:38 -0400335void GrResourceAllocator::expire(unsigned int curIndex) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500336 while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->end() < curIndex) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400337 Interval* temp = fActiveIntvls.popHead();
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400338 SkASSERT(!temp->next());
Robert Phillips5b65a842017-11-13 15:48:12 -0500339
340 if (temp->wasAssignedSurface()) {
Robert Phillips715d08c2018-07-18 13:56:48 -0400341 sk_sp<GrSurface> surface = temp->detachSurface();
342
Robert Phillipsc73666f2019-04-24 08:49:48 -0400343 if (temp->isRecyclable()) {
Robert Phillips715d08c2018-07-18 13:56:48 -0400344 this->recycleSurface(std::move(surface));
345 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500346 }
Robert Phillips8186cbe2017-11-01 17:32:39 -0400347
348 // Add temp to the free interval list so it can be reused
Robert Phillips715d08c2018-07-18 13:56:48 -0400349 SkASSERT(!temp->wasAssignedSurface()); // it had better not have a ref on a surface
Robert Phillipsf8e25022017-11-08 15:24:31 -0500350 temp->setNext(fFreeIntervalList);
Robert Phillips8186cbe2017-11-01 17:32:39 -0400351 fFreeIntervalList = temp;
Robert Phillips5af44de2017-07-18 14:49:38 -0400352 }
353}
354
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400355bool GrResourceAllocator::onOpListBoundary() const {
356 if (fIntvlList.empty()) {
357 SkASSERT(fCurOpListIndex+1 <= fNumOpLists);
358 // Although technically on an opList boundary there is no need to force an
359 // intermediate flush here
360 return false;
361 }
362
363 const Interval* tmp = fIntvlList.peekHead();
364 return fEndOfOpListOpIndices[fCurOpListIndex] <= tmp->start();
365}
366
367void GrResourceAllocator::forceIntermediateFlush(int* stopIndex) {
368 *stopIndex = fCurOpListIndex+1;
369
370 // This is interrupting the allocation of resources for this flush. We need to
371 // proactively clear the active interval list of any intervals that aren't
372 // guaranteed to survive the partial flush lest they become zombies (i.e.,
373 // holding a deleted surface proxy).
374 const Interval* tmp = fIntvlList.peekHead();
375 SkASSERT(fEndOfOpListOpIndices[fCurOpListIndex] <= tmp->start());
376
377 fCurOpListIndex++;
378 SkASSERT(fCurOpListIndex < fNumOpLists);
379
380 this->expire(tmp->start());
381}
382
Brian Salomon577aa0f2018-11-30 13:32:23 -0500383bool GrResourceAllocator::assign(int* startIndex, int* stopIndex, AssignError* outError) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500384 SkASSERT(outError);
Robert Phillips82774f82019-06-20 14:38:27 -0400385 *outError = fLazyInstantiationError ? AssignError::kFailedProxyInstantiation
386 : AssignError::kNoError;
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500387
Mike Klein6350cb02019-04-22 12:09:45 +0000388 SkASSERT(fNumOpLists == fEndOfOpListOpIndices.count());
389
Robert Phillips5f78adf2019-04-22 12:41:39 -0400390 fIntvlHash.reset(); // we don't need the interval hash anymore
391
392 if (fCurOpListIndex >= fEndOfOpListOpIndices.count()) {
393 return false; // nothing to render
394 }
395
Robert Phillipseafd48a2017-11-16 07:52:08 -0500396 *startIndex = fCurOpListIndex;
397 *stopIndex = fEndOfOpListOpIndices.count();
398
Robert Phillips5f78adf2019-04-22 12:41:39 -0400399 if (fIntvlList.empty()) {
400 fCurOpListIndex = fEndOfOpListOpIndices.count();
401 return true; // no resources to assign
402 }
403
Robert Phillips3bf3d4a2019-03-27 07:09:09 -0400404#if GR_ALLOCATION_SPEW
405 SkDebugf("assigning opLists %d through %d out of %d numOpLists\n",
406 *startIndex, *stopIndex, fNumOpLists);
407 SkDebugf("EndOfOpListIndices: ");
408 for (int i = 0; i < fEndOfOpListOpIndices.count(); ++i) {
409 SkDebugf("%d ", fEndOfOpListOpIndices[i]);
410 }
411 SkDebugf("\n");
412#endif
413
Robert Phillips5af44de2017-07-18 14:49:38 -0400414 SkDEBUGCODE(fAssigned = true;)
415
Robert Phillips715d08c2018-07-18 13:56:48 -0400416#if GR_ALLOCATION_SPEW
417 this->dumpIntervals();
418#endif
Robert Phillips5af44de2017-07-18 14:49:38 -0400419 while (Interval* cur = fIntvlList.popHead()) {
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400420 if (fEndOfOpListOpIndices[fCurOpListIndex] <= cur->start()) {
Robert Phillipseafd48a2017-11-16 07:52:08 -0500421 fCurOpListIndex++;
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400422 SkASSERT(fCurOpListIndex < fNumOpLists);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500423 }
424
Robert Phillipsf8e25022017-11-08 15:24:31 -0500425 this->expire(cur->start());
Robert Phillips57aa3672017-07-21 11:38:13 -0400426
Chris Daltoneffee202019-07-01 22:28:03 -0600427 int minStencilSampleCount = (cur->proxy()->asRenderTargetProxy())
428 ? cur->proxy()->asRenderTargetProxy()->numStencilSamples()
429 : 0;
Robert Phillipseafd48a2017-11-16 07:52:08 -0500430
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400431 if (cur->proxy()->isInstantiated()) {
432 if (!GrSurfaceProxyPriv::AttachStencilIfNeeded(
Chris Daltoneffee202019-07-01 22:28:03 -0600433 fResourceProvider, cur->proxy()->peekSurface(), minStencilSampleCount)) {
Robert Phillips01a91282018-07-26 08:03:04 -0400434 *outError = AssignError::kFailedProxyInstantiation;
435 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500436
Robert Phillips57aa3672017-07-21 11:38:13 -0400437 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500438
439 if (fResourceProvider->overBudget()) {
440 // Only force intermediate draws on opList boundaries
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400441 if (this->onOpListBoundary()) {
442 this->forceIntermediateFlush(stopIndex);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500443 return true;
444 }
445 }
446
Robert Phillips57aa3672017-07-21 11:38:13 -0400447 continue;
448 }
449
Greg Daniel65fa8ca2018-01-10 17:06:31 -0500450 if (GrSurfaceProxy::LazyState::kNot != cur->proxy()->lazyInstantiationState()) {
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500451 if (!cur->proxy()->priv().doLazyInstantiation(fResourceProvider)) {
452 *outError = AssignError::kFailedProxyInstantiation;
Brian Salomon876a0172019-03-08 11:12:14 -0500453 } else {
454 if (GrSurfaceProxy::LazyInstantiationType::kDeinstantiate ==
455 cur->proxy()->priv().lazyInstantiationType()) {
456 fDeinstantiateTracker->addProxy(cur->proxy());
457 }
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500458 }
Chris Daltoneffee202019-07-01 22:28:03 -0600459 } else if (sk_sp<GrSurface> surface = this->findSurfaceFor(
460 cur->proxy(), minStencilSampleCount)) {
Robert Phillipsf8e25022017-11-08 15:24:31 -0500461 // TODO: make getUniqueKey virtual on GrSurfaceProxy
Robert Phillips0790f8a2018-09-18 13:11:03 -0400462 GrTextureProxy* texProxy = cur->proxy()->asTextureProxy();
463
464 if (texProxy && texProxy->getUniqueKey().isValid()) {
465 if (!surface->getUniqueKey().isValid()) {
466 fResourceProvider->assignUniqueKeyToResource(texProxy->getUniqueKey(),
467 surface.get());
468 }
469 SkASSERT(surface->getUniqueKey() == texProxy->getUniqueKey());
Robert Phillipsf8e25022017-11-08 15:24:31 -0500470 }
471
Robert Phillips715d08c2018-07-18 13:56:48 -0400472#if GR_ALLOCATION_SPEW
473 SkDebugf("Assigning %d to %d\n",
474 surface->uniqueID().asUInt(),
475 cur->proxy()->uniqueID().asUInt());
476#endif
477
Robert Phillips5b65a842017-11-13 15:48:12 -0500478 cur->assign(std::move(surface));
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500479 } else {
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400480 SkASSERT(!cur->proxy()->isInstantiated());
Greg Danielaa3dfbe2018-01-29 10:34:25 -0500481 *outError = AssignError::kFailedProxyInstantiation;
Robert Phillips5af44de2017-07-18 14:49:38 -0400482 }
Robert Phillipseafd48a2017-11-16 07:52:08 -0500483
Robert Phillips5af44de2017-07-18 14:49:38 -0400484 fActiveIntvls.insertByIncreasingEnd(cur);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500485
486 if (fResourceProvider->overBudget()) {
487 // Only force intermediate draws on opList boundaries
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400488 if (this->onOpListBoundary()) {
489 this->forceIntermediateFlush(stopIndex);
Robert Phillipseafd48a2017-11-16 07:52:08 -0500490 return true;
491 }
492 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400493 }
Robert Phillips5b65a842017-11-13 15:48:12 -0500494
495 // expire all the remaining intervals to drain the active interval list
496 this->expire(std::numeric_limits<unsigned int>::max());
Robert Phillipseafd48a2017-11-16 07:52:08 -0500497 return true;
Robert Phillips5af44de2017-07-18 14:49:38 -0400498}
Robert Phillips715d08c2018-07-18 13:56:48 -0400499
500#if GR_ALLOCATION_SPEW
501void GrResourceAllocator::dumpIntervals() {
Robert Phillips715d08c2018-07-18 13:56:48 -0400502 // Print all the intervals while computing their range
Robert Phillips3bf3d4a2019-03-27 07:09:09 -0400503 SkDebugf("------------------------------------------------------------\n");
504 unsigned int min = std::numeric_limits<unsigned int>::max();
Robert Phillips715d08c2018-07-18 13:56:48 -0400505 unsigned int max = 0;
506 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
Robert Phillipsb5204762019-06-19 14:12:13 -0400507 SkDebugf("{ %3d,%3d }: [%2d, %2d] - proxyRefs:%d surfaceRefs:%d\n",
Robert Phillips715d08c2018-07-18 13:56:48 -0400508 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400509 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1,
Robert Phillips715d08c2018-07-18 13:56:48 -0400510 cur->start(),
511 cur->end(),
512 cur->proxy()->priv().getProxyRefCnt(),
Robert Phillipsb5204762019-06-19 14:12:13 -0400513 cur->proxy()->testingOnly_getBackingRefCnt());
Robert Phillips715d08c2018-07-18 13:56:48 -0400514 min = SkTMin(min, cur->start());
515 max = SkTMax(max, cur->end());
516 }
517
518 // Draw a graph of the useage intervals
519 for(const Interval* cur = fIntvlList.peekHead(); cur; cur = cur->next()) {
520 SkDebugf("{ %3d,%3d }: ",
521 cur->proxy()->uniqueID().asUInt(),
Brian Salomonfd98c2c2018-07-31 17:25:29 -0400522 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1);
Robert Phillips715d08c2018-07-18 13:56:48 -0400523 for (unsigned int i = min; i <= max; ++i) {
524 if (i >= cur->start() && i <= cur->end()) {
525 SkDebugf("x");
526 } else {
527 SkDebugf(" ");
528 }
529 }
530 SkDebugf("\n");
531 }
532}
533#endif