blob: 348d9b4683d79671e43c761da71147b1b1c9aa00 [file] [log] [blame]
Robert Phillips5af44de2017-07-18 14:49:38 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrResourceAllocator_DEFINED
9#define GrResourceAllocator_DEFINED
10
Adlai Holler539db2f2021-03-16 09:45:05 -040011#include "include/private/SkTHash.h"
12
Adlai Holler539db2f2021-03-16 09:45:05 -040013#include "src/gpu/GrHashMapWithCache.h"
Greg Daniel456f9b52020-03-05 19:14:18 +000014#include "src/gpu/GrSurface.h"
Adlai Hollerca1137b2021-04-08 11:39:55 -040015#include "src/gpu/GrSurfaceProxy.h"
Robert Phillips8186cbe2017-11-01 17:32:39 -040016
Ben Wagner729a23f2019-05-17 16:29:34 -040017#include "src/core/SkArenaAlloc.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/core/SkTMultiMap.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040019
Adlai Hollerca1137b2021-04-08 11:39:55 -040020class GrDirectContext;
Robert Phillips5af44de2017-07-18 14:49:38 -040021
Robert Phillips715d08c2018-07-18 13:56:48 -040022// Print out explicit allocation information
23#define GR_ALLOCATION_SPEW 0
24
Robert Phillipsda1be462018-07-27 07:18:06 -040025// Print out information about interval creation
26#define GR_TRACK_INTERVAL_CREATION 0
27
Robert Phillips5af44de2017-07-18 14:49:38 -040028/*
29 * The ResourceAllocator explicitly distributes GPU resources at flush time. It operates by
30 * being given the usage intervals of the various proxies. It keeps these intervals in a singly
31 * linked list sorted by increasing start index. (It also maintains a hash table from proxyID
32 * to interval to find proxy reuse). When it comes time to allocate the resources it
33 * traverses the sorted list and:
34 * removes intervals from the active list that have completed (returning their GrSurfaces
35 * to the free pool)
36
37 * allocates a new resource (preferably from the free pool) for the new interval
38 * adds the new interval to the active list (that is sorted by increasing end index)
39 *
40 * Note: the op indices (used in the usage intervals) come from the order of the ops in
Greg Danielf41b2bd2019-08-22 16:19:24 -040041 * their opsTasks after the opsTask DAG has been linearized.
Robert Phillips82774f82019-06-20 14:38:27 -040042 *
43 *************************************************************************************************
44 * How does instantiation failure handling work when explicitly allocating?
45 *
46 * In the gather usage intervals pass all the GrSurfaceProxies used in the flush should be
Greg Danielf41b2bd2019-08-22 16:19:24 -040047 * gathered (i.e., in GrOpsTask::gatherProxyIntervals).
Robert Phillips82774f82019-06-20 14:38:27 -040048 *
49 * The allocator will churn through this list but could fail anywhere.
50 *
51 * Allocation failure handling occurs at two levels:
52 *
Greg Danielf41b2bd2019-08-22 16:19:24 -040053 * 1) If the GrSurface backing an opsTask fails to allocate then the entire opsTask is dropped.
Robert Phillips82774f82019-06-20 14:38:27 -040054 *
55 * 2) If an individual GrSurfaceProxy fails to allocate then any ops that use it are dropped
Greg Danielf41b2bd2019-08-22 16:19:24 -040056 * (via GrOpsTask::purgeOpsWithUninstantiatedProxies)
Robert Phillips82774f82019-06-20 14:38:27 -040057 *
Greg Danielf41b2bd2019-08-22 16:19:24 -040058 * The pass to determine which ops to drop is a bit laborious so we only check the opsTasks and
Robert Phillips82774f82019-06-20 14:38:27 -040059 * individual ops when something goes wrong in allocation (i.e., when the return code from
60 * GrResourceAllocator::assign is bad)
61 *
62 * All together this means we should never attempt to draw an op which is missing some
63 * required GrSurface.
64 *
65 * One wrinkle in this plan is that promise images are fulfilled during the gather interval pass.
66 * If any of the promise images fail at this stage then the allocator is set into an error
67 * state and all allocations are then scanned for failures during the main allocation pass.
Robert Phillips5af44de2017-07-18 14:49:38 -040068 */
69class GrResourceAllocator {
70public:
Adlai Hollerb51dd572021-04-15 11:01:46 -040071 GrResourceAllocator(GrDirectContext* dContext)
Adlai Hollerca1137b2021-04-08 11:39:55 -040072 : fDContext(dContext) {}
Robert Phillips5af44de2017-07-18 14:49:38 -040073
Robert Phillips5b65a842017-11-13 15:48:12 -050074 ~GrResourceAllocator();
75
Robert Phillips5af44de2017-07-18 14:49:38 -040076 unsigned int curOp() const { return fNumOps; }
77 void incOps() { fNumOps++; }
Robert Phillips5af44de2017-07-18 14:49:38 -040078
Adlai Holler7f7a5df2021-02-09 17:41:10 +000079 /** Indicates whether a given call to addInterval represents an actual usage of the
80 * provided proxy. This is mainly here to accommodate deferred proxies attached to opsTasks.
81 * In that case we need to create an extra long interval for them (due to the upload) but
82 * don't want to count that usage/reference towards the proxy's recyclability.
83 */
84 enum class ActualUse : bool {
85 kNo = false,
86 kYes = true
87 };
88
Robert Phillipseafd48a2017-11-16 07:52:08 -050089 // Add a usage interval from 'start' to 'end' inclusive. This is usually used for renderTargets.
Robert Phillips5af44de2017-07-18 14:49:38 -040090 // If an existing interval already exists it will be expanded to include the new range.
Adlai Holler7f7a5df2021-02-09 17:41:10 +000091 void addInterval(GrSurfaceProxy*, unsigned int start, unsigned int end, ActualUse actualUse
Chris Dalton8816b932017-11-29 16:48:25 -070092 SkDEBUGCODE(, bool isDirectDstRead = false));
Robert Phillips5af44de2017-07-18 14:49:38 -040093
Adlai Hollerb51dd572021-04-15 11:01:46 -040094 bool failedInstantiation() const { return fFailedInstantiation; }
95
Adlai Hollercd2f96d2021-04-09 17:58:14 -040096 // Generate an internal plan for resource allocation. After this you can optionally call
97 // `makeBudgetHeadroom` to check whether that plan would go over our memory budget.
Adlai Holleree2837b2021-04-09 16:52:48 -040098 // Fully-lazy proxies are also instantiated at this point so that their size can
99 // be known accurately. Returns false if any lazy proxy failed to instantiate, true otherwise.
100 bool planAssignment();
101
Adlai Hollercd2f96d2021-04-09 17:58:14 -0400102 // Figure out how much VRAM headroom this plan requires. If there's enough purgeable resources,
103 // purge them and return true. Otherwise return false.
104 bool makeBudgetHeadroom();
105
Adlai Hollerb51dd572021-04-15 11:01:46 -0400106 // Clear all internal state in preparation for a new set of intervals.
107 void reset();
108
Adlai Holleree2837b2021-04-09 16:52:48 -0400109 // Instantiate and assign resources to all proxies.
Adlai Holler19fd5142021-03-08 10:19:30 -0700110 bool assign();
Robert Phillips5af44de2017-07-18 14:49:38 -0400111
Robert Phillips715d08c2018-07-18 13:56:48 -0400112#if GR_ALLOCATION_SPEW
113 void dumpIntervals();
114#endif
115
Robert Phillips5af44de2017-07-18 14:49:38 -0400116private:
117 class Interval;
Adlai Holler4cfbe532021-03-17 10:36:39 -0400118 class Register;
Robert Phillips5af44de2017-07-18 14:49:38 -0400119
120 // Remove dead intervals from the active list
121 void expire(unsigned int curIndex);
122
123 // These two methods wrap the interactions with the free pool
Adlai Holler4cfbe532021-03-17 10:36:39 -0400124 void recycleRegister(Register* r);
Adlai Holler7df8d222021-03-19 12:27:49 -0400125 Register* findOrCreateRegisterFor(GrSurfaceProxy* proxy);
Robert Phillips5af44de2017-07-18 14:49:38 -0400126
Robert Phillips57aa3672017-07-21 11:38:13 -0400127 struct FreePoolTraits {
Adlai Holler4cfbe532021-03-17 10:36:39 -0400128 static const GrScratchKey& GetKey(const Register& r) {
129 return r.scratchKey();
Robert Phillips57aa3672017-07-21 11:38:13 -0400130 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400131
Robert Phillips57aa3672017-07-21 11:38:13 -0400132 static uint32_t Hash(const GrScratchKey& key) { return key.hash(); }
Adlai Holler4cfbe532021-03-17 10:36:39 -0400133 static void OnFree(Register* r) { }
Robert Phillips5af44de2017-07-18 14:49:38 -0400134 };
Adlai Holler4cfbe532021-03-17 10:36:39 -0400135 typedef SkTMultiMap<Register, GrScratchKey, FreePoolTraits> FreePoolMultiMap;
Robert Phillips57aa3672017-07-21 11:38:13 -0400136
Adlai Hollerb51dd572021-04-15 11:01:46 -0400137 typedef SkTHashMap<uint32_t, Interval*, GrCheapHash> IntvlHash;
Robert Phillips5af44de2017-07-18 14:49:38 -0400138
Adlai Holler7df8d222021-03-19 12:27:49 -0400139 struct UniqueKeyHash {
140 uint32_t operator()(const GrUniqueKey& key) const { return key.hash(); }
141 };
142 typedef SkTHashMap<GrUniqueKey, Register*, UniqueKeyHash> UniqueKeyRegisterHash;
143
144 // Each proxy – with some exceptions – is assigned a register. After all assignments are made,
145 // another pass is performed to instantiate and assign actual surfaces to the proxies. Right
146 // now these are performed in one call, but in the future they will be separable and the user
147 // will be able to query re: memory cost before committing to surface creation.
Adlai Holler4cfbe532021-03-17 10:36:39 -0400148 class Register {
149 public:
Adlai Holler7df8d222021-03-19 12:27:49 -0400150 // It's OK to pass an invalid scratch key iff the proxy has a unique key.
Adlai Holleree2837b2021-04-09 16:52:48 -0400151 Register(GrSurfaceProxy* originatingProxy, GrScratchKey, GrResourceProvider*);
Adlai Holler4cfbe532021-03-17 10:36:39 -0400152
Adlai Holler7df8d222021-03-19 12:27:49 -0400153 const GrScratchKey& scratchKey() const { return fScratchKey; }
154 const GrUniqueKey& uniqueKey() const { return fOriginatingProxy->getUniqueKey(); }
Adlai Holler4cfbe532021-03-17 10:36:39 -0400155
Adlai Hollercd2f96d2021-04-09 17:58:14 -0400156 bool accountedForInBudget() const { return fAccountedForInBudget; }
157 void setAccountedForInBudget() { fAccountedForInBudget = true; }
158
Adlai Holleree2837b2021-04-09 16:52:48 -0400159 GrSurface* existingSurface() const { return fExistingSurface.get(); }
160
Adlai Holler7df8d222021-03-19 12:27:49 -0400161 // Can this register be used by other proxies after this one?
162 bool isRecyclable(const GrCaps&, GrSurfaceProxy* proxy, int knownUseCount) const;
163
164 // Resolve the register allocation to an actual GrSurface. 'fOriginatingProxy'
165 // is used to cache the allocation when a given register is used by multiple
166 // proxies.
167 bool instantiateSurface(GrSurfaceProxy*, GrResourceProvider*);
Adlai Holler4cfbe532021-03-17 10:36:39 -0400168
169 SkDEBUGCODE(uint32_t uniqueID() const { return fUniqueID; })
170
171 private:
Adlai Holler7df8d222021-03-19 12:27:49 -0400172 GrSurfaceProxy* fOriginatingProxy;
173 GrScratchKey fScratchKey; // free pool wants a reference to this.
Adlai Holleree2837b2021-04-09 16:52:48 -0400174 sk_sp<GrSurface> fExistingSurface; // queried from resource cache. may be null.
Adlai Hollercd2f96d2021-04-09 17:58:14 -0400175 bool fAccountedForInBudget = false;
Adlai Holler4cfbe532021-03-17 10:36:39 -0400176
177#ifdef SK_DEBUG
178 uint32_t fUniqueID;
179
180 static uint32_t CreateUniqueID();
181#endif
182 };
183
Robert Phillips5af44de2017-07-18 14:49:38 -0400184 class Interval {
185 public:
186 Interval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end)
Adlai Holler7df8d222021-03-19 12:27:49 -0400187 : fProxy(proxy)
188 , fStart(start)
189 , fEnd(end) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400190 SkASSERT(proxy);
Adlai Holler4cfbe532021-03-17 10:36:39 -0400191 SkDEBUGCODE(fUniqueID = CreateUniqueID());
Robert Phillipsda1be462018-07-27 07:18:06 -0400192#if GR_TRACK_INTERVAL_CREATION
Robert Phillips047d5bb2021-01-08 13:39:19 -0500193 SkString proxyStr = proxy->dump();
194 SkDebugf("New intvl %d: %s [%d, %d]\n", fUniqueID, proxyStr.c_str(), start, end);
Robert Phillipsda1be462018-07-27 07:18:06 -0400195#endif
Robert Phillips5af44de2017-07-18 14:49:38 -0400196 }
197
Robert Phillipsf8e25022017-11-08 15:24:31 -0500198 const GrSurfaceProxy* proxy() const { return fProxy; }
199 GrSurfaceProxy* proxy() { return fProxy; }
Robert Phillipsc73666f2019-04-24 08:49:48 -0400200
Robert Phillipsf8e25022017-11-08 15:24:31 -0500201 unsigned int start() const { return fStart; }
202 unsigned int end() const { return fEnd; }
Robert Phillipsc73666f2019-04-24 08:49:48 -0400203
204 void setNext(Interval* next) { fNext = next; }
Robert Phillipsf8e25022017-11-08 15:24:31 -0500205 const Interval* next() const { return fNext; }
206 Interval* next() { return fNext; }
207
Adlai Holler4cfbe532021-03-17 10:36:39 -0400208 Register* getRegister() const { return fRegister; }
209 void setRegister(Register* r) { fRegister = r; }
210
Robert Phillipsc73666f2019-04-24 08:49:48 -0400211 void addUse() { fUses++; }
Adlai Holler1143b1b2021-03-16 13:07:40 -0400212 int uses() const { return fUses; }
Robert Phillipsf8e25022017-11-08 15:24:31 -0500213
214 void extendEnd(unsigned int newEnd) {
Chris Dalton8816b932017-11-29 16:48:25 -0700215 if (newEnd > fEnd) {
216 fEnd = newEnd;
Robert Phillipsda1be462018-07-27 07:18:06 -0400217#if GR_TRACK_INTERVAL_CREATION
218 SkDebugf("intvl %d: extending from %d to %d\n", fUniqueID, fEnd, newEnd);
219#endif
Chris Dalton8816b932017-11-29 16:48:25 -0700220 }
Robert Phillipsf8e25022017-11-08 15:24:31 -0500221 }
222
Adlai Holler4cfbe532021-03-17 10:36:39 -0400223 SkDEBUGCODE(uint32_t uniqueID() const { return fUniqueID; })
224
Robert Phillipsf8e25022017-11-08 15:24:31 -0500225 private:
Robert Phillips5b65a842017-11-13 15:48:12 -0500226 GrSurfaceProxy* fProxy;
Robert Phillips5b65a842017-11-13 15:48:12 -0500227 unsigned int fStart;
228 unsigned int fEnd;
Adlai Holler1143b1b2021-03-16 13:07:40 -0400229 Interval* fNext = nullptr;
Robert Phillipsc73666f2019-04-24 08:49:48 -0400230 unsigned int fUses = 0;
Adlai Holler4cfbe532021-03-17 10:36:39 -0400231 Register* fRegister = nullptr;
Robert Phillipsda1be462018-07-27 07:18:06 -0400232
Adlai Holler4cfbe532021-03-17 10:36:39 -0400233#ifdef SK_DEBUG
Robert Phillipsda1be462018-07-27 07:18:06 -0400234 uint32_t fUniqueID;
235
Adlai Hollerda163672021-03-15 11:03:37 -0400236 static uint32_t CreateUniqueID();
Robert Phillipsda1be462018-07-27 07:18:06 -0400237#endif
Robert Phillips5af44de2017-07-18 14:49:38 -0400238 };
239
240 class IntervalList {
241 public:
242 IntervalList() = default;
Adlai Hollerb51dd572021-04-15 11:01:46 -0400243 // N.B. No need for a destructor – the arena allocator will clean up for us.
Robert Phillips5af44de2017-07-18 14:49:38 -0400244
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400245 bool empty() const {
246 SkASSERT(SkToBool(fHead) == SkToBool(fTail));
247 return !SkToBool(fHead);
248 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400249 const Interval* peekHead() const { return fHead; }
Robert Phillipsc73666f2019-04-24 08:49:48 -0400250 Interval* peekHead() { return fHead; }
Robert Phillips5af44de2017-07-18 14:49:38 -0400251 Interval* popHead();
252 void insertByIncreasingStart(Interval*);
253 void insertByIncreasingEnd(Interval*);
254
255 private:
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400256 SkDEBUGCODE(void validate() const;)
257
Robert Phillips5af44de2017-07-18 14:49:38 -0400258 Interval* fHead = nullptr;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400259 Interval* fTail = nullptr;
Robert Phillips5af44de2017-07-18 14:49:38 -0400260 };
261
Brian Salomon309b0532018-10-10 17:22:00 -0400262 // Compositing use cases can create > 80 intervals.
263 static const int kInitialArenaSize = 128 * sizeof(Interval);
Robert Phillips8186cbe2017-11-01 17:32:39 -0400264
Adlai Hollerca1137b2021-04-08 11:39:55 -0400265 GrDirectContext* fDContext;
Brian Salomon577aa0f2018-11-30 13:32:23 -0500266 FreePoolMultiMap fFreePool; // Recently created/used GrSurfaces
267 IntvlHash fIntvlHash; // All the intervals, hashed by proxyID
Robert Phillips5af44de2017-07-18 14:49:38 -0400268
Brian Salomon577aa0f2018-11-30 13:32:23 -0500269 IntervalList fIntvlList; // All the intervals sorted by increasing start
270 IntervalList fActiveIntvls; // List of live intervals during assignment
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400271 // (sorted by increasing end)
Adlai Holler7df8d222021-03-19 12:27:49 -0400272 IntervalList fFinishedIntvls; // All the completed intervals
273 // (sorted by increasing start)
274 UniqueKeyRegisterHash fUniqueKeyRegisters;
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400275 unsigned int fNumOps = 0;
Robert Phillips8186cbe2017-11-01 17:32:39 -0400276
Adlai Holleree2837b2021-04-09 16:52:48 -0400277 SkDEBUGCODE(bool fPlanned = false;)
Brian Salomon577aa0f2018-11-30 13:32:23 -0500278 SkDEBUGCODE(bool fAssigned = false;)
Robert Phillipseafd48a2017-11-16 07:52:08 -0500279
Adlai Hollerb51dd572021-04-15 11:01:46 -0400280 SkSTArenaAllocWithReset<kInitialArenaSize> fInternalAllocator; // intervals & registers
281 bool fFailedInstantiation = false;
Robert Phillips5af44de2017-07-18 14:49:38 -0400282};
283
284#endif // GrResourceAllocator_DEFINED