blob: fc0626472b2e56ad3ee764afc14f6ae98ce498f5 [file] [log] [blame]
Robert Phillips5af44de2017-07-18 14:49:38 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrResourceAllocator_DEFINED
9#define GrResourceAllocator_DEFINED
10
Adlai Holler539db2f2021-03-16 09:45:05 -040011#include "include/private/SkTHash.h"
12
Adlai Holler539db2f2021-03-16 09:45:05 -040013#include "src/gpu/GrHashMapWithCache.h"
Greg Daniel456f9b52020-03-05 19:14:18 +000014#include "src/gpu/GrSurface.h"
Adlai Hollerca1137b2021-04-08 11:39:55 -040015#include "src/gpu/GrSurfaceProxy.h"
Robert Phillips8186cbe2017-11-01 17:32:39 -040016
Ben Wagner729a23f2019-05-17 16:29:34 -040017#include "src/core/SkArenaAlloc.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "src/core/SkTMultiMap.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040019
Adlai Hollerca1137b2021-04-08 11:39:55 -040020class GrDirectContext;
Robert Phillips5af44de2017-07-18 14:49:38 -040021
Robert Phillips715d08c2018-07-18 13:56:48 -040022// Print out explicit allocation information
23#define GR_ALLOCATION_SPEW 0
24
Robert Phillipsda1be462018-07-27 07:18:06 -040025// Print out information about interval creation
26#define GR_TRACK_INTERVAL_CREATION 0
27
Robert Phillips5af44de2017-07-18 14:49:38 -040028/*
29 * The ResourceAllocator explicitly distributes GPU resources at flush time. It operates by
30 * being given the usage intervals of the various proxies. It keeps these intervals in a singly
31 * linked list sorted by increasing start index. (It also maintains a hash table from proxyID
32 * to interval to find proxy reuse). When it comes time to allocate the resources it
33 * traverses the sorted list and:
34 * removes intervals from the active list that have completed (returning their GrSurfaces
35 * to the free pool)
36
37 * allocates a new resource (preferably from the free pool) for the new interval
38 * adds the new interval to the active list (that is sorted by increasing end index)
39 *
40 * Note: the op indices (used in the usage intervals) come from the order of the ops in
Greg Danielf41b2bd2019-08-22 16:19:24 -040041 * their opsTasks after the opsTask DAG has been linearized.
Robert Phillips82774f82019-06-20 14:38:27 -040042 *
43 *************************************************************************************************
44 * How does instantiation failure handling work when explicitly allocating?
45 *
46 * In the gather usage intervals pass all the GrSurfaceProxies used in the flush should be
Greg Danielf41b2bd2019-08-22 16:19:24 -040047 * gathered (i.e., in GrOpsTask::gatherProxyIntervals).
Robert Phillips82774f82019-06-20 14:38:27 -040048 *
49 * The allocator will churn through this list but could fail anywhere.
50 *
51 * Allocation failure handling occurs at two levels:
52 *
Greg Danielf41b2bd2019-08-22 16:19:24 -040053 * 1) If the GrSurface backing an opsTask fails to allocate then the entire opsTask is dropped.
Robert Phillips82774f82019-06-20 14:38:27 -040054 *
55 * 2) If an individual GrSurfaceProxy fails to allocate then any ops that use it are dropped
Greg Danielf41b2bd2019-08-22 16:19:24 -040056 * (via GrOpsTask::purgeOpsWithUninstantiatedProxies)
Robert Phillips82774f82019-06-20 14:38:27 -040057 *
Greg Danielf41b2bd2019-08-22 16:19:24 -040058 * The pass to determine which ops to drop is a bit laborious so we only check the opsTasks and
Robert Phillips82774f82019-06-20 14:38:27 -040059 * individual ops when something goes wrong in allocation (i.e., when the return code from
60 * GrResourceAllocator::assign is bad)
61 *
62 * All together this means we should never attempt to draw an op which is missing some
63 * required GrSurface.
64 *
65 * One wrinkle in this plan is that promise images are fulfilled during the gather interval pass.
66 * If any of the promise images fail at this stage then the allocator is set into an error
67 * state and all allocations are then scanned for failures during the main allocation pass.
Robert Phillips5af44de2017-07-18 14:49:38 -040068 */
69class GrResourceAllocator {
70public:
Adlai Hollerca1137b2021-04-08 11:39:55 -040071 GrResourceAllocator(GrDirectContext* dContext SkDEBUGCODE(, int numOpsTasks))
72 : fDContext(dContext) {}
Robert Phillips5af44de2017-07-18 14:49:38 -040073
Robert Phillips5b65a842017-11-13 15:48:12 -050074 ~GrResourceAllocator();
75
Robert Phillips5af44de2017-07-18 14:49:38 -040076 unsigned int curOp() const { return fNumOps; }
77 void incOps() { fNumOps++; }
Robert Phillips5af44de2017-07-18 14:49:38 -040078
Adlai Holler7f7a5df2021-02-09 17:41:10 +000079 /** Indicates whether a given call to addInterval represents an actual usage of the
80 * provided proxy. This is mainly here to accommodate deferred proxies attached to opsTasks.
81 * In that case we need to create an extra long interval for them (due to the upload) but
82 * don't want to count that usage/reference towards the proxy's recyclability.
83 */
84 enum class ActualUse : bool {
85 kNo = false,
86 kYes = true
87 };
88
Robert Phillipseafd48a2017-11-16 07:52:08 -050089 // Add a usage interval from 'start' to 'end' inclusive. This is usually used for renderTargets.
Robert Phillips5af44de2017-07-18 14:49:38 -040090 // If an existing interval already exists it will be expanded to include the new range.
Adlai Holler7f7a5df2021-02-09 17:41:10 +000091 void addInterval(GrSurfaceProxy*, unsigned int start, unsigned int end, ActualUse actualUse
Chris Dalton8816b932017-11-29 16:48:25 -070092 SkDEBUGCODE(, bool isDirectDstRead = false));
Robert Phillips5af44de2017-07-18 14:49:38 -040093
Adlai Holler19fd5142021-03-08 10:19:30 -070094 // Assign resources to all proxies. Returns whether the assignment was successful.
95 bool assign();
Robert Phillips5af44de2017-07-18 14:49:38 -040096
Robert Phillips715d08c2018-07-18 13:56:48 -040097#if GR_ALLOCATION_SPEW
98 void dumpIntervals();
99#endif
100
Robert Phillips5af44de2017-07-18 14:49:38 -0400101private:
102 class Interval;
Adlai Holler4cfbe532021-03-17 10:36:39 -0400103 class Register;
Robert Phillips5af44de2017-07-18 14:49:38 -0400104
105 // Remove dead intervals from the active list
106 void expire(unsigned int curIndex);
107
108 // These two methods wrap the interactions with the free pool
Adlai Holler4cfbe532021-03-17 10:36:39 -0400109 void recycleRegister(Register* r);
Adlai Holler7df8d222021-03-19 12:27:49 -0400110 Register* findOrCreateRegisterFor(GrSurfaceProxy* proxy);
Robert Phillips5af44de2017-07-18 14:49:38 -0400111
Robert Phillips57aa3672017-07-21 11:38:13 -0400112 struct FreePoolTraits {
Adlai Holler4cfbe532021-03-17 10:36:39 -0400113 static const GrScratchKey& GetKey(const Register& r) {
114 return r.scratchKey();
Robert Phillips57aa3672017-07-21 11:38:13 -0400115 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400116
Robert Phillips57aa3672017-07-21 11:38:13 -0400117 static uint32_t Hash(const GrScratchKey& key) { return key.hash(); }
Adlai Holler4cfbe532021-03-17 10:36:39 -0400118 static void OnFree(Register* r) { }
Robert Phillips5af44de2017-07-18 14:49:38 -0400119 };
Adlai Holler4cfbe532021-03-17 10:36:39 -0400120 typedef SkTMultiMap<Register, GrScratchKey, FreePoolTraits> FreePoolMultiMap;
Robert Phillips57aa3672017-07-21 11:38:13 -0400121
Adlai Holler7df8d222021-03-19 12:27:49 -0400122 typedef SkTHashMap<uint32_t, Interval*, GrCheapHash> IntvlHash;
Robert Phillips5af44de2017-07-18 14:49:38 -0400123
Adlai Holler7df8d222021-03-19 12:27:49 -0400124 struct UniqueKeyHash {
125 uint32_t operator()(const GrUniqueKey& key) const { return key.hash(); }
126 };
127 typedef SkTHashMap<GrUniqueKey, Register*, UniqueKeyHash> UniqueKeyRegisterHash;
128
129 // Each proxy – with some exceptions – is assigned a register. After all assignments are made,
130 // another pass is performed to instantiate and assign actual surfaces to the proxies. Right
131 // now these are performed in one call, but in the future they will be separable and the user
132 // will be able to query re: memory cost before committing to surface creation.
Adlai Holler4cfbe532021-03-17 10:36:39 -0400133 class Register {
134 public:
Adlai Holler7df8d222021-03-19 12:27:49 -0400135 // It's OK to pass an invalid scratch key iff the proxy has a unique key.
Adlai Holler3cffe812021-04-09 13:43:32 -0400136 Register(GrSurfaceProxy* originatingProxy, GrScratchKey);
Adlai Holler4cfbe532021-03-17 10:36:39 -0400137
Adlai Holler7df8d222021-03-19 12:27:49 -0400138 const GrScratchKey& scratchKey() const { return fScratchKey; }
139 const GrUniqueKey& uniqueKey() const { return fOriginatingProxy->getUniqueKey(); }
Adlai Holler4cfbe532021-03-17 10:36:39 -0400140
Adlai Holler7df8d222021-03-19 12:27:49 -0400141 // Can this register be used by other proxies after this one?
142 bool isRecyclable(const GrCaps&, GrSurfaceProxy* proxy, int knownUseCount) const;
143
144 // Resolve the register allocation to an actual GrSurface. 'fOriginatingProxy'
145 // is used to cache the allocation when a given register is used by multiple
146 // proxies.
147 bool instantiateSurface(GrSurfaceProxy*, GrResourceProvider*);
Adlai Holler4cfbe532021-03-17 10:36:39 -0400148
149 SkDEBUGCODE(uint32_t uniqueID() const { return fUniqueID; })
150
151 private:
Adlai Holler7df8d222021-03-19 12:27:49 -0400152 GrSurfaceProxy* fOriginatingProxy;
153 GrScratchKey fScratchKey; // free pool wants a reference to this.
Adlai Holler4cfbe532021-03-17 10:36:39 -0400154
155#ifdef SK_DEBUG
156 uint32_t fUniqueID;
157
158 static uint32_t CreateUniqueID();
159#endif
160 };
161
Robert Phillips5af44de2017-07-18 14:49:38 -0400162 class Interval {
163 public:
164 Interval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end)
Adlai Holler7df8d222021-03-19 12:27:49 -0400165 : fProxy(proxy)
166 , fStart(start)
167 , fEnd(end) {
Robert Phillips5af44de2017-07-18 14:49:38 -0400168 SkASSERT(proxy);
Adlai Holler4cfbe532021-03-17 10:36:39 -0400169 SkDEBUGCODE(fUniqueID = CreateUniqueID());
Robert Phillipsda1be462018-07-27 07:18:06 -0400170#if GR_TRACK_INTERVAL_CREATION
Robert Phillips047d5bb2021-01-08 13:39:19 -0500171 SkString proxyStr = proxy->dump();
172 SkDebugf("New intvl %d: %s [%d, %d]\n", fUniqueID, proxyStr.c_str(), start, end);
Robert Phillipsda1be462018-07-27 07:18:06 -0400173#endif
Robert Phillips5af44de2017-07-18 14:49:38 -0400174 }
175
Robert Phillipsf8e25022017-11-08 15:24:31 -0500176 const GrSurfaceProxy* proxy() const { return fProxy; }
177 GrSurfaceProxy* proxy() { return fProxy; }
Robert Phillipsc73666f2019-04-24 08:49:48 -0400178
Robert Phillipsf8e25022017-11-08 15:24:31 -0500179 unsigned int start() const { return fStart; }
180 unsigned int end() const { return fEnd; }
Robert Phillipsc73666f2019-04-24 08:49:48 -0400181
182 void setNext(Interval* next) { fNext = next; }
Robert Phillipsf8e25022017-11-08 15:24:31 -0500183 const Interval* next() const { return fNext; }
184 Interval* next() { return fNext; }
185
Adlai Holler4cfbe532021-03-17 10:36:39 -0400186 Register* getRegister() const { return fRegister; }
187 void setRegister(Register* r) { fRegister = r; }
188
Robert Phillipsc73666f2019-04-24 08:49:48 -0400189 void addUse() { fUses++; }
Adlai Holler1143b1b2021-03-16 13:07:40 -0400190 int uses() const { return fUses; }
Robert Phillipsf8e25022017-11-08 15:24:31 -0500191
192 void extendEnd(unsigned int newEnd) {
Chris Dalton8816b932017-11-29 16:48:25 -0700193 if (newEnd > fEnd) {
194 fEnd = newEnd;
Robert Phillipsda1be462018-07-27 07:18:06 -0400195#if GR_TRACK_INTERVAL_CREATION
196 SkDebugf("intvl %d: extending from %d to %d\n", fUniqueID, fEnd, newEnd);
197#endif
Chris Dalton8816b932017-11-29 16:48:25 -0700198 }
Robert Phillipsf8e25022017-11-08 15:24:31 -0500199 }
200
Adlai Holler4cfbe532021-03-17 10:36:39 -0400201 SkDEBUGCODE(uint32_t uniqueID() const { return fUniqueID; })
202
Robert Phillipsf8e25022017-11-08 15:24:31 -0500203 private:
Robert Phillips5b65a842017-11-13 15:48:12 -0500204 GrSurfaceProxy* fProxy;
Robert Phillips5b65a842017-11-13 15:48:12 -0500205 unsigned int fStart;
206 unsigned int fEnd;
Adlai Holler1143b1b2021-03-16 13:07:40 -0400207 Interval* fNext = nullptr;
Robert Phillipsc73666f2019-04-24 08:49:48 -0400208 unsigned int fUses = 0;
Adlai Holler4cfbe532021-03-17 10:36:39 -0400209 Register* fRegister = nullptr;
Robert Phillipsda1be462018-07-27 07:18:06 -0400210
Adlai Holler4cfbe532021-03-17 10:36:39 -0400211#ifdef SK_DEBUG
Robert Phillipsda1be462018-07-27 07:18:06 -0400212 uint32_t fUniqueID;
213
Adlai Hollerda163672021-03-15 11:03:37 -0400214 static uint32_t CreateUniqueID();
Robert Phillipsda1be462018-07-27 07:18:06 -0400215#endif
Robert Phillips5af44de2017-07-18 14:49:38 -0400216 };
217
218 class IntervalList {
219 public:
220 IntervalList() = default;
221 ~IntervalList() {
Robert Phillips8186cbe2017-11-01 17:32:39 -0400222 // The only time we delete an IntervalList is in the GrResourceAllocator dtor.
223 // Since the arena allocator will clean up for us we don't bother here.
Robert Phillips5af44de2017-07-18 14:49:38 -0400224 }
225
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400226 bool empty() const {
227 SkASSERT(SkToBool(fHead) == SkToBool(fTail));
228 return !SkToBool(fHead);
229 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400230 const Interval* peekHead() const { return fHead; }
Robert Phillipsc73666f2019-04-24 08:49:48 -0400231 Interval* peekHead() { return fHead; }
Robert Phillips5af44de2017-07-18 14:49:38 -0400232 Interval* popHead();
233 void insertByIncreasingStart(Interval*);
234 void insertByIncreasingEnd(Interval*);
235
236 private:
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400237 SkDEBUGCODE(void validate() const;)
238
Robert Phillips5af44de2017-07-18 14:49:38 -0400239 Interval* fHead = nullptr;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400240 Interval* fTail = nullptr;
Robert Phillips5af44de2017-07-18 14:49:38 -0400241 };
242
Brian Salomon309b0532018-10-10 17:22:00 -0400243 // Compositing use cases can create > 80 intervals.
244 static const int kInitialArenaSize = 128 * sizeof(Interval);
Robert Phillips8186cbe2017-11-01 17:32:39 -0400245
Adlai Hollerca1137b2021-04-08 11:39:55 -0400246 GrDirectContext* fDContext;
Brian Salomon577aa0f2018-11-30 13:32:23 -0500247 FreePoolMultiMap fFreePool; // Recently created/used GrSurfaces
248 IntvlHash fIntvlHash; // All the intervals, hashed by proxyID
Robert Phillips5af44de2017-07-18 14:49:38 -0400249
Brian Salomon577aa0f2018-11-30 13:32:23 -0500250 IntervalList fIntvlList; // All the intervals sorted by increasing start
251 IntervalList fActiveIntvls; // List of live intervals during assignment
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400252 // (sorted by increasing end)
Adlai Holler7df8d222021-03-19 12:27:49 -0400253 IntervalList fFinishedIntvls; // All the completed intervals
254 // (sorted by increasing start)
255 UniqueKeyRegisterHash fUniqueKeyRegisters;
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400256 unsigned int fNumOps = 0;
Robert Phillips8186cbe2017-11-01 17:32:39 -0400257
Brian Salomon577aa0f2018-11-30 13:32:23 -0500258 SkDEBUGCODE(bool fAssigned = false;)
Robert Phillipseafd48a2017-11-16 07:52:08 -0500259
Adlai Holler4cfbe532021-03-17 10:36:39 -0400260 SkSTArenaAlloc<kInitialArenaSize> fInternalAllocator; // intervals & registers live here
Adlai Holler19fd5142021-03-08 10:19:30 -0700261 bool fFailedInstantiation = false;
Robert Phillips5af44de2017-07-18 14:49:38 -0400262};
263
264#endif // GrResourceAllocator_DEFINED