blob: c69dc2eca285511bffd720503afedbab2a1b36b2 [file] [log] [blame]
Robert Phillips5af44de2017-07-18 14:49:38 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrResourceAllocator_DEFINED
9#define GrResourceAllocator_DEFINED
10
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "src/gpu/GrGpuResourcePriv.h"
Greg Daniel456f9b52020-03-05 19:14:18 +000012#include "src/gpu/GrSurface.h"
Greg Danielf91aeb22019-06-18 09:58:02 -040013#include "src/gpu/GrSurfaceProxy.h"
Robert Phillips8186cbe2017-11-01 17:32:39 -040014
Ben Wagner729a23f2019-05-17 16:29:34 -040015#include "src/core/SkArenaAlloc.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050016#include "src/core/SkTDynamicHash.h"
17#include "src/core/SkTMultiMap.h"
Robert Phillips5af44de2017-07-18 14:49:38 -040018
19class GrResourceProvider;
20
Robert Phillips715d08c2018-07-18 13:56:48 -040021// Print out explicit allocation information
22#define GR_ALLOCATION_SPEW 0
23
Robert Phillipsda1be462018-07-27 07:18:06 -040024// Print out information about interval creation
25#define GR_TRACK_INTERVAL_CREATION 0
26
Robert Phillips5af44de2017-07-18 14:49:38 -040027/*
28 * The ResourceAllocator explicitly distributes GPU resources at flush time. It operates by
29 * being given the usage intervals of the various proxies. It keeps these intervals in a singly
30 * linked list sorted by increasing start index. (It also maintains a hash table from proxyID
31 * to interval to find proxy reuse). When it comes time to allocate the resources it
32 * traverses the sorted list and:
33 * removes intervals from the active list that have completed (returning their GrSurfaces
34 * to the free pool)
35
36 * allocates a new resource (preferably from the free pool) for the new interval
37 * adds the new interval to the active list (that is sorted by increasing end index)
38 *
39 * Note: the op indices (used in the usage intervals) come from the order of the ops in
Greg Danielf41b2bd2019-08-22 16:19:24 -040040 * their opsTasks after the opsTask DAG has been linearized.
Robert Phillips82774f82019-06-20 14:38:27 -040041 *
42 *************************************************************************************************
43 * How does instantiation failure handling work when explicitly allocating?
44 *
45 * In the gather usage intervals pass all the GrSurfaceProxies used in the flush should be
Greg Danielf41b2bd2019-08-22 16:19:24 -040046 * gathered (i.e., in GrOpsTask::gatherProxyIntervals).
Robert Phillips82774f82019-06-20 14:38:27 -040047 *
48 * The allocator will churn through this list but could fail anywhere.
49 *
50 * Allocation failure handling occurs at two levels:
51 *
Greg Danielf41b2bd2019-08-22 16:19:24 -040052 * 1) If the GrSurface backing an opsTask fails to allocate then the entire opsTask is dropped.
Robert Phillips82774f82019-06-20 14:38:27 -040053 *
54 * 2) If an individual GrSurfaceProxy fails to allocate then any ops that use it are dropped
Greg Danielf41b2bd2019-08-22 16:19:24 -040055 * (via GrOpsTask::purgeOpsWithUninstantiatedProxies)
Robert Phillips82774f82019-06-20 14:38:27 -040056 *
Greg Danielf41b2bd2019-08-22 16:19:24 -040057 * The pass to determine which ops to drop is a bit laborious so we only check the opsTasks and
Robert Phillips82774f82019-06-20 14:38:27 -040058 * individual ops when something goes wrong in allocation (i.e., when the return code from
59 * GrResourceAllocator::assign is bad)
60 *
61 * All together this means we should never attempt to draw an op which is missing some
62 * required GrSurface.
63 *
64 * One wrinkle in this plan is that promise images are fulfilled during the gather interval pass.
65 * If any of the promise images fail at this stage then the allocator is set into an error
66 * state and all allocations are then scanned for failures during the main allocation pass.
Robert Phillips5af44de2017-07-18 14:49:38 -040067 */
68class GrResourceAllocator {
69public:
Brian Salomonbeb7f522019-08-30 16:19:42 -040070 GrResourceAllocator(GrResourceProvider* resourceProvider SkDEBUGCODE(, int numOpsTasks))
Adlai Hollerc616e1c2021-02-11 15:18:17 -050071 : fResourceProvider(resourceProvider) {}
Robert Phillips5af44de2017-07-18 14:49:38 -040072
Robert Phillips5b65a842017-11-13 15:48:12 -050073 ~GrResourceAllocator();
74
Robert Phillips5af44de2017-07-18 14:49:38 -040075 unsigned int curOp() const { return fNumOps; }
76 void incOps() { fNumOps++; }
Robert Phillips5af44de2017-07-18 14:49:38 -040077
Adlai Holler7f7a5df2021-02-09 17:41:10 +000078 /** Indicates whether a given call to addInterval represents an actual usage of the
79 * provided proxy. This is mainly here to accommodate deferred proxies attached to opsTasks.
80 * In that case we need to create an extra long interval for them (due to the upload) but
81 * don't want to count that usage/reference towards the proxy's recyclability.
82 */
83 enum class ActualUse : bool {
84 kNo = false,
85 kYes = true
86 };
87
Robert Phillipseafd48a2017-11-16 07:52:08 -050088 // Add a usage interval from 'start' to 'end' inclusive. This is usually used for renderTargets.
Robert Phillips5af44de2017-07-18 14:49:38 -040089 // If an existing interval already exists it will be expanded to include the new range.
Adlai Holler7f7a5df2021-02-09 17:41:10 +000090 void addInterval(GrSurfaceProxy*, unsigned int start, unsigned int end, ActualUse actualUse
Chris Dalton8816b932017-11-29 16:48:25 -070091 SkDEBUGCODE(, bool isDirectDstRead = false));
Robert Phillips5af44de2017-07-18 14:49:38 -040092
Adlai Holler19fd5142021-03-08 10:19:30 -070093 // Assign resources to all proxies. Returns whether the assignment was successful.
94 bool assign();
Robert Phillips5af44de2017-07-18 14:49:38 -040095
Robert Phillips715d08c2018-07-18 13:56:48 -040096#if GR_ALLOCATION_SPEW
97 void dumpIntervals();
98#endif
99
Robert Phillips5af44de2017-07-18 14:49:38 -0400100private:
101 class Interval;
102
103 // Remove dead intervals from the active list
104 void expire(unsigned int curIndex);
105
106 // These two methods wrap the interactions with the free pool
Robert Phillips715d08c2018-07-18 13:56:48 -0400107 void recycleSurface(sk_sp<GrSurface> surface);
Chris Dalton0b68dda2019-11-07 21:08:03 -0700108 sk_sp<GrSurface> findSurfaceFor(const GrSurfaceProxy* proxy);
Robert Phillips5af44de2017-07-18 14:49:38 -0400109
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500110 void determineRecyclability();
111
Robert Phillips57aa3672017-07-21 11:38:13 -0400112 struct FreePoolTraits {
113 static const GrScratchKey& GetKey(const GrSurface& s) {
114 return s.resourcePriv().getScratchKey();
115 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400116
Robert Phillips57aa3672017-07-21 11:38:13 -0400117 static uint32_t Hash(const GrScratchKey& key) { return key.hash(); }
Robert Phillipsf8e25022017-11-08 15:24:31 -0500118 static void OnFree(GrSurface* s) { s->unref(); }
Robert Phillips5af44de2017-07-18 14:49:38 -0400119 };
Robert Phillips57aa3672017-07-21 11:38:13 -0400120 typedef SkTMultiMap<GrSurface, GrScratchKey, FreePoolTraits> FreePoolMultiMap;
121
Robert Phillips5af44de2017-07-18 14:49:38 -0400122 typedef SkTDynamicHash<Interval, unsigned int> IntvlHash;
123
124 class Interval {
125 public:
126 Interval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end)
127 : fProxy(proxy)
128 , fProxyID(proxy->uniqueID().asUInt())
129 , fStart(start)
130 , fEnd(end)
131 , fNext(nullptr) {
132 SkASSERT(proxy);
Robert Phillipsda1be462018-07-27 07:18:06 -0400133#if GR_TRACK_INTERVAL_CREATION
134 fUniqueID = CreateUniqueID();
Robert Phillips047d5bb2021-01-08 13:39:19 -0500135 SkString proxyStr = proxy->dump();
136 SkDebugf("New intvl %d: %s [%d, %d]\n", fUniqueID, proxyStr.c_str(), start, end);
Robert Phillipsda1be462018-07-27 07:18:06 -0400137#endif
Robert Phillips5af44de2017-07-18 14:49:38 -0400138 }
139
Robert Phillips39667382019-04-17 16:03:30 -0400140 // Used when recycling an interval
Robert Phillips8186cbe2017-11-01 17:32:39 -0400141 void resetTo(GrSurfaceProxy* proxy, unsigned int start, unsigned int end) {
142 SkASSERT(proxy);
Robert Phillips39667382019-04-17 16:03:30 -0400143 SkASSERT(!fProxy && !fNext);
Robert Phillips8186cbe2017-11-01 17:32:39 -0400144
Robert Phillipsc73666f2019-04-24 08:49:48 -0400145 fUses = 0;
Robert Phillips8186cbe2017-11-01 17:32:39 -0400146 fProxy = proxy;
147 fProxyID = proxy->uniqueID().asUInt();
148 fStart = start;
149 fEnd = end;
150 fNext = nullptr;
Robert Phillipsda1be462018-07-27 07:18:06 -0400151#if GR_TRACK_INTERVAL_CREATION
152 fUniqueID = CreateUniqueID();
Robert Phillips047d5bb2021-01-08 13:39:19 -0500153 SkString proxyStr = proxy->dump();
154 SkDebugf("New intvl %d: %s [ %d, %d ]\n", fUniqueID, proxyStr.c_str(), start, end);
Robert Phillipsda1be462018-07-27 07:18:06 -0400155#endif
Robert Phillips8186cbe2017-11-01 17:32:39 -0400156 }
157
Robert Phillips5b65a842017-11-13 15:48:12 -0500158 ~Interval() {
159 SkASSERT(!fAssignedSurface);
160 }
161
Robert Phillipsf8e25022017-11-08 15:24:31 -0500162 const GrSurfaceProxy* proxy() const { return fProxy; }
163 GrSurfaceProxy* proxy() { return fProxy; }
Robert Phillipsc73666f2019-04-24 08:49:48 -0400164
Robert Phillipsf8e25022017-11-08 15:24:31 -0500165 unsigned int start() const { return fStart; }
166 unsigned int end() const { return fEnd; }
Robert Phillipsc73666f2019-04-24 08:49:48 -0400167
168 void setNext(Interval* next) { fNext = next; }
Robert Phillipsf8e25022017-11-08 15:24:31 -0500169 const Interval* next() const { return fNext; }
170 Interval* next() { return fNext; }
171
Robert Phillipsc73666f2019-04-24 08:49:48 -0400172 void markAsRecyclable() { fIsRecyclable = true;}
173 bool isRecyclable() const { return fIsRecyclable; }
174
175 void addUse() { fUses++; }
176 int uses() { return fUses; }
Robert Phillipsf8e25022017-11-08 15:24:31 -0500177
178 void extendEnd(unsigned int newEnd) {
Chris Dalton8816b932017-11-29 16:48:25 -0700179 if (newEnd > fEnd) {
180 fEnd = newEnd;
Robert Phillipsda1be462018-07-27 07:18:06 -0400181#if GR_TRACK_INTERVAL_CREATION
182 SkDebugf("intvl %d: extending from %d to %d\n", fUniqueID, fEnd, newEnd);
183#endif
Chris Dalton8816b932017-11-29 16:48:25 -0700184 }
Robert Phillipsf8e25022017-11-08 15:24:31 -0500185 }
186
Robert Phillips5b65a842017-11-13 15:48:12 -0500187 void assign(sk_sp<GrSurface>);
Ben Wagner5d1adbf2018-05-28 13:35:39 -0400188 bool wasAssignedSurface() const { return fAssignedSurface != nullptr; }
Robert Phillips5b65a842017-11-13 15:48:12 -0500189 sk_sp<GrSurface> detachSurface() { return std::move(fAssignedSurface); }
190
Robert Phillips5af44de2017-07-18 14:49:38 -0400191 // for SkTDynamicHash
192 static const uint32_t& GetKey(const Interval& intvl) {
193 return intvl.fProxyID;
194 }
195 static uint32_t Hash(const uint32_t& key) { return key; }
196
Robert Phillipsf8e25022017-11-08 15:24:31 -0500197 private:
Adlai Hollerc616e1c2021-02-11 15:18:17 -0500198 // TODO: Do we really need this variable?
Robert Phillips5b65a842017-11-13 15:48:12 -0500199 sk_sp<GrSurface> fAssignedSurface;
200 GrSurfaceProxy* fProxy;
201 uint32_t fProxyID; // This is here b.c. DynamicHash requires a ref to the key
202 unsigned int fStart;
203 unsigned int fEnd;
204 Interval* fNext;
Robert Phillipsc73666f2019-04-24 08:49:48 -0400205 unsigned int fUses = 0;
206 bool fIsRecyclable = false;
Robert Phillipsda1be462018-07-27 07:18:06 -0400207
208#if GR_TRACK_INTERVAL_CREATION
209 uint32_t fUniqueID;
210
211 uint32_t CreateUniqueID();
212#endif
Robert Phillips5af44de2017-07-18 14:49:38 -0400213 };
214
215 class IntervalList {
216 public:
217 IntervalList() = default;
218 ~IntervalList() {
Robert Phillips8186cbe2017-11-01 17:32:39 -0400219 // The only time we delete an IntervalList is in the GrResourceAllocator dtor.
220 // Since the arena allocator will clean up for us we don't bother here.
Robert Phillips5af44de2017-07-18 14:49:38 -0400221 }
222
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400223 bool empty() const {
224 SkASSERT(SkToBool(fHead) == SkToBool(fTail));
225 return !SkToBool(fHead);
226 }
Robert Phillips5af44de2017-07-18 14:49:38 -0400227 const Interval* peekHead() const { return fHead; }
Robert Phillipsc73666f2019-04-24 08:49:48 -0400228 Interval* peekHead() { return fHead; }
Robert Phillips5af44de2017-07-18 14:49:38 -0400229 Interval* popHead();
230 void insertByIncreasingStart(Interval*);
231 void insertByIncreasingEnd(Interval*);
Robert Phillips4150eea2018-02-07 17:08:21 -0500232 Interval* detachAll();
Robert Phillips5af44de2017-07-18 14:49:38 -0400233
234 private:
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400235 SkDEBUGCODE(void validate() const;)
236
Robert Phillips5af44de2017-07-18 14:49:38 -0400237 Interval* fHead = nullptr;
Robert Phillipsdf25e3a2018-08-08 12:48:40 -0400238 Interval* fTail = nullptr;
Robert Phillips5af44de2017-07-18 14:49:38 -0400239 };
240
Brian Salomon309b0532018-10-10 17:22:00 -0400241 // Compositing use cases can create > 80 intervals.
242 static const int kInitialArenaSize = 128 * sizeof(Interval);
Robert Phillips8186cbe2017-11-01 17:32:39 -0400243
Brian Salomon577aa0f2018-11-30 13:32:23 -0500244 GrResourceProvider* fResourceProvider;
Brian Salomon577aa0f2018-11-30 13:32:23 -0500245 FreePoolMultiMap fFreePool; // Recently created/used GrSurfaces
246 IntvlHash fIntvlHash; // All the intervals, hashed by proxyID
Robert Phillips5af44de2017-07-18 14:49:38 -0400247
Brian Salomon577aa0f2018-11-30 13:32:23 -0500248 IntervalList fIntvlList; // All the intervals sorted by increasing start
249 IntervalList fActiveIntvls; // List of live intervals during assignment
Robert Phillipsc476e5d2019-03-26 14:50:08 -0400250 // (sorted by increasing end)
251 unsigned int fNumOps = 0;
Robert Phillips8186cbe2017-11-01 17:32:39 -0400252
Brian Salomon577aa0f2018-11-30 13:32:23 -0500253 SkDEBUGCODE(bool fAssigned = false;)
Robert Phillipseafd48a2017-11-16 07:52:08 -0500254
Adlai Holler19fd5142021-03-08 10:19:30 -0700255 SkSTArenaAlloc<kInitialArenaSize> fIntervalAllocator;
256 Interval* fFreeIntervalList = nullptr;
257 bool fFailedInstantiation = false;
Robert Phillips5af44de2017-07-18 14:49:38 -0400258};
259
260#endif // GrResourceAllocator_DEFINED