blob: b44e706945521bb0bad45e784000374998bf2f80 [file] [log] [blame]
Robert Phillips26f3aeb2020-09-16 10:57:32 -04001/*
2 * Copyright 2020 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Robert Phillipsd464feb2020-10-08 11:00:02 -04008#ifndef GrThreadSafeCache_DEFINED
9#define GrThreadSafeCache_DEFINED
Robert Phillips26f3aeb2020-09-16 10:57:32 -040010
Robert Phillips42a741a2020-10-23 12:27:47 -040011#include "include/core/SkRefCnt.h"
Robert Phillips26f3aeb2020-09-16 10:57:32 -040012#include "include/private/SkSpinlock.h"
Robert Phillipsf3e2b3c2020-09-18 14:07:43 -040013#include "src/core/SkArenaAlloc.h"
Robert Phillips752f7e12020-09-18 12:28:59 -040014#include "src/core/SkTDynamicHash.h"
Robert Phillips45593682020-09-18 16:16:33 -040015#include "src/core/SkTInternalLList.h"
Robert Phillips71143952021-06-17 14:55:07 -040016#include "src/gpu/GrGpuBuffer.h"
Robert Phillips26f3aeb2020-09-16 10:57:32 -040017#include "src/gpu/GrSurfaceProxyView.h"
18
19// Ganesh creates a lot of utility textures (e.g., blurred-rrect masks) that need to be shared
20// between the direct context and all the DDL recording contexts. This thread-safe cache
21// allows this sharing.
22//
23// In operation, each thread will first check if the threaded cache possesses the required texture.
24//
25// If a DDL thread doesn't find a needed texture it will go off and create it on the cpu and then
26// attempt to add it to the cache. If another thread had added it in the interim, the losing thread
27// will discard its work and use the texture the winning thread had created.
28//
29// If the thread in possession of the direct context doesn't find the needed texture it should
30// add a place holder view and then queue up the draw calls to complete it. In this way the
31// gpu-thread has precedence over the recording threads.
32//
33// The invariants for this cache differ a bit from those of the proxy and resource caches.
34// For this cache:
35//
36// only this cache knows the unique key - neither the proxy nor backing resource should
37// be discoverable in any other cache by the unique key
38// if a backing resource resides in the resource cache then there should be an entry in this
39// cache
40// an entry in this cache, however, doesn't guarantee that there is a corresponding entry in
41// the resource cache - although the entry here should be able to generate that entry
42// (i.e., be a lazy proxy)
Robert Phillipsc61c8952020-09-22 14:24:43 -040043//
44// Wrt interactions w/ GrContext/GrResourceCache purging, we have:
45//
46// Both GrContext::abandonContext and GrContext::releaseResourcesAndAbandonContext will cause
47// all the refs held in this cache to be dropped prior to clearing out the resource cache.
48//
49// For the size_t-variant of GrContext::purgeUnlockedResources, after an initial attempt
50// to purge the requested amount of resources fails, uniquely held resources in this cache
51// will be dropped in LRU to MRU order until the cache is under budget. Note that this
52// prioritizes the survival of resources in this cache over those just in the resource cache.
Robert Phillips331699c2020-09-22 15:20:01 -040053//
54// For the 'scratchResourcesOnly' variant of GrContext::purgeUnlockedResources, this cache
55// won't be modified in the scratch-only case unless the resource cache is over budget (in
56// which case it will purge uniquely-held resources in LRU to MRU order to get
57// back under budget). In the non-scratch-only case, all uniquely held resources in this cache
58// will be released prior to the resource cache being cleared out.
59//
60// For GrContext::setResourceCacheLimit, if an initial pass through the resource cache doesn't
61// reach the budget, uniquely held resources in this cache will be released in LRU to MRU order.
Robert Phillipsc2fe1642020-09-22 17:34:51 -040062//
63// For GrContext::performDeferredCleanup, any uniquely held resources that haven't been accessed
64// w/in 'msNotUsed' will be released from this cache prior to the resource cache being cleaned.
Robert Phillipsd464feb2020-10-08 11:00:02 -040065class GrThreadSafeCache {
Robert Phillips26f3aeb2020-09-16 10:57:32 -040066public:
Robert Phillipsd464feb2020-10-08 11:00:02 -040067 GrThreadSafeCache();
68 ~GrThreadSafeCache();
Robert Phillips26f3aeb2020-09-16 10:57:32 -040069
70#if GR_TEST_UTILS
71 int numEntries() const SK_EXCLUDES(fSpinLock);
Robert Phillipsc61c8952020-09-22 14:24:43 -040072
73 size_t approxBytesUsedForHash() const SK_EXCLUDES(fSpinLock);
Robert Phillips26f3aeb2020-09-16 10:57:32 -040074#endif
75
Robert Phillips752f7e12020-09-18 12:28:59 -040076 void dropAllRefs() SK_EXCLUDES(fSpinLock);
Robert Phillips331699c2020-09-22 15:20:01 -040077
Robert Phillipsc2fe1642020-09-22 17:34:51 -040078 // Drop uniquely held refs until under the resource cache's budget.
79 // A null parameter means drop all uniquely held refs.
Robert Phillips331699c2020-09-22 15:20:01 -040080 void dropUniqueRefs(GrResourceCache* resourceCache) SK_EXCLUDES(fSpinLock);
Robert Phillips26f3aeb2020-09-16 10:57:32 -040081
Robert Phillipsc2fe1642020-09-22 17:34:51 -040082 // Drop uniquely held refs that were last accessed before 'purgeTime'
83 void dropUniqueRefsOlderThan(GrStdSteadyClock::time_point purgeTime) SK_EXCLUDES(fSpinLock);
84
Robert Phillips83c38a82020-10-28 14:57:53 -040085 SkDEBUGCODE(bool has(const GrUniqueKey&) SK_EXCLUDES(fSpinLock);)
86
Robert Phillips26f3aeb2020-09-16 10:57:32 -040087 GrSurfaceProxyView find(const GrUniqueKey&) SK_EXCLUDES(fSpinLock);
Robert Phillips6e17ffe2020-10-06 14:52:11 -040088 std::tuple<GrSurfaceProxyView, sk_sp<SkData>> findWithData(
89 const GrUniqueKey&) SK_EXCLUDES(fSpinLock);
Robert Phillips26f3aeb2020-09-16 10:57:32 -040090
91 GrSurfaceProxyView add(const GrUniqueKey&, const GrSurfaceProxyView&) SK_EXCLUDES(fSpinLock);
Robert Phillips6e17ffe2020-10-06 14:52:11 -040092 std::tuple<GrSurfaceProxyView, sk_sp<SkData>> addWithData(
93 const GrUniqueKey&, const GrSurfaceProxyView&) SK_EXCLUDES(fSpinLock);
Robert Phillips26f3aeb2020-09-16 10:57:32 -040094
Robert Phillips3380be92020-09-25 12:47:10 -040095 GrSurfaceProxyView findOrAdd(const GrUniqueKey&,
96 const GrSurfaceProxyView&) SK_EXCLUDES(fSpinLock);
Robert Phillips6e17ffe2020-10-06 14:52:11 -040097 std::tuple<GrSurfaceProxyView, sk_sp<SkData>> findOrAddWithData(
98 const GrUniqueKey&, const GrSurfaceProxyView&) SK_EXCLUDES(fSpinLock);
Robert Phillips3380be92020-09-25 12:47:10 -040099
Robert Phillips42a741a2020-10-23 12:27:47 -0400100 // To hold vertex data in the cache and have it transparently transition from cpu-side to
101 // gpu-side while being shared between all the threads we need a ref counted object that
102 // keeps hold of the cpu-side data but allows deferred filling in of the mirroring gpu buffer.
103 class VertexData : public SkNVRefCnt<VertexData> {
104 public:
105 ~VertexData();
106
107 const void* vertices() const { return fVertices; }
108 size_t size() const { return fNumVertices * fVertexSize; }
109
110 int numVertices() const { return fNumVertices; }
111 size_t vertexSize() const { return fVertexSize; }
112
113 // TODO: make these return const GrGpuBuffers?
114 GrGpuBuffer* gpuBuffer() { return fGpuBuffer.get(); }
115 sk_sp<GrGpuBuffer> refGpuBuffer() { return fGpuBuffer; }
116
117 void setGpuBuffer(sk_sp<GrGpuBuffer> gpuBuffer) {
118 // TODO: once we add the gpuBuffer we could free 'fVertices'. Deinstantiable
119 // DDLs could throw a monkey wrench into that plan though.
120 SkASSERT(!fGpuBuffer);
121 fGpuBuffer = gpuBuffer;
122 }
123
124 void reset() {
125 sk_free(const_cast<void*>(fVertices));
126 fVertices = nullptr;
127 fNumVertices = 0;
128 fVertexSize = 0;
129 fGpuBuffer.reset();
130 }
131
132 private:
133 friend class GrThreadSafeCache; // for access to ctor
134
135 VertexData(const void* vertices, int numVertices, size_t vertexSize)
136 : fVertices(vertices)
137 , fNumVertices(numVertices)
138 , fVertexSize(vertexSize) {
139 }
140
Robert Phillips83c38a82020-10-28 14:57:53 -0400141 VertexData(sk_sp<GrGpuBuffer> gpuBuffer, int numVertices, size_t vertexSize)
142 : fVertices(nullptr)
143 , fNumVertices(numVertices)
144 , fVertexSize(vertexSize)
145 , fGpuBuffer(std::move(gpuBuffer)) {
146 }
147
Robert Phillips42a741a2020-10-23 12:27:47 -0400148 const void* fVertices;
149 int fNumVertices;
150 size_t fVertexSize;
151
152 sk_sp<GrGpuBuffer> fGpuBuffer;
153 };
154
155 // The returned VertexData object takes ownership of 'vertices' which had better have been
156 // allocated with malloc!
157 static sk_sp<VertexData> MakeVertexData(const void* vertices,
158 int vertexCount,
159 size_t vertexSize);
Robert Phillips83c38a82020-10-28 14:57:53 -0400160 static sk_sp<VertexData> MakeVertexData(sk_sp<GrGpuBuffer> buffer,
161 int vertexCount,
162 size_t vertexSize);
Robert Phillips42a741a2020-10-23 12:27:47 -0400163
164 std::tuple<sk_sp<VertexData>, sk_sp<SkData>> findVertsWithData(
Robert Phillips83c38a82020-10-28 14:57:53 -0400165 const GrUniqueKey&) SK_EXCLUDES(fSpinLock);
Robert Phillips42a741a2020-10-23 12:27:47 -0400166
Robert Phillips67e58cb2020-11-02 08:57:39 -0500167 typedef bool (*IsNewerBetter)(SkData* incumbent, SkData* challenger);
168
Robert Phillips42a741a2020-10-23 12:27:47 -0400169 std::tuple<sk_sp<VertexData>, sk_sp<SkData>> addVertsWithData(
Robert Phillips83c38a82020-10-28 14:57:53 -0400170 const GrUniqueKey&,
Robert Phillips67e58cb2020-11-02 08:57:39 -0500171 sk_sp<VertexData>,
172 IsNewerBetter) SK_EXCLUDES(fSpinLock);
Robert Phillips42a741a2020-10-23 12:27:47 -0400173
Robert Phillips3380be92020-09-25 12:47:10 -0400174 void remove(const GrUniqueKey&) SK_EXCLUDES(fSpinLock);
175
Robert Phillipsb1807122020-10-06 16:44:18 -0400176 // To allow gpu-created resources to have priority, we pre-emptively place a lazy proxy
177 // in the thread-safe cache (with findOrAdd). The Trampoline object allows that lazy proxy to
178 // be instantiated with some later generated rendering result.
179 class Trampoline : public SkRefCnt {
180 public:
181 sk_sp<GrTextureProxy> fProxy;
182 };
183
184 static std::tuple<GrSurfaceProxyView, sk_sp<Trampoline>> CreateLazyView(GrDirectContext*,
Robert Phillipsb1807122020-10-06 16:44:18 -0400185 GrColorType,
Robert Phillipsfde67e42020-10-07 15:33:43 -0400186 SkISize dimensions,
187 GrSurfaceOrigin,
188 SkBackingFit);
Robert Phillips26f3aeb2020-09-16 10:57:32 -0400189private:
190 struct Entry {
Robert Phillips01771c12020-10-20 15:46:07 -0400191 Entry(const GrUniqueKey& key, const GrSurfaceProxyView& view)
192 : fKey(key)
193 , fView(view)
194 , fTag(Entry::kView) {
195 }
Robert Phillips26f3aeb2020-09-16 10:57:32 -0400196
Robert Phillips42a741a2020-10-23 12:27:47 -0400197 Entry(const GrUniqueKey& key, sk_sp<VertexData> vertData)
198 : fKey(key)
199 , fVertData(std::move(vertData))
200 , fTag(Entry::kVertData) {
201 }
202
203 ~Entry() {
204 this->makeEmpty();
205 }
206
Robert Phillips01771c12020-10-20 15:46:07 -0400207 bool uniquelyHeld() const {
208 SkASSERT(fTag != kEmpty);
209
210 if (fTag == kView && fView.proxy()->unique()) {
211 return true;
Robert Phillips42a741a2020-10-23 12:27:47 -0400212 } else if (fTag == kVertData && fVertData->unique()) {
213 return true;
Robert Phillips01771c12020-10-20 15:46:07 -0400214 }
215
216 return false;
217 }
218
219 const GrUniqueKey& key() const {
220 SkASSERT(fTag != kEmpty);
221 return fKey;
222 }
223
Robert Phillips83c38a82020-10-28 14:57:53 -0400224 SkData* getCustomData() const {
225 SkASSERT(fTag != kEmpty);
226 return fKey.getCustomData();
227 }
228
Robert Phillips01771c12020-10-20 15:46:07 -0400229 sk_sp<SkData> refCustomData() const {
230 SkASSERT(fTag != kEmpty);
231 return fKey.refCustomData();
232 }
233
234 GrSurfaceProxyView view() {
235 SkASSERT(fTag == kView);
236 return fView;
237 }
238
Robert Phillips42a741a2020-10-23 12:27:47 -0400239 sk_sp<VertexData> vertexData() {
240 SkASSERT(fTag == kVertData);
241 return fVertData;
242 }
243
Robert Phillips01771c12020-10-20 15:46:07 -0400244 void set(const GrUniqueKey& key, const GrSurfaceProxyView& view) {
245 SkASSERT(fTag == kEmpty);
246 fKey = key;
247 fView = view;
248 fTag = kView;
249 }
250
251 void makeEmpty() {
Robert Phillips01771c12020-10-20 15:46:07 -0400252 fKey.reset();
Robert Phillips42a741a2020-10-23 12:27:47 -0400253 if (fTag == kView) {
254 fView.reset();
255 } else if (fTag == kVertData) {
256 fVertData.reset();
257 }
Robert Phillips01771c12020-10-20 15:46:07 -0400258 fTag = kEmpty;
259 }
260
Robert Phillips42a741a2020-10-23 12:27:47 -0400261 void set(const GrUniqueKey& key, sk_sp<VertexData> vertData) {
Robert Phillips83c38a82020-10-28 14:57:53 -0400262 SkASSERT(fTag == kEmpty || fTag == kVertData);
Robert Phillips42a741a2020-10-23 12:27:47 -0400263 fKey = key;
264 fVertData = vertData;
265 fTag = kVertData;
266 }
Robert Phillips45593682020-09-18 16:16:33 -0400267
Robert Phillips42a741a2020-10-23 12:27:47 -0400268 // The thread-safe cache gets to directly manipulate the llist and last-access members
269 GrStdSteadyClock::time_point fLastAccess;
Robert Phillips45593682020-09-18 16:16:33 -0400270 SK_DECLARE_INTERNAL_LLIST_INTERFACE(Entry);
Robert Phillips752f7e12020-09-18 12:28:59 -0400271
272 // for SkTDynamicHash
Robert Phillips01771c12020-10-20 15:46:07 -0400273 static const GrUniqueKey& GetKey(const Entry& e) {
274 SkASSERT(e.fTag != kEmpty);
275 return e.fKey;
276 }
Robert Phillips752f7e12020-09-18 12:28:59 -0400277 static uint32_t Hash(const GrUniqueKey& key) { return key.hash(); }
Robert Phillips01771c12020-10-20 15:46:07 -0400278
279 private:
280 // Note: the unique key is stored here bc it is never attached to a proxy or a GrTexture
Robert Phillips42a741a2020-10-23 12:27:47 -0400281 GrUniqueKey fKey;
282 union {
283 GrSurfaceProxyView fView;
284 sk_sp<VertexData> fVertData;
285 };
Robert Phillips01771c12020-10-20 15:46:07 -0400286
287 enum {
288 kEmpty,
289 kView,
Robert Phillips42a741a2020-10-23 12:27:47 -0400290 kVertData,
Robert Phillips01771c12020-10-20 15:46:07 -0400291 } fTag { kEmpty };
Robert Phillips26f3aeb2020-09-16 10:57:32 -0400292 };
293
Robert Phillips42a741a2020-10-23 12:27:47 -0400294 void makeExistingEntryMRU(Entry*) SK_REQUIRES(fSpinLock);
295 Entry* makeNewEntryMRU(Entry*) SK_REQUIRES(fSpinLock);
296
297 Entry* getEntry(const GrUniqueKey&, const GrSurfaceProxyView&) SK_REQUIRES(fSpinLock);
298 Entry* getEntry(const GrUniqueKey&, sk_sp<VertexData>) SK_REQUIRES(fSpinLock);
299
Robert Phillips45593682020-09-18 16:16:33 -0400300 void recycleEntry(Entry*) SK_REQUIRES(fSpinLock);
Robert Phillipsf3e2b3c2020-09-18 14:07:43 -0400301
Robert Phillips6e17ffe2020-10-06 14:52:11 -0400302 std::tuple<GrSurfaceProxyView, sk_sp<SkData>> internalFind(
Robert Phillips83c38a82020-10-28 14:57:53 -0400303 const GrUniqueKey&) SK_REQUIRES(fSpinLock);
Robert Phillips6e17ffe2020-10-06 14:52:11 -0400304 std::tuple<GrSurfaceProxyView, sk_sp<SkData>> internalAdd(
Robert Phillips83c38a82020-10-28 14:57:53 -0400305 const GrUniqueKey&,
306 const GrSurfaceProxyView&) SK_REQUIRES(fSpinLock);
Robert Phillips26f3aeb2020-09-16 10:57:32 -0400307
Robert Phillips42a741a2020-10-23 12:27:47 -0400308 std::tuple<sk_sp<VertexData>, sk_sp<SkData>> internalFindVerts(
Robert Phillips83c38a82020-10-28 14:57:53 -0400309 const GrUniqueKey&) SK_REQUIRES(fSpinLock);
Robert Phillips42a741a2020-10-23 12:27:47 -0400310 std::tuple<sk_sp<VertexData>, sk_sp<SkData>> internalAddVerts(
Robert Phillips83c38a82020-10-28 14:57:53 -0400311 const GrUniqueKey&,
Robert Phillips67e58cb2020-11-02 08:57:39 -0500312 sk_sp<VertexData>,
313 IsNewerBetter) SK_REQUIRES(fSpinLock);
Robert Phillips42a741a2020-10-23 12:27:47 -0400314
Robert Phillips26f3aeb2020-09-16 10:57:32 -0400315 mutable SkSpinlock fSpinLock;
316
Robert Phillips74ad27d2020-10-20 10:16:55 -0400317 SkTDynamicHash<Entry, GrUniqueKey> fUniquelyKeyedEntryMap SK_GUARDED_BY(fSpinLock);
Robert Phillips45593682020-09-18 16:16:33 -0400318 // The head of this list is the MRU
Robert Phillips74ad27d2020-10-20 10:16:55 -0400319 SkTInternalLList<Entry> fUniquelyKeyedEntryList SK_GUARDED_BY(fSpinLock);
Robert Phillipsf3e2b3c2020-09-18 14:07:43 -0400320
321 // TODO: empirically determine this from the skps
322 static const int kInitialArenaSize = 64 * sizeof(Entry);
323
324 char fStorage[kInitialArenaSize];
325 SkArenaAlloc fEntryAllocator{fStorage, kInitialArenaSize, kInitialArenaSize};
Robert Phillips45593682020-09-18 16:16:33 -0400326 Entry* fFreeEntryList SK_GUARDED_BY(fSpinLock);
Robert Phillips26f3aeb2020-09-16 10:57:32 -0400327};
328
Robert Phillipsd464feb2020-10-08 11:00:02 -0400329#endif // GrThreadSafeCache_DEFINED