blob: e2d506e121b9888024ff0839912f39cab6c54bd1 [file] [log] [blame]
Robert Phillips26f3aeb2020-09-16 10:57:32 -04001/*
2 * Copyright 2020 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Robert Phillipsd464feb2020-10-08 11:00:02 -04008#ifndef GrThreadSafeCache_DEFINED
9#define GrThreadSafeCache_DEFINED
Robert Phillips26f3aeb2020-09-16 10:57:32 -040010
11#include "include/private/SkSpinlock.h"
Robert Phillipsf3e2b3c2020-09-18 14:07:43 -040012#include "src/core/SkArenaAlloc.h"
Robert Phillips752f7e12020-09-18 12:28:59 -040013#include "src/core/SkTDynamicHash.h"
Robert Phillips45593682020-09-18 16:16:33 -040014#include "src/core/SkTInternalLList.h"
Robert Phillips26f3aeb2020-09-16 10:57:32 -040015#include "src/gpu/GrSurfaceProxyView.h"
16
17// Ganesh creates a lot of utility textures (e.g., blurred-rrect masks) that need to be shared
18// between the direct context and all the DDL recording contexts. This thread-safe cache
19// allows this sharing.
20//
21// In operation, each thread will first check if the threaded cache possesses the required texture.
22//
23// If a DDL thread doesn't find a needed texture it will go off and create it on the cpu and then
24// attempt to add it to the cache. If another thread had added it in the interim, the losing thread
25// will discard its work and use the texture the winning thread had created.
26//
27// If the thread in possession of the direct context doesn't find the needed texture it should
28// add a place holder view and then queue up the draw calls to complete it. In this way the
29// gpu-thread has precedence over the recording threads.
30//
31// The invariants for this cache differ a bit from those of the proxy and resource caches.
32// For this cache:
33//
34// only this cache knows the unique key - neither the proxy nor backing resource should
35// be discoverable in any other cache by the unique key
36// if a backing resource resides in the resource cache then there should be an entry in this
37// cache
38// an entry in this cache, however, doesn't guarantee that there is a corresponding entry in
39// the resource cache - although the entry here should be able to generate that entry
40// (i.e., be a lazy proxy)
Robert Phillipsc61c8952020-09-22 14:24:43 -040041//
42// Wrt interactions w/ GrContext/GrResourceCache purging, we have:
43//
44// Both GrContext::abandonContext and GrContext::releaseResourcesAndAbandonContext will cause
45// all the refs held in this cache to be dropped prior to clearing out the resource cache.
46//
47// For the size_t-variant of GrContext::purgeUnlockedResources, after an initial attempt
48// to purge the requested amount of resources fails, uniquely held resources in this cache
49// will be dropped in LRU to MRU order until the cache is under budget. Note that this
50// prioritizes the survival of resources in this cache over those just in the resource cache.
Robert Phillips331699c2020-09-22 15:20:01 -040051//
52// For the 'scratchResourcesOnly' variant of GrContext::purgeUnlockedResources, this cache
53// won't be modified in the scratch-only case unless the resource cache is over budget (in
54// which case it will purge uniquely-held resources in LRU to MRU order to get
55// back under budget). In the non-scratch-only case, all uniquely held resources in this cache
56// will be released prior to the resource cache being cleared out.
57//
58// For GrContext::setResourceCacheLimit, if an initial pass through the resource cache doesn't
59// reach the budget, uniquely held resources in this cache will be released in LRU to MRU order.
Robert Phillipsc2fe1642020-09-22 17:34:51 -040060//
61// For GrContext::performDeferredCleanup, any uniquely held resources that haven't been accessed
62// w/in 'msNotUsed' will be released from this cache prior to the resource cache being cleaned.
Robert Phillipsd464feb2020-10-08 11:00:02 -040063class GrThreadSafeCache {
Robert Phillips26f3aeb2020-09-16 10:57:32 -040064public:
Robert Phillipsd464feb2020-10-08 11:00:02 -040065 GrThreadSafeCache();
66 ~GrThreadSafeCache();
Robert Phillips26f3aeb2020-09-16 10:57:32 -040067
68#if GR_TEST_UTILS
69 int numEntries() const SK_EXCLUDES(fSpinLock);
Robert Phillipsc61c8952020-09-22 14:24:43 -040070
71 size_t approxBytesUsedForHash() const SK_EXCLUDES(fSpinLock);
Robert Phillips26f3aeb2020-09-16 10:57:32 -040072#endif
73
Robert Phillips752f7e12020-09-18 12:28:59 -040074 void dropAllRefs() SK_EXCLUDES(fSpinLock);
Robert Phillips331699c2020-09-22 15:20:01 -040075
Robert Phillipsc2fe1642020-09-22 17:34:51 -040076 // Drop uniquely held refs until under the resource cache's budget.
77 // A null parameter means drop all uniquely held refs.
Robert Phillips331699c2020-09-22 15:20:01 -040078 void dropUniqueRefs(GrResourceCache* resourceCache) SK_EXCLUDES(fSpinLock);
Robert Phillips26f3aeb2020-09-16 10:57:32 -040079
Robert Phillipsc2fe1642020-09-22 17:34:51 -040080 // Drop uniquely held refs that were last accessed before 'purgeTime'
81 void dropUniqueRefsOlderThan(GrStdSteadyClock::time_point purgeTime) SK_EXCLUDES(fSpinLock);
82
Robert Phillips26f3aeb2020-09-16 10:57:32 -040083 GrSurfaceProxyView find(const GrUniqueKey&) SK_EXCLUDES(fSpinLock);
Robert Phillips6e17ffe2020-10-06 14:52:11 -040084 std::tuple<GrSurfaceProxyView, sk_sp<SkData>> findWithData(
85 const GrUniqueKey&) SK_EXCLUDES(fSpinLock);
Robert Phillips26f3aeb2020-09-16 10:57:32 -040086
87 GrSurfaceProxyView add(const GrUniqueKey&, const GrSurfaceProxyView&) SK_EXCLUDES(fSpinLock);
Robert Phillips6e17ffe2020-10-06 14:52:11 -040088 std::tuple<GrSurfaceProxyView, sk_sp<SkData>> addWithData(
89 const GrUniqueKey&, const GrSurfaceProxyView&) SK_EXCLUDES(fSpinLock);
Robert Phillips26f3aeb2020-09-16 10:57:32 -040090
Robert Phillips3380be92020-09-25 12:47:10 -040091 GrSurfaceProxyView findOrAdd(const GrUniqueKey&,
92 const GrSurfaceProxyView&) SK_EXCLUDES(fSpinLock);
Robert Phillips6e17ffe2020-10-06 14:52:11 -040093 std::tuple<GrSurfaceProxyView, sk_sp<SkData>> findOrAddWithData(
94 const GrUniqueKey&, const GrSurfaceProxyView&) SK_EXCLUDES(fSpinLock);
Robert Phillips3380be92020-09-25 12:47:10 -040095
96 void remove(const GrUniqueKey&) SK_EXCLUDES(fSpinLock);
97
Robert Phillipsb1807122020-10-06 16:44:18 -040098 // To allow gpu-created resources to have priority, we pre-emptively place a lazy proxy
99 // in the thread-safe cache (with findOrAdd). The Trampoline object allows that lazy proxy to
100 // be instantiated with some later generated rendering result.
101 class Trampoline : public SkRefCnt {
102 public:
103 sk_sp<GrTextureProxy> fProxy;
104 };
105
106 static std::tuple<GrSurfaceProxyView, sk_sp<Trampoline>> CreateLazyView(GrDirectContext*,
Robert Phillipsb1807122020-10-06 16:44:18 -0400107 GrColorType,
Robert Phillipsfde67e42020-10-07 15:33:43 -0400108 SkISize dimensions,
109 GrSurfaceOrigin,
110 SkBackingFit);
Robert Phillips26f3aeb2020-09-16 10:57:32 -0400111private:
112 struct Entry {
Robert Phillips01771c12020-10-20 15:46:07 -0400113 Entry(const GrUniqueKey& key, const GrSurfaceProxyView& view)
114 : fKey(key)
115 , fView(view)
116 , fTag(Entry::kView) {
117 }
Robert Phillips26f3aeb2020-09-16 10:57:32 -0400118
Robert Phillips01771c12020-10-20 15:46:07 -0400119 bool uniquelyHeld() const {
120 SkASSERT(fTag != kEmpty);
121
122 if (fTag == kView && fView.proxy()->unique()) {
123 return true;
124 }
125
126 return false;
127 }
128
129 const GrUniqueKey& key() const {
130 SkASSERT(fTag != kEmpty);
131 return fKey;
132 }
133
134 sk_sp<SkData> refCustomData() const {
135 SkASSERT(fTag != kEmpty);
136 return fKey.refCustomData();
137 }
138
139 GrSurfaceProxyView view() {
140 SkASSERT(fTag == kView);
141 return fView;
142 }
143
144 void set(const GrUniqueKey& key, const GrSurfaceProxyView& view) {
145 SkASSERT(fTag == kEmpty);
146 fKey = key;
147 fView = view;
148 fTag = kView;
149 }
150
151 void makeEmpty() {
152 SkASSERT(fTag != kEmpty);
153
154 fKey.reset();
155 fView.reset();
156 fTag = kEmpty;
157 }
158
159 // The thread-safe cache gets to manipulate the llist and last-access members
Robert Phillipsc2fe1642020-09-22 17:34:51 -0400160 GrStdSteadyClock::time_point fLastAccess;
Robert Phillips45593682020-09-18 16:16:33 -0400161
162 SK_DECLARE_INTERNAL_LLIST_INTERFACE(Entry);
Robert Phillips752f7e12020-09-18 12:28:59 -0400163
164 // for SkTDynamicHash
Robert Phillips01771c12020-10-20 15:46:07 -0400165 static const GrUniqueKey& GetKey(const Entry& e) {
166 SkASSERT(e.fTag != kEmpty);
167 return e.fKey;
168 }
Robert Phillips752f7e12020-09-18 12:28:59 -0400169 static uint32_t Hash(const GrUniqueKey& key) { return key.hash(); }
Robert Phillips01771c12020-10-20 15:46:07 -0400170
171 private:
172 // Note: the unique key is stored here bc it is never attached to a proxy or a GrTexture
173 GrUniqueKey fKey;
174 GrSurfaceProxyView fView;
175
176 enum {
177 kEmpty,
178 kView,
179 } fTag { kEmpty };
Robert Phillips26f3aeb2020-09-16 10:57:32 -0400180 };
181
Robert Phillips45593682020-09-18 16:16:33 -0400182 Entry* getEntry(const GrUniqueKey&, const GrSurfaceProxyView&) SK_REQUIRES(fSpinLock);
183 void recycleEntry(Entry*) SK_REQUIRES(fSpinLock);
Robert Phillipsf3e2b3c2020-09-18 14:07:43 -0400184
Robert Phillips6e17ffe2020-10-06 14:52:11 -0400185 std::tuple<GrSurfaceProxyView, sk_sp<SkData>> internalFind(
186 const GrUniqueKey&) SK_REQUIRES(fSpinLock);
187 std::tuple<GrSurfaceProxyView, sk_sp<SkData>> internalAdd(
188 const GrUniqueKey&, const GrSurfaceProxyView&) SK_REQUIRES(fSpinLock);
Robert Phillips26f3aeb2020-09-16 10:57:32 -0400189
190 mutable SkSpinlock fSpinLock;
191
Robert Phillips74ad27d2020-10-20 10:16:55 -0400192 SkTDynamicHash<Entry, GrUniqueKey> fUniquelyKeyedEntryMap SK_GUARDED_BY(fSpinLock);
Robert Phillips45593682020-09-18 16:16:33 -0400193 // The head of this list is the MRU
Robert Phillips74ad27d2020-10-20 10:16:55 -0400194 SkTInternalLList<Entry> fUniquelyKeyedEntryList SK_GUARDED_BY(fSpinLock);
Robert Phillipsf3e2b3c2020-09-18 14:07:43 -0400195
196 // TODO: empirically determine this from the skps
197 static const int kInitialArenaSize = 64 * sizeof(Entry);
198
199 char fStorage[kInitialArenaSize];
200 SkArenaAlloc fEntryAllocator{fStorage, kInitialArenaSize, kInitialArenaSize};
Robert Phillips45593682020-09-18 16:16:33 -0400201 Entry* fFreeEntryList SK_GUARDED_BY(fSpinLock);
Robert Phillips26f3aeb2020-09-16 10:57:32 -0400202};
203
Robert Phillipsd464feb2020-10-08 11:00:02 -0400204#endif // GrThreadSafeCache_DEFINED