Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2020 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Robert Phillips | d464feb | 2020-10-08 11:00:02 -0400 | [diff] [blame] | 8 | #ifndef GrThreadSafeCache_DEFINED |
| 9 | #define GrThreadSafeCache_DEFINED |
Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 10 | |
Robert Phillips | 42a741a | 2020-10-23 12:27:47 -0400 | [diff] [blame] | 11 | #include "include/core/SkRefCnt.h" |
Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 12 | #include "include/private/SkSpinlock.h" |
Robert Phillips | f3e2b3c | 2020-09-18 14:07:43 -0400 | [diff] [blame] | 13 | #include "src/core/SkArenaAlloc.h" |
Robert Phillips | 752f7e1 | 2020-09-18 12:28:59 -0400 | [diff] [blame] | 14 | #include "src/core/SkTDynamicHash.h" |
Robert Phillips | 4559368 | 2020-09-18 16:16:33 -0400 | [diff] [blame] | 15 | #include "src/core/SkTInternalLList.h" |
Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 16 | #include "src/gpu/GrSurfaceProxyView.h" |
| 17 | |
Robert Phillips | 42a741a | 2020-10-23 12:27:47 -0400 | [diff] [blame] | 18 | class GrGpuBuffer; |
| 19 | |
Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 20 | // Ganesh creates a lot of utility textures (e.g., blurred-rrect masks) that need to be shared |
| 21 | // between the direct context and all the DDL recording contexts. This thread-safe cache |
| 22 | // allows this sharing. |
| 23 | // |
| 24 | // In operation, each thread will first check if the threaded cache possesses the required texture. |
| 25 | // |
| 26 | // If a DDL thread doesn't find a needed texture it will go off and create it on the cpu and then |
| 27 | // attempt to add it to the cache. If another thread had added it in the interim, the losing thread |
| 28 | // will discard its work and use the texture the winning thread had created. |
| 29 | // |
| 30 | // If the thread in possession of the direct context doesn't find the needed texture it should |
| 31 | // add a place holder view and then queue up the draw calls to complete it. In this way the |
| 32 | // gpu-thread has precedence over the recording threads. |
| 33 | // |
| 34 | // The invariants for this cache differ a bit from those of the proxy and resource caches. |
| 35 | // For this cache: |
| 36 | // |
| 37 | // only this cache knows the unique key - neither the proxy nor backing resource should |
| 38 | // be discoverable in any other cache by the unique key |
| 39 | // if a backing resource resides in the resource cache then there should be an entry in this |
| 40 | // cache |
| 41 | // an entry in this cache, however, doesn't guarantee that there is a corresponding entry in |
| 42 | // the resource cache - although the entry here should be able to generate that entry |
| 43 | // (i.e., be a lazy proxy) |
Robert Phillips | c61c895 | 2020-09-22 14:24:43 -0400 | [diff] [blame] | 44 | // |
| 45 | // Wrt interactions w/ GrContext/GrResourceCache purging, we have: |
| 46 | // |
| 47 | // Both GrContext::abandonContext and GrContext::releaseResourcesAndAbandonContext will cause |
| 48 | // all the refs held in this cache to be dropped prior to clearing out the resource cache. |
| 49 | // |
| 50 | // For the size_t-variant of GrContext::purgeUnlockedResources, after an initial attempt |
| 51 | // to purge the requested amount of resources fails, uniquely held resources in this cache |
| 52 | // will be dropped in LRU to MRU order until the cache is under budget. Note that this |
| 53 | // prioritizes the survival of resources in this cache over those just in the resource cache. |
Robert Phillips | 331699c | 2020-09-22 15:20:01 -0400 | [diff] [blame] | 54 | // |
| 55 | // For the 'scratchResourcesOnly' variant of GrContext::purgeUnlockedResources, this cache |
| 56 | // won't be modified in the scratch-only case unless the resource cache is over budget (in |
| 57 | // which case it will purge uniquely-held resources in LRU to MRU order to get |
| 58 | // back under budget). In the non-scratch-only case, all uniquely held resources in this cache |
| 59 | // will be released prior to the resource cache being cleared out. |
| 60 | // |
| 61 | // For GrContext::setResourceCacheLimit, if an initial pass through the resource cache doesn't |
| 62 | // reach the budget, uniquely held resources in this cache will be released in LRU to MRU order. |
Robert Phillips | c2fe164 | 2020-09-22 17:34:51 -0400 | [diff] [blame] | 63 | // |
| 64 | // For GrContext::performDeferredCleanup, any uniquely held resources that haven't been accessed |
| 65 | // w/in 'msNotUsed' will be released from this cache prior to the resource cache being cleaned. |
Robert Phillips | d464feb | 2020-10-08 11:00:02 -0400 | [diff] [blame] | 66 | class GrThreadSafeCache { |
Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 67 | public: |
Robert Phillips | d464feb | 2020-10-08 11:00:02 -0400 | [diff] [blame] | 68 | GrThreadSafeCache(); |
| 69 | ~GrThreadSafeCache(); |
Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 70 | |
| 71 | #if GR_TEST_UTILS |
| 72 | int numEntries() const SK_EXCLUDES(fSpinLock); |
Robert Phillips | c61c895 | 2020-09-22 14:24:43 -0400 | [diff] [blame] | 73 | |
| 74 | size_t approxBytesUsedForHash() const SK_EXCLUDES(fSpinLock); |
Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 75 | #endif |
| 76 | |
Robert Phillips | 752f7e1 | 2020-09-18 12:28:59 -0400 | [diff] [blame] | 77 | void dropAllRefs() SK_EXCLUDES(fSpinLock); |
Robert Phillips | 331699c | 2020-09-22 15:20:01 -0400 | [diff] [blame] | 78 | |
Robert Phillips | c2fe164 | 2020-09-22 17:34:51 -0400 | [diff] [blame] | 79 | // Drop uniquely held refs until under the resource cache's budget. |
| 80 | // A null parameter means drop all uniquely held refs. |
Robert Phillips | 331699c | 2020-09-22 15:20:01 -0400 | [diff] [blame] | 81 | void dropUniqueRefs(GrResourceCache* resourceCache) SK_EXCLUDES(fSpinLock); |
Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 82 | |
Robert Phillips | c2fe164 | 2020-09-22 17:34:51 -0400 | [diff] [blame] | 83 | // Drop uniquely held refs that were last accessed before 'purgeTime' |
| 84 | void dropUniqueRefsOlderThan(GrStdSteadyClock::time_point purgeTime) SK_EXCLUDES(fSpinLock); |
| 85 | |
Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 86 | GrSurfaceProxyView find(const GrUniqueKey&) SK_EXCLUDES(fSpinLock); |
Robert Phillips | 6e17ffe | 2020-10-06 14:52:11 -0400 | [diff] [blame] | 87 | std::tuple<GrSurfaceProxyView, sk_sp<SkData>> findWithData( |
| 88 | const GrUniqueKey&) SK_EXCLUDES(fSpinLock); |
Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 89 | |
| 90 | GrSurfaceProxyView add(const GrUniqueKey&, const GrSurfaceProxyView&) SK_EXCLUDES(fSpinLock); |
Robert Phillips | 6e17ffe | 2020-10-06 14:52:11 -0400 | [diff] [blame] | 91 | std::tuple<GrSurfaceProxyView, sk_sp<SkData>> addWithData( |
| 92 | const GrUniqueKey&, const GrSurfaceProxyView&) SK_EXCLUDES(fSpinLock); |
Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 93 | |
Robert Phillips | 3380be9 | 2020-09-25 12:47:10 -0400 | [diff] [blame] | 94 | GrSurfaceProxyView findOrAdd(const GrUniqueKey&, |
| 95 | const GrSurfaceProxyView&) SK_EXCLUDES(fSpinLock); |
Robert Phillips | 6e17ffe | 2020-10-06 14:52:11 -0400 | [diff] [blame] | 96 | std::tuple<GrSurfaceProxyView, sk_sp<SkData>> findOrAddWithData( |
| 97 | const GrUniqueKey&, const GrSurfaceProxyView&) SK_EXCLUDES(fSpinLock); |
Robert Phillips | 3380be9 | 2020-09-25 12:47:10 -0400 | [diff] [blame] | 98 | |
Robert Phillips | 42a741a | 2020-10-23 12:27:47 -0400 | [diff] [blame] | 99 | // To hold vertex data in the cache and have it transparently transition from cpu-side to |
| 100 | // gpu-side while being shared between all the threads we need a ref counted object that |
| 101 | // keeps hold of the cpu-side data but allows deferred filling in of the mirroring gpu buffer. |
| 102 | class VertexData : public SkNVRefCnt<VertexData> { |
| 103 | public: |
| 104 | ~VertexData(); |
| 105 | |
| 106 | const void* vertices() const { return fVertices; } |
| 107 | size_t size() const { return fNumVertices * fVertexSize; } |
| 108 | |
| 109 | int numVertices() const { return fNumVertices; } |
| 110 | size_t vertexSize() const { return fVertexSize; } |
| 111 | |
| 112 | // TODO: make these return const GrGpuBuffers? |
| 113 | GrGpuBuffer* gpuBuffer() { return fGpuBuffer.get(); } |
| 114 | sk_sp<GrGpuBuffer> refGpuBuffer() { return fGpuBuffer; } |
| 115 | |
| 116 | void setGpuBuffer(sk_sp<GrGpuBuffer> gpuBuffer) { |
| 117 | // TODO: once we add the gpuBuffer we could free 'fVertices'. Deinstantiable |
| 118 | // DDLs could throw a monkey wrench into that plan though. |
| 119 | SkASSERT(!fGpuBuffer); |
| 120 | fGpuBuffer = gpuBuffer; |
| 121 | } |
| 122 | |
| 123 | void reset() { |
| 124 | sk_free(const_cast<void*>(fVertices)); |
| 125 | fVertices = nullptr; |
| 126 | fNumVertices = 0; |
| 127 | fVertexSize = 0; |
| 128 | fGpuBuffer.reset(); |
| 129 | } |
| 130 | |
| 131 | private: |
| 132 | friend class GrThreadSafeCache; // for access to ctor |
| 133 | |
| 134 | VertexData(const void* vertices, int numVertices, size_t vertexSize) |
| 135 | : fVertices(vertices) |
| 136 | , fNumVertices(numVertices) |
| 137 | , fVertexSize(vertexSize) { |
| 138 | } |
| 139 | |
| 140 | const void* fVertices; |
| 141 | int fNumVertices; |
| 142 | size_t fVertexSize; |
| 143 | |
| 144 | sk_sp<GrGpuBuffer> fGpuBuffer; |
| 145 | }; |
| 146 | |
| 147 | // The returned VertexData object takes ownership of 'vertices' which had better have been |
| 148 | // allocated with malloc! |
| 149 | static sk_sp<VertexData> MakeVertexData(const void* vertices, |
| 150 | int vertexCount, |
| 151 | size_t vertexSize); |
| 152 | |
| 153 | std::tuple<sk_sp<VertexData>, sk_sp<SkData>> findVertsWithData( |
| 154 | const GrUniqueKey&) SK_EXCLUDES(fSpinLock); |
| 155 | |
| 156 | std::tuple<sk_sp<VertexData>, sk_sp<SkData>> addVertsWithData( |
| 157 | const GrUniqueKey&, sk_sp<VertexData>) SK_EXCLUDES(fSpinLock); |
| 158 | |
Robert Phillips | 3380be9 | 2020-09-25 12:47:10 -0400 | [diff] [blame] | 159 | void remove(const GrUniqueKey&) SK_EXCLUDES(fSpinLock); |
| 160 | |
Robert Phillips | b180712 | 2020-10-06 16:44:18 -0400 | [diff] [blame] | 161 | // To allow gpu-created resources to have priority, we pre-emptively place a lazy proxy |
| 162 | // in the thread-safe cache (with findOrAdd). The Trampoline object allows that lazy proxy to |
| 163 | // be instantiated with some later generated rendering result. |
| 164 | class Trampoline : public SkRefCnt { |
| 165 | public: |
| 166 | sk_sp<GrTextureProxy> fProxy; |
| 167 | }; |
| 168 | |
| 169 | static std::tuple<GrSurfaceProxyView, sk_sp<Trampoline>> CreateLazyView(GrDirectContext*, |
Robert Phillips | b180712 | 2020-10-06 16:44:18 -0400 | [diff] [blame] | 170 | GrColorType, |
Robert Phillips | fde67e4 | 2020-10-07 15:33:43 -0400 | [diff] [blame] | 171 | SkISize dimensions, |
| 172 | GrSurfaceOrigin, |
| 173 | SkBackingFit); |
Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 174 | private: |
| 175 | struct Entry { |
Robert Phillips | 01771c1 | 2020-10-20 15:46:07 -0400 | [diff] [blame] | 176 | Entry(const GrUniqueKey& key, const GrSurfaceProxyView& view) |
| 177 | : fKey(key) |
| 178 | , fView(view) |
| 179 | , fTag(Entry::kView) { |
| 180 | } |
Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 181 | |
Robert Phillips | 42a741a | 2020-10-23 12:27:47 -0400 | [diff] [blame] | 182 | Entry(const GrUniqueKey& key, sk_sp<VertexData> vertData) |
| 183 | : fKey(key) |
| 184 | , fVertData(std::move(vertData)) |
| 185 | , fTag(Entry::kVertData) { |
| 186 | } |
| 187 | |
| 188 | ~Entry() { |
| 189 | this->makeEmpty(); |
| 190 | } |
| 191 | |
Robert Phillips | 01771c1 | 2020-10-20 15:46:07 -0400 | [diff] [blame] | 192 | bool uniquelyHeld() const { |
| 193 | SkASSERT(fTag != kEmpty); |
| 194 | |
| 195 | if (fTag == kView && fView.proxy()->unique()) { |
| 196 | return true; |
Robert Phillips | 42a741a | 2020-10-23 12:27:47 -0400 | [diff] [blame] | 197 | } else if (fTag == kVertData && fVertData->unique()) { |
| 198 | return true; |
Robert Phillips | 01771c1 | 2020-10-20 15:46:07 -0400 | [diff] [blame] | 199 | } |
| 200 | |
| 201 | return false; |
| 202 | } |
| 203 | |
| 204 | const GrUniqueKey& key() const { |
| 205 | SkASSERT(fTag != kEmpty); |
| 206 | return fKey; |
| 207 | } |
| 208 | |
| 209 | sk_sp<SkData> refCustomData() const { |
| 210 | SkASSERT(fTag != kEmpty); |
| 211 | return fKey.refCustomData(); |
| 212 | } |
| 213 | |
| 214 | GrSurfaceProxyView view() { |
| 215 | SkASSERT(fTag == kView); |
| 216 | return fView; |
| 217 | } |
| 218 | |
Robert Phillips | 42a741a | 2020-10-23 12:27:47 -0400 | [diff] [blame] | 219 | sk_sp<VertexData> vertexData() { |
| 220 | SkASSERT(fTag == kVertData); |
| 221 | return fVertData; |
| 222 | } |
| 223 | |
Robert Phillips | 01771c1 | 2020-10-20 15:46:07 -0400 | [diff] [blame] | 224 | void set(const GrUniqueKey& key, const GrSurfaceProxyView& view) { |
| 225 | SkASSERT(fTag == kEmpty); |
| 226 | fKey = key; |
| 227 | fView = view; |
| 228 | fTag = kView; |
| 229 | } |
| 230 | |
| 231 | void makeEmpty() { |
Robert Phillips | 01771c1 | 2020-10-20 15:46:07 -0400 | [diff] [blame] | 232 | fKey.reset(); |
Robert Phillips | 42a741a | 2020-10-23 12:27:47 -0400 | [diff] [blame] | 233 | if (fTag == kView) { |
| 234 | fView.reset(); |
| 235 | } else if (fTag == kVertData) { |
| 236 | fVertData.reset(); |
| 237 | } |
Robert Phillips | 01771c1 | 2020-10-20 15:46:07 -0400 | [diff] [blame] | 238 | fTag = kEmpty; |
| 239 | } |
| 240 | |
Robert Phillips | 42a741a | 2020-10-23 12:27:47 -0400 | [diff] [blame] | 241 | void set(const GrUniqueKey& key, sk_sp<VertexData> vertData) { |
| 242 | SkASSERT(fTag == kEmpty); |
| 243 | fKey = key; |
| 244 | fVertData = vertData; |
| 245 | fTag = kVertData; |
| 246 | } |
Robert Phillips | 4559368 | 2020-09-18 16:16:33 -0400 | [diff] [blame] | 247 | |
Robert Phillips | 42a741a | 2020-10-23 12:27:47 -0400 | [diff] [blame] | 248 | // The thread-safe cache gets to directly manipulate the llist and last-access members |
| 249 | GrStdSteadyClock::time_point fLastAccess; |
Robert Phillips | 4559368 | 2020-09-18 16:16:33 -0400 | [diff] [blame] | 250 | SK_DECLARE_INTERNAL_LLIST_INTERFACE(Entry); |
Robert Phillips | 752f7e1 | 2020-09-18 12:28:59 -0400 | [diff] [blame] | 251 | |
| 252 | // for SkTDynamicHash |
Robert Phillips | 01771c1 | 2020-10-20 15:46:07 -0400 | [diff] [blame] | 253 | static const GrUniqueKey& GetKey(const Entry& e) { |
| 254 | SkASSERT(e.fTag != kEmpty); |
| 255 | return e.fKey; |
| 256 | } |
Robert Phillips | 752f7e1 | 2020-09-18 12:28:59 -0400 | [diff] [blame] | 257 | static uint32_t Hash(const GrUniqueKey& key) { return key.hash(); } |
Robert Phillips | 01771c1 | 2020-10-20 15:46:07 -0400 | [diff] [blame] | 258 | |
| 259 | private: |
| 260 | // Note: the unique key is stored here bc it is never attached to a proxy or a GrTexture |
Robert Phillips | 42a741a | 2020-10-23 12:27:47 -0400 | [diff] [blame] | 261 | GrUniqueKey fKey; |
| 262 | union { |
| 263 | GrSurfaceProxyView fView; |
| 264 | sk_sp<VertexData> fVertData; |
| 265 | }; |
Robert Phillips | 01771c1 | 2020-10-20 15:46:07 -0400 | [diff] [blame] | 266 | |
| 267 | enum { |
| 268 | kEmpty, |
| 269 | kView, |
Robert Phillips | 42a741a | 2020-10-23 12:27:47 -0400 | [diff] [blame] | 270 | kVertData, |
Robert Phillips | 01771c1 | 2020-10-20 15:46:07 -0400 | [diff] [blame] | 271 | } fTag { kEmpty }; |
Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 272 | }; |
| 273 | |
Robert Phillips | 42a741a | 2020-10-23 12:27:47 -0400 | [diff] [blame] | 274 | void makeExistingEntryMRU(Entry*) SK_REQUIRES(fSpinLock); |
| 275 | Entry* makeNewEntryMRU(Entry*) SK_REQUIRES(fSpinLock); |
| 276 | |
| 277 | Entry* getEntry(const GrUniqueKey&, const GrSurfaceProxyView&) SK_REQUIRES(fSpinLock); |
| 278 | Entry* getEntry(const GrUniqueKey&, sk_sp<VertexData>) SK_REQUIRES(fSpinLock); |
| 279 | |
Robert Phillips | 4559368 | 2020-09-18 16:16:33 -0400 | [diff] [blame] | 280 | void recycleEntry(Entry*) SK_REQUIRES(fSpinLock); |
Robert Phillips | f3e2b3c | 2020-09-18 14:07:43 -0400 | [diff] [blame] | 281 | |
Robert Phillips | 6e17ffe | 2020-10-06 14:52:11 -0400 | [diff] [blame] | 282 | std::tuple<GrSurfaceProxyView, sk_sp<SkData>> internalFind( |
| 283 | const GrUniqueKey&) SK_REQUIRES(fSpinLock); |
| 284 | std::tuple<GrSurfaceProxyView, sk_sp<SkData>> internalAdd( |
| 285 | const GrUniqueKey&, const GrSurfaceProxyView&) SK_REQUIRES(fSpinLock); |
Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 286 | |
Robert Phillips | 42a741a | 2020-10-23 12:27:47 -0400 | [diff] [blame] | 287 | std::tuple<sk_sp<VertexData>, sk_sp<SkData>> internalFindVerts( |
| 288 | const GrUniqueKey&) SK_REQUIRES(fSpinLock); |
| 289 | std::tuple<sk_sp<VertexData>, sk_sp<SkData>> internalAddVerts( |
| 290 | const GrUniqueKey&, sk_sp<VertexData>) SK_REQUIRES(fSpinLock); |
| 291 | |
Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 292 | mutable SkSpinlock fSpinLock; |
| 293 | |
Robert Phillips | 74ad27d | 2020-10-20 10:16:55 -0400 | [diff] [blame] | 294 | SkTDynamicHash<Entry, GrUniqueKey> fUniquelyKeyedEntryMap SK_GUARDED_BY(fSpinLock); |
Robert Phillips | 4559368 | 2020-09-18 16:16:33 -0400 | [diff] [blame] | 295 | // The head of this list is the MRU |
Robert Phillips | 74ad27d | 2020-10-20 10:16:55 -0400 | [diff] [blame] | 296 | SkTInternalLList<Entry> fUniquelyKeyedEntryList SK_GUARDED_BY(fSpinLock); |
Robert Phillips | f3e2b3c | 2020-09-18 14:07:43 -0400 | [diff] [blame] | 297 | |
| 298 | // TODO: empirically determine this from the skps |
| 299 | static const int kInitialArenaSize = 64 * sizeof(Entry); |
| 300 | |
| 301 | char fStorage[kInitialArenaSize]; |
| 302 | SkArenaAlloc fEntryAllocator{fStorage, kInitialArenaSize, kInitialArenaSize}; |
Robert Phillips | 4559368 | 2020-09-18 16:16:33 -0400 | [diff] [blame] | 303 | Entry* fFreeEntryList SK_GUARDED_BY(fSpinLock); |
Robert Phillips | 26f3aeb | 2020-09-16 10:57:32 -0400 | [diff] [blame] | 304 | }; |
| 305 | |
Robert Phillips | d464feb | 2020-10-08 11:00:02 -0400 | [diff] [blame] | 306 | #endif // GrThreadSafeCache_DEFINED |