Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrCCPathCache_DEFINED |
| 9 | #define GrCCPathCache_DEFINED |
| 10 | |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 11 | #include "include/private/SkTHash.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 12 | #include "src/core/SkExchange.h" |
Ben Wagner | 729a23f | 2019-05-17 16:29:34 -0400 | [diff] [blame] | 13 | #include "src/core/SkTInternalLList.h" |
Mike Klein | c0bd9f9 | 2019-04-23 12:05:21 -0500 | [diff] [blame] | 14 | #include "src/gpu/ccpr/GrCCAtlas.h" |
| 15 | #include "src/gpu/ccpr/GrCCPathProcessor.h" |
Michael Ludwig | 663afe5 | 2019-06-03 16:46:19 -0400 | [diff] [blame] | 16 | #include "src/gpu/geometry/GrShape.h" |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 17 | |
| 18 | class GrCCPathCacheEntry; |
| 19 | class GrShape; |
| 20 | |
| 21 | /** |
| 22 | * This class implements an LRU cache that maps from GrShape to GrCCPathCacheEntry objects. Shapes |
| 23 | * are only given one entry in the cache, so any time they are accessed with a different matrix, the |
| 24 | * old entry gets evicted. |
| 25 | */ |
| 26 | class GrCCPathCache { |
| 27 | public: |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 28 | GrCCPathCache(uint32_t contextUniqueID); |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 29 | ~GrCCPathCache(); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 30 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 31 | class Key : public SkPathRef::GenIDChangeListener { |
| 32 | public: |
| 33 | static sk_sp<Key> Make(uint32_t pathCacheUniqueID, int dataCountU32, |
| 34 | const void* data = nullptr); |
| 35 | |
| 36 | uint32_t pathCacheUniqueID() const { return fPathCacheUniqueID; } |
| 37 | |
| 38 | int dataSizeInBytes() const { return fDataSizeInBytes; } |
| 39 | const uint32_t* data() const; |
| 40 | |
| 41 | void resetDataCountU32(int dataCountU32) { |
| 42 | SkASSERT(dataCountU32 <= fDataReserveCountU32); |
| 43 | fDataSizeInBytes = dataCountU32 * sizeof(uint32_t); |
| 44 | } |
| 45 | uint32_t* data(); |
| 46 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 47 | bool operator==(const Key& that) const { |
| 48 | return fDataSizeInBytes == that.fDataSizeInBytes && |
| 49 | !memcmp(this->data(), that.data(), fDataSizeInBytes); |
| 50 | } |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 51 | |
| 52 | // Called when our corresponding path is modified or deleted. Not threadsafe. |
| 53 | void onChange() override; |
| 54 | |
Andy Weiss | 8ef18d3 | 2019-08-29 10:36:58 -0700 | [diff] [blame] | 55 | // TODO(b/30449950): use sized delete once P0722R3 is available |
| 56 | static void operator delete(void* p); |
| 57 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 58 | private: |
| 59 | Key(uint32_t pathCacheUniqueID, int dataCountU32) |
| 60 | : fPathCacheUniqueID(pathCacheUniqueID) |
| 61 | , fDataSizeInBytes(dataCountU32 * sizeof(uint32_t)) |
| 62 | SkDEBUGCODE(, fDataReserveCountU32(dataCountU32)) { |
| 63 | SkASSERT(SK_InvalidUniqueID != fPathCacheUniqueID); |
| 64 | } |
| 65 | |
| 66 | const uint32_t fPathCacheUniqueID; |
| 67 | int fDataSizeInBytes; |
| 68 | SkDEBUGCODE(const int fDataReserveCountU32); |
| 69 | // The GrShape's unstyled key is stored as a variable-length footer to this class. GetKey |
| 70 | // provides access to it. |
| 71 | }; |
| 72 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 73 | // Stores the components of a transformation that affect a path mask (i.e. everything but |
| 74 | // integer translation). During construction, any integer portions of the matrix's translate are |
| 75 | // shaved off and returned to the caller. The caller is responsible for those integer shifts. |
| 76 | struct MaskTransform { |
| 77 | MaskTransform(const SkMatrix& m, SkIVector* shift); |
| 78 | float fMatrix2x2[4]; |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 79 | #ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK |
| 80 | // Except on AOSP, cache hits must have matching subpixel portions of their view matrix. |
| 81 | // On AOSP we follow after HWUI and ignore the subpixel translate. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 82 | float fSubpixelTranslate[2]; |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 83 | #endif |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 84 | }; |
| 85 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 86 | // Represents a ref on a GrCCPathCacheEntry that should only be used during the current flush. |
| 87 | class OnFlushEntryRef : SkNoncopyable { |
| 88 | public: |
| 89 | static OnFlushEntryRef OnFlushRef(GrCCPathCacheEntry*); |
| 90 | OnFlushEntryRef() = default; |
| 91 | OnFlushEntryRef(OnFlushEntryRef&& ref) : fEntry(skstd::exchange(ref.fEntry, nullptr)) {} |
| 92 | ~OnFlushEntryRef(); |
| 93 | |
| 94 | GrCCPathCacheEntry* get() const { return fEntry; } |
| 95 | GrCCPathCacheEntry* operator->() const { return fEntry; } |
| 96 | GrCCPathCacheEntry& operator*() const { return *fEntry; } |
| 97 | explicit operator bool() const { return fEntry; } |
| 98 | void operator=(OnFlushEntryRef&& ref) { fEntry = skstd::exchange(ref.fEntry, nullptr); } |
| 99 | |
| 100 | private: |
| 101 | OnFlushEntryRef(GrCCPathCacheEntry* entry) : fEntry(entry) {} |
| 102 | GrCCPathCacheEntry* fEntry = nullptr; |
| 103 | }; |
| 104 | |
Chris Dalton | aaa77c1 | 2019-01-07 17:45:36 -0700 | [diff] [blame] | 105 | // Finds an entry in the cache that matches the given shape and transformation matrix. |
| 106 | // 'maskShift' is filled with an integer post-translate that the caller must apply when drawing |
| 107 | // the entry's mask to the device. |
| 108 | // |
| 109 | // NOTE: Shapes are only given one entry, so any time they are accessed with a new |
| 110 | // transformation, the old entry gets evicted. |
| 111 | OnFlushEntryRef find(GrOnFlushResourceProvider*, const GrShape&, |
| 112 | const SkIRect& clippedDrawBounds, const SkMatrix& viewMatrix, |
| 113 | SkIVector* maskShift); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 114 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 115 | void doPreFlushProcessing(); |
| 116 | |
| 117 | void purgeEntriesOlderThan(GrProxyProvider*, const GrStdSteadyClock::time_point& purgeTime); |
| 118 | |
| 119 | // As we evict entries from our local path cache, we accumulate a list of invalidated atlas |
| 120 | // textures. This call purges the invalidated atlas textures from the mainline GrResourceCache. |
| 121 | // This call is available with two different "provider" objects, to accomodate whatever might |
| 122 | // be available at the callsite. |
| 123 | void purgeInvalidatedAtlasTextures(GrOnFlushResourceProvider*); |
| 124 | void purgeInvalidatedAtlasTextures(GrProxyProvider*); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 125 | |
| 126 | private: |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 127 | // This is a special ref ptr for GrCCPathCacheEntry, used by the hash table. It provides static |
| 128 | // methods for SkTHash, and can only be moved. This guarantees the hash table holds exactly one |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 129 | // reference for each entry. Also, when a HashNode goes out of scope, that means it is exiting |
| 130 | // the hash table. We take that opportunity to remove it from the LRU list and do some cleanup. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 131 | class HashNode : SkNoncopyable { |
| 132 | public: |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 133 | static const Key& GetKey(const HashNode&); |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 134 | inline static uint32_t Hash(const Key& key) { |
| 135 | return GrResourceKeyHash(key.data(), key.dataSizeInBytes()); |
| 136 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 137 | |
| 138 | HashNode() = default; |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 139 | HashNode(GrCCPathCache*, sk_sp<Key>, const MaskTransform&, const GrShape&); |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 140 | HashNode(HashNode&& node) |
| 141 | : fPathCache(node.fPathCache), fEntry(std::move(node.fEntry)) { |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 142 | SkASSERT(!node.fEntry); |
| 143 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 144 | |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 145 | ~HashNode(); |
| 146 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 147 | void operator=(HashNode&& node); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 148 | |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 149 | GrCCPathCacheEntry* entry() const { return fEntry.get(); } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 150 | |
| 151 | private: |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 152 | GrCCPathCache* fPathCache = nullptr; |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 153 | sk_sp<GrCCPathCacheEntry> fEntry; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 154 | }; |
| 155 | |
Chris Dalton | 6c3879d | 2018-11-01 11:13:19 -0600 | [diff] [blame] | 156 | GrStdSteadyClock::time_point quickPerFlushTimestamp() { |
| 157 | // time_point::min() means it's time to update fPerFlushTimestamp with a newer clock read. |
| 158 | if (GrStdSteadyClock::time_point::min() == fPerFlushTimestamp) { |
| 159 | fPerFlushTimestamp = GrStdSteadyClock::now(); |
| 160 | } |
| 161 | return fPerFlushTimestamp; |
| 162 | } |
| 163 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 164 | void evict(const GrCCPathCache::Key&, GrCCPathCacheEntry* = nullptr); |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 165 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 166 | // Evicts all the cache entries whose keys have been queued up in fInvalidatedKeysInbox via |
| 167 | // SkPath listeners. |
| 168 | void evictInvalidatedCacheKeys(); |
Chris Dalton | 6c3879d | 2018-11-01 11:13:19 -0600 | [diff] [blame] | 169 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 170 | const uint32_t fContextUniqueID; |
| 171 | |
| 172 | SkTHashTable<HashNode, const Key&> fHashTable; |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 173 | SkTInternalLList<GrCCPathCacheEntry> fLRU; |
| 174 | SkMessageBus<sk_sp<Key>>::Inbox fInvalidatedKeysInbox; |
| 175 | sk_sp<Key> fScratchKey; // Reused for creating a temporary key in the find() method. |
Chris Dalton | 6c3879d | 2018-11-01 11:13:19 -0600 | [diff] [blame] | 176 | |
| 177 | // We only read the clock once per flush, and cache it in this variable. This prevents us from |
| 178 | // excessive clock reads for cache timestamps that might degrade performance. |
| 179 | GrStdSteadyClock::time_point fPerFlushTimestamp = GrStdSteadyClock::time_point::min(); |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 180 | |
| 181 | // As we evict entries from our local path cache, we accumulate lists of invalidated atlas |
| 182 | // textures in these two members. We hold these until we purge them from the GrResourceCache |
| 183 | // (e.g. via purgeInvalidatedAtlasTextures().) |
| 184 | SkSTArray<4, sk_sp<GrTextureProxy>> fInvalidatedProxies; |
| 185 | SkSTArray<4, GrUniqueKey> fInvalidatedProxyUniqueKeys; |
| 186 | |
| 187 | friend class GrCCCachedAtlas; // To append to fInvalidatedProxies, fInvalidatedProxyUniqueKeys. |
| 188 | |
| 189 | public: |
| 190 | const SkTHashTable<HashNode, const Key&>& testingOnly_getHashTable() const; |
| 191 | const SkTInternalLList<GrCCPathCacheEntry>& testingOnly_getLRU() const; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 192 | }; |
| 193 | |
| 194 | /** |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 195 | * This class stores all the data necessary to draw a specific path + matrix combination from their |
| 196 | * corresponding cached atlas. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 197 | */ |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 198 | class GrCCPathCacheEntry : public GrNonAtomicRef<GrCCPathCacheEntry> { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 199 | public: |
| 200 | SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrCCPathCacheEntry); |
| 201 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 202 | ~GrCCPathCacheEntry() { |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 203 | SkASSERT(this->hasBeenEvicted()); // Should have called GrCCPathCache::evict(). |
| 204 | SkASSERT(!fCachedAtlas); |
| 205 | SkASSERT(0 == fOnFlushRefCnt); |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 206 | } |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 207 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 208 | const GrCCPathCache::Key& cacheKey() const { SkASSERT(fCacheKey); return *fCacheKey; } |
| 209 | |
Chris Dalton | aaa77c1 | 2019-01-07 17:45:36 -0700 | [diff] [blame] | 210 | // The number of flushes during which this specific entry (path + matrix combination) has been |
| 211 | // pulled from the path cache. If a path is pulled from the cache more than once in a single |
| 212 | // flush, the hit count is only incremented once. |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 213 | // |
Chris Dalton | aaa77c1 | 2019-01-07 17:45:36 -0700 | [diff] [blame] | 214 | // If the entry did not previously exist, its hit count will be 1. |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 215 | int hitCount() const { return fHitCount; } |
| 216 | |
Chris Dalton | aaa77c1 | 2019-01-07 17:45:36 -0700 | [diff] [blame] | 217 | // The accumulative region of the path that has been drawn during the lifetime of this cache |
| 218 | // entry (as defined by the 'clippedDrawBounds' parameter for GrCCPathCache::find). |
| 219 | const SkIRect& hitRect() const { return fHitRect; } |
| 220 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 221 | const GrCCCachedAtlas* cachedAtlas() const { return fCachedAtlas.get(); } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 222 | |
| 223 | const SkIRect& devIBounds() const { return fDevIBounds; } |
| 224 | int width() const { return fDevIBounds.width(); } |
| 225 | int height() const { return fDevIBounds.height(); } |
| 226 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 227 | enum class ReleaseAtlasResult : bool { |
| 228 | kNone, |
| 229 | kDidInvalidateFromCache |
| 230 | }; |
| 231 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 232 | // Called once our path has been rendered into the mainline CCPR (fp16, coverage count) atlas. |
| 233 | // The caller will stash this atlas texture away after drawing, and during the next flush, |
| 234 | // recover it and attempt to copy any paths that got reused into permanent 8-bit atlases. |
Chris Dalton | 8610e9c | 2019-05-09 11:07:10 -0600 | [diff] [blame] | 235 | void setCoverageCountAtlas( |
| 236 | GrOnFlushResourceProvider*, GrCCAtlas*, const SkIVector& atlasOffset, |
| 237 | const GrOctoBounds& octoBounds, const SkIRect& devIBounds, const SkIVector& maskShift); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 238 | |
| 239 | // Called once our path mask has been copied into a permanent, 8-bit atlas. This method points |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 240 | // the entry at the new atlas and updates the GrCCCCachedAtlas data. |
| 241 | ReleaseAtlasResult upgradeToLiteralCoverageAtlas(GrCCPathCache*, GrOnFlushResourceProvider*, |
| 242 | GrCCAtlas*, const SkIVector& newAtlasOffset); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 243 | |
| 244 | private: |
| 245 | using MaskTransform = GrCCPathCache::MaskTransform; |
| 246 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 247 | GrCCPathCacheEntry(sk_sp<GrCCPathCache::Key> cacheKey, const MaskTransform& maskTransform) |
| 248 | : fCacheKey(std::move(cacheKey)), fMaskTransform(maskTransform) { |
| 249 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 250 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 251 | bool hasBeenEvicted() const { return fCacheKey->shouldUnregisterFromPath(); } |
| 252 | |
Chris Dalton | 907102e | 2018-06-29 13:18:53 -0600 | [diff] [blame] | 253 | // Resets this entry back to not having an atlas, and purges its previous atlas texture from the |
| 254 | // resource cache if needed. |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 255 | ReleaseAtlasResult releaseCachedAtlas(GrCCPathCache*); |
Chris Dalton | 907102e | 2018-06-29 13:18:53 -0600 | [diff] [blame] | 256 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 257 | sk_sp<GrCCPathCache::Key> fCacheKey; |
Chris Dalton | 6c3879d | 2018-11-01 11:13:19 -0600 | [diff] [blame] | 258 | GrStdSteadyClock::time_point fTimestamp; |
| 259 | int fHitCount = 0; |
Chris Dalton | aaa77c1 | 2019-01-07 17:45:36 -0700 | [diff] [blame] | 260 | SkIRect fHitRect = SkIRect::MakeEmpty(); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 261 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 262 | sk_sp<GrCCCachedAtlas> fCachedAtlas; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 263 | SkIVector fAtlasOffset; |
| 264 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 265 | MaskTransform fMaskTransform; |
Chris Dalton | 8610e9c | 2019-05-09 11:07:10 -0600 | [diff] [blame] | 266 | GrOctoBounds fOctoBounds; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 267 | SkIRect fDevIBounds; |
| 268 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 269 | int fOnFlushRefCnt = 0; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 270 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 271 | friend class GrCCPathCache; |
| 272 | friend void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry&, const SkIVector&, |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 273 | uint64_t color, GrFillRule); // To access data. |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 274 | |
| 275 | public: |
| 276 | int testingOnly_peekOnFlushRefCnt() const; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 277 | }; |
| 278 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame] | 279 | /** |
| 280 | * Encapsulates the data for an atlas whose texture is stored in the mainline GrResourceCache. Many |
| 281 | * instances of GrCCPathCacheEntry will reference the same GrCCCachedAtlas. |
| 282 | * |
| 283 | * We use this object to track the percentage of the original atlas pixels that could still ever |
| 284 | * potentially be reused (i.e., those which still represent an extant path). When the percentage |
| 285 | * of useful pixels drops below 50%, we purge the entire texture from the resource cache. |
| 286 | * |
| 287 | * This object also holds a ref on the atlas's actual texture proxy during flush. When |
| 288 | * fOnFlushRefCnt decrements back down to zero, we release fOnFlushProxy and reset it back to null. |
| 289 | */ |
| 290 | class GrCCCachedAtlas : public GrNonAtomicRef<GrCCCachedAtlas> { |
| 291 | public: |
| 292 | using ReleaseAtlasResult = GrCCPathCacheEntry::ReleaseAtlasResult; |
| 293 | |
| 294 | GrCCCachedAtlas(GrCCAtlas::CoverageType type, const GrUniqueKey& textureKey, |
| 295 | sk_sp<GrTextureProxy> onFlushProxy) |
| 296 | : fCoverageType(type) |
| 297 | , fTextureKey(textureKey) |
| 298 | , fOnFlushProxy(std::move(onFlushProxy)) {} |
| 299 | |
| 300 | ~GrCCCachedAtlas() { |
| 301 | SkASSERT(!fOnFlushProxy); |
| 302 | SkASSERT(!fOnFlushRefCnt); |
| 303 | } |
| 304 | |
| 305 | GrCCAtlas::CoverageType coverageType() const { return fCoverageType; } |
| 306 | const GrUniqueKey& textureKey() const { return fTextureKey; } |
| 307 | |
| 308 | GrTextureProxy* getOnFlushProxy() const { return fOnFlushProxy.get(); } |
| 309 | |
| 310 | void setOnFlushProxy(sk_sp<GrTextureProxy> proxy) { |
| 311 | SkASSERT(!fOnFlushProxy); |
| 312 | fOnFlushProxy = std::move(proxy); |
| 313 | } |
| 314 | |
| 315 | void addPathPixels(int numPixels) { fNumPathPixels += numPixels; } |
| 316 | ReleaseAtlasResult invalidatePathPixels(GrCCPathCache*, int numPixels); |
| 317 | |
| 318 | int peekOnFlushRefCnt() const { return fOnFlushRefCnt; } |
| 319 | void incrOnFlushRefCnt(int count = 1) const { |
| 320 | SkASSERT(count > 0); |
| 321 | SkASSERT(fOnFlushProxy); |
| 322 | fOnFlushRefCnt += count; |
| 323 | } |
| 324 | void decrOnFlushRefCnt(int count = 1) const; |
| 325 | |
| 326 | private: |
| 327 | const GrCCAtlas::CoverageType fCoverageType; |
| 328 | const GrUniqueKey fTextureKey; |
| 329 | |
| 330 | int fNumPathPixels = 0; |
| 331 | int fNumInvalidatedPathPixels = 0; |
| 332 | bool fIsInvalidatedFromResourceCache = false; |
| 333 | |
| 334 | mutable sk_sp<GrTextureProxy> fOnFlushProxy; |
| 335 | mutable int fOnFlushRefCnt = 0; |
| 336 | |
| 337 | public: |
| 338 | int testingOnly_peekOnFlushRefCnt() const; |
| 339 | }; |
| 340 | |
| 341 | |
| 342 | inline GrCCPathCache::HashNode::HashNode(GrCCPathCache* pathCache, sk_sp<Key> key, |
| 343 | const MaskTransform& m, const GrShape& shape) |
| 344 | : fPathCache(pathCache) |
| 345 | , fEntry(new GrCCPathCacheEntry(key, m)) { |
| 346 | SkASSERT(shape.hasUnstyledKey()); |
| 347 | shape.addGenIDChangeListener(std::move(key)); |
| 348 | } |
| 349 | |
| 350 | inline const GrCCPathCache::Key& GrCCPathCache::HashNode::GetKey( |
| 351 | const GrCCPathCache::HashNode& node) { |
| 352 | return *node.entry()->fCacheKey; |
| 353 | } |
| 354 | |
| 355 | inline GrCCPathCache::HashNode::~HashNode() { |
| 356 | SkASSERT(!fEntry || fEntry->hasBeenEvicted()); // Should have called GrCCPathCache::evict(). |
| 357 | } |
| 358 | |
| 359 | inline void GrCCPathCache::HashNode::operator=(HashNode&& node) { |
| 360 | SkASSERT(!fEntry || fEntry->hasBeenEvicted()); // Should have called GrCCPathCache::evict(). |
| 361 | fEntry = skstd::exchange(node.fEntry, nullptr); |
| 362 | } |
| 363 | |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 364 | inline void GrCCPathProcessor::Instance::set( |
| 365 | const GrCCPathCacheEntry& entry, const SkIVector& shift, uint64_t color, |
| 366 | GrFillRule fillRule) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 367 | float dx = (float)shift.fX, dy = (float)shift.fY; |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 368 | this->set(entry.fOctoBounds.makeOffset(dx, dy), entry.fAtlasOffset - shift, color, fillRule); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 369 | } |
| 370 | |
| 371 | #endif |