Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrCCPathCache_DEFINED |
| 9 | #define GrCCPathCache_DEFINED |
| 10 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 11 | #include "GrShape.h" |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 12 | #include "SkExchange.h" |
| 13 | #include "SkTHash.h" |
| 14 | #include "SkTInternalLList.h" |
| 15 | #include "ccpr/GrCCAtlas.h" |
| 16 | #include "ccpr/GrCCPathProcessor.h" |
| 17 | |
| 18 | class GrCCPathCacheEntry; |
| 19 | class GrShape; |
| 20 | |
| 21 | /** |
| 22 | * This class implements an LRU cache that maps from GrShape to GrCCPathCacheEntry objects. Shapes |
| 23 | * are only given one entry in the cache, so any time they are accessed with a different matrix, the |
| 24 | * old entry gets evicted. |
| 25 | */ |
| 26 | class GrCCPathCache { |
| 27 | public: |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 28 | GrCCPathCache(uint32_t contextUniqueID); |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 29 | ~GrCCPathCache(); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 30 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 31 | class Key : public SkPathRef::GenIDChangeListener { |
| 32 | public: |
| 33 | static sk_sp<Key> Make(uint32_t pathCacheUniqueID, int dataCountU32, |
| 34 | const void* data = nullptr); |
| 35 | |
| 36 | uint32_t pathCacheUniqueID() const { return fPathCacheUniqueID; } |
| 37 | |
| 38 | int dataSizeInBytes() const { return fDataSizeInBytes; } |
| 39 | const uint32_t* data() const; |
| 40 | |
| 41 | void resetDataCountU32(int dataCountU32) { |
| 42 | SkASSERT(dataCountU32 <= fDataReserveCountU32); |
| 43 | fDataSizeInBytes = dataCountU32 * sizeof(uint32_t); |
| 44 | } |
| 45 | uint32_t* data(); |
| 46 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 47 | bool operator==(const Key& that) const { |
| 48 | return fDataSizeInBytes == that.fDataSizeInBytes && |
| 49 | !memcmp(this->data(), that.data(), fDataSizeInBytes); |
| 50 | } |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 51 | |
| 52 | // Called when our corresponding path is modified or deleted. Not threadsafe. |
| 53 | void onChange() override; |
| 54 | |
| 55 | private: |
| 56 | Key(uint32_t pathCacheUniqueID, int dataCountU32) |
| 57 | : fPathCacheUniqueID(pathCacheUniqueID) |
| 58 | , fDataSizeInBytes(dataCountU32 * sizeof(uint32_t)) |
| 59 | SkDEBUGCODE(, fDataReserveCountU32(dataCountU32)) { |
| 60 | SkASSERT(SK_InvalidUniqueID != fPathCacheUniqueID); |
| 61 | } |
| 62 | |
| 63 | const uint32_t fPathCacheUniqueID; |
| 64 | int fDataSizeInBytes; |
| 65 | SkDEBUGCODE(const int fDataReserveCountU32); |
| 66 | // The GrShape's unstyled key is stored as a variable-length footer to this class. GetKey |
| 67 | // provides access to it. |
| 68 | }; |
| 69 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 70 | // Stores the components of a transformation that affect a path mask (i.e. everything but |
| 71 | // integer translation). During construction, any integer portions of the matrix's translate are |
| 72 | // shaved off and returned to the caller. The caller is responsible for those integer shifts. |
| 73 | struct MaskTransform { |
| 74 | MaskTransform(const SkMatrix& m, SkIVector* shift); |
| 75 | float fMatrix2x2[4]; |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 76 | #ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK |
| 77 | // Except on AOSP, cache hits must have matching subpixel portions of their view matrix. |
| 78 | // On AOSP we follow after HWUI and ignore the subpixel translate. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 79 | float fSubpixelTranslate[2]; |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 80 | #endif |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 81 | }; |
| 82 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 83 | // Represents a ref on a GrCCPathCacheEntry that should only be used during the current flush. |
| 84 | class OnFlushEntryRef : SkNoncopyable { |
| 85 | public: |
| 86 | static OnFlushEntryRef OnFlushRef(GrCCPathCacheEntry*); |
| 87 | OnFlushEntryRef() = default; |
| 88 | OnFlushEntryRef(OnFlushEntryRef&& ref) : fEntry(skstd::exchange(ref.fEntry, nullptr)) {} |
| 89 | ~OnFlushEntryRef(); |
| 90 | |
| 91 | GrCCPathCacheEntry* get() const { return fEntry; } |
| 92 | GrCCPathCacheEntry* operator->() const { return fEntry; } |
| 93 | GrCCPathCacheEntry& operator*() const { return *fEntry; } |
| 94 | explicit operator bool() const { return fEntry; } |
| 95 | void operator=(OnFlushEntryRef&& ref) { fEntry = skstd::exchange(ref.fEntry, nullptr); } |
| 96 | |
| 97 | private: |
| 98 | OnFlushEntryRef(GrCCPathCacheEntry* entry) : fEntry(entry) {} |
| 99 | GrCCPathCacheEntry* fEntry = nullptr; |
| 100 | }; |
| 101 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 102 | enum class CreateIfAbsent : bool { |
| 103 | kNo = false, |
| 104 | kYes = true |
| 105 | }; |
| 106 | |
| 107 | // Finds an entry in the cache. Shapes are only given one entry, so any time they are accessed |
| 108 | // with a different MaskTransform, the old entry gets evicted. |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 109 | OnFlushEntryRef find(GrOnFlushResourceProvider*, const GrShape&, const MaskTransform&, |
| 110 | CreateIfAbsent = CreateIfAbsent::kNo); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 111 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 112 | void doPreFlushProcessing(); |
| 113 | |
| 114 | void purgeEntriesOlderThan(GrProxyProvider*, const GrStdSteadyClock::time_point& purgeTime); |
| 115 | |
| 116 | // As we evict entries from our local path cache, we accumulate a list of invalidated atlas |
| 117 | // textures. This call purges the invalidated atlas textures from the mainline GrResourceCache. |
| 118 | // This call is available with two different "provider" objects, to accomodate whatever might |
| 119 | // be available at the callsite. |
| 120 | void purgeInvalidatedAtlasTextures(GrOnFlushResourceProvider*); |
| 121 | void purgeInvalidatedAtlasTextures(GrProxyProvider*); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 122 | |
| 123 | private: |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 124 | // This is a special ref ptr for GrCCPathCacheEntry, used by the hash table. It provides static |
| 125 | // methods for SkTHash, and can only be moved. This guarantees the hash table holds exactly one |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 126 | // reference for each entry. Also, when a HashNode goes out of scope, that means it is exiting |
| 127 | // the hash table. We take that opportunity to remove it from the LRU list and do some cleanup. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 128 | class HashNode : SkNoncopyable { |
| 129 | public: |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 130 | static const Key& GetKey(const HashNode&); |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 131 | inline static uint32_t Hash(const Key& key) { |
| 132 | return GrResourceKeyHash(key.data(), key.dataSizeInBytes()); |
| 133 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 134 | |
| 135 | HashNode() = default; |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 136 | HashNode(GrCCPathCache*, sk_sp<Key>, const MaskTransform&, const GrShape&); |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 137 | HashNode(HashNode&& node) |
| 138 | : fPathCache(node.fPathCache), fEntry(std::move(node.fEntry)) { |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 139 | SkASSERT(!node.fEntry); |
| 140 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 141 | |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 142 | ~HashNode(); |
| 143 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 144 | void operator=(HashNode&& node); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 145 | |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 146 | GrCCPathCacheEntry* entry() const { return fEntry.get(); } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 147 | |
| 148 | private: |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 149 | GrCCPathCache* fPathCache = nullptr; |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 150 | sk_sp<GrCCPathCacheEntry> fEntry; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 151 | }; |
| 152 | |
Chris Dalton | 6c3879d | 2018-11-01 11:13:19 -0600 | [diff] [blame] | 153 | GrStdSteadyClock::time_point quickPerFlushTimestamp() { |
| 154 | // time_point::min() means it's time to update fPerFlushTimestamp with a newer clock read. |
| 155 | if (GrStdSteadyClock::time_point::min() == fPerFlushTimestamp) { |
| 156 | fPerFlushTimestamp = GrStdSteadyClock::now(); |
| 157 | } |
| 158 | return fPerFlushTimestamp; |
| 159 | } |
| 160 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 161 | void evict(const GrCCPathCache::Key&, GrCCPathCacheEntry* = nullptr); |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 162 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 163 | // Evicts all the cache entries whose keys have been queued up in fInvalidatedKeysInbox via |
| 164 | // SkPath listeners. |
| 165 | void evictInvalidatedCacheKeys(); |
Chris Dalton | 6c3879d | 2018-11-01 11:13:19 -0600 | [diff] [blame] | 166 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 167 | const uint32_t fContextUniqueID; |
| 168 | |
| 169 | SkTHashTable<HashNode, const Key&> fHashTable; |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 170 | SkTInternalLList<GrCCPathCacheEntry> fLRU; |
| 171 | SkMessageBus<sk_sp<Key>>::Inbox fInvalidatedKeysInbox; |
| 172 | sk_sp<Key> fScratchKey; // Reused for creating a temporary key in the find() method. |
Chris Dalton | 6c3879d | 2018-11-01 11:13:19 -0600 | [diff] [blame] | 173 | |
| 174 | // We only read the clock once per flush, and cache it in this variable. This prevents us from |
| 175 | // excessive clock reads for cache timestamps that might degrade performance. |
| 176 | GrStdSteadyClock::time_point fPerFlushTimestamp = GrStdSteadyClock::time_point::min(); |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 177 | |
| 178 | // As we evict entries from our local path cache, we accumulate lists of invalidated atlas |
| 179 | // textures in these two members. We hold these until we purge them from the GrResourceCache |
| 180 | // (e.g. via purgeInvalidatedAtlasTextures().) |
| 181 | SkSTArray<4, sk_sp<GrTextureProxy>> fInvalidatedProxies; |
| 182 | SkSTArray<4, GrUniqueKey> fInvalidatedProxyUniqueKeys; |
| 183 | |
| 184 | friend class GrCCCachedAtlas; // To append to fInvalidatedProxies, fInvalidatedProxyUniqueKeys. |
| 185 | |
| 186 | public: |
| 187 | const SkTHashTable<HashNode, const Key&>& testingOnly_getHashTable() const; |
| 188 | const SkTInternalLList<GrCCPathCacheEntry>& testingOnly_getLRU() const; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 189 | }; |
| 190 | |
| 191 | /** |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 192 | * This class stores all the data necessary to draw a specific path + matrix combination from their |
| 193 | * corresponding cached atlas. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 194 | */ |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 195 | class GrCCPathCacheEntry : public GrNonAtomicRef<GrCCPathCacheEntry> { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 196 | public: |
| 197 | SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrCCPathCacheEntry); |
| 198 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 199 | ~GrCCPathCacheEntry() { |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 200 | SkASSERT(this->hasBeenEvicted()); // Should have called GrCCPathCache::evict(). |
| 201 | SkASSERT(!fCachedAtlas); |
| 202 | SkASSERT(0 == fOnFlushRefCnt); |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 203 | } |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 204 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 205 | const GrCCPathCache::Key& cacheKey() const { SkASSERT(fCacheKey); return *fCacheKey; } |
| 206 | |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 207 | // The number of times this specific entry (path + matrix combination) has been pulled from |
| 208 | // the path cache. As long as the caller does exactly one lookup per draw, this translates to |
| 209 | // the number of times the path has been drawn with a compatible matrix. |
| 210 | // |
| 211 | // If the entry did not previously exist and was created during |
| 212 | // GrCCPathCache::find(.., CreateIfAbsent::kYes), its hit count will be 1. |
| 213 | int hitCount() const { return fHitCount; } |
| 214 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 215 | const GrCCCachedAtlas* cachedAtlas() const { return fCachedAtlas.get(); } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 216 | |
| 217 | const SkIRect& devIBounds() const { return fDevIBounds; } |
| 218 | int width() const { return fDevIBounds.width(); } |
| 219 | int height() const { return fDevIBounds.height(); } |
| 220 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 221 | enum class ReleaseAtlasResult : bool { |
| 222 | kNone, |
| 223 | kDidInvalidateFromCache |
| 224 | }; |
| 225 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 226 | // Called once our path has been rendered into the mainline CCPR (fp16, coverage count) atlas. |
| 227 | // The caller will stash this atlas texture away after drawing, and during the next flush, |
| 228 | // recover it and attempt to copy any paths that got reused into permanent 8-bit atlases. |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 229 | void setCoverageCountAtlas(GrOnFlushResourceProvider*, GrCCAtlas*, const SkIVector& atlasOffset, |
| 230 | const SkRect& devBounds, const SkRect& devBounds45, |
| 231 | const SkIRect& devIBounds, const SkIVector& maskShift); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 232 | |
| 233 | // Called once our path mask has been copied into a permanent, 8-bit atlas. This method points |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 234 | // the entry at the new atlas and updates the GrCCCCachedAtlas data. |
| 235 | ReleaseAtlasResult upgradeToLiteralCoverageAtlas(GrCCPathCache*, GrOnFlushResourceProvider*, |
| 236 | GrCCAtlas*, const SkIVector& newAtlasOffset); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 237 | |
| 238 | private: |
| 239 | using MaskTransform = GrCCPathCache::MaskTransform; |
| 240 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 241 | GrCCPathCacheEntry(sk_sp<GrCCPathCache::Key> cacheKey, const MaskTransform& maskTransform) |
| 242 | : fCacheKey(std::move(cacheKey)), fMaskTransform(maskTransform) { |
| 243 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 244 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 245 | bool hasBeenEvicted() const { return fCacheKey->shouldUnregisterFromPath(); } |
| 246 | |
Chris Dalton | 907102e | 2018-06-29 13:18:53 -0600 | [diff] [blame] | 247 | // Resets this entry back to not having an atlas, and purges its previous atlas texture from the |
| 248 | // resource cache if needed. |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 249 | ReleaseAtlasResult releaseCachedAtlas(GrCCPathCache*); |
Chris Dalton | 907102e | 2018-06-29 13:18:53 -0600 | [diff] [blame] | 250 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 251 | sk_sp<GrCCPathCache::Key> fCacheKey; |
Chris Dalton | 6c3879d | 2018-11-01 11:13:19 -0600 | [diff] [blame] | 252 | GrStdSteadyClock::time_point fTimestamp; |
| 253 | int fHitCount = 0; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 254 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 255 | sk_sp<GrCCCachedAtlas> fCachedAtlas; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 256 | SkIVector fAtlasOffset; |
| 257 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 258 | MaskTransform fMaskTransform; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 259 | SkRect fDevBounds; |
| 260 | SkRect fDevBounds45; |
| 261 | SkIRect fDevIBounds; |
| 262 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 263 | int fOnFlushRefCnt = 0; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 264 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 265 | friend class GrCCPathCache; |
| 266 | friend void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry&, const SkIVector&, |
Brian Osman | 1be2b7c | 2018-10-29 16:07:15 -0400 | [diff] [blame] | 267 | GrColor, DoEvenOddFill); // To access data. |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 268 | |
| 269 | public: |
| 270 | int testingOnly_peekOnFlushRefCnt() const; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 271 | }; |
| 272 | |
Chris Dalton | 351e80c | 2019-01-06 22:51:00 -0700 | [diff] [blame^] | 273 | /** |
| 274 | * Encapsulates the data for an atlas whose texture is stored in the mainline GrResourceCache. Many |
| 275 | * instances of GrCCPathCacheEntry will reference the same GrCCCachedAtlas. |
| 276 | * |
| 277 | * We use this object to track the percentage of the original atlas pixels that could still ever |
| 278 | * potentially be reused (i.e., those which still represent an extant path). When the percentage |
| 279 | * of useful pixels drops below 50%, we purge the entire texture from the resource cache. |
| 280 | * |
| 281 | * This object also holds a ref on the atlas's actual texture proxy during flush. When |
| 282 | * fOnFlushRefCnt decrements back down to zero, we release fOnFlushProxy and reset it back to null. |
| 283 | */ |
| 284 | class GrCCCachedAtlas : public GrNonAtomicRef<GrCCCachedAtlas> { |
| 285 | public: |
| 286 | using ReleaseAtlasResult = GrCCPathCacheEntry::ReleaseAtlasResult; |
| 287 | |
| 288 | GrCCCachedAtlas(GrCCAtlas::CoverageType type, const GrUniqueKey& textureKey, |
| 289 | sk_sp<GrTextureProxy> onFlushProxy) |
| 290 | : fCoverageType(type) |
| 291 | , fTextureKey(textureKey) |
| 292 | , fOnFlushProxy(std::move(onFlushProxy)) {} |
| 293 | |
| 294 | ~GrCCCachedAtlas() { |
| 295 | SkASSERT(!fOnFlushProxy); |
| 296 | SkASSERT(!fOnFlushRefCnt); |
| 297 | } |
| 298 | |
| 299 | GrCCAtlas::CoverageType coverageType() const { return fCoverageType; } |
| 300 | const GrUniqueKey& textureKey() const { return fTextureKey; } |
| 301 | |
| 302 | GrTextureProxy* getOnFlushProxy() const { return fOnFlushProxy.get(); } |
| 303 | |
| 304 | void setOnFlushProxy(sk_sp<GrTextureProxy> proxy) { |
| 305 | SkASSERT(!fOnFlushProxy); |
| 306 | fOnFlushProxy = std::move(proxy); |
| 307 | } |
| 308 | |
| 309 | void addPathPixels(int numPixels) { fNumPathPixels += numPixels; } |
| 310 | ReleaseAtlasResult invalidatePathPixels(GrCCPathCache*, int numPixels); |
| 311 | |
| 312 | int peekOnFlushRefCnt() const { return fOnFlushRefCnt; } |
| 313 | void incrOnFlushRefCnt(int count = 1) const { |
| 314 | SkASSERT(count > 0); |
| 315 | SkASSERT(fOnFlushProxy); |
| 316 | fOnFlushRefCnt += count; |
| 317 | } |
| 318 | void decrOnFlushRefCnt(int count = 1) const; |
| 319 | |
| 320 | private: |
| 321 | const GrCCAtlas::CoverageType fCoverageType; |
| 322 | const GrUniqueKey fTextureKey; |
| 323 | |
| 324 | int fNumPathPixels = 0; |
| 325 | int fNumInvalidatedPathPixels = 0; |
| 326 | bool fIsInvalidatedFromResourceCache = false; |
| 327 | |
| 328 | mutable sk_sp<GrTextureProxy> fOnFlushProxy; |
| 329 | mutable int fOnFlushRefCnt = 0; |
| 330 | |
| 331 | public: |
| 332 | int testingOnly_peekOnFlushRefCnt() const; |
| 333 | }; |
| 334 | |
| 335 | |
| 336 | inline GrCCPathCache::HashNode::HashNode(GrCCPathCache* pathCache, sk_sp<Key> key, |
| 337 | const MaskTransform& m, const GrShape& shape) |
| 338 | : fPathCache(pathCache) |
| 339 | , fEntry(new GrCCPathCacheEntry(key, m)) { |
| 340 | SkASSERT(shape.hasUnstyledKey()); |
| 341 | shape.addGenIDChangeListener(std::move(key)); |
| 342 | } |
| 343 | |
| 344 | inline const GrCCPathCache::Key& GrCCPathCache::HashNode::GetKey( |
| 345 | const GrCCPathCache::HashNode& node) { |
| 346 | return *node.entry()->fCacheKey; |
| 347 | } |
| 348 | |
| 349 | inline GrCCPathCache::HashNode::~HashNode() { |
| 350 | SkASSERT(!fEntry || fEntry->hasBeenEvicted()); // Should have called GrCCPathCache::evict(). |
| 351 | } |
| 352 | |
| 353 | inline void GrCCPathCache::HashNode::operator=(HashNode&& node) { |
| 354 | SkASSERT(!fEntry || fEntry->hasBeenEvicted()); // Should have called GrCCPathCache::evict(). |
| 355 | fEntry = skstd::exchange(node.fEntry, nullptr); |
| 356 | } |
| 357 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 358 | inline void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry& entry, |
| 359 | const SkIVector& shift, GrColor color, |
| 360 | DoEvenOddFill doEvenOddFill) { |
| 361 | float dx = (float)shift.fX, dy = (float)shift.fY; |
| 362 | this->set(entry.fDevBounds.makeOffset(dx, dy), MakeOffset45(entry.fDevBounds45, dx, dy), |
| 363 | entry.fAtlasOffset - shift, color, doEvenOddFill); |
| 364 | } |
| 365 | |
| 366 | #endif |