Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrCCPathCache_DEFINED |
| 9 | #define GrCCPathCache_DEFINED |
| 10 | |
| 11 | #include "SkExchange.h" |
| 12 | #include "SkTHash.h" |
| 13 | #include "SkTInternalLList.h" |
| 14 | #include "ccpr/GrCCAtlas.h" |
| 15 | #include "ccpr/GrCCPathProcessor.h" |
| 16 | |
| 17 | class GrCCPathCacheEntry; |
| 18 | class GrShape; |
| 19 | |
| 20 | /** |
| 21 | * This class implements an LRU cache that maps from GrShape to GrCCPathCacheEntry objects. Shapes |
| 22 | * are only given one entry in the cache, so any time they are accessed with a different matrix, the |
| 23 | * old entry gets evicted. |
| 24 | */ |
| 25 | class GrCCPathCache { |
| 26 | public: |
Chris Dalton | 8429c79 | 2018-10-23 15:56:22 -0600 | [diff] [blame] | 27 | GrCCPathCache(); |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 28 | ~GrCCPathCache(); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 29 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame^] | 30 | class Key : public SkPathRef::GenIDChangeListener { |
| 31 | public: |
| 32 | static sk_sp<Key> Make(uint32_t pathCacheUniqueID, int dataCountU32, |
| 33 | const void* data = nullptr); |
| 34 | |
| 35 | uint32_t pathCacheUniqueID() const { return fPathCacheUniqueID; } |
| 36 | |
| 37 | int dataSizeInBytes() const { return fDataSizeInBytes; } |
| 38 | const uint32_t* data() const; |
| 39 | |
| 40 | void resetDataCountU32(int dataCountU32) { |
| 41 | SkASSERT(dataCountU32 <= fDataReserveCountU32); |
| 42 | fDataSizeInBytes = dataCountU32 * sizeof(uint32_t); |
| 43 | } |
| 44 | uint32_t* data(); |
| 45 | |
| 46 | bool operator==(const Key&) const; |
| 47 | |
| 48 | // Called when our corresponding path is modified or deleted. Not threadsafe. |
| 49 | void onChange() override; |
| 50 | |
| 51 | private: |
| 52 | Key(uint32_t pathCacheUniqueID, int dataCountU32) |
| 53 | : fPathCacheUniqueID(pathCacheUniqueID) |
| 54 | , fDataSizeInBytes(dataCountU32 * sizeof(uint32_t)) |
| 55 | SkDEBUGCODE(, fDataReserveCountU32(dataCountU32)) { |
| 56 | SkASSERT(SK_InvalidUniqueID != fPathCacheUniqueID); |
| 57 | } |
| 58 | |
| 59 | const uint32_t fPathCacheUniqueID; |
| 60 | int fDataSizeInBytes; |
| 61 | SkDEBUGCODE(const int fDataReserveCountU32); |
| 62 | // The GrShape's unstyled key is stored as a variable-length footer to this class. GetKey |
| 63 | // provides access to it. |
| 64 | }; |
| 65 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 66 | // Stores the components of a transformation that affect a path mask (i.e. everything but |
| 67 | // integer translation). During construction, any integer portions of the matrix's translate are |
| 68 | // shaved off and returned to the caller. The caller is responsible for those integer shifts. |
| 69 | struct MaskTransform { |
| 70 | MaskTransform(const SkMatrix& m, SkIVector* shift); |
| 71 | float fMatrix2x2[4]; |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 72 | #ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK |
| 73 | // Except on AOSP, cache hits must have matching subpixel portions of their view matrix. |
| 74 | // On AOSP we follow after HWUI and ignore the subpixel translate. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 75 | float fSubpixelTranslate[2]; |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 76 | #endif |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 77 | }; |
| 78 | |
| 79 | enum class CreateIfAbsent : bool { |
| 80 | kNo = false, |
| 81 | kYes = true |
| 82 | }; |
| 83 | |
| 84 | // Finds an entry in the cache. Shapes are only given one entry, so any time they are accessed |
| 85 | // with a different MaskTransform, the old entry gets evicted. |
| 86 | sk_sp<GrCCPathCacheEntry> find(const GrShape&, const MaskTransform&, |
| 87 | CreateIfAbsent = CreateIfAbsent::kNo); |
| 88 | |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 89 | void purgeAsNeeded(); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 90 | |
| 91 | private: |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 92 | // This is a special ref ptr for GrCCPathCacheEntry, used by the hash table. It provides static |
| 93 | // methods for SkTHash, and can only be moved. This guarantees the hash table holds exactly one |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 94 | // reference for each entry. Also, when a HashNode goes out of scope, that means it is exiting |
| 95 | // the hash table. We take that opportunity to remove it from the LRU list and do some cleanup. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 96 | class HashNode : SkNoncopyable { |
| 97 | public: |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame^] | 98 | static const Key& GetKey(const HashNode&); |
| 99 | static uint32_t Hash(const Key&); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 100 | |
| 101 | HashNode() = default; |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame^] | 102 | HashNode(GrCCPathCache*, sk_sp<Key>, const MaskTransform&, const GrShape&); |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 103 | HashNode(HashNode&& node) |
| 104 | : fPathCache(node.fPathCache), fEntry(std::move(node.fEntry)) { |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 105 | SkASSERT(!node.fEntry); |
| 106 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 107 | |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 108 | ~HashNode(); |
| 109 | |
| 110 | HashNode& operator=(HashNode&& node); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 111 | |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 112 | GrCCPathCacheEntry* entry() const { return fEntry.get(); } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 113 | |
| 114 | private: |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 115 | void willExitHashTable(); |
| 116 | |
| 117 | GrCCPathCache* fPathCache = nullptr; |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 118 | sk_sp<GrCCPathCacheEntry> fEntry; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 119 | }; |
| 120 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame^] | 121 | void evict(const GrCCPathCache::Key& key) { |
| 122 | fHashTable.remove(key); // HashNode::willExitHashTable() takes care of the rest. |
| 123 | } |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 124 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame^] | 125 | SkTHashTable<HashNode, const GrCCPathCache::Key&> fHashTable; |
| 126 | SkTInternalLList<GrCCPathCacheEntry> fLRU; |
| 127 | SkMessageBus<sk_sp<Key>>::Inbox fInvalidatedKeysInbox; |
| 128 | sk_sp<Key> fScratchKey; // Reused for creating a temporary key in the find() method. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 129 | }; |
| 130 | |
| 131 | /** |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 132 | * This class stores all the data necessary to draw a specific path + matrix combination from their |
| 133 | * corresponding cached atlas. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 134 | */ |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame^] | 135 | class GrCCPathCacheEntry : public GrNonAtomicRef<GrCCPathCacheEntry> { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 136 | public: |
| 137 | SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrCCPathCacheEntry); |
| 138 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame^] | 139 | ~GrCCPathCacheEntry() { |
| 140 | SkASSERT(!fCurrFlushAtlas); // Client is required to reset fCurrFlushAtlas back to null. |
| 141 | this->invalidateAtlas(); |
| 142 | } |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 143 | |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 144 | // The number of times this specific entry (path + matrix combination) has been pulled from |
| 145 | // the path cache. As long as the caller does exactly one lookup per draw, this translates to |
| 146 | // the number of times the path has been drawn with a compatible matrix. |
| 147 | // |
| 148 | // If the entry did not previously exist and was created during |
| 149 | // GrCCPathCache::find(.., CreateIfAbsent::kYes), its hit count will be 1. |
| 150 | int hitCount() const { return fHitCount; } |
| 151 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 152 | // Does this entry reference a permanent, 8-bit atlas that resides in the resource cache? |
| 153 | // (i.e. not a temporarily-stashed, fp16 coverage count atlas.) |
| 154 | bool hasCachedAtlas() const { return SkToBool(fCachedAtlasInfo); } |
| 155 | |
| 156 | const SkIRect& devIBounds() const { return fDevIBounds; } |
| 157 | int width() const { return fDevIBounds.width(); } |
| 158 | int height() const { return fDevIBounds.height(); } |
| 159 | |
| 160 | // Called once our path has been rendered into the mainline CCPR (fp16, coverage count) atlas. |
| 161 | // The caller will stash this atlas texture away after drawing, and during the next flush, |
| 162 | // recover it and attempt to copy any paths that got reused into permanent 8-bit atlases. |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 163 | void initAsStashedAtlas(const GrUniqueKey& atlasKey, const SkIVector& atlasOffset, |
| 164 | const SkRect& devBounds, const SkRect& devBounds45, |
| 165 | const SkIRect& devIBounds, const SkIVector& maskShift); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 166 | |
| 167 | // Called once our path mask has been copied into a permanent, 8-bit atlas. This method points |
| 168 | // the entry at the new atlas and updates the CachedAtlasInfo data. |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 169 | void updateToCachedAtlas(const GrUniqueKey& atlasKey, const SkIVector& newAtlasOffset, |
| 170 | sk_sp<GrCCAtlas::CachedAtlasInfo>); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 171 | |
| 172 | const GrUniqueKey& atlasKey() const { return fAtlasKey; } |
| 173 | |
| 174 | void resetAtlasKeyAndInfo() { |
| 175 | fAtlasKey.reset(); |
| 176 | fCachedAtlasInfo.reset(); |
| 177 | } |
| 178 | |
| 179 | // This is a utility for the caller to detect when a path gets drawn more than once during the |
| 180 | // same flush, with compatible matrices. Before adding a path to an atlas, the caller may check |
| 181 | // here to see if they have already placed the path previously during the same flush. The caller |
| 182 | // is required to reset all currFlushAtlas references back to null before any subsequent flush. |
| 183 | void setCurrFlushAtlas(const GrCCAtlas* currFlushAtlas) { |
| 184 | // This should not get called more than once in a single flush. Once fCurrFlushAtlas is |
| 185 | // non-null, it can only be set back to null (once the flush is over). |
| 186 | SkASSERT(!fCurrFlushAtlas || !currFlushAtlas); |
| 187 | fCurrFlushAtlas = currFlushAtlas; |
| 188 | } |
| 189 | const GrCCAtlas* currFlushAtlas() const { return fCurrFlushAtlas; } |
| 190 | |
| 191 | private: |
| 192 | using MaskTransform = GrCCPathCache::MaskTransform; |
| 193 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame^] | 194 | GrCCPathCacheEntry(sk_sp<GrCCPathCache::Key> cacheKey, const MaskTransform& maskTransform) |
| 195 | : fCacheKey(std::move(cacheKey)), fMaskTransform(maskTransform) { |
| 196 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 197 | |
Chris Dalton | 907102e | 2018-06-29 13:18:53 -0600 | [diff] [blame] | 198 | // Resets this entry back to not having an atlas, and purges its previous atlas texture from the |
| 199 | // resource cache if needed. |
| 200 | void invalidateAtlas(); |
| 201 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame^] | 202 | sk_sp<GrCCPathCache::Key> fCacheKey; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 203 | |
Chris Dalton | 907102e | 2018-06-29 13:18:53 -0600 | [diff] [blame] | 204 | MaskTransform fMaskTransform; |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 205 | int fHitCount = 1; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 206 | |
| 207 | GrUniqueKey fAtlasKey; |
| 208 | SkIVector fAtlasOffset; |
| 209 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 210 | SkRect fDevBounds; |
| 211 | SkRect fDevBounds45; |
| 212 | SkIRect fDevIBounds; |
| 213 | |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 214 | // If null, then we are referencing a "stashed" atlas (see initAsStashedAtlas()). |
| 215 | sk_sp<GrCCAtlas::CachedAtlasInfo> fCachedAtlasInfo; |
| 216 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 217 | // This field is for when a path gets drawn more than once during the same flush. |
| 218 | const GrCCAtlas* fCurrFlushAtlas = nullptr; |
| 219 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 220 | friend class GrCCPathCache; |
| 221 | friend void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry&, const SkIVector&, |
Brian Osman | 1be2b7c | 2018-10-29 16:07:15 -0400 | [diff] [blame] | 222 | GrColor, DoEvenOddFill); // To access data. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 223 | }; |
| 224 | |
| 225 | inline void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry& entry, |
| 226 | const SkIVector& shift, GrColor color, |
| 227 | DoEvenOddFill doEvenOddFill) { |
| 228 | float dx = (float)shift.fX, dy = (float)shift.fY; |
| 229 | this->set(entry.fDevBounds.makeOffset(dx, dy), MakeOffset45(entry.fDevBounds45, dx, dy), |
| 230 | entry.fAtlasOffset - shift, color, doEvenOddFill); |
| 231 | } |
| 232 | |
| 233 | #endif |