Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #ifndef GrCCPathCache_DEFINED |
| 9 | #define GrCCPathCache_DEFINED |
| 10 | |
| 11 | #include "SkExchange.h" |
| 12 | #include "SkTHash.h" |
| 13 | #include "SkTInternalLList.h" |
| 14 | #include "ccpr/GrCCAtlas.h" |
| 15 | #include "ccpr/GrCCPathProcessor.h" |
| 16 | |
| 17 | class GrCCPathCacheEntry; |
| 18 | class GrShape; |
| 19 | |
| 20 | /** |
| 21 | * This class implements an LRU cache that maps from GrShape to GrCCPathCacheEntry objects. Shapes |
| 22 | * are only given one entry in the cache, so any time they are accessed with a different matrix, the |
| 23 | * old entry gets evicted. |
| 24 | */ |
| 25 | class GrCCPathCache { |
| 26 | public: |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame^] | 27 | GrCCPathCache(uint32_t contextUniqueID) : fInvalidatedEntriesInbox(contextUniqueID) {} |
| 28 | SkDEBUGCODE(~GrCCPathCache();) |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 29 | |
| 30 | // Stores the components of a transformation that affect a path mask (i.e. everything but |
| 31 | // integer translation). During construction, any integer portions of the matrix's translate are |
| 32 | // shaved off and returned to the caller. The caller is responsible for those integer shifts. |
| 33 | struct MaskTransform { |
| 34 | MaskTransform(const SkMatrix& m, SkIVector* shift); |
| 35 | float fMatrix2x2[4]; |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 36 | #ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK |
| 37 | // Except on AOSP, cache hits must have matching subpixel portions of their view matrix. |
| 38 | // On AOSP we follow after HWUI and ignore the subpixel translate. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 39 | float fSubpixelTranslate[2]; |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 40 | #endif |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 41 | }; |
| 42 | |
| 43 | enum class CreateIfAbsent : bool { |
| 44 | kNo = false, |
| 45 | kYes = true |
| 46 | }; |
| 47 | |
| 48 | // Finds an entry in the cache. Shapes are only given one entry, so any time they are accessed |
| 49 | // with a different MaskTransform, the old entry gets evicted. |
| 50 | sk_sp<GrCCPathCacheEntry> find(const GrShape&, const MaskTransform&, |
| 51 | CreateIfAbsent = CreateIfAbsent::kNo); |
| 52 | |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame^] | 53 | void evict(GrCCPathCacheEntry*); |
| 54 | |
| 55 | void purgeAsNeeded(); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 56 | |
| 57 | private: |
| 58 | // Wrapper around a raw GrShape key that has a specialized operator==. Used by the hash table. |
| 59 | struct HashKey { |
| 60 | const uint32_t* fData; |
| 61 | }; |
| 62 | friend bool operator==(const HashKey&, const HashKey&); |
| 63 | |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame^] | 64 | // This is a special ref ptr for GrCCPathCacheEntry, used by the hash table. It provides static |
| 65 | // methods for SkTHash, and can only be moved. This guarantees the hash table holds exactly one |
| 66 | // reference for each entry. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 67 | class HashNode : SkNoncopyable { |
| 68 | public: |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame^] | 69 | static HashKey GetKey(const HashNode& node) { return GetKey(node.entry()); } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 70 | static HashKey GetKey(const GrCCPathCacheEntry*); |
| 71 | static uint32_t Hash(HashKey); |
| 72 | |
| 73 | HashNode() = default; |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame^] | 74 | HashNode(uint32_t pathCacheUniqueID, const MaskTransform&, const GrShape&); |
| 75 | HashNode(HashNode&& node) : fEntry(std::move(node.fEntry)) { |
| 76 | SkASSERT(!node.fEntry); |
| 77 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 78 | |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame^] | 79 | HashNode& operator=(HashNode&& node) { |
| 80 | fEntry = std::move(node.fEntry); |
| 81 | SkASSERT(!node.fEntry); |
| 82 | return *this; |
| 83 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 84 | |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame^] | 85 | GrCCPathCacheEntry* entry() const { return fEntry.get(); } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 86 | |
| 87 | private: |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame^] | 88 | sk_sp<GrCCPathCacheEntry> fEntry; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 89 | // The GrShape's unstyled key is stored as a variable-length footer to the 'fEntry' |
| 90 | // allocation. GetKey provides access to it. |
| 91 | }; |
| 92 | |
| 93 | SkTHashTable<HashNode, HashKey> fHashTable; |
| 94 | SkTInternalLList<GrCCPathCacheEntry> fLRU; |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame^] | 95 | SkMessageBus<sk_sp<GrCCPathCacheEntry>>::Inbox fInvalidatedEntriesInbox; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 96 | }; |
| 97 | |
| 98 | /** |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 99 | * This class stores all the data necessary to draw a specific path + matrix combination from their |
| 100 | * corresponding cached atlas. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 101 | */ |
| 102 | class GrCCPathCacheEntry : public SkPathRef::GenIDChangeListener { |
| 103 | public: |
| 104 | SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrCCPathCacheEntry); |
| 105 | |
Chris Dalton | 907102e | 2018-06-29 13:18:53 -0600 | [diff] [blame] | 106 | ~GrCCPathCacheEntry() override; |
| 107 | |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame^] | 108 | uint32_t pathCacheUniqueID() const { return fPathCacheUniqueID; } |
| 109 | |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 110 | // The number of times this specific entry (path + matrix combination) has been pulled from |
| 111 | // the path cache. As long as the caller does exactly one lookup per draw, this translates to |
| 112 | // the number of times the path has been drawn with a compatible matrix. |
| 113 | // |
| 114 | // If the entry did not previously exist and was created during |
| 115 | // GrCCPathCache::find(.., CreateIfAbsent::kYes), its hit count will be 1. |
| 116 | int hitCount() const { return fHitCount; } |
| 117 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 118 | // Does this entry reference a permanent, 8-bit atlas that resides in the resource cache? |
| 119 | // (i.e. not a temporarily-stashed, fp16 coverage count atlas.) |
| 120 | bool hasCachedAtlas() const { return SkToBool(fCachedAtlasInfo); } |
| 121 | |
| 122 | const SkIRect& devIBounds() const { return fDevIBounds; } |
| 123 | int width() const { return fDevIBounds.width(); } |
| 124 | int height() const { return fDevIBounds.height(); } |
| 125 | |
| 126 | // Called once our path has been rendered into the mainline CCPR (fp16, coverage count) atlas. |
| 127 | // The caller will stash this atlas texture away after drawing, and during the next flush, |
| 128 | // recover it and attempt to copy any paths that got reused into permanent 8-bit atlases. |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame^] | 129 | void initAsStashedAtlas(const GrUniqueKey& atlasKey, const SkIVector& atlasOffset, |
| 130 | const SkRect& devBounds, const SkRect& devBounds45, |
| 131 | const SkIRect& devIBounds, const SkIVector& maskShift); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 132 | |
| 133 | // Called once our path mask has been copied into a permanent, 8-bit atlas. This method points |
| 134 | // the entry at the new atlas and updates the CachedAtlasInfo data. |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame^] | 135 | void updateToCachedAtlas(const GrUniqueKey& atlasKey, const SkIVector& newAtlasOffset, |
| 136 | sk_sp<GrCCAtlas::CachedAtlasInfo>); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 137 | |
| 138 | const GrUniqueKey& atlasKey() const { return fAtlasKey; } |
| 139 | |
| 140 | void resetAtlasKeyAndInfo() { |
| 141 | fAtlasKey.reset(); |
| 142 | fCachedAtlasInfo.reset(); |
| 143 | } |
| 144 | |
| 145 | // This is a utility for the caller to detect when a path gets drawn more than once during the |
| 146 | // same flush, with compatible matrices. Before adding a path to an atlas, the caller may check |
| 147 | // here to see if they have already placed the path previously during the same flush. The caller |
| 148 | // is required to reset all currFlushAtlas references back to null before any subsequent flush. |
| 149 | void setCurrFlushAtlas(const GrCCAtlas* currFlushAtlas) { |
| 150 | // This should not get called more than once in a single flush. Once fCurrFlushAtlas is |
| 151 | // non-null, it can only be set back to null (once the flush is over). |
| 152 | SkASSERT(!fCurrFlushAtlas || !currFlushAtlas); |
| 153 | fCurrFlushAtlas = currFlushAtlas; |
| 154 | } |
| 155 | const GrCCAtlas* currFlushAtlas() const { return fCurrFlushAtlas; } |
| 156 | |
| 157 | private: |
| 158 | using MaskTransform = GrCCPathCache::MaskTransform; |
| 159 | |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame^] | 160 | GrCCPathCacheEntry(uint32_t pathCacheUniqueID, const MaskTransform& maskTransform) |
| 161 | : fPathCacheUniqueID(pathCacheUniqueID), fMaskTransform(maskTransform) { |
| 162 | SkASSERT(SK_InvalidUniqueID != fPathCacheUniqueID); |
| 163 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 164 | |
Chris Dalton | 907102e | 2018-06-29 13:18:53 -0600 | [diff] [blame] | 165 | // Resets this entry back to not having an atlas, and purges its previous atlas texture from the |
| 166 | // resource cache if needed. |
| 167 | void invalidateAtlas(); |
| 168 | |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame^] | 169 | // Called when our corresponding path is modified or deleted. Not threadsafe. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 170 | void onChange() override; |
| 171 | |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame^] | 172 | const uint32_t fPathCacheUniqueID; |
Chris Dalton | 907102e | 2018-06-29 13:18:53 -0600 | [diff] [blame] | 173 | MaskTransform fMaskTransform; |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 174 | int fHitCount = 1; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 175 | |
| 176 | GrUniqueKey fAtlasKey; |
| 177 | SkIVector fAtlasOffset; |
| 178 | |
| 179 | // If null, then we are referencing a "stashed" atlas (see initAsStashedAtlas()). |
| 180 | sk_sp<GrCCAtlas::CachedAtlasInfo> fCachedAtlasInfo; |
| 181 | |
| 182 | SkRect fDevBounds; |
| 183 | SkRect fDevBounds45; |
| 184 | SkIRect fDevIBounds; |
| 185 | |
| 186 | // This field is for when a path gets drawn more than once during the same flush. |
| 187 | const GrCCAtlas* fCurrFlushAtlas = nullptr; |
| 188 | |
| 189 | friend class GrCCPathCache; |
| 190 | friend void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry&, const SkIVector&, |
| 191 | uint32_t, DoEvenOddFill); // To access data. |
| 192 | }; |
| 193 | |
| 194 | inline void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry& entry, |
| 195 | const SkIVector& shift, GrColor color, |
| 196 | DoEvenOddFill doEvenOddFill) { |
| 197 | float dx = (float)shift.fX, dy = (float)shift.fY; |
| 198 | this->set(entry.fDevBounds.makeOffset(dx, dy), MakeOffset45(entry.fDevBounds45, dx, dy), |
| 199 | entry.fAtlasOffset - shift, color, doEvenOddFill); |
| 200 | } |
| 201 | |
| 202 | #endif |