blob: 16f31d02094477a323cecade2c0fc5692629c0d3 [file] [log] [blame]
Chris Dalton4da70192018-06-18 09:51:36 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCCPathCache_DEFINED
9#define GrCCPathCache_DEFINED
10
11#include "SkExchange.h"
12#include "SkTHash.h"
13#include "SkTInternalLList.h"
14#include "ccpr/GrCCAtlas.h"
15#include "ccpr/GrCCPathProcessor.h"
16
17class GrCCPathCacheEntry;
18class GrShape;
19
20/**
21 * This class implements an LRU cache that maps from GrShape to GrCCPathCacheEntry objects. Shapes
22 * are only given one entry in the cache, so any time they are accessed with a different matrix, the
23 * old entry gets evicted.
24 */
25class GrCCPathCache {
26public:
Chris Dalton8429c792018-10-23 15:56:22 -060027 GrCCPathCache();
Chris Dalton3b572792018-10-23 18:26:20 -060028 ~GrCCPathCache();
Chris Dalton4da70192018-06-18 09:51:36 -060029
Chris Dalton9985a272018-10-30 14:29:39 -060030 class Key : public SkPathRef::GenIDChangeListener {
31 public:
32 static sk_sp<Key> Make(uint32_t pathCacheUniqueID, int dataCountU32,
33 const void* data = nullptr);
34
35 uint32_t pathCacheUniqueID() const { return fPathCacheUniqueID; }
36
37 int dataSizeInBytes() const { return fDataSizeInBytes; }
38 const uint32_t* data() const;
39
40 void resetDataCountU32(int dataCountU32) {
41 SkASSERT(dataCountU32 <= fDataReserveCountU32);
42 fDataSizeInBytes = dataCountU32 * sizeof(uint32_t);
43 }
44 uint32_t* data();
45
46 bool operator==(const Key&) const;
47
48 // Called when our corresponding path is modified or deleted. Not threadsafe.
49 void onChange() override;
50
51 private:
52 Key(uint32_t pathCacheUniqueID, int dataCountU32)
53 : fPathCacheUniqueID(pathCacheUniqueID)
54 , fDataSizeInBytes(dataCountU32 * sizeof(uint32_t))
55 SkDEBUGCODE(, fDataReserveCountU32(dataCountU32)) {
56 SkASSERT(SK_InvalidUniqueID != fPathCacheUniqueID);
57 }
58
59 const uint32_t fPathCacheUniqueID;
60 int fDataSizeInBytes;
61 SkDEBUGCODE(const int fDataReserveCountU32);
62 // The GrShape's unstyled key is stored as a variable-length footer to this class. GetKey
63 // provides access to it.
64 };
65
Chris Dalton4da70192018-06-18 09:51:36 -060066 // Stores the components of a transformation that affect a path mask (i.e. everything but
67 // integer translation). During construction, any integer portions of the matrix's translate are
68 // shaved off and returned to the caller. The caller is responsible for those integer shifts.
69 struct MaskTransform {
70 MaskTransform(const SkMatrix& m, SkIVector* shift);
71 float fMatrix2x2[4];
Chris Dalton644341a2018-06-18 19:14:16 -060072#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
73 // Except on AOSP, cache hits must have matching subpixel portions of their view matrix.
74 // On AOSP we follow after HWUI and ignore the subpixel translate.
Chris Dalton4da70192018-06-18 09:51:36 -060075 float fSubpixelTranslate[2];
Chris Dalton644341a2018-06-18 19:14:16 -060076#endif
Chris Dalton4da70192018-06-18 09:51:36 -060077 };
78
79 enum class CreateIfAbsent : bool {
80 kNo = false,
81 kYes = true
82 };
83
84 // Finds an entry in the cache. Shapes are only given one entry, so any time they are accessed
85 // with a different MaskTransform, the old entry gets evicted.
86 sk_sp<GrCCPathCacheEntry> find(const GrShape&, const MaskTransform&,
87 CreateIfAbsent = CreateIfAbsent::kNo);
88
Chris Dalton9a986cf2018-10-18 15:27:59 -060089 void purgeAsNeeded();
Chris Dalton4da70192018-06-18 09:51:36 -060090
91private:
Chris Dalton9a986cf2018-10-18 15:27:59 -060092 // This is a special ref ptr for GrCCPathCacheEntry, used by the hash table. It provides static
93 // methods for SkTHash, and can only be moved. This guarantees the hash table holds exactly one
Chris Dalton3b572792018-10-23 18:26:20 -060094 // reference for each entry. Also, when a HashNode goes out of scope, that means it is exiting
95 // the hash table. We take that opportunity to remove it from the LRU list and do some cleanup.
Chris Dalton4da70192018-06-18 09:51:36 -060096 class HashNode : SkNoncopyable {
97 public:
Chris Dalton9985a272018-10-30 14:29:39 -060098 static const Key& GetKey(const HashNode&);
99 static uint32_t Hash(const Key&);
Chris Dalton4da70192018-06-18 09:51:36 -0600100
101 HashNode() = default;
Chris Dalton9985a272018-10-30 14:29:39 -0600102 HashNode(GrCCPathCache*, sk_sp<Key>, const MaskTransform&, const GrShape&);
Chris Dalton3b572792018-10-23 18:26:20 -0600103 HashNode(HashNode&& node)
104 : fPathCache(node.fPathCache), fEntry(std::move(node.fEntry)) {
Chris Dalton9a986cf2018-10-18 15:27:59 -0600105 SkASSERT(!node.fEntry);
106 }
Chris Dalton4da70192018-06-18 09:51:36 -0600107
Chris Dalton3b572792018-10-23 18:26:20 -0600108 ~HashNode();
109
110 HashNode& operator=(HashNode&& node);
Chris Dalton4da70192018-06-18 09:51:36 -0600111
Chris Dalton9a986cf2018-10-18 15:27:59 -0600112 GrCCPathCacheEntry* entry() const { return fEntry.get(); }
Chris Dalton4da70192018-06-18 09:51:36 -0600113
114 private:
Chris Dalton3b572792018-10-23 18:26:20 -0600115 void willExitHashTable();
116
117 GrCCPathCache* fPathCache = nullptr;
Chris Dalton9a986cf2018-10-18 15:27:59 -0600118 sk_sp<GrCCPathCacheEntry> fEntry;
Chris Dalton4da70192018-06-18 09:51:36 -0600119 };
120
Chris Dalton9985a272018-10-30 14:29:39 -0600121 void evict(const GrCCPathCache::Key& key) {
122 fHashTable.remove(key); // HashNode::willExitHashTable() takes care of the rest.
123 }
Chris Dalton3b572792018-10-23 18:26:20 -0600124
Chris Dalton9985a272018-10-30 14:29:39 -0600125 SkTHashTable<HashNode, const GrCCPathCache::Key&> fHashTable;
126 SkTInternalLList<GrCCPathCacheEntry> fLRU;
127 SkMessageBus<sk_sp<Key>>::Inbox fInvalidatedKeysInbox;
128 sk_sp<Key> fScratchKey; // Reused for creating a temporary key in the find() method.
Chris Dalton4da70192018-06-18 09:51:36 -0600129};
130
131/**
Chris Daltona8429cf2018-06-22 11:43:31 -0600132 * This class stores all the data necessary to draw a specific path + matrix combination from their
133 * corresponding cached atlas.
Chris Dalton4da70192018-06-18 09:51:36 -0600134 */
Chris Dalton9985a272018-10-30 14:29:39 -0600135class GrCCPathCacheEntry : public GrNonAtomicRef<GrCCPathCacheEntry> {
Chris Dalton4da70192018-06-18 09:51:36 -0600136public:
137 SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrCCPathCacheEntry);
138
Chris Dalton9985a272018-10-30 14:29:39 -0600139 ~GrCCPathCacheEntry() {
140 SkASSERT(!fCurrFlushAtlas); // Client is required to reset fCurrFlushAtlas back to null.
141 this->invalidateAtlas();
142 }
Chris Dalton9a986cf2018-10-18 15:27:59 -0600143
Chris Daltona8429cf2018-06-22 11:43:31 -0600144 // The number of times this specific entry (path + matrix combination) has been pulled from
145 // the path cache. As long as the caller does exactly one lookup per draw, this translates to
146 // the number of times the path has been drawn with a compatible matrix.
147 //
148 // If the entry did not previously exist and was created during
149 // GrCCPathCache::find(.., CreateIfAbsent::kYes), its hit count will be 1.
150 int hitCount() const { return fHitCount; }
151
Chris Dalton4da70192018-06-18 09:51:36 -0600152 // Does this entry reference a permanent, 8-bit atlas that resides in the resource cache?
153 // (i.e. not a temporarily-stashed, fp16 coverage count atlas.)
154 bool hasCachedAtlas() const { return SkToBool(fCachedAtlasInfo); }
155
156 const SkIRect& devIBounds() const { return fDevIBounds; }
157 int width() const { return fDevIBounds.width(); }
158 int height() const { return fDevIBounds.height(); }
159
160 // Called once our path has been rendered into the mainline CCPR (fp16, coverage count) atlas.
161 // The caller will stash this atlas texture away after drawing, and during the next flush,
162 // recover it and attempt to copy any paths that got reused into permanent 8-bit atlases.
Chris Dalton9a986cf2018-10-18 15:27:59 -0600163 void initAsStashedAtlas(const GrUniqueKey& atlasKey, const SkIVector& atlasOffset,
164 const SkRect& devBounds, const SkRect& devBounds45,
165 const SkIRect& devIBounds, const SkIVector& maskShift);
Chris Dalton4da70192018-06-18 09:51:36 -0600166
167 // Called once our path mask has been copied into a permanent, 8-bit atlas. This method points
168 // the entry at the new atlas and updates the CachedAtlasInfo data.
Chris Dalton9a986cf2018-10-18 15:27:59 -0600169 void updateToCachedAtlas(const GrUniqueKey& atlasKey, const SkIVector& newAtlasOffset,
170 sk_sp<GrCCAtlas::CachedAtlasInfo>);
Chris Dalton4da70192018-06-18 09:51:36 -0600171
172 const GrUniqueKey& atlasKey() const { return fAtlasKey; }
173
174 void resetAtlasKeyAndInfo() {
175 fAtlasKey.reset();
176 fCachedAtlasInfo.reset();
177 }
178
179 // This is a utility for the caller to detect when a path gets drawn more than once during the
180 // same flush, with compatible matrices. Before adding a path to an atlas, the caller may check
181 // here to see if they have already placed the path previously during the same flush. The caller
182 // is required to reset all currFlushAtlas references back to null before any subsequent flush.
183 void setCurrFlushAtlas(const GrCCAtlas* currFlushAtlas) {
184 // This should not get called more than once in a single flush. Once fCurrFlushAtlas is
185 // non-null, it can only be set back to null (once the flush is over).
186 SkASSERT(!fCurrFlushAtlas || !currFlushAtlas);
187 fCurrFlushAtlas = currFlushAtlas;
188 }
189 const GrCCAtlas* currFlushAtlas() const { return fCurrFlushAtlas; }
190
191private:
192 using MaskTransform = GrCCPathCache::MaskTransform;
193
Chris Dalton9985a272018-10-30 14:29:39 -0600194 GrCCPathCacheEntry(sk_sp<GrCCPathCache::Key> cacheKey, const MaskTransform& maskTransform)
195 : fCacheKey(std::move(cacheKey)), fMaskTransform(maskTransform) {
196 }
Chris Dalton4da70192018-06-18 09:51:36 -0600197
Chris Dalton907102e2018-06-29 13:18:53 -0600198 // Resets this entry back to not having an atlas, and purges its previous atlas texture from the
199 // resource cache if needed.
200 void invalidateAtlas();
201
Chris Dalton9985a272018-10-30 14:29:39 -0600202 sk_sp<GrCCPathCache::Key> fCacheKey;
Chris Dalton4da70192018-06-18 09:51:36 -0600203
Chris Dalton907102e2018-06-29 13:18:53 -0600204 MaskTransform fMaskTransform;
Chris Daltona8429cf2018-06-22 11:43:31 -0600205 int fHitCount = 1;
Chris Dalton4da70192018-06-18 09:51:36 -0600206
207 GrUniqueKey fAtlasKey;
208 SkIVector fAtlasOffset;
209
Chris Dalton4da70192018-06-18 09:51:36 -0600210 SkRect fDevBounds;
211 SkRect fDevBounds45;
212 SkIRect fDevIBounds;
213
Chris Dalton3b572792018-10-23 18:26:20 -0600214 // If null, then we are referencing a "stashed" atlas (see initAsStashedAtlas()).
215 sk_sp<GrCCAtlas::CachedAtlasInfo> fCachedAtlasInfo;
216
Chris Dalton4da70192018-06-18 09:51:36 -0600217 // This field is for when a path gets drawn more than once during the same flush.
218 const GrCCAtlas* fCurrFlushAtlas = nullptr;
219
Chris Dalton4da70192018-06-18 09:51:36 -0600220 friend class GrCCPathCache;
221 friend void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry&, const SkIVector&,
Brian Osman1be2b7c2018-10-29 16:07:15 -0400222 GrColor, DoEvenOddFill); // To access data.
Chris Dalton4da70192018-06-18 09:51:36 -0600223};
224
225inline void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry& entry,
226 const SkIVector& shift, GrColor color,
227 DoEvenOddFill doEvenOddFill) {
228 float dx = (float)shift.fX, dy = (float)shift.fY;
229 this->set(entry.fDevBounds.makeOffset(dx, dy), MakeOffset45(entry.fDevBounds45, dx, dy),
230 entry.fAtlasOffset - shift, color, doEvenOddFill);
231}
232
233#endif