blob: 8265ea5fa97b4664957f274c63e721db76d6fc7d [file] [log] [blame]
Chris Dalton4da70192018-06-18 09:51:36 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCCPathCache_DEFINED
9#define GrCCPathCache_DEFINED
10
11#include "SkExchange.h"
12#include "SkTHash.h"
13#include "SkTInternalLList.h"
14#include "ccpr/GrCCAtlas.h"
15#include "ccpr/GrCCPathProcessor.h"
16
17class GrCCPathCacheEntry;
18class GrShape;
19
20/**
21 * This class implements an LRU cache that maps from GrShape to GrCCPathCacheEntry objects. Shapes
22 * are only given one entry in the cache, so any time they are accessed with a different matrix, the
23 * old entry gets evicted.
24 */
25class GrCCPathCache {
26public:
Chris Dalton8429c792018-10-23 15:56:22 -060027 GrCCPathCache();
Chris Dalton3b572792018-10-23 18:26:20 -060028 ~GrCCPathCache();
Chris Dalton4da70192018-06-18 09:51:36 -060029
30 // Stores the components of a transformation that affect a path mask (i.e. everything but
31 // integer translation). During construction, any integer portions of the matrix's translate are
32 // shaved off and returned to the caller. The caller is responsible for those integer shifts.
33 struct MaskTransform {
34 MaskTransform(const SkMatrix& m, SkIVector* shift);
35 float fMatrix2x2[4];
Chris Dalton644341a2018-06-18 19:14:16 -060036#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
37 // Except on AOSP, cache hits must have matching subpixel portions of their view matrix.
38 // On AOSP we follow after HWUI and ignore the subpixel translate.
Chris Dalton4da70192018-06-18 09:51:36 -060039 float fSubpixelTranslate[2];
Chris Dalton644341a2018-06-18 19:14:16 -060040#endif
Chris Dalton4da70192018-06-18 09:51:36 -060041 };
42
43 enum class CreateIfAbsent : bool {
44 kNo = false,
45 kYes = true
46 };
47
48 // Finds an entry in the cache. Shapes are only given one entry, so any time they are accessed
49 // with a different MaskTransform, the old entry gets evicted.
50 sk_sp<GrCCPathCacheEntry> find(const GrShape&, const MaskTransform&,
51 CreateIfAbsent = CreateIfAbsent::kNo);
52
Chris Dalton9a986cf2018-10-18 15:27:59 -060053 void evict(GrCCPathCacheEntry*);
54
55 void purgeAsNeeded();
Chris Dalton4da70192018-06-18 09:51:36 -060056
57private:
58 // Wrapper around a raw GrShape key that has a specialized operator==. Used by the hash table.
59 struct HashKey {
60 const uint32_t* fData;
61 };
62 friend bool operator==(const HashKey&, const HashKey&);
63
Chris Dalton9a986cf2018-10-18 15:27:59 -060064 // This is a special ref ptr for GrCCPathCacheEntry, used by the hash table. It provides static
65 // methods for SkTHash, and can only be moved. This guarantees the hash table holds exactly one
Chris Dalton3b572792018-10-23 18:26:20 -060066 // reference for each entry. Also, when a HashNode goes out of scope, that means it is exiting
67 // the hash table. We take that opportunity to remove it from the LRU list and do some cleanup.
Chris Dalton4da70192018-06-18 09:51:36 -060068 class HashNode : SkNoncopyable {
69 public:
Chris Dalton9a986cf2018-10-18 15:27:59 -060070 static HashKey GetKey(const HashNode& node) { return GetKey(node.entry()); }
Chris Dalton4da70192018-06-18 09:51:36 -060071 static HashKey GetKey(const GrCCPathCacheEntry*);
72 static uint32_t Hash(HashKey);
73
74 HashNode() = default;
Chris Dalton3b572792018-10-23 18:26:20 -060075 HashNode(GrCCPathCache*, const MaskTransform&, const GrShape&);
76 HashNode(HashNode&& node)
77 : fPathCache(node.fPathCache), fEntry(std::move(node.fEntry)) {
Chris Dalton9a986cf2018-10-18 15:27:59 -060078 SkASSERT(!node.fEntry);
79 }
Chris Dalton4da70192018-06-18 09:51:36 -060080
Chris Dalton3b572792018-10-23 18:26:20 -060081 ~HashNode();
82
83 HashNode& operator=(HashNode&& node);
Chris Dalton4da70192018-06-18 09:51:36 -060084
Chris Dalton9a986cf2018-10-18 15:27:59 -060085 GrCCPathCacheEntry* entry() const { return fEntry.get(); }
Chris Dalton4da70192018-06-18 09:51:36 -060086
87 private:
Chris Dalton3b572792018-10-23 18:26:20 -060088 void willExitHashTable();
89
90 GrCCPathCache* fPathCache = nullptr;
Chris Dalton9a986cf2018-10-18 15:27:59 -060091 sk_sp<GrCCPathCacheEntry> fEntry;
Chris Dalton4da70192018-06-18 09:51:36 -060092 // The GrShape's unstyled key is stored as a variable-length footer to the 'fEntry'
93 // allocation. GetKey provides access to it.
94 };
95
96 SkTHashTable<HashNode, HashKey> fHashTable;
97 SkTInternalLList<GrCCPathCacheEntry> fLRU;
Chris Dalton9a986cf2018-10-18 15:27:59 -060098 SkMessageBus<sk_sp<GrCCPathCacheEntry>>::Inbox fInvalidatedEntriesInbox;
Chris Dalton3b572792018-10-23 18:26:20 -060099
100 SkDEBUGCODE(SkThreadID fGraphicsThreadID);
Chris Dalton4da70192018-06-18 09:51:36 -0600101};
102
103/**
Chris Daltona8429cf2018-06-22 11:43:31 -0600104 * This class stores all the data necessary to draw a specific path + matrix combination from their
105 * corresponding cached atlas.
Chris Dalton4da70192018-06-18 09:51:36 -0600106 */
107class GrCCPathCacheEntry : public SkPathRef::GenIDChangeListener {
108public:
109 SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrCCPathCacheEntry);
110
Chris Dalton907102e2018-06-29 13:18:53 -0600111 ~GrCCPathCacheEntry() override;
112
Chris Dalton9a986cf2018-10-18 15:27:59 -0600113 uint32_t pathCacheUniqueID() const { return fPathCacheUniqueID; }
114
Chris Daltona8429cf2018-06-22 11:43:31 -0600115 // The number of times this specific entry (path + matrix combination) has been pulled from
116 // the path cache. As long as the caller does exactly one lookup per draw, this translates to
117 // the number of times the path has been drawn with a compatible matrix.
118 //
119 // If the entry did not previously exist and was created during
120 // GrCCPathCache::find(.., CreateIfAbsent::kYes), its hit count will be 1.
121 int hitCount() const { return fHitCount; }
122
Chris Dalton4da70192018-06-18 09:51:36 -0600123 // Does this entry reference a permanent, 8-bit atlas that resides in the resource cache?
124 // (i.e. not a temporarily-stashed, fp16 coverage count atlas.)
125 bool hasCachedAtlas() const { return SkToBool(fCachedAtlasInfo); }
126
127 const SkIRect& devIBounds() const { return fDevIBounds; }
128 int width() const { return fDevIBounds.width(); }
129 int height() const { return fDevIBounds.height(); }
130
131 // Called once our path has been rendered into the mainline CCPR (fp16, coverage count) atlas.
132 // The caller will stash this atlas texture away after drawing, and during the next flush,
133 // recover it and attempt to copy any paths that got reused into permanent 8-bit atlases.
Chris Dalton9a986cf2018-10-18 15:27:59 -0600134 void initAsStashedAtlas(const GrUniqueKey& atlasKey, const SkIVector& atlasOffset,
135 const SkRect& devBounds, const SkRect& devBounds45,
136 const SkIRect& devIBounds, const SkIVector& maskShift);
Chris Dalton4da70192018-06-18 09:51:36 -0600137
138 // Called once our path mask has been copied into a permanent, 8-bit atlas. This method points
139 // the entry at the new atlas and updates the CachedAtlasInfo data.
Chris Dalton9a986cf2018-10-18 15:27:59 -0600140 void updateToCachedAtlas(const GrUniqueKey& atlasKey, const SkIVector& newAtlasOffset,
141 sk_sp<GrCCAtlas::CachedAtlasInfo>);
Chris Dalton4da70192018-06-18 09:51:36 -0600142
143 const GrUniqueKey& atlasKey() const { return fAtlasKey; }
144
145 void resetAtlasKeyAndInfo() {
146 fAtlasKey.reset();
147 fCachedAtlasInfo.reset();
148 }
149
150 // This is a utility for the caller to detect when a path gets drawn more than once during the
151 // same flush, with compatible matrices. Before adding a path to an atlas, the caller may check
152 // here to see if they have already placed the path previously during the same flush. The caller
153 // is required to reset all currFlushAtlas references back to null before any subsequent flush.
154 void setCurrFlushAtlas(const GrCCAtlas* currFlushAtlas) {
155 // This should not get called more than once in a single flush. Once fCurrFlushAtlas is
156 // non-null, it can only be set back to null (once the flush is over).
157 SkASSERT(!fCurrFlushAtlas || !currFlushAtlas);
158 fCurrFlushAtlas = currFlushAtlas;
159 }
160 const GrCCAtlas* currFlushAtlas() const { return fCurrFlushAtlas; }
161
162private:
163 using MaskTransform = GrCCPathCache::MaskTransform;
164
Chris Dalton3b572792018-10-23 18:26:20 -0600165 GrCCPathCacheEntry(uint32_t pathCacheUniqueID, const MaskTransform&);
Chris Dalton4da70192018-06-18 09:51:36 -0600166
Chris Dalton907102e2018-06-29 13:18:53 -0600167 // Resets this entry back to not having an atlas, and purges its previous atlas texture from the
168 // resource cache if needed.
169 void invalidateAtlas();
170
Chris Dalton9a986cf2018-10-18 15:27:59 -0600171 // Called when our corresponding path is modified or deleted. Not threadsafe.
Chris Dalton4da70192018-06-18 09:51:36 -0600172 void onChange() override;
173
Chris Dalton9a986cf2018-10-18 15:27:59 -0600174 const uint32_t fPathCacheUniqueID;
Chris Dalton907102e2018-06-29 13:18:53 -0600175 MaskTransform fMaskTransform;
Chris Daltona8429cf2018-06-22 11:43:31 -0600176 int fHitCount = 1;
Chris Dalton4da70192018-06-18 09:51:36 -0600177
178 GrUniqueKey fAtlasKey;
179 SkIVector fAtlasOffset;
180
Chris Dalton4da70192018-06-18 09:51:36 -0600181 SkRect fDevBounds;
182 SkRect fDevBounds45;
183 SkIRect fDevIBounds;
184
Chris Dalton3b572792018-10-23 18:26:20 -0600185 // If null, then we are referencing a "stashed" atlas (see initAsStashedAtlas()).
186 sk_sp<GrCCAtlas::CachedAtlasInfo> fCachedAtlasInfo;
187
Chris Dalton4da70192018-06-18 09:51:36 -0600188 // This field is for when a path gets drawn more than once during the same flush.
189 const GrCCAtlas* fCurrFlushAtlas = nullptr;
190
Chris Dalton3b572792018-10-23 18:26:20 -0600191 SkDEBUGCODE(SkThreadID fGraphicsThreadID);
192
Chris Dalton4da70192018-06-18 09:51:36 -0600193 friend class GrCCPathCache;
194 friend void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry&, const SkIVector&,
195 uint32_t, DoEvenOddFill); // To access data.
196};
197
198inline void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry& entry,
199 const SkIVector& shift, GrColor color,
200 DoEvenOddFill doEvenOddFill) {
201 float dx = (float)shift.fX, dy = (float)shift.fY;
202 this->set(entry.fDevBounds.makeOffset(dx, dy), MakeOffset45(entry.fDevBounds45, dx, dy),
203 entry.fAtlasOffset - shift, color, doEvenOddFill);
204}
205
206#endif