blob: e8ce928b9bd8b70e5445304454d2432d28d675df [file] [log] [blame]
Chris Dalton4da70192018-06-18 09:51:36 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCCPathCache_DEFINED
9#define GrCCPathCache_DEFINED
10
11#include "SkExchange.h"
12#include "SkTHash.h"
13#include "SkTInternalLList.h"
14#include "ccpr/GrCCAtlas.h"
15#include "ccpr/GrCCPathProcessor.h"
16
17class GrCCPathCacheEntry;
18class GrShape;
19
20/**
21 * This class implements an LRU cache that maps from GrShape to GrCCPathCacheEntry objects. Shapes
22 * are only given one entry in the cache, so any time they are accessed with a different matrix, the
23 * old entry gets evicted.
24 */
25class GrCCPathCache {
26public:
Chris Dalton9a986cf2018-10-18 15:27:59 -060027 GrCCPathCache(uint32_t contextUniqueID) : fInvalidatedEntriesInbox(contextUniqueID) {}
28 SkDEBUGCODE(~GrCCPathCache();)
Chris Dalton4da70192018-06-18 09:51:36 -060029
30 // Stores the components of a transformation that affect a path mask (i.e. everything but
31 // integer translation). During construction, any integer portions of the matrix's translate are
32 // shaved off and returned to the caller. The caller is responsible for those integer shifts.
33 struct MaskTransform {
34 MaskTransform(const SkMatrix& m, SkIVector* shift);
35 float fMatrix2x2[4];
Chris Dalton644341a2018-06-18 19:14:16 -060036#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
37 // Except on AOSP, cache hits must have matching subpixel portions of their view matrix.
38 // On AOSP we follow after HWUI and ignore the subpixel translate.
Chris Dalton4da70192018-06-18 09:51:36 -060039 float fSubpixelTranslate[2];
Chris Dalton644341a2018-06-18 19:14:16 -060040#endif
Chris Dalton4da70192018-06-18 09:51:36 -060041 };
42
43 enum class CreateIfAbsent : bool {
44 kNo = false,
45 kYes = true
46 };
47
48 // Finds an entry in the cache. Shapes are only given one entry, so any time they are accessed
49 // with a different MaskTransform, the old entry gets evicted.
50 sk_sp<GrCCPathCacheEntry> find(const GrShape&, const MaskTransform&,
51 CreateIfAbsent = CreateIfAbsent::kNo);
52
Chris Dalton9a986cf2018-10-18 15:27:59 -060053 void evict(GrCCPathCacheEntry*);
54
55 void purgeAsNeeded();
Chris Dalton4da70192018-06-18 09:51:36 -060056
57private:
58 // Wrapper around a raw GrShape key that has a specialized operator==. Used by the hash table.
59 struct HashKey {
60 const uint32_t* fData;
61 };
62 friend bool operator==(const HashKey&, const HashKey&);
63
Chris Dalton9a986cf2018-10-18 15:27:59 -060064 // This is a special ref ptr for GrCCPathCacheEntry, used by the hash table. It provides static
65 // methods for SkTHash, and can only be moved. This guarantees the hash table holds exactly one
66 // reference for each entry.
Chris Dalton4da70192018-06-18 09:51:36 -060067 class HashNode : SkNoncopyable {
68 public:
Chris Dalton9a986cf2018-10-18 15:27:59 -060069 static HashKey GetKey(const HashNode& node) { return GetKey(node.entry()); }
Chris Dalton4da70192018-06-18 09:51:36 -060070 static HashKey GetKey(const GrCCPathCacheEntry*);
71 static uint32_t Hash(HashKey);
72
73 HashNode() = default;
Chris Dalton9a986cf2018-10-18 15:27:59 -060074 HashNode(uint32_t pathCacheUniqueID, const MaskTransform&, const GrShape&);
75 HashNode(HashNode&& node) : fEntry(std::move(node.fEntry)) {
76 SkASSERT(!node.fEntry);
77 }
Chris Dalton4da70192018-06-18 09:51:36 -060078
Chris Dalton9a986cf2018-10-18 15:27:59 -060079 HashNode& operator=(HashNode&& node) {
80 fEntry = std::move(node.fEntry);
81 SkASSERT(!node.fEntry);
82 return *this;
83 }
Chris Dalton4da70192018-06-18 09:51:36 -060084
Chris Dalton9a986cf2018-10-18 15:27:59 -060085 GrCCPathCacheEntry* entry() const { return fEntry.get(); }
Chris Dalton4da70192018-06-18 09:51:36 -060086
87 private:
Chris Dalton9a986cf2018-10-18 15:27:59 -060088 sk_sp<GrCCPathCacheEntry> fEntry;
Chris Dalton4da70192018-06-18 09:51:36 -060089 // The GrShape's unstyled key is stored as a variable-length footer to the 'fEntry'
90 // allocation. GetKey provides access to it.
91 };
92
93 SkTHashTable<HashNode, HashKey> fHashTable;
94 SkTInternalLList<GrCCPathCacheEntry> fLRU;
Chris Dalton9a986cf2018-10-18 15:27:59 -060095 SkMessageBus<sk_sp<GrCCPathCacheEntry>>::Inbox fInvalidatedEntriesInbox;
Chris Dalton4da70192018-06-18 09:51:36 -060096};
97
98/**
Chris Daltona8429cf2018-06-22 11:43:31 -060099 * This class stores all the data necessary to draw a specific path + matrix combination from their
100 * corresponding cached atlas.
Chris Dalton4da70192018-06-18 09:51:36 -0600101 */
102class GrCCPathCacheEntry : public SkPathRef::GenIDChangeListener {
103public:
104 SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrCCPathCacheEntry);
105
Chris Dalton907102e2018-06-29 13:18:53 -0600106 ~GrCCPathCacheEntry() override;
107
Chris Dalton9a986cf2018-10-18 15:27:59 -0600108 uint32_t pathCacheUniqueID() const { return fPathCacheUniqueID; }
109
Chris Daltona8429cf2018-06-22 11:43:31 -0600110 // The number of times this specific entry (path + matrix combination) has been pulled from
111 // the path cache. As long as the caller does exactly one lookup per draw, this translates to
112 // the number of times the path has been drawn with a compatible matrix.
113 //
114 // If the entry did not previously exist and was created during
115 // GrCCPathCache::find(.., CreateIfAbsent::kYes), its hit count will be 1.
116 int hitCount() const { return fHitCount; }
117
Chris Dalton4da70192018-06-18 09:51:36 -0600118 // Does this entry reference a permanent, 8-bit atlas that resides in the resource cache?
119 // (i.e. not a temporarily-stashed, fp16 coverage count atlas.)
120 bool hasCachedAtlas() const { return SkToBool(fCachedAtlasInfo); }
121
122 const SkIRect& devIBounds() const { return fDevIBounds; }
123 int width() const { return fDevIBounds.width(); }
124 int height() const { return fDevIBounds.height(); }
125
126 // Called once our path has been rendered into the mainline CCPR (fp16, coverage count) atlas.
127 // The caller will stash this atlas texture away after drawing, and during the next flush,
128 // recover it and attempt to copy any paths that got reused into permanent 8-bit atlases.
Chris Dalton9a986cf2018-10-18 15:27:59 -0600129 void initAsStashedAtlas(const GrUniqueKey& atlasKey, const SkIVector& atlasOffset,
130 const SkRect& devBounds, const SkRect& devBounds45,
131 const SkIRect& devIBounds, const SkIVector& maskShift);
Chris Dalton4da70192018-06-18 09:51:36 -0600132
133 // Called once our path mask has been copied into a permanent, 8-bit atlas. This method points
134 // the entry at the new atlas and updates the CachedAtlasInfo data.
Chris Dalton9a986cf2018-10-18 15:27:59 -0600135 void updateToCachedAtlas(const GrUniqueKey& atlasKey, const SkIVector& newAtlasOffset,
136 sk_sp<GrCCAtlas::CachedAtlasInfo>);
Chris Dalton4da70192018-06-18 09:51:36 -0600137
138 const GrUniqueKey& atlasKey() const { return fAtlasKey; }
139
140 void resetAtlasKeyAndInfo() {
141 fAtlasKey.reset();
142 fCachedAtlasInfo.reset();
143 }
144
145 // This is a utility for the caller to detect when a path gets drawn more than once during the
146 // same flush, with compatible matrices. Before adding a path to an atlas, the caller may check
147 // here to see if they have already placed the path previously during the same flush. The caller
148 // is required to reset all currFlushAtlas references back to null before any subsequent flush.
149 void setCurrFlushAtlas(const GrCCAtlas* currFlushAtlas) {
150 // This should not get called more than once in a single flush. Once fCurrFlushAtlas is
151 // non-null, it can only be set back to null (once the flush is over).
152 SkASSERT(!fCurrFlushAtlas || !currFlushAtlas);
153 fCurrFlushAtlas = currFlushAtlas;
154 }
155 const GrCCAtlas* currFlushAtlas() const { return fCurrFlushAtlas; }
156
157private:
158 using MaskTransform = GrCCPathCache::MaskTransform;
159
Chris Dalton9a986cf2018-10-18 15:27:59 -0600160 GrCCPathCacheEntry(uint32_t pathCacheUniqueID, const MaskTransform& maskTransform)
161 : fPathCacheUniqueID(pathCacheUniqueID), fMaskTransform(maskTransform) {
162 SkASSERT(SK_InvalidUniqueID != fPathCacheUniqueID);
163 }
Chris Dalton4da70192018-06-18 09:51:36 -0600164
Chris Dalton907102e2018-06-29 13:18:53 -0600165 // Resets this entry back to not having an atlas, and purges its previous atlas texture from the
166 // resource cache if needed.
167 void invalidateAtlas();
168
Chris Dalton9a986cf2018-10-18 15:27:59 -0600169 // Called when our corresponding path is modified or deleted. Not threadsafe.
Chris Dalton4da70192018-06-18 09:51:36 -0600170 void onChange() override;
171
Chris Dalton9a986cf2018-10-18 15:27:59 -0600172 const uint32_t fPathCacheUniqueID;
Chris Dalton907102e2018-06-29 13:18:53 -0600173 MaskTransform fMaskTransform;
Chris Daltona8429cf2018-06-22 11:43:31 -0600174 int fHitCount = 1;
Chris Dalton4da70192018-06-18 09:51:36 -0600175
176 GrUniqueKey fAtlasKey;
177 SkIVector fAtlasOffset;
178
179 // If null, then we are referencing a "stashed" atlas (see initAsStashedAtlas()).
180 sk_sp<GrCCAtlas::CachedAtlasInfo> fCachedAtlasInfo;
181
182 SkRect fDevBounds;
183 SkRect fDevBounds45;
184 SkIRect fDevIBounds;
185
186 // This field is for when a path gets drawn more than once during the same flush.
187 const GrCCAtlas* fCurrFlushAtlas = nullptr;
188
189 friend class GrCCPathCache;
190 friend void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry&, const SkIVector&,
191 uint32_t, DoEvenOddFill); // To access data.
192};
193
194inline void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry& entry,
195 const SkIVector& shift, GrColor color,
196 DoEvenOddFill doEvenOddFill) {
197 float dx = (float)shift.fX, dy = (float)shift.fY;
198 this->set(entry.fDevBounds.makeOffset(dx, dy), MakeOffset45(entry.fDevBounds45, dx, dy),
199 entry.fAtlasOffset - shift, color, doEvenOddFill);
200}
201
202#endif