blob: 7a552bbc2a4dd82018ea4b433f0a3d48fee6e6b1 [file] [log] [blame]
Chris Dalton4da70192018-06-18 09:51:36 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrCCPathCache_DEFINED
9#define GrCCPathCache_DEFINED
10
11#include "SkExchange.h"
12#include "SkTHash.h"
13#include "SkTInternalLList.h"
14#include "ccpr/GrCCAtlas.h"
15#include "ccpr/GrCCPathProcessor.h"
16
17class GrCCPathCacheEntry;
18class GrShape;
19
20/**
21 * This class implements an LRU cache that maps from GrShape to GrCCPathCacheEntry objects. Shapes
22 * are only given one entry in the cache, so any time they are accessed with a different matrix, the
23 * old entry gets evicted.
24 */
25class GrCCPathCache {
26public:
27#ifdef SK_DEBUG
28 ~GrCCPathCache() {
29 // Ensure the hash table and LRU list are still coherent.
30 fHashTable.reset();
31 SkASSERT(fLRU.isEmpty());
32 }
33#endif
34
35 // Stores the components of a transformation that affect a path mask (i.e. everything but
36 // integer translation). During construction, any integer portions of the matrix's translate are
37 // shaved off and returned to the caller. The caller is responsible for those integer shifts.
38 struct MaskTransform {
39 MaskTransform(const SkMatrix& m, SkIVector* shift);
40 float fMatrix2x2[4];
Chris Dalton644341a2018-06-18 19:14:16 -060041#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
42 // Except on AOSP, cache hits must have matching subpixel portions of their view matrix.
43 // On AOSP we follow after HWUI and ignore the subpixel translate.
Chris Dalton4da70192018-06-18 09:51:36 -060044 float fSubpixelTranslate[2];
Chris Dalton644341a2018-06-18 19:14:16 -060045#endif
Chris Dalton4da70192018-06-18 09:51:36 -060046 };
47
48 enum class CreateIfAbsent : bool {
49 kNo = false,
50 kYes = true
51 };
52
53 // Finds an entry in the cache. Shapes are only given one entry, so any time they are accessed
54 // with a different MaskTransform, the old entry gets evicted.
55 sk_sp<GrCCPathCacheEntry> find(const GrShape&, const MaskTransform&,
56 CreateIfAbsent = CreateIfAbsent::kNo);
57
58 void evict(const GrCCPathCacheEntry*);
59
60private:
61 // Wrapper around a raw GrShape key that has a specialized operator==. Used by the hash table.
62 struct HashKey {
63 const uint32_t* fData;
64 };
65 friend bool operator==(const HashKey&, const HashKey&);
66
67 // This is a special ref ptr for GrCCPathCacheEntry, used by the hash table. It can only be
68 // moved, which guarantees the hash table holds exactly one reference for each entry. When a
69 // HashNode goes out of scope, it therefore means the entry has been evicted from the cache.
70 class HashNode : SkNoncopyable {
71 public:
72 static HashKey GetKey(const HashNode& node) { return GetKey(node.fEntry); }
73 static HashKey GetKey(const GrCCPathCacheEntry*);
74 static uint32_t Hash(HashKey);
75
76 HashNode() = default;
77 HashNode(GrCCPathCache*, const MaskTransform&, const GrShape&);
78 HashNode(HashNode&& node) { fEntry = skstd::exchange(node.fEntry, nullptr); }
79 ~HashNode(); // Called when fEntry (if not null) has been evicted from the cache.
80
81 HashNode& operator=(HashNode&&);
82
83 GrCCPathCacheEntry* entry() const { return fEntry; }
84
85 private:
86 GrCCPathCacheEntry* fEntry = nullptr;
87 // The GrShape's unstyled key is stored as a variable-length footer to the 'fEntry'
88 // allocation. GetKey provides access to it.
89 };
90
91 SkTHashTable<HashNode, HashKey> fHashTable;
92 SkTInternalLList<GrCCPathCacheEntry> fLRU;
93};
94
95/**
Chris Daltona8429cf2018-06-22 11:43:31 -060096 * This class stores all the data necessary to draw a specific path + matrix combination from their
97 * corresponding cached atlas.
Chris Dalton4da70192018-06-18 09:51:36 -060098 */
99class GrCCPathCacheEntry : public SkPathRef::GenIDChangeListener {
100public:
101 SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrCCPathCacheEntry);
102
Chris Dalton907102e2018-06-29 13:18:53 -0600103 ~GrCCPathCacheEntry() override;
104
Chris Daltona8429cf2018-06-22 11:43:31 -0600105 // The number of times this specific entry (path + matrix combination) has been pulled from
106 // the path cache. As long as the caller does exactly one lookup per draw, this translates to
107 // the number of times the path has been drawn with a compatible matrix.
108 //
109 // If the entry did not previously exist and was created during
110 // GrCCPathCache::find(.., CreateIfAbsent::kYes), its hit count will be 1.
111 int hitCount() const { return fHitCount; }
112
Chris Dalton4da70192018-06-18 09:51:36 -0600113 // Does this entry reference a permanent, 8-bit atlas that resides in the resource cache?
114 // (i.e. not a temporarily-stashed, fp16 coverage count atlas.)
115 bool hasCachedAtlas() const { return SkToBool(fCachedAtlasInfo); }
116
117 const SkIRect& devIBounds() const { return fDevIBounds; }
118 int width() const { return fDevIBounds.width(); }
119 int height() const { return fDevIBounds.height(); }
120
121 // Called once our path has been rendered into the mainline CCPR (fp16, coverage count) atlas.
122 // The caller will stash this atlas texture away after drawing, and during the next flush,
123 // recover it and attempt to copy any paths that got reused into permanent 8-bit atlases.
Brian Salomon238069b2018-07-11 15:58:57 -0400124 void initAsStashedAtlas(const GrUniqueKey& atlasKey, uint32_t contextUniqueID,
125 const SkIVector& atlasOffset, const SkRect& devBounds,
126 const SkRect& devBounds45, const SkIRect& devIBounds,
127 const SkIVector& maskShift);
Chris Dalton4da70192018-06-18 09:51:36 -0600128
129 // Called once our path mask has been copied into a permanent, 8-bit atlas. This method points
130 // the entry at the new atlas and updates the CachedAtlasInfo data.
Brian Salomon238069b2018-07-11 15:58:57 -0400131 void updateToCachedAtlas(const GrUniqueKey& atlasKey, uint32_t contextUniqueID,
132 const SkIVector& newAtlasOffset, sk_sp<GrCCAtlas::CachedAtlasInfo>);
Chris Dalton4da70192018-06-18 09:51:36 -0600133
134 const GrUniqueKey& atlasKey() const { return fAtlasKey; }
135
136 void resetAtlasKeyAndInfo() {
137 fAtlasKey.reset();
138 fCachedAtlasInfo.reset();
139 }
140
141 // This is a utility for the caller to detect when a path gets drawn more than once during the
142 // same flush, with compatible matrices. Before adding a path to an atlas, the caller may check
143 // here to see if they have already placed the path previously during the same flush. The caller
144 // is required to reset all currFlushAtlas references back to null before any subsequent flush.
145 void setCurrFlushAtlas(const GrCCAtlas* currFlushAtlas) {
146 // This should not get called more than once in a single flush. Once fCurrFlushAtlas is
147 // non-null, it can only be set back to null (once the flush is over).
148 SkASSERT(!fCurrFlushAtlas || !currFlushAtlas);
149 fCurrFlushAtlas = currFlushAtlas;
150 }
151 const GrCCAtlas* currFlushAtlas() const { return fCurrFlushAtlas; }
152
153private:
154 using MaskTransform = GrCCPathCache::MaskTransform;
155
156 GrCCPathCacheEntry(GrCCPathCache* cache, const MaskTransform& m)
157 : fCacheWeakPtr(cache), fMaskTransform(m) {}
158
Chris Dalton907102e2018-06-29 13:18:53 -0600159 // Resets this entry back to not having an atlas, and purges its previous atlas texture from the
160 // resource cache if needed.
161 void invalidateAtlas();
162
Chris Dalton4da70192018-06-18 09:51:36 -0600163 // Called when our corresponding path is modified or deleted.
164 void onChange() override;
165
Brian Salomon238069b2018-07-11 15:58:57 -0400166 uint32_t fContextUniqueID;
Chris Dalton4da70192018-06-18 09:51:36 -0600167 GrCCPathCache* fCacheWeakPtr; // Gets manually reset to null by the path cache upon eviction.
Chris Dalton907102e2018-06-29 13:18:53 -0600168 MaskTransform fMaskTransform;
Chris Daltona8429cf2018-06-22 11:43:31 -0600169 int fHitCount = 1;
Chris Dalton4da70192018-06-18 09:51:36 -0600170
171 GrUniqueKey fAtlasKey;
172 SkIVector fAtlasOffset;
173
174 // If null, then we are referencing a "stashed" atlas (see initAsStashedAtlas()).
175 sk_sp<GrCCAtlas::CachedAtlasInfo> fCachedAtlasInfo;
176
177 SkRect fDevBounds;
178 SkRect fDevBounds45;
179 SkIRect fDevIBounds;
180
181 // This field is for when a path gets drawn more than once during the same flush.
182 const GrCCAtlas* fCurrFlushAtlas = nullptr;
183
184 friend class GrCCPathCache;
185 friend void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry&, const SkIVector&,
186 uint32_t, DoEvenOddFill); // To access data.
187};
188
189inline void GrCCPathProcessor::Instance::set(const GrCCPathCacheEntry& entry,
190 const SkIVector& shift, GrColor color,
191 DoEvenOddFill doEvenOddFill) {
192 float dx = (float)shift.fX, dy = (float)shift.fY;
193 this->set(entry.fDevBounds.makeOffset(dx, dy), MakeOffset45(entry.fDevBounds45, dx, dy),
194 entry.fAtlasOffset - shift, color, doEvenOddFill);
195}
196
197#endif