blob: db85641ad7e6a032b24a289bf0676f342d40ed73 [file] [log] [blame]
Chris Dalton4da70192018-06-18 09:51:36 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCPathCache.h"
9
10#include "GrShape.h"
11#include "SkNx.h"
Chris Dalton4da70192018-06-18 09:51:36 -060012
Chris Dalton9a986cf2018-10-18 15:27:59 -060013DECLARE_SKMESSAGEBUS_MESSAGE(sk_sp<GrCCPathCacheEntry>);
14
15static inline bool SkShouldPostMessageToBus(
16 const sk_sp<GrCCPathCacheEntry>& entry, uint32_t msgBusUniqueID) {
17 return entry->pathCacheUniqueID() == msgBusUniqueID;
18}
19
Chris Dalton4da70192018-06-18 09:51:36 -060020// The maximum number of cache entries we allow in our own cache.
21static constexpr int kMaxCacheCount = 1 << 16;
22
23GrCCPathCache::MaskTransform::MaskTransform(const SkMatrix& m, SkIVector* shift)
24 : fMatrix2x2{m.getScaleX(), m.getSkewX(), m.getSkewY(), m.getScaleY()} {
25 SkASSERT(!m.hasPerspective());
26 Sk2f translate = Sk2f(m.getTranslateX(), m.getTranslateY());
Chris Dalton76c775f2018-10-01 23:08:06 -060027 Sk2f transFloor;
28#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
29 // On Android framework we pre-round view matrix translates to integers for better caching.
30 transFloor = translate;
31#else
32 transFloor = translate.floor();
33 (translate - transFloor).store(fSubpixelTranslate);
Chris Dalton644341a2018-06-18 19:14:16 -060034#endif
Chris Dalton76c775f2018-10-01 23:08:06 -060035 shift->set((int)transFloor[0], (int)transFloor[1]);
36 SkASSERT((float)shift->fX == transFloor[0]); // Make sure transFloor had integer values.
37 SkASSERT((float)shift->fY == transFloor[1]);
Chris Dalton4da70192018-06-18 09:51:36 -060038}
39
40inline static bool fuzzy_equals(const GrCCPathCache::MaskTransform& a,
41 const GrCCPathCache::MaskTransform& b) {
Chris Dalton644341a2018-06-18 19:14:16 -060042 if ((Sk4f::Load(a.fMatrix2x2) != Sk4f::Load(b.fMatrix2x2)).anyTrue()) {
43 return false;
44 }
45#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
46 if (((Sk2f::Load(a.fSubpixelTranslate) -
47 Sk2f::Load(b.fSubpixelTranslate)).abs() > 1.f/256).anyTrue()) {
48 return false;
49 }
50#endif
51 return true;
Chris Dalton4da70192018-06-18 09:51:36 -060052}
53
Chris Dalton8f8bf882018-07-18 10:55:51 -060054namespace {
55
56// Produces a key that accounts both for a shape's path geometry, as well as any stroke/style.
57class WriteStyledKey {
58public:
Chris Dalton09a7bb22018-08-31 19:53:15 +080059 static constexpr int kStyledKeySizeInBytesIdx = 0;
60 static constexpr int kStrokeWidthIdx = 1;
61 static constexpr int kStrokeMiterIdx = 2;
62 static constexpr int kStrokeCapJoinIdx = 3;
63 static constexpr int kShapeUnstyledKeyIdx = 4;
64
65 static constexpr int kStrokeKeyCount = 3; // [width, miterLimit, cap|join].
66
67 WriteStyledKey(const GrShape& shape) : fShapeUnstyledKeyCount(shape.unstyledKeySize()) {}
Chris Dalton8f8bf882018-07-18 10:55:51 -060068
69 // Returns the total number of uint32_t's to allocate for the key.
Chris Dalton09a7bb22018-08-31 19:53:15 +080070 int allocCountU32() const { return kShapeUnstyledKeyIdx + fShapeUnstyledKeyCount; }
Chris Dalton8f8bf882018-07-18 10:55:51 -060071
72 // Writes the key to out[].
73 void write(const GrShape& shape, uint32_t* out) {
Chris Dalton09a7bb22018-08-31 19:53:15 +080074 out[kStyledKeySizeInBytesIdx] =
75 (kStrokeKeyCount + fShapeUnstyledKeyCount) * sizeof(uint32_t);
76
77 // Stroke key.
78 // We don't use GrStyle::WriteKey() because it does not account for hairlines.
79 // http://skbug.com/8273
80 SkASSERT(!shape.style().hasPathEffect());
81 const SkStrokeRec& stroke = shape.style().strokeRec();
82 if (stroke.isFillStyle()) {
83 // Use a value for width that won't collide with a valid fp32 value >= 0.
84 out[kStrokeWidthIdx] = ~0;
85 out[kStrokeMiterIdx] = out[kStrokeCapJoinIdx] = 0;
86 } else {
87 float width = stroke.getWidth(), miterLimit = stroke.getMiter();
88 memcpy(&out[kStrokeWidthIdx], &width, sizeof(float));
89 memcpy(&out[kStrokeMiterIdx], &miterLimit, sizeof(float));
90 out[kStrokeCapJoinIdx] = (stroke.getCap() << 16) | stroke.getJoin();
91 GR_STATIC_ASSERT(sizeof(out[kStrokeWidthIdx]) == sizeof(float));
92 }
93
94 // Shape unstyled key.
95 shape.writeUnstyledKey(&out[kShapeUnstyledKeyIdx]);
Chris Dalton8f8bf882018-07-18 10:55:51 -060096 }
97
98private:
99 int fShapeUnstyledKeyCount;
Chris Dalton8f8bf882018-07-18 10:55:51 -0600100};
101
102}
103
Chris Dalton9a986cf2018-10-18 15:27:59 -0600104inline GrCCPathCache::HashNode::HashNode(uint32_t pathCacheUniqueID, const MaskTransform& m,
Chris Dalton4da70192018-06-18 09:51:36 -0600105 const GrShape& shape) {
106 SkASSERT(shape.hasUnstyledKey());
107
Chris Dalton8f8bf882018-07-18 10:55:51 -0600108 WriteStyledKey writeKey(shape);
109 void* memory = ::operator new (sizeof(GrCCPathCacheEntry) +
110 writeKey.allocCountU32() * sizeof(uint32_t));
Chris Dalton9a986cf2018-10-18 15:27:59 -0600111 fEntry.reset(new (memory) GrCCPathCacheEntry(pathCacheUniqueID, m));
Chris Dalton4da70192018-06-18 09:51:36 -0600112
113 // The shape key is a variable-length footer to the entry allocation.
Chris Dalton8f8bf882018-07-18 10:55:51 -0600114 uint32_t* keyData = (uint32_t*)((char*)memory + sizeof(GrCCPathCacheEntry));
115 writeKey.write(shape, keyData);
Chris Dalton4da70192018-06-18 09:51:36 -0600116}
117
118inline bool operator==(const GrCCPathCache::HashKey& key1, const GrCCPathCache::HashKey& key2) {
Chris Dalton8f8bf882018-07-18 10:55:51 -0600119 return key1.fData[0] == key2.fData[0] && !memcmp(&key1.fData[1], &key2.fData[1], key1.fData[0]);
Chris Dalton4da70192018-06-18 09:51:36 -0600120}
121
122inline GrCCPathCache::HashKey GrCCPathCache::HashNode::GetKey(const GrCCPathCacheEntry* entry) {
123 // The shape key is a variable-length footer to the entry allocation.
124 return HashKey{(const uint32_t*)((const char*)entry + sizeof(GrCCPathCacheEntry))};
125}
126
127inline uint32_t GrCCPathCache::HashNode::Hash(HashKey key) {
128 return GrResourceKeyHash(&key.fData[1], key.fData[0]);
129}
130
Chris Dalton9a986cf2018-10-18 15:27:59 -0600131#ifdef SK_DEBUG
132GrCCPathCache::~GrCCPathCache() {
133 // Ensure the hash table and LRU list are still coherent.
134 int lruCount = 0;
135 for (const GrCCPathCacheEntry* entry : fLRU) {
136 SkASSERT(fHashTable.find(HashNode::GetKey(entry))->entry() == entry);
137 ++lruCount;
Chris Dalton4da70192018-06-18 09:51:36 -0600138 }
Chris Dalton9a986cf2018-10-18 15:27:59 -0600139 SkASSERT(fHashTable.count() == lruCount);
Chris Dalton4da70192018-06-18 09:51:36 -0600140}
Chris Dalton9a986cf2018-10-18 15:27:59 -0600141#endif
Chris Dalton4da70192018-06-18 09:51:36 -0600142
143sk_sp<GrCCPathCacheEntry> GrCCPathCache::find(const GrShape& shape, const MaskTransform& m,
144 CreateIfAbsent createIfAbsent) {
145 if (!shape.hasUnstyledKey()) {
146 return nullptr;
147 }
148
Chris Dalton8f8bf882018-07-18 10:55:51 -0600149 WriteStyledKey writeKey(shape);
150 SkAutoSTMalloc<GrShape::kMaxKeyFromDataVerbCnt * 4, uint32_t> keyData(writeKey.allocCountU32());
151 writeKey.write(shape, keyData.get());
Chris Dalton4da70192018-06-18 09:51:36 -0600152
153 GrCCPathCacheEntry* entry = nullptr;
154 if (HashNode* node = fHashTable.find({keyData.get()})) {
155 entry = node->entry();
Chris Dalton9a986cf2018-10-18 15:27:59 -0600156 SkASSERT(fLRU.isInList(entry));
Chris Daltona8429cf2018-06-22 11:43:31 -0600157 if (fuzzy_equals(m, entry->fMaskTransform)) {
Chris Dalton907102e2018-06-29 13:18:53 -0600158 ++entry->fHitCount; // The path was reused with a compatible matrix.
159 } else if (CreateIfAbsent::kYes == createIfAbsent && entry->unique()) {
160 // This entry is unique: we can recycle it instead of deleting and malloc-ing a new one.
161 entry->fMaskTransform = m;
162 entry->fHitCount = 1;
163 entry->invalidateAtlas();
164 SkASSERT(!entry->fCurrFlushAtlas); // Should be null because 'entry' is unique.
Chris Daltona8429cf2018-06-22 11:43:31 -0600165 } else {
Chris Dalton907102e2018-06-29 13:18:53 -0600166 this->evict(entry);
Chris Dalton4da70192018-06-18 09:51:36 -0600167 entry = nullptr;
168 }
169 }
170
171 if (!entry) {
172 if (CreateIfAbsent::kNo == createIfAbsent) {
173 return nullptr;
174 }
175 if (fHashTable.count() >= kMaxCacheCount) {
176 this->evict(fLRU.tail()); // We've exceeded our limit.
177 }
Chris Dalton9a986cf2018-10-18 15:27:59 -0600178 entry = fHashTable.set(HashNode(fInvalidatedEntriesInbox.uniqueID(), m, shape))->entry();
Chris Daltonf40ddfa2018-10-09 14:19:21 -0600179 shape.addGenIDChangeListener(sk_ref_sp(entry));
Chris Dalton4da70192018-06-18 09:51:36 -0600180 SkASSERT(fHashTable.count() <= kMaxCacheCount);
181 } else {
182 fLRU.remove(entry); // Will be re-added at head.
183 }
184
185 fLRU.addToHead(entry);
186 return sk_ref_sp(entry);
187}
188
Chris Dalton9a986cf2018-10-18 15:27:59 -0600189void GrCCPathCache::evict(GrCCPathCacheEntry* entry) {
190 bool isInCache = entry->fNext || (fLRU.tail() == entry);
191 SkASSERT(isInCache == fLRU.isInList(entry));
192 if (isInCache) {
193 fLRU.remove(entry);
194 fHashTable.remove(HashNode::GetKey(entry)); // Do this last, as it might delete the entry.
195 }
196}
Chris Dalton4da70192018-06-18 09:51:36 -0600197
Chris Dalton9a986cf2018-10-18 15:27:59 -0600198void GrCCPathCache::purgeAsNeeded() {
199 SkTArray<sk_sp<GrCCPathCacheEntry>> invalidatedEntries;
200 fInvalidatedEntriesInbox.poll(&invalidatedEntries);
201 for (const sk_sp<GrCCPathCacheEntry>& entry : invalidatedEntries) {
202 this->evict(entry.get());
203 }
Chris Dalton4da70192018-06-18 09:51:36 -0600204}
205
Chris Dalton907102e2018-06-29 13:18:53 -0600206
207GrCCPathCacheEntry::~GrCCPathCacheEntry() {
Chris Dalton907102e2018-06-29 13:18:53 -0600208 SkASSERT(!fCurrFlushAtlas); // Client is required to reset fCurrFlushAtlas back to null.
Chris Dalton907102e2018-06-29 13:18:53 -0600209 this->invalidateAtlas();
210}
211
Chris Dalton9a986cf2018-10-18 15:27:59 -0600212void GrCCPathCacheEntry::initAsStashedAtlas(const GrUniqueKey& atlasKey,
Chris Dalton4da70192018-06-18 09:51:36 -0600213 const SkIVector& atlasOffset, const SkRect& devBounds,
214 const SkRect& devBounds45, const SkIRect& devIBounds,
215 const SkIVector& maskShift) {
216 SkASSERT(atlasKey.isValid());
217 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
218
219 fAtlasKey = atlasKey;
220 fAtlasOffset = atlasOffset + maskShift;
221 SkASSERT(!fCachedAtlasInfo); // Otherwise they should have reused the cached atlas instead.
222
223 float dx = (float)maskShift.fX, dy = (float)maskShift.fY;
224 fDevBounds = devBounds.makeOffset(-dx, -dy);
225 fDevBounds45 = GrCCPathProcessor::MakeOffset45(devBounds45, -dx, -dy);
226 fDevIBounds = devIBounds.makeOffset(-maskShift.fX, -maskShift.fY);
227}
228
Chris Dalton9a986cf2018-10-18 15:27:59 -0600229void GrCCPathCacheEntry::updateToCachedAtlas(const GrUniqueKey& atlasKey,
Chris Dalton4da70192018-06-18 09:51:36 -0600230 const SkIVector& newAtlasOffset,
231 sk_sp<GrCCAtlas::CachedAtlasInfo> info) {
232 SkASSERT(atlasKey.isValid());
233 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
234
235 fAtlasKey = atlasKey;
236 fAtlasOffset = newAtlasOffset;
237
238 SkASSERT(!fCachedAtlasInfo); // Otherwise we need to invalidate our pixels in the old info.
239 fCachedAtlasInfo = std::move(info);
240 fCachedAtlasInfo->fNumPathPixels += this->height() * this->width();
241}
242
Chris Dalton907102e2018-06-29 13:18:53 -0600243void GrCCPathCacheEntry::invalidateAtlas() {
244 if (fCachedAtlasInfo) {
245 // Mark our own pixels invalid in the cached atlas texture.
246 fCachedAtlasInfo->fNumInvalidatedPathPixels += this->height() * this->width();
247 if (!fCachedAtlasInfo->fIsPurgedFromResourceCache &&
248 fCachedAtlasInfo->fNumInvalidatedPathPixels >= fCachedAtlasInfo->fNumPathPixels / 2) {
249 // Too many invalidated pixels: purge the atlas texture from the resource cache.
Chris Dalton9a986cf2018-10-18 15:27:59 -0600250 // The GrContext and CCPR path cache both share the same unique ID.
251 uint32_t contextUniqueID = fPathCacheUniqueID;
Chris Dalton907102e2018-06-29 13:18:53 -0600252 SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post(
Chris Dalton9a986cf2018-10-18 15:27:59 -0600253 GrUniqueKeyInvalidatedMessage(fAtlasKey, contextUniqueID));
Chris Dalton907102e2018-06-29 13:18:53 -0600254 fCachedAtlasInfo->fIsPurgedFromResourceCache = true;
255 }
256 }
257
258 fAtlasKey.reset();
259 fCachedAtlasInfo = nullptr;
260}
261
Chris Dalton4da70192018-06-18 09:51:36 -0600262void GrCCPathCacheEntry::onChange() {
Chris Dalton9a986cf2018-10-18 15:27:59 -0600263 // Post a thread-safe eviction message.
264 SkMessageBus<sk_sp<GrCCPathCacheEntry>>::Post(sk_ref_sp(this));
Chris Dalton4da70192018-06-18 09:51:36 -0600265}