blob: 6d37b4fca5fa93060581ffbf72b357b6b6587369 [file] [log] [blame]
Chris Dalton4da70192018-06-18 09:51:36 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCPathCache.h"
9
10#include "GrShape.h"
11#include "SkNx.h"
Chris Dalton4da70192018-06-18 09:51:36 -060012
Chris Dalton9a986cf2018-10-18 15:27:59 -060013DECLARE_SKMESSAGEBUS_MESSAGE(sk_sp<GrCCPathCacheEntry>);
14
Chris Dalton8429c792018-10-23 15:56:22 -060015static inline uint32_t next_path_cache_id() {
16 static std::atomic<uint32_t> gNextID(1);
17 for (;;) {
18 uint32_t id = gNextID.fetch_add(+1, std::memory_order_acquire);
19 if (SK_InvalidUniqueID != id) {
20 return id;
21 }
22 }
23}
24
Chris Dalton9a986cf2018-10-18 15:27:59 -060025static inline bool SkShouldPostMessageToBus(
26 const sk_sp<GrCCPathCacheEntry>& entry, uint32_t msgBusUniqueID) {
27 return entry->pathCacheUniqueID() == msgBusUniqueID;
28}
29
Chris Dalton4da70192018-06-18 09:51:36 -060030// The maximum number of cache entries we allow in our own cache.
31static constexpr int kMaxCacheCount = 1 << 16;
32
Chris Dalton8429c792018-10-23 15:56:22 -060033
Chris Dalton4da70192018-06-18 09:51:36 -060034GrCCPathCache::MaskTransform::MaskTransform(const SkMatrix& m, SkIVector* shift)
35 : fMatrix2x2{m.getScaleX(), m.getSkewX(), m.getSkewY(), m.getScaleY()} {
36 SkASSERT(!m.hasPerspective());
37 Sk2f translate = Sk2f(m.getTranslateX(), m.getTranslateY());
Chris Dalton76c775f2018-10-01 23:08:06 -060038 Sk2f transFloor;
39#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
40 // On Android framework we pre-round view matrix translates to integers for better caching.
41 transFloor = translate;
42#else
43 transFloor = translate.floor();
44 (translate - transFloor).store(fSubpixelTranslate);
Chris Dalton644341a2018-06-18 19:14:16 -060045#endif
Chris Dalton76c775f2018-10-01 23:08:06 -060046 shift->set((int)transFloor[0], (int)transFloor[1]);
47 SkASSERT((float)shift->fX == transFloor[0]); // Make sure transFloor had integer values.
48 SkASSERT((float)shift->fY == transFloor[1]);
Chris Dalton4da70192018-06-18 09:51:36 -060049}
50
51inline static bool fuzzy_equals(const GrCCPathCache::MaskTransform& a,
52 const GrCCPathCache::MaskTransform& b) {
Chris Dalton644341a2018-06-18 19:14:16 -060053 if ((Sk4f::Load(a.fMatrix2x2) != Sk4f::Load(b.fMatrix2x2)).anyTrue()) {
54 return false;
55 }
56#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
57 if (((Sk2f::Load(a.fSubpixelTranslate) -
58 Sk2f::Load(b.fSubpixelTranslate)).abs() > 1.f/256).anyTrue()) {
59 return false;
60 }
61#endif
62 return true;
Chris Dalton4da70192018-06-18 09:51:36 -060063}
64
Chris Dalton8f8bf882018-07-18 10:55:51 -060065namespace {
66
67// Produces a key that accounts both for a shape's path geometry, as well as any stroke/style.
68class WriteStyledKey {
69public:
Chris Dalton09a7bb22018-08-31 19:53:15 +080070 static constexpr int kStyledKeySizeInBytesIdx = 0;
71 static constexpr int kStrokeWidthIdx = 1;
72 static constexpr int kStrokeMiterIdx = 2;
73 static constexpr int kStrokeCapJoinIdx = 3;
74 static constexpr int kShapeUnstyledKeyIdx = 4;
75
76 static constexpr int kStrokeKeyCount = 3; // [width, miterLimit, cap|join].
77
78 WriteStyledKey(const GrShape& shape) : fShapeUnstyledKeyCount(shape.unstyledKeySize()) {}
Chris Dalton8f8bf882018-07-18 10:55:51 -060079
80 // Returns the total number of uint32_t's to allocate for the key.
Chris Dalton09a7bb22018-08-31 19:53:15 +080081 int allocCountU32() const { return kShapeUnstyledKeyIdx + fShapeUnstyledKeyCount; }
Chris Dalton8f8bf882018-07-18 10:55:51 -060082
83 // Writes the key to out[].
84 void write(const GrShape& shape, uint32_t* out) {
Chris Dalton09a7bb22018-08-31 19:53:15 +080085 out[kStyledKeySizeInBytesIdx] =
86 (kStrokeKeyCount + fShapeUnstyledKeyCount) * sizeof(uint32_t);
87
88 // Stroke key.
89 // We don't use GrStyle::WriteKey() because it does not account for hairlines.
90 // http://skbug.com/8273
91 SkASSERT(!shape.style().hasPathEffect());
92 const SkStrokeRec& stroke = shape.style().strokeRec();
93 if (stroke.isFillStyle()) {
94 // Use a value for width that won't collide with a valid fp32 value >= 0.
95 out[kStrokeWidthIdx] = ~0;
96 out[kStrokeMiterIdx] = out[kStrokeCapJoinIdx] = 0;
97 } else {
98 float width = stroke.getWidth(), miterLimit = stroke.getMiter();
99 memcpy(&out[kStrokeWidthIdx], &width, sizeof(float));
100 memcpy(&out[kStrokeMiterIdx], &miterLimit, sizeof(float));
101 out[kStrokeCapJoinIdx] = (stroke.getCap() << 16) | stroke.getJoin();
102 GR_STATIC_ASSERT(sizeof(out[kStrokeWidthIdx]) == sizeof(float));
103 }
104
105 // Shape unstyled key.
106 shape.writeUnstyledKey(&out[kShapeUnstyledKeyIdx]);
Chris Dalton8f8bf882018-07-18 10:55:51 -0600107 }
108
109private:
110 int fShapeUnstyledKeyCount;
Chris Dalton8f8bf882018-07-18 10:55:51 -0600111};
112
113}
114
Chris Dalton9a986cf2018-10-18 15:27:59 -0600115inline GrCCPathCache::HashNode::HashNode(uint32_t pathCacheUniqueID, const MaskTransform& m,
Chris Dalton4da70192018-06-18 09:51:36 -0600116 const GrShape& shape) {
117 SkASSERT(shape.hasUnstyledKey());
118
Chris Dalton8f8bf882018-07-18 10:55:51 -0600119 WriteStyledKey writeKey(shape);
120 void* memory = ::operator new (sizeof(GrCCPathCacheEntry) +
121 writeKey.allocCountU32() * sizeof(uint32_t));
Chris Dalton9a986cf2018-10-18 15:27:59 -0600122 fEntry.reset(new (memory) GrCCPathCacheEntry(pathCacheUniqueID, m));
Chris Dalton4da70192018-06-18 09:51:36 -0600123
124 // The shape key is a variable-length footer to the entry allocation.
Chris Dalton8f8bf882018-07-18 10:55:51 -0600125 uint32_t* keyData = (uint32_t*)((char*)memory + sizeof(GrCCPathCacheEntry));
126 writeKey.write(shape, keyData);
Chris Dalton4da70192018-06-18 09:51:36 -0600127}
128
129inline bool operator==(const GrCCPathCache::HashKey& key1, const GrCCPathCache::HashKey& key2) {
Chris Dalton8f8bf882018-07-18 10:55:51 -0600130 return key1.fData[0] == key2.fData[0] && !memcmp(&key1.fData[1], &key2.fData[1], key1.fData[0]);
Chris Dalton4da70192018-06-18 09:51:36 -0600131}
132
133inline GrCCPathCache::HashKey GrCCPathCache::HashNode::GetKey(const GrCCPathCacheEntry* entry) {
134 // The shape key is a variable-length footer to the entry allocation.
135 return HashKey{(const uint32_t*)((const char*)entry + sizeof(GrCCPathCacheEntry))};
136}
137
138inline uint32_t GrCCPathCache::HashNode::Hash(HashKey key) {
139 return GrResourceKeyHash(&key.fData[1], key.fData[0]);
140}
141
Chris Dalton8429c792018-10-23 15:56:22 -0600142
143GrCCPathCache::GrCCPathCache()
144 : fInvalidatedEntriesInbox(next_path_cache_id()) {
145}
146
Chris Dalton9a986cf2018-10-18 15:27:59 -0600147#ifdef SK_DEBUG
148GrCCPathCache::~GrCCPathCache() {
149 // Ensure the hash table and LRU list are still coherent.
150 int lruCount = 0;
151 for (const GrCCPathCacheEntry* entry : fLRU) {
152 SkASSERT(fHashTable.find(HashNode::GetKey(entry))->entry() == entry);
153 ++lruCount;
Chris Dalton4da70192018-06-18 09:51:36 -0600154 }
Chris Dalton9a986cf2018-10-18 15:27:59 -0600155 SkASSERT(fHashTable.count() == lruCount);
Chris Dalton4da70192018-06-18 09:51:36 -0600156}
Chris Dalton9a986cf2018-10-18 15:27:59 -0600157#endif
Chris Dalton4da70192018-06-18 09:51:36 -0600158
159sk_sp<GrCCPathCacheEntry> GrCCPathCache::find(const GrShape& shape, const MaskTransform& m,
160 CreateIfAbsent createIfAbsent) {
161 if (!shape.hasUnstyledKey()) {
162 return nullptr;
163 }
164
Chris Dalton8f8bf882018-07-18 10:55:51 -0600165 WriteStyledKey writeKey(shape);
166 SkAutoSTMalloc<GrShape::kMaxKeyFromDataVerbCnt * 4, uint32_t> keyData(writeKey.allocCountU32());
167 writeKey.write(shape, keyData.get());
Chris Dalton4da70192018-06-18 09:51:36 -0600168
169 GrCCPathCacheEntry* entry = nullptr;
170 if (HashNode* node = fHashTable.find({keyData.get()})) {
171 entry = node->entry();
Chris Dalton9a986cf2018-10-18 15:27:59 -0600172 SkASSERT(fLRU.isInList(entry));
Chris Daltona8429cf2018-06-22 11:43:31 -0600173 if (fuzzy_equals(m, entry->fMaskTransform)) {
Chris Dalton907102e2018-06-29 13:18:53 -0600174 ++entry->fHitCount; // The path was reused with a compatible matrix.
175 } else if (CreateIfAbsent::kYes == createIfAbsent && entry->unique()) {
176 // This entry is unique: we can recycle it instead of deleting and malloc-ing a new one.
177 entry->fMaskTransform = m;
178 entry->fHitCount = 1;
179 entry->invalidateAtlas();
180 SkASSERT(!entry->fCurrFlushAtlas); // Should be null because 'entry' is unique.
Chris Daltona8429cf2018-06-22 11:43:31 -0600181 } else {
Chris Dalton907102e2018-06-29 13:18:53 -0600182 this->evict(entry);
Chris Dalton4da70192018-06-18 09:51:36 -0600183 entry = nullptr;
184 }
185 }
186
187 if (!entry) {
188 if (CreateIfAbsent::kNo == createIfAbsent) {
189 return nullptr;
190 }
191 if (fHashTable.count() >= kMaxCacheCount) {
192 this->evict(fLRU.tail()); // We've exceeded our limit.
193 }
Chris Dalton9a986cf2018-10-18 15:27:59 -0600194 entry = fHashTable.set(HashNode(fInvalidatedEntriesInbox.uniqueID(), m, shape))->entry();
Chris Daltonf40ddfa2018-10-09 14:19:21 -0600195 shape.addGenIDChangeListener(sk_ref_sp(entry));
Chris Dalton4da70192018-06-18 09:51:36 -0600196 SkASSERT(fHashTable.count() <= kMaxCacheCount);
197 } else {
198 fLRU.remove(entry); // Will be re-added at head.
199 }
200
201 fLRU.addToHead(entry);
202 return sk_ref_sp(entry);
203}
204
Chris Dalton9a986cf2018-10-18 15:27:59 -0600205void GrCCPathCache::evict(GrCCPathCacheEntry* entry) {
Greg Danieleb772c02018-10-19 15:00:06 +0000206 bool isInCache = entry->fNext || (fLRU.tail() == entry);
207 SkASSERT(isInCache == fLRU.isInList(entry));
208 if (isInCache) {
209 fLRU.remove(entry);
210 fHashTable.remove(HashNode::GetKey(entry)); // Do this last, as it might delete the entry.
Chris Dalton9a986cf2018-10-18 15:27:59 -0600211 }
212}
Chris Dalton4da70192018-06-18 09:51:36 -0600213
Chris Dalton9a986cf2018-10-18 15:27:59 -0600214void GrCCPathCache::purgeAsNeeded() {
215 SkTArray<sk_sp<GrCCPathCacheEntry>> invalidatedEntries;
216 fInvalidatedEntriesInbox.poll(&invalidatedEntries);
217 for (const sk_sp<GrCCPathCacheEntry>& entry : invalidatedEntries) {
218 this->evict(entry.get());
219 }
Chris Dalton4da70192018-06-18 09:51:36 -0600220}
221
Chris Dalton907102e2018-06-29 13:18:53 -0600222
223GrCCPathCacheEntry::~GrCCPathCacheEntry() {
Chris Dalton907102e2018-06-29 13:18:53 -0600224 SkASSERT(!fCurrFlushAtlas); // Client is required to reset fCurrFlushAtlas back to null.
Chris Dalton907102e2018-06-29 13:18:53 -0600225 this->invalidateAtlas();
226}
227
Chris Dalton9a986cf2018-10-18 15:27:59 -0600228void GrCCPathCacheEntry::initAsStashedAtlas(const GrUniqueKey& atlasKey,
Chris Dalton4da70192018-06-18 09:51:36 -0600229 const SkIVector& atlasOffset, const SkRect& devBounds,
230 const SkRect& devBounds45, const SkIRect& devIBounds,
231 const SkIVector& maskShift) {
232 SkASSERT(atlasKey.isValid());
233 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
234
235 fAtlasKey = atlasKey;
236 fAtlasOffset = atlasOffset + maskShift;
237 SkASSERT(!fCachedAtlasInfo); // Otherwise they should have reused the cached atlas instead.
238
239 float dx = (float)maskShift.fX, dy = (float)maskShift.fY;
240 fDevBounds = devBounds.makeOffset(-dx, -dy);
241 fDevBounds45 = GrCCPathProcessor::MakeOffset45(devBounds45, -dx, -dy);
242 fDevIBounds = devIBounds.makeOffset(-maskShift.fX, -maskShift.fY);
243}
244
Chris Dalton9a986cf2018-10-18 15:27:59 -0600245void GrCCPathCacheEntry::updateToCachedAtlas(const GrUniqueKey& atlasKey,
Chris Dalton4da70192018-06-18 09:51:36 -0600246 const SkIVector& newAtlasOffset,
247 sk_sp<GrCCAtlas::CachedAtlasInfo> info) {
248 SkASSERT(atlasKey.isValid());
249 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
250
251 fAtlasKey = atlasKey;
252 fAtlasOffset = newAtlasOffset;
253
254 SkASSERT(!fCachedAtlasInfo); // Otherwise we need to invalidate our pixels in the old info.
255 fCachedAtlasInfo = std::move(info);
256 fCachedAtlasInfo->fNumPathPixels += this->height() * this->width();
257}
258
Chris Dalton907102e2018-06-29 13:18:53 -0600259void GrCCPathCacheEntry::invalidateAtlas() {
260 if (fCachedAtlasInfo) {
261 // Mark our own pixels invalid in the cached atlas texture.
262 fCachedAtlasInfo->fNumInvalidatedPathPixels += this->height() * this->width();
263 if (!fCachedAtlasInfo->fIsPurgedFromResourceCache &&
264 fCachedAtlasInfo->fNumInvalidatedPathPixels >= fCachedAtlasInfo->fNumPathPixels / 2) {
265 // Too many invalidated pixels: purge the atlas texture from the resource cache.
Chris Dalton9a986cf2018-10-18 15:27:59 -0600266 // The GrContext and CCPR path cache both share the same unique ID.
Chris Dalton907102e2018-06-29 13:18:53 -0600267 SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post(
Chris Dalton8429c792018-10-23 15:56:22 -0600268 GrUniqueKeyInvalidatedMessage(fAtlasKey, fCachedAtlasInfo->fContextUniqueID));
Chris Dalton907102e2018-06-29 13:18:53 -0600269 fCachedAtlasInfo->fIsPurgedFromResourceCache = true;
270 }
271 }
272
273 fAtlasKey.reset();
274 fCachedAtlasInfo = nullptr;
275}
276
Chris Dalton4da70192018-06-18 09:51:36 -0600277void GrCCPathCacheEntry::onChange() {
Chris Dalton9a986cf2018-10-18 15:27:59 -0600278 // Post a thread-safe eviction message.
279 SkMessageBus<sk_sp<GrCCPathCacheEntry>>::Post(sk_ref_sp(this));
Chris Dalton4da70192018-06-18 09:51:36 -0600280}