blob: 367caff9eeea083e0651317f3aab8b42a1d30a7c [file] [log] [blame]
Chris Dalton4da70192018-06-18 09:51:36 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCPathCache.h"
9
10#include "GrShape.h"
11#include "SkNx.h"
Chris Dalton4da70192018-06-18 09:51:36 -060012
13// The maximum number of cache entries we allow in our own cache.
14static constexpr int kMaxCacheCount = 1 << 16;
15
16GrCCPathCache::MaskTransform::MaskTransform(const SkMatrix& m, SkIVector* shift)
17 : fMatrix2x2{m.getScaleX(), m.getSkewX(), m.getSkewY(), m.getScaleY()} {
18 SkASSERT(!m.hasPerspective());
19 Sk2f translate = Sk2f(m.getTranslateX(), m.getTranslateY());
Chris Dalton76c775f2018-10-01 23:08:06 -060020 Sk2f transFloor;
21#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
22 // On Android framework we pre-round view matrix translates to integers for better caching.
23 transFloor = translate;
24#else
25 transFloor = translate.floor();
26 (translate - transFloor).store(fSubpixelTranslate);
Chris Dalton644341a2018-06-18 19:14:16 -060027#endif
Chris Dalton76c775f2018-10-01 23:08:06 -060028 shift->set((int)transFloor[0], (int)transFloor[1]);
29 SkASSERT((float)shift->fX == transFloor[0]); // Make sure transFloor had integer values.
30 SkASSERT((float)shift->fY == transFloor[1]);
Chris Dalton4da70192018-06-18 09:51:36 -060031}
32
33inline static bool fuzzy_equals(const GrCCPathCache::MaskTransform& a,
34 const GrCCPathCache::MaskTransform& b) {
Chris Dalton644341a2018-06-18 19:14:16 -060035 if ((Sk4f::Load(a.fMatrix2x2) != Sk4f::Load(b.fMatrix2x2)).anyTrue()) {
36 return false;
37 }
38#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
39 if (((Sk2f::Load(a.fSubpixelTranslate) -
40 Sk2f::Load(b.fSubpixelTranslate)).abs() > 1.f/256).anyTrue()) {
41 return false;
42 }
43#endif
44 return true;
Chris Dalton4da70192018-06-18 09:51:36 -060045}
46
Chris Dalton8f8bf882018-07-18 10:55:51 -060047namespace {
48
49// Produces a key that accounts both for a shape's path geometry, as well as any stroke/style.
50class WriteStyledKey {
51public:
Chris Dalton09a7bb22018-08-31 19:53:15 +080052 static constexpr int kStyledKeySizeInBytesIdx = 0;
53 static constexpr int kStrokeWidthIdx = 1;
54 static constexpr int kStrokeMiterIdx = 2;
55 static constexpr int kStrokeCapJoinIdx = 3;
56 static constexpr int kShapeUnstyledKeyIdx = 4;
57
58 static constexpr int kStrokeKeyCount = 3; // [width, miterLimit, cap|join].
59
60 WriteStyledKey(const GrShape& shape) : fShapeUnstyledKeyCount(shape.unstyledKeySize()) {}
Chris Dalton8f8bf882018-07-18 10:55:51 -060061
62 // Returns the total number of uint32_t's to allocate for the key.
Chris Dalton09a7bb22018-08-31 19:53:15 +080063 int allocCountU32() const { return kShapeUnstyledKeyIdx + fShapeUnstyledKeyCount; }
Chris Dalton8f8bf882018-07-18 10:55:51 -060064
65 // Writes the key to out[].
66 void write(const GrShape& shape, uint32_t* out) {
Chris Dalton09a7bb22018-08-31 19:53:15 +080067 out[kStyledKeySizeInBytesIdx] =
68 (kStrokeKeyCount + fShapeUnstyledKeyCount) * sizeof(uint32_t);
69
70 // Stroke key.
71 // We don't use GrStyle::WriteKey() because it does not account for hairlines.
72 // http://skbug.com/8273
73 SkASSERT(!shape.style().hasPathEffect());
74 const SkStrokeRec& stroke = shape.style().strokeRec();
75 if (stroke.isFillStyle()) {
76 // Use a value for width that won't collide with a valid fp32 value >= 0.
77 out[kStrokeWidthIdx] = ~0;
78 out[kStrokeMiterIdx] = out[kStrokeCapJoinIdx] = 0;
79 } else {
80 float width = stroke.getWidth(), miterLimit = stroke.getMiter();
81 memcpy(&out[kStrokeWidthIdx], &width, sizeof(float));
82 memcpy(&out[kStrokeMiterIdx], &miterLimit, sizeof(float));
83 out[kStrokeCapJoinIdx] = (stroke.getCap() << 16) | stroke.getJoin();
84 GR_STATIC_ASSERT(sizeof(out[kStrokeWidthIdx]) == sizeof(float));
85 }
86
87 // Shape unstyled key.
88 shape.writeUnstyledKey(&out[kShapeUnstyledKeyIdx]);
Chris Dalton8f8bf882018-07-18 10:55:51 -060089 }
90
91private:
92 int fShapeUnstyledKeyCount;
Chris Dalton8f8bf882018-07-18 10:55:51 -060093};
94
95}
96
Chris Dalton4da70192018-06-18 09:51:36 -060097inline GrCCPathCache::HashNode::HashNode(GrCCPathCache* cache, const MaskTransform& m,
98 const GrShape& shape) {
99 SkASSERT(shape.hasUnstyledKey());
100
Chris Dalton8f8bf882018-07-18 10:55:51 -0600101 WriteStyledKey writeKey(shape);
102 void* memory = ::operator new (sizeof(GrCCPathCacheEntry) +
103 writeKey.allocCountU32() * sizeof(uint32_t));
104 fEntry = new (memory) GrCCPathCacheEntry(cache, m);
Chris Dalton4da70192018-06-18 09:51:36 -0600105
106 // The shape key is a variable-length footer to the entry allocation.
Chris Dalton8f8bf882018-07-18 10:55:51 -0600107 uint32_t* keyData = (uint32_t*)((char*)memory + sizeof(GrCCPathCacheEntry));
108 writeKey.write(shape, keyData);
Chris Dalton4da70192018-06-18 09:51:36 -0600109}
110
111inline bool operator==(const GrCCPathCache::HashKey& key1, const GrCCPathCache::HashKey& key2) {
Chris Dalton8f8bf882018-07-18 10:55:51 -0600112 return key1.fData[0] == key2.fData[0] && !memcmp(&key1.fData[1], &key2.fData[1], key1.fData[0]);
Chris Dalton4da70192018-06-18 09:51:36 -0600113}
114
115inline GrCCPathCache::HashKey GrCCPathCache::HashNode::GetKey(const GrCCPathCacheEntry* entry) {
116 // The shape key is a variable-length footer to the entry allocation.
117 return HashKey{(const uint32_t*)((const char*)entry + sizeof(GrCCPathCacheEntry))};
118}
119
120inline uint32_t GrCCPathCache::HashNode::Hash(HashKey key) {
121 return GrResourceKeyHash(&key.fData[1], key.fData[0]);
122}
123
124GrCCPathCache::HashNode::~HashNode() {
125 if (!fEntry) {
126 return;
127 }
128
129 // Finalize our eviction from the path cache.
130 SkASSERT(fEntry->fCacheWeakPtr);
131 fEntry->fCacheWeakPtr->fLRU.remove(fEntry);
132 fEntry->fCacheWeakPtr = nullptr;
Chris Dalton4da70192018-06-18 09:51:36 -0600133 fEntry->unref();
134}
135
136GrCCPathCache::HashNode& GrCCPathCache::HashNode::operator=(HashNode&& node) {
137 this->~HashNode();
138 return *new (this) HashNode(std::move(node));
139}
140
141sk_sp<GrCCPathCacheEntry> GrCCPathCache::find(const GrShape& shape, const MaskTransform& m,
142 CreateIfAbsent createIfAbsent) {
143 if (!shape.hasUnstyledKey()) {
144 return nullptr;
145 }
146
Chris Dalton8f8bf882018-07-18 10:55:51 -0600147 WriteStyledKey writeKey(shape);
148 SkAutoSTMalloc<GrShape::kMaxKeyFromDataVerbCnt * 4, uint32_t> keyData(writeKey.allocCountU32());
149 writeKey.write(shape, keyData.get());
Chris Dalton4da70192018-06-18 09:51:36 -0600150
151 GrCCPathCacheEntry* entry = nullptr;
152 if (HashNode* node = fHashTable.find({keyData.get()})) {
153 entry = node->entry();
154 SkASSERT(this == entry->fCacheWeakPtr);
Chris Daltona8429cf2018-06-22 11:43:31 -0600155 if (fuzzy_equals(m, entry->fMaskTransform)) {
Chris Dalton907102e2018-06-29 13:18:53 -0600156 ++entry->fHitCount; // The path was reused with a compatible matrix.
157 } else if (CreateIfAbsent::kYes == createIfAbsent && entry->unique()) {
158 // This entry is unique: we can recycle it instead of deleting and malloc-ing a new one.
159 entry->fMaskTransform = m;
160 entry->fHitCount = 1;
161 entry->invalidateAtlas();
162 SkASSERT(!entry->fCurrFlushAtlas); // Should be null because 'entry' is unique.
Chris Daltona8429cf2018-06-22 11:43:31 -0600163 } else {
Chris Dalton907102e2018-06-29 13:18:53 -0600164 this->evict(entry);
Chris Dalton4da70192018-06-18 09:51:36 -0600165 entry = nullptr;
166 }
167 }
168
169 if (!entry) {
170 if (CreateIfAbsent::kNo == createIfAbsent) {
171 return nullptr;
172 }
173 if (fHashTable.count() >= kMaxCacheCount) {
174 this->evict(fLRU.tail()); // We've exceeded our limit.
175 }
176 entry = fHashTable.set(HashNode(this, m, shape))->entry();
Chris Daltonf40ddfa2018-10-09 14:19:21 -0600177 shape.addGenIDChangeListener(sk_ref_sp(entry));
Chris Dalton4da70192018-06-18 09:51:36 -0600178 SkASSERT(fHashTable.count() <= kMaxCacheCount);
179 } else {
180 fLRU.remove(entry); // Will be re-added at head.
181 }
182
183 fLRU.addToHead(entry);
184 return sk_ref_sp(entry);
185}
186
187void GrCCPathCache::evict(const GrCCPathCacheEntry* entry) {
188 SkASSERT(entry);
189 SkASSERT(this == entry->fCacheWeakPtr);
190 SkASSERT(fLRU.isInList(entry));
191 SkASSERT(fHashTable.find(HashNode::GetKey(entry))->entry() == entry);
192
193 fHashTable.remove(HashNode::GetKey(entry)); // ~HashNode() handles the rest.
194}
195
Chris Dalton907102e2018-06-29 13:18:53 -0600196
197GrCCPathCacheEntry::~GrCCPathCacheEntry() {
198 SkASSERT(!fCacheWeakPtr); // HashNode should have cleared our cache pointer.
199 SkASSERT(!fCurrFlushAtlas); // Client is required to reset fCurrFlushAtlas back to null.
200
201 this->invalidateAtlas();
202}
203
Brian Salomon238069b2018-07-11 15:58:57 -0400204void GrCCPathCacheEntry::initAsStashedAtlas(const GrUniqueKey& atlasKey, uint32_t contextUniqueID,
Chris Dalton4da70192018-06-18 09:51:36 -0600205 const SkIVector& atlasOffset, const SkRect& devBounds,
206 const SkRect& devBounds45, const SkIRect& devIBounds,
207 const SkIVector& maskShift) {
Brian Salomon238069b2018-07-11 15:58:57 -0400208 SkASSERT(contextUniqueID != SK_InvalidUniqueID);
Chris Dalton4da70192018-06-18 09:51:36 -0600209 SkASSERT(atlasKey.isValid());
210 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
211
Brian Salomon238069b2018-07-11 15:58:57 -0400212 fContextUniqueID = contextUniqueID;
213
Chris Dalton4da70192018-06-18 09:51:36 -0600214 fAtlasKey = atlasKey;
215 fAtlasOffset = atlasOffset + maskShift;
216 SkASSERT(!fCachedAtlasInfo); // Otherwise they should have reused the cached atlas instead.
217
218 float dx = (float)maskShift.fX, dy = (float)maskShift.fY;
219 fDevBounds = devBounds.makeOffset(-dx, -dy);
220 fDevBounds45 = GrCCPathProcessor::MakeOffset45(devBounds45, -dx, -dy);
221 fDevIBounds = devIBounds.makeOffset(-maskShift.fX, -maskShift.fY);
222}
223
Brian Salomon238069b2018-07-11 15:58:57 -0400224void GrCCPathCacheEntry::updateToCachedAtlas(const GrUniqueKey& atlasKey, uint32_t contextUniqueID,
Chris Dalton4da70192018-06-18 09:51:36 -0600225 const SkIVector& newAtlasOffset,
226 sk_sp<GrCCAtlas::CachedAtlasInfo> info) {
Brian Salomon238069b2018-07-11 15:58:57 -0400227 SkASSERT(contextUniqueID != SK_InvalidUniqueID);
Chris Dalton4da70192018-06-18 09:51:36 -0600228 SkASSERT(atlasKey.isValid());
229 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
230
Brian Salomon238069b2018-07-11 15:58:57 -0400231 fContextUniqueID = contextUniqueID;
232
Chris Dalton4da70192018-06-18 09:51:36 -0600233 fAtlasKey = atlasKey;
234 fAtlasOffset = newAtlasOffset;
235
236 SkASSERT(!fCachedAtlasInfo); // Otherwise we need to invalidate our pixels in the old info.
237 fCachedAtlasInfo = std::move(info);
238 fCachedAtlasInfo->fNumPathPixels += this->height() * this->width();
239}
240
Chris Dalton907102e2018-06-29 13:18:53 -0600241void GrCCPathCacheEntry::invalidateAtlas() {
242 if (fCachedAtlasInfo) {
243 // Mark our own pixels invalid in the cached atlas texture.
244 fCachedAtlasInfo->fNumInvalidatedPathPixels += this->height() * this->width();
245 if (!fCachedAtlasInfo->fIsPurgedFromResourceCache &&
246 fCachedAtlasInfo->fNumInvalidatedPathPixels >= fCachedAtlasInfo->fNumPathPixels / 2) {
247 // Too many invalidated pixels: purge the atlas texture from the resource cache.
248 SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post(
Brian Salomon238069b2018-07-11 15:58:57 -0400249 GrUniqueKeyInvalidatedMessage(fAtlasKey, fContextUniqueID));
Chris Dalton907102e2018-06-29 13:18:53 -0600250 fCachedAtlasInfo->fIsPurgedFromResourceCache = true;
251 }
252 }
253
254 fAtlasKey.reset();
255 fCachedAtlasInfo = nullptr;
256}
257
Chris Dalton4da70192018-06-18 09:51:36 -0600258void GrCCPathCacheEntry::onChange() {
259 // Our corresponding path was modified or deleted. Evict ourselves.
260 if (fCacheWeakPtr) {
261 fCacheWeakPtr->evict(this);
262 }
263}