blob: 7d3fe2a658bbcd63ac5a1293a699aa0cd460ccd7 [file] [log] [blame]
Chris Dalton4da70192018-06-18 09:51:36 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCPathCache.h"
9
10#include "GrShape.h"
11#include "SkNx.h"
Chris Dalton4da70192018-06-18 09:51:36 -060012
13// The maximum number of cache entries we allow in our own cache.
14static constexpr int kMaxCacheCount = 1 << 16;
15
16GrCCPathCache::MaskTransform::MaskTransform(const SkMatrix& m, SkIVector* shift)
17 : fMatrix2x2{m.getScaleX(), m.getSkewX(), m.getSkewY(), m.getScaleY()} {
18 SkASSERT(!m.hasPerspective());
Chris Dalton644341a2018-06-18 19:14:16 -060019#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
Chris Dalton4da70192018-06-18 09:51:36 -060020 Sk2f translate = Sk2f(m.getTranslateX(), m.getTranslateY());
21 Sk2f floor = translate.floor();
22 (translate - floor).store(fSubpixelTranslate);
23 shift->set((int)floor[0], (int)floor[1]);
24 SkASSERT((float)shift->fX == floor[0]);
25 SkASSERT((float)shift->fY == floor[1]);
Chris Dalton644341a2018-06-18 19:14:16 -060026#endif
Chris Dalton4da70192018-06-18 09:51:36 -060027}
28
29inline static bool fuzzy_equals(const GrCCPathCache::MaskTransform& a,
30 const GrCCPathCache::MaskTransform& b) {
Chris Dalton644341a2018-06-18 19:14:16 -060031 if ((Sk4f::Load(a.fMatrix2x2) != Sk4f::Load(b.fMatrix2x2)).anyTrue()) {
32 return false;
33 }
34#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
35 if (((Sk2f::Load(a.fSubpixelTranslate) -
36 Sk2f::Load(b.fSubpixelTranslate)).abs() > 1.f/256).anyTrue()) {
37 return false;
38 }
39#endif
40 return true;
Chris Dalton4da70192018-06-18 09:51:36 -060041}
42
Chris Dalton8f8bf882018-07-18 10:55:51 -060043namespace {
44
45// Produces a key that accounts both for a shape's path geometry, as well as any stroke/style.
46class WriteStyledKey {
47public:
Chris Dalton09a7bb22018-08-31 19:53:15 +080048 static constexpr int kStyledKeySizeInBytesIdx = 0;
49 static constexpr int kStrokeWidthIdx = 1;
50 static constexpr int kStrokeMiterIdx = 2;
51 static constexpr int kStrokeCapJoinIdx = 3;
52 static constexpr int kShapeUnstyledKeyIdx = 4;
53
54 static constexpr int kStrokeKeyCount = 3; // [width, miterLimit, cap|join].
55
56 WriteStyledKey(const GrShape& shape) : fShapeUnstyledKeyCount(shape.unstyledKeySize()) {}
Chris Dalton8f8bf882018-07-18 10:55:51 -060057
58 // Returns the total number of uint32_t's to allocate for the key.
Chris Dalton09a7bb22018-08-31 19:53:15 +080059 int allocCountU32() const { return kShapeUnstyledKeyIdx + fShapeUnstyledKeyCount; }
Chris Dalton8f8bf882018-07-18 10:55:51 -060060
61 // Writes the key to out[].
62 void write(const GrShape& shape, uint32_t* out) {
Chris Dalton09a7bb22018-08-31 19:53:15 +080063 out[kStyledKeySizeInBytesIdx] =
64 (kStrokeKeyCount + fShapeUnstyledKeyCount) * sizeof(uint32_t);
65
66 // Stroke key.
67 // We don't use GrStyle::WriteKey() because it does not account for hairlines.
68 // http://skbug.com/8273
69 SkASSERT(!shape.style().hasPathEffect());
70 const SkStrokeRec& stroke = shape.style().strokeRec();
71 if (stroke.isFillStyle()) {
72 // Use a value for width that won't collide with a valid fp32 value >= 0.
73 out[kStrokeWidthIdx] = ~0;
74 out[kStrokeMiterIdx] = out[kStrokeCapJoinIdx] = 0;
75 } else {
76 float width = stroke.getWidth(), miterLimit = stroke.getMiter();
77 memcpy(&out[kStrokeWidthIdx], &width, sizeof(float));
78 memcpy(&out[kStrokeMiterIdx], &miterLimit, sizeof(float));
79 out[kStrokeCapJoinIdx] = (stroke.getCap() << 16) | stroke.getJoin();
80 GR_STATIC_ASSERT(sizeof(out[kStrokeWidthIdx]) == sizeof(float));
81 }
82
83 // Shape unstyled key.
84 shape.writeUnstyledKey(&out[kShapeUnstyledKeyIdx]);
Chris Dalton8f8bf882018-07-18 10:55:51 -060085 }
86
87private:
88 int fShapeUnstyledKeyCount;
Chris Dalton8f8bf882018-07-18 10:55:51 -060089};
90
91}
92
Chris Dalton4da70192018-06-18 09:51:36 -060093inline GrCCPathCache::HashNode::HashNode(GrCCPathCache* cache, const MaskTransform& m,
94 const GrShape& shape) {
95 SkASSERT(shape.hasUnstyledKey());
96
Chris Dalton8f8bf882018-07-18 10:55:51 -060097 WriteStyledKey writeKey(shape);
98 void* memory = ::operator new (sizeof(GrCCPathCacheEntry) +
99 writeKey.allocCountU32() * sizeof(uint32_t));
100 fEntry = new (memory) GrCCPathCacheEntry(cache, m);
Chris Dalton4da70192018-06-18 09:51:36 -0600101
102 // The shape key is a variable-length footer to the entry allocation.
Chris Dalton8f8bf882018-07-18 10:55:51 -0600103 uint32_t* keyData = (uint32_t*)((char*)memory + sizeof(GrCCPathCacheEntry));
104 writeKey.write(shape, keyData);
Chris Dalton4da70192018-06-18 09:51:36 -0600105}
106
107inline bool operator==(const GrCCPathCache::HashKey& key1, const GrCCPathCache::HashKey& key2) {
Chris Dalton8f8bf882018-07-18 10:55:51 -0600108 return key1.fData[0] == key2.fData[0] && !memcmp(&key1.fData[1], &key2.fData[1], key1.fData[0]);
Chris Dalton4da70192018-06-18 09:51:36 -0600109}
110
111inline GrCCPathCache::HashKey GrCCPathCache::HashNode::GetKey(const GrCCPathCacheEntry* entry) {
112 // The shape key is a variable-length footer to the entry allocation.
113 return HashKey{(const uint32_t*)((const char*)entry + sizeof(GrCCPathCacheEntry))};
114}
115
116inline uint32_t GrCCPathCache::HashNode::Hash(HashKey key) {
117 return GrResourceKeyHash(&key.fData[1], key.fData[0]);
118}
119
120GrCCPathCache::HashNode::~HashNode() {
121 if (!fEntry) {
122 return;
123 }
124
125 // Finalize our eviction from the path cache.
126 SkASSERT(fEntry->fCacheWeakPtr);
127 fEntry->fCacheWeakPtr->fLRU.remove(fEntry);
128 fEntry->fCacheWeakPtr = nullptr;
Chris Dalton4da70192018-06-18 09:51:36 -0600129 fEntry->unref();
130}
131
132GrCCPathCache::HashNode& GrCCPathCache::HashNode::operator=(HashNode&& node) {
133 this->~HashNode();
134 return *new (this) HashNode(std::move(node));
135}
136
137sk_sp<GrCCPathCacheEntry> GrCCPathCache::find(const GrShape& shape, const MaskTransform& m,
138 CreateIfAbsent createIfAbsent) {
139 if (!shape.hasUnstyledKey()) {
140 return nullptr;
141 }
142
Chris Dalton8f8bf882018-07-18 10:55:51 -0600143 WriteStyledKey writeKey(shape);
144 SkAutoSTMalloc<GrShape::kMaxKeyFromDataVerbCnt * 4, uint32_t> keyData(writeKey.allocCountU32());
145 writeKey.write(shape, keyData.get());
Chris Dalton4da70192018-06-18 09:51:36 -0600146
147 GrCCPathCacheEntry* entry = nullptr;
148 if (HashNode* node = fHashTable.find({keyData.get()})) {
149 entry = node->entry();
150 SkASSERT(this == entry->fCacheWeakPtr);
Chris Daltona8429cf2018-06-22 11:43:31 -0600151 if (fuzzy_equals(m, entry->fMaskTransform)) {
Chris Dalton907102e2018-06-29 13:18:53 -0600152 ++entry->fHitCount; // The path was reused with a compatible matrix.
153 } else if (CreateIfAbsent::kYes == createIfAbsent && entry->unique()) {
154 // This entry is unique: we can recycle it instead of deleting and malloc-ing a new one.
155 entry->fMaskTransform = m;
156 entry->fHitCount = 1;
157 entry->invalidateAtlas();
158 SkASSERT(!entry->fCurrFlushAtlas); // Should be null because 'entry' is unique.
Chris Daltona8429cf2018-06-22 11:43:31 -0600159 } else {
Chris Dalton907102e2018-06-29 13:18:53 -0600160 this->evict(entry);
Chris Dalton4da70192018-06-18 09:51:36 -0600161 entry = nullptr;
162 }
163 }
164
165 if (!entry) {
166 if (CreateIfAbsent::kNo == createIfAbsent) {
167 return nullptr;
168 }
169 if (fHashTable.count() >= kMaxCacheCount) {
170 this->evict(fLRU.tail()); // We've exceeded our limit.
171 }
172 entry = fHashTable.set(HashNode(this, m, shape))->entry();
173 SkASSERT(fHashTable.count() <= kMaxCacheCount);
174 } else {
175 fLRU.remove(entry); // Will be re-added at head.
176 }
177
178 fLRU.addToHead(entry);
179 return sk_ref_sp(entry);
180}
181
182void GrCCPathCache::evict(const GrCCPathCacheEntry* entry) {
183 SkASSERT(entry);
184 SkASSERT(this == entry->fCacheWeakPtr);
185 SkASSERT(fLRU.isInList(entry));
186 SkASSERT(fHashTable.find(HashNode::GetKey(entry))->entry() == entry);
187
188 fHashTable.remove(HashNode::GetKey(entry)); // ~HashNode() handles the rest.
189}
190
Chris Dalton907102e2018-06-29 13:18:53 -0600191
192GrCCPathCacheEntry::~GrCCPathCacheEntry() {
193 SkASSERT(!fCacheWeakPtr); // HashNode should have cleared our cache pointer.
194 SkASSERT(!fCurrFlushAtlas); // Client is required to reset fCurrFlushAtlas back to null.
195
196 this->invalidateAtlas();
197}
198
Brian Salomon238069b2018-07-11 15:58:57 -0400199void GrCCPathCacheEntry::initAsStashedAtlas(const GrUniqueKey& atlasKey, uint32_t contextUniqueID,
Chris Dalton4da70192018-06-18 09:51:36 -0600200 const SkIVector& atlasOffset, const SkRect& devBounds,
201 const SkRect& devBounds45, const SkIRect& devIBounds,
202 const SkIVector& maskShift) {
Brian Salomon238069b2018-07-11 15:58:57 -0400203 SkASSERT(contextUniqueID != SK_InvalidUniqueID);
Chris Dalton4da70192018-06-18 09:51:36 -0600204 SkASSERT(atlasKey.isValid());
205 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
206
Brian Salomon238069b2018-07-11 15:58:57 -0400207 fContextUniqueID = contextUniqueID;
208
Chris Dalton4da70192018-06-18 09:51:36 -0600209 fAtlasKey = atlasKey;
210 fAtlasOffset = atlasOffset + maskShift;
211 SkASSERT(!fCachedAtlasInfo); // Otherwise they should have reused the cached atlas instead.
212
213 float dx = (float)maskShift.fX, dy = (float)maskShift.fY;
214 fDevBounds = devBounds.makeOffset(-dx, -dy);
215 fDevBounds45 = GrCCPathProcessor::MakeOffset45(devBounds45, -dx, -dy);
216 fDevIBounds = devIBounds.makeOffset(-maskShift.fX, -maskShift.fY);
217}
218
Brian Salomon238069b2018-07-11 15:58:57 -0400219void GrCCPathCacheEntry::updateToCachedAtlas(const GrUniqueKey& atlasKey, uint32_t contextUniqueID,
Chris Dalton4da70192018-06-18 09:51:36 -0600220 const SkIVector& newAtlasOffset,
221 sk_sp<GrCCAtlas::CachedAtlasInfo> info) {
Brian Salomon238069b2018-07-11 15:58:57 -0400222 SkASSERT(contextUniqueID != SK_InvalidUniqueID);
Chris Dalton4da70192018-06-18 09:51:36 -0600223 SkASSERT(atlasKey.isValid());
224 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
225
Brian Salomon238069b2018-07-11 15:58:57 -0400226 fContextUniqueID = contextUniqueID;
227
Chris Dalton4da70192018-06-18 09:51:36 -0600228 fAtlasKey = atlasKey;
229 fAtlasOffset = newAtlasOffset;
230
231 SkASSERT(!fCachedAtlasInfo); // Otherwise we need to invalidate our pixels in the old info.
232 fCachedAtlasInfo = std::move(info);
233 fCachedAtlasInfo->fNumPathPixels += this->height() * this->width();
234}
235
Chris Dalton907102e2018-06-29 13:18:53 -0600236void GrCCPathCacheEntry::invalidateAtlas() {
237 if (fCachedAtlasInfo) {
238 // Mark our own pixels invalid in the cached atlas texture.
239 fCachedAtlasInfo->fNumInvalidatedPathPixels += this->height() * this->width();
240 if (!fCachedAtlasInfo->fIsPurgedFromResourceCache &&
241 fCachedAtlasInfo->fNumInvalidatedPathPixels >= fCachedAtlasInfo->fNumPathPixels / 2) {
242 // Too many invalidated pixels: purge the atlas texture from the resource cache.
243 SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post(
Brian Salomon238069b2018-07-11 15:58:57 -0400244 GrUniqueKeyInvalidatedMessage(fAtlasKey, fContextUniqueID));
Chris Dalton907102e2018-06-29 13:18:53 -0600245 fCachedAtlasInfo->fIsPurgedFromResourceCache = true;
246 }
247 }
248
249 fAtlasKey.reset();
250 fCachedAtlasInfo = nullptr;
251}
252
Chris Dalton4da70192018-06-18 09:51:36 -0600253void GrCCPathCacheEntry::onChange() {
254 // Our corresponding path was modified or deleted. Evict ourselves.
255 if (fCacheWeakPtr) {
256 fCacheWeakPtr->evict(this);
257 }
258}