blob: 20f472864fcfaaec9c5478810dcced17e5ae18bb [file] [log] [blame]
Chris Dalton4da70192018-06-18 09:51:36 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCPathCache.h"
9
10#include "GrShape.h"
11#include "SkNx.h"
Chris Dalton4da70192018-06-18 09:51:36 -060012
13// The maximum number of cache entries we allow in our own cache.
14static constexpr int kMaxCacheCount = 1 << 16;
15
16GrCCPathCache::MaskTransform::MaskTransform(const SkMatrix& m, SkIVector* shift)
17 : fMatrix2x2{m.getScaleX(), m.getSkewX(), m.getSkewY(), m.getScaleY()} {
18 SkASSERT(!m.hasPerspective());
19 Sk2f translate = Sk2f(m.getTranslateX(), m.getTranslateY());
Chris Dalton76c775f2018-10-01 23:08:06 -060020 Sk2f transFloor;
21#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
22 // On Android framework we pre-round view matrix translates to integers for better caching.
23 transFloor = translate;
24#else
25 transFloor = translate.floor();
26 (translate - transFloor).store(fSubpixelTranslate);
Chris Dalton644341a2018-06-18 19:14:16 -060027#endif
Chris Dalton76c775f2018-10-01 23:08:06 -060028 shift->set((int)transFloor[0], (int)transFloor[1]);
29 SkASSERT((float)shift->fX == transFloor[0]); // Make sure transFloor had integer values.
30 SkASSERT((float)shift->fY == transFloor[1]);
Chris Dalton4da70192018-06-18 09:51:36 -060031}
32
33inline static bool fuzzy_equals(const GrCCPathCache::MaskTransform& a,
34 const GrCCPathCache::MaskTransform& b) {
Chris Dalton644341a2018-06-18 19:14:16 -060035 if ((Sk4f::Load(a.fMatrix2x2) != Sk4f::Load(b.fMatrix2x2)).anyTrue()) {
36 return false;
37 }
38#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
39 if (((Sk2f::Load(a.fSubpixelTranslate) -
40 Sk2f::Load(b.fSubpixelTranslate)).abs() > 1.f/256).anyTrue()) {
41 return false;
42 }
43#endif
44 return true;
Chris Dalton4da70192018-06-18 09:51:36 -060045}
46
Chris Dalton8f8bf882018-07-18 10:55:51 -060047namespace {
48
49// Produces a key that accounts both for a shape's path geometry, as well as any stroke/style.
50class WriteStyledKey {
51public:
Chris Dalton09a7bb22018-08-31 19:53:15 +080052 static constexpr int kStyledKeySizeInBytesIdx = 0;
53 static constexpr int kStrokeWidthIdx = 1;
54 static constexpr int kStrokeMiterIdx = 2;
55 static constexpr int kStrokeCapJoinIdx = 3;
56 static constexpr int kShapeUnstyledKeyIdx = 4;
57
58 static constexpr int kStrokeKeyCount = 3; // [width, miterLimit, cap|join].
59
60 WriteStyledKey(const GrShape& shape) : fShapeUnstyledKeyCount(shape.unstyledKeySize()) {}
Chris Dalton8f8bf882018-07-18 10:55:51 -060061
62 // Returns the total number of uint32_t's to allocate for the key.
Chris Dalton09a7bb22018-08-31 19:53:15 +080063 int allocCountU32() const { return kShapeUnstyledKeyIdx + fShapeUnstyledKeyCount; }
Chris Dalton8f8bf882018-07-18 10:55:51 -060064
65 // Writes the key to out[].
66 void write(const GrShape& shape, uint32_t* out) {
Chris Dalton09a7bb22018-08-31 19:53:15 +080067 out[kStyledKeySizeInBytesIdx] =
68 (kStrokeKeyCount + fShapeUnstyledKeyCount) * sizeof(uint32_t);
69
70 // Stroke key.
71 // We don't use GrStyle::WriteKey() because it does not account for hairlines.
72 // http://skbug.com/8273
73 SkASSERT(!shape.style().hasPathEffect());
74 const SkStrokeRec& stroke = shape.style().strokeRec();
75 if (stroke.isFillStyle()) {
76 // Use a value for width that won't collide with a valid fp32 value >= 0.
77 out[kStrokeWidthIdx] = ~0;
78 out[kStrokeMiterIdx] = out[kStrokeCapJoinIdx] = 0;
79 } else {
80 float width = stroke.getWidth(), miterLimit = stroke.getMiter();
81 memcpy(&out[kStrokeWidthIdx], &width, sizeof(float));
82 memcpy(&out[kStrokeMiterIdx], &miterLimit, sizeof(float));
83 out[kStrokeCapJoinIdx] = (stroke.getCap() << 16) | stroke.getJoin();
84 GR_STATIC_ASSERT(sizeof(out[kStrokeWidthIdx]) == sizeof(float));
85 }
86
87 // Shape unstyled key.
88 shape.writeUnstyledKey(&out[kShapeUnstyledKeyIdx]);
Chris Dalton8f8bf882018-07-18 10:55:51 -060089 }
90
91private:
92 int fShapeUnstyledKeyCount;
Chris Dalton8f8bf882018-07-18 10:55:51 -060093};
94
95}
96
Chris Dalton4da70192018-06-18 09:51:36 -060097inline GrCCPathCache::HashNode::HashNode(GrCCPathCache* cache, const MaskTransform& m,
98 const GrShape& shape) {
99 SkASSERT(shape.hasUnstyledKey());
100
Chris Dalton8f8bf882018-07-18 10:55:51 -0600101 WriteStyledKey writeKey(shape);
102 void* memory = ::operator new (sizeof(GrCCPathCacheEntry) +
103 writeKey.allocCountU32() * sizeof(uint32_t));
104 fEntry = new (memory) GrCCPathCacheEntry(cache, m);
Chris Dalton4da70192018-06-18 09:51:36 -0600105
106 // The shape key is a variable-length footer to the entry allocation.
Chris Dalton8f8bf882018-07-18 10:55:51 -0600107 uint32_t* keyData = (uint32_t*)((char*)memory + sizeof(GrCCPathCacheEntry));
108 writeKey.write(shape, keyData);
Chris Dalton4da70192018-06-18 09:51:36 -0600109}
110
111inline bool operator==(const GrCCPathCache::HashKey& key1, const GrCCPathCache::HashKey& key2) {
Chris Dalton8f8bf882018-07-18 10:55:51 -0600112 return key1.fData[0] == key2.fData[0] && !memcmp(&key1.fData[1], &key2.fData[1], key1.fData[0]);
Chris Dalton4da70192018-06-18 09:51:36 -0600113}
114
115inline GrCCPathCache::HashKey GrCCPathCache::HashNode::GetKey(const GrCCPathCacheEntry* entry) {
116 // The shape key is a variable-length footer to the entry allocation.
117 return HashKey{(const uint32_t*)((const char*)entry + sizeof(GrCCPathCacheEntry))};
118}
119
120inline uint32_t GrCCPathCache::HashNode::Hash(HashKey key) {
121 return GrResourceKeyHash(&key.fData[1], key.fData[0]);
122}
123
124GrCCPathCache::HashNode::~HashNode() {
125 if (!fEntry) {
126 return;
127 }
128
129 // Finalize our eviction from the path cache.
130 SkASSERT(fEntry->fCacheWeakPtr);
131 fEntry->fCacheWeakPtr->fLRU.remove(fEntry);
132 fEntry->fCacheWeakPtr = nullptr;
Chris Dalton4da70192018-06-18 09:51:36 -0600133 fEntry->unref();
134}
135
136GrCCPathCache::HashNode& GrCCPathCache::HashNode::operator=(HashNode&& node) {
137 this->~HashNode();
138 return *new (this) HashNode(std::move(node));
139}
140
141sk_sp<GrCCPathCacheEntry> GrCCPathCache::find(const GrShape& shape, const MaskTransform& m,
142 CreateIfAbsent createIfAbsent) {
143 if (!shape.hasUnstyledKey()) {
144 return nullptr;
145 }
146
Chris Dalton8f8bf882018-07-18 10:55:51 -0600147 WriteStyledKey writeKey(shape);
148 SkAutoSTMalloc<GrShape::kMaxKeyFromDataVerbCnt * 4, uint32_t> keyData(writeKey.allocCountU32());
149 writeKey.write(shape, keyData.get());
Chris Dalton4da70192018-06-18 09:51:36 -0600150
151 GrCCPathCacheEntry* entry = nullptr;
152 if (HashNode* node = fHashTable.find({keyData.get()})) {
153 entry = node->entry();
154 SkASSERT(this == entry->fCacheWeakPtr);
Chris Daltona8429cf2018-06-22 11:43:31 -0600155 if (fuzzy_equals(m, entry->fMaskTransform)) {
Chris Dalton907102e2018-06-29 13:18:53 -0600156 ++entry->fHitCount; // The path was reused with a compatible matrix.
157 } else if (CreateIfAbsent::kYes == createIfAbsent && entry->unique()) {
158 // This entry is unique: we can recycle it instead of deleting and malloc-ing a new one.
159 entry->fMaskTransform = m;
160 entry->fHitCount = 1;
161 entry->invalidateAtlas();
162 SkASSERT(!entry->fCurrFlushAtlas); // Should be null because 'entry' is unique.
Chris Daltona8429cf2018-06-22 11:43:31 -0600163 } else {
Chris Dalton907102e2018-06-29 13:18:53 -0600164 this->evict(entry);
Chris Dalton4da70192018-06-18 09:51:36 -0600165 entry = nullptr;
166 }
167 }
168
169 if (!entry) {
170 if (CreateIfAbsent::kNo == createIfAbsent) {
171 return nullptr;
172 }
173 if (fHashTable.count() >= kMaxCacheCount) {
174 this->evict(fLRU.tail()); // We've exceeded our limit.
175 }
176 entry = fHashTable.set(HashNode(this, m, shape))->entry();
177 SkASSERT(fHashTable.count() <= kMaxCacheCount);
178 } else {
179 fLRU.remove(entry); // Will be re-added at head.
180 }
181
182 fLRU.addToHead(entry);
183 return sk_ref_sp(entry);
184}
185
186void GrCCPathCache::evict(const GrCCPathCacheEntry* entry) {
187 SkASSERT(entry);
188 SkASSERT(this == entry->fCacheWeakPtr);
189 SkASSERT(fLRU.isInList(entry));
190 SkASSERT(fHashTable.find(HashNode::GetKey(entry))->entry() == entry);
191
192 fHashTable.remove(HashNode::GetKey(entry)); // ~HashNode() handles the rest.
193}
194
Chris Dalton907102e2018-06-29 13:18:53 -0600195
196GrCCPathCacheEntry::~GrCCPathCacheEntry() {
197 SkASSERT(!fCacheWeakPtr); // HashNode should have cleared our cache pointer.
198 SkASSERT(!fCurrFlushAtlas); // Client is required to reset fCurrFlushAtlas back to null.
199
200 this->invalidateAtlas();
201}
202
Brian Salomon238069b2018-07-11 15:58:57 -0400203void GrCCPathCacheEntry::initAsStashedAtlas(const GrUniqueKey& atlasKey, uint32_t contextUniqueID,
Chris Dalton4da70192018-06-18 09:51:36 -0600204 const SkIVector& atlasOffset, const SkRect& devBounds,
205 const SkRect& devBounds45, const SkIRect& devIBounds,
206 const SkIVector& maskShift) {
Brian Salomon238069b2018-07-11 15:58:57 -0400207 SkASSERT(contextUniqueID != SK_InvalidUniqueID);
Chris Dalton4da70192018-06-18 09:51:36 -0600208 SkASSERT(atlasKey.isValid());
209 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
210
Brian Salomon238069b2018-07-11 15:58:57 -0400211 fContextUniqueID = contextUniqueID;
212
Chris Dalton4da70192018-06-18 09:51:36 -0600213 fAtlasKey = atlasKey;
214 fAtlasOffset = atlasOffset + maskShift;
215 SkASSERT(!fCachedAtlasInfo); // Otherwise they should have reused the cached atlas instead.
216
217 float dx = (float)maskShift.fX, dy = (float)maskShift.fY;
218 fDevBounds = devBounds.makeOffset(-dx, -dy);
219 fDevBounds45 = GrCCPathProcessor::MakeOffset45(devBounds45, -dx, -dy);
220 fDevIBounds = devIBounds.makeOffset(-maskShift.fX, -maskShift.fY);
221}
222
Brian Salomon238069b2018-07-11 15:58:57 -0400223void GrCCPathCacheEntry::updateToCachedAtlas(const GrUniqueKey& atlasKey, uint32_t contextUniqueID,
Chris Dalton4da70192018-06-18 09:51:36 -0600224 const SkIVector& newAtlasOffset,
225 sk_sp<GrCCAtlas::CachedAtlasInfo> info) {
Brian Salomon238069b2018-07-11 15:58:57 -0400226 SkASSERT(contextUniqueID != SK_InvalidUniqueID);
Chris Dalton4da70192018-06-18 09:51:36 -0600227 SkASSERT(atlasKey.isValid());
228 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
229
Brian Salomon238069b2018-07-11 15:58:57 -0400230 fContextUniqueID = contextUniqueID;
231
Chris Dalton4da70192018-06-18 09:51:36 -0600232 fAtlasKey = atlasKey;
233 fAtlasOffset = newAtlasOffset;
234
235 SkASSERT(!fCachedAtlasInfo); // Otherwise we need to invalidate our pixels in the old info.
236 fCachedAtlasInfo = std::move(info);
237 fCachedAtlasInfo->fNumPathPixels += this->height() * this->width();
238}
239
Chris Dalton907102e2018-06-29 13:18:53 -0600240void GrCCPathCacheEntry::invalidateAtlas() {
241 if (fCachedAtlasInfo) {
242 // Mark our own pixels invalid in the cached atlas texture.
243 fCachedAtlasInfo->fNumInvalidatedPathPixels += this->height() * this->width();
244 if (!fCachedAtlasInfo->fIsPurgedFromResourceCache &&
245 fCachedAtlasInfo->fNumInvalidatedPathPixels >= fCachedAtlasInfo->fNumPathPixels / 2) {
246 // Too many invalidated pixels: purge the atlas texture from the resource cache.
247 SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post(
Brian Salomon238069b2018-07-11 15:58:57 -0400248 GrUniqueKeyInvalidatedMessage(fAtlasKey, fContextUniqueID));
Chris Dalton907102e2018-06-29 13:18:53 -0600249 fCachedAtlasInfo->fIsPurgedFromResourceCache = true;
250 }
251 }
252
253 fAtlasKey.reset();
254 fCachedAtlasInfo = nullptr;
255}
256
Chris Dalton4da70192018-06-18 09:51:36 -0600257void GrCCPathCacheEntry::onChange() {
258 // Our corresponding path was modified or deleted. Evict ourselves.
259 if (fCacheWeakPtr) {
260 fCacheWeakPtr->evict(this);
261 }
262}