blob: 7a00d35fd5b268dd5461bfa08f20f5d5edaa3c16 [file] [log] [blame]
Chris Dalton4da70192018-06-18 09:51:36 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCPathCache.h"
9
10#include "GrShape.h"
11#include "SkNx.h"
12#include "ccpr/GrCCPathParser.h"
13
14// The maximum number of cache entries we allow in our own cache.
15static constexpr int kMaxCacheCount = 1 << 16;
16
17GrCCPathCache::MaskTransform::MaskTransform(const SkMatrix& m, SkIVector* shift)
18 : fMatrix2x2{m.getScaleX(), m.getSkewX(), m.getSkewY(), m.getScaleY()} {
19 SkASSERT(!m.hasPerspective());
Chris Dalton644341a2018-06-18 19:14:16 -060020#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
Chris Dalton4da70192018-06-18 09:51:36 -060021 Sk2f translate = Sk2f(m.getTranslateX(), m.getTranslateY());
22 Sk2f floor = translate.floor();
23 (translate - floor).store(fSubpixelTranslate);
24 shift->set((int)floor[0], (int)floor[1]);
25 SkASSERT((float)shift->fX == floor[0]);
26 SkASSERT((float)shift->fY == floor[1]);
Chris Dalton644341a2018-06-18 19:14:16 -060027#endif
Chris Dalton4da70192018-06-18 09:51:36 -060028}
29
30inline static bool fuzzy_equals(const GrCCPathCache::MaskTransform& a,
31 const GrCCPathCache::MaskTransform& b) {
Chris Dalton644341a2018-06-18 19:14:16 -060032 if ((Sk4f::Load(a.fMatrix2x2) != Sk4f::Load(b.fMatrix2x2)).anyTrue()) {
33 return false;
34 }
35#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
36 if (((Sk2f::Load(a.fSubpixelTranslate) -
37 Sk2f::Load(b.fSubpixelTranslate)).abs() > 1.f/256).anyTrue()) {
38 return false;
39 }
40#endif
41 return true;
Chris Dalton4da70192018-06-18 09:51:36 -060042}
43
Chris Dalton8f8bf882018-07-18 10:55:51 -060044namespace {
45
46// Produces a key that accounts both for a shape's path geometry, as well as any stroke/style.
47class WriteStyledKey {
48public:
49 WriteStyledKey(const GrShape& shape)
50 : fShapeUnstyledKeyCount(shape.unstyledKeySize())
51 , fStyleKeyCount(
52 GrStyle::KeySize(shape.style(), GrStyle::Apply::kPathEffectAndStrokeRec)) {}
53
54 // Returns the total number of uint32_t's to allocate for the key.
55 int allocCountU32() const { return 2 + fShapeUnstyledKeyCount + fStyleKeyCount; }
56
57 // Writes the key to out[].
58 void write(const GrShape& shape, uint32_t* out) {
59 // How many bytes remain in the key, beginning on out[1]?
60 out[0] = (1 + fShapeUnstyledKeyCount + fStyleKeyCount) * sizeof(uint32_t);
61 out[1] = fStyleKeyCount;
62 shape.writeUnstyledKey(&out[2]);
63 GrStyle::WriteKey(&out[2 + fShapeUnstyledKeyCount], shape.style(),
64 GrStyle::Apply::kPathEffectAndStrokeRec, 1);
65 }
66
67private:
68 int fShapeUnstyledKeyCount;
69 int fStyleKeyCount;
70};
71
72}
73
Chris Dalton4da70192018-06-18 09:51:36 -060074inline GrCCPathCache::HashNode::HashNode(GrCCPathCache* cache, const MaskTransform& m,
75 const GrShape& shape) {
76 SkASSERT(shape.hasUnstyledKey());
77
Chris Dalton8f8bf882018-07-18 10:55:51 -060078 WriteStyledKey writeKey(shape);
79 void* memory = ::operator new (sizeof(GrCCPathCacheEntry) +
80 writeKey.allocCountU32() * sizeof(uint32_t));
81 fEntry = new (memory) GrCCPathCacheEntry(cache, m);
Chris Dalton4da70192018-06-18 09:51:36 -060082
83 // The shape key is a variable-length footer to the entry allocation.
Chris Dalton8f8bf882018-07-18 10:55:51 -060084 uint32_t* keyData = (uint32_t*)((char*)memory + sizeof(GrCCPathCacheEntry));
85 writeKey.write(shape, keyData);
Chris Dalton4da70192018-06-18 09:51:36 -060086}
87
88inline bool operator==(const GrCCPathCache::HashKey& key1, const GrCCPathCache::HashKey& key2) {
Chris Dalton8f8bf882018-07-18 10:55:51 -060089 return key1.fData[0] == key2.fData[0] && !memcmp(&key1.fData[1], &key2.fData[1], key1.fData[0]);
Chris Dalton4da70192018-06-18 09:51:36 -060090}
91
92inline GrCCPathCache::HashKey GrCCPathCache::HashNode::GetKey(const GrCCPathCacheEntry* entry) {
93 // The shape key is a variable-length footer to the entry allocation.
94 return HashKey{(const uint32_t*)((const char*)entry + sizeof(GrCCPathCacheEntry))};
95}
96
97inline uint32_t GrCCPathCache::HashNode::Hash(HashKey key) {
98 return GrResourceKeyHash(&key.fData[1], key.fData[0]);
99}
100
101GrCCPathCache::HashNode::~HashNode() {
102 if (!fEntry) {
103 return;
104 }
105
106 // Finalize our eviction from the path cache.
107 SkASSERT(fEntry->fCacheWeakPtr);
108 fEntry->fCacheWeakPtr->fLRU.remove(fEntry);
109 fEntry->fCacheWeakPtr = nullptr;
Chris Dalton4da70192018-06-18 09:51:36 -0600110 fEntry->unref();
111}
112
113GrCCPathCache::HashNode& GrCCPathCache::HashNode::operator=(HashNode&& node) {
114 this->~HashNode();
115 return *new (this) HashNode(std::move(node));
116}
117
118sk_sp<GrCCPathCacheEntry> GrCCPathCache::find(const GrShape& shape, const MaskTransform& m,
119 CreateIfAbsent createIfAbsent) {
120 if (!shape.hasUnstyledKey()) {
121 return nullptr;
122 }
123
Chris Dalton8f8bf882018-07-18 10:55:51 -0600124 WriteStyledKey writeKey(shape);
125 SkAutoSTMalloc<GrShape::kMaxKeyFromDataVerbCnt * 4, uint32_t> keyData(writeKey.allocCountU32());
126 writeKey.write(shape, keyData.get());
Chris Dalton4da70192018-06-18 09:51:36 -0600127
128 GrCCPathCacheEntry* entry = nullptr;
129 if (HashNode* node = fHashTable.find({keyData.get()})) {
130 entry = node->entry();
131 SkASSERT(this == entry->fCacheWeakPtr);
Chris Daltona8429cf2018-06-22 11:43:31 -0600132 if (fuzzy_equals(m, entry->fMaskTransform)) {
Chris Dalton907102e2018-06-29 13:18:53 -0600133 ++entry->fHitCount; // The path was reused with a compatible matrix.
134 } else if (CreateIfAbsent::kYes == createIfAbsent && entry->unique()) {
135 // This entry is unique: we can recycle it instead of deleting and malloc-ing a new one.
136 entry->fMaskTransform = m;
137 entry->fHitCount = 1;
138 entry->invalidateAtlas();
139 SkASSERT(!entry->fCurrFlushAtlas); // Should be null because 'entry' is unique.
Chris Daltona8429cf2018-06-22 11:43:31 -0600140 } else {
Chris Dalton907102e2018-06-29 13:18:53 -0600141 this->evict(entry);
Chris Dalton4da70192018-06-18 09:51:36 -0600142 entry = nullptr;
143 }
144 }
145
146 if (!entry) {
147 if (CreateIfAbsent::kNo == createIfAbsent) {
148 return nullptr;
149 }
150 if (fHashTable.count() >= kMaxCacheCount) {
151 this->evict(fLRU.tail()); // We've exceeded our limit.
152 }
153 entry = fHashTable.set(HashNode(this, m, shape))->entry();
154 SkASSERT(fHashTable.count() <= kMaxCacheCount);
155 } else {
156 fLRU.remove(entry); // Will be re-added at head.
157 }
158
159 fLRU.addToHead(entry);
160 return sk_ref_sp(entry);
161}
162
163void GrCCPathCache::evict(const GrCCPathCacheEntry* entry) {
164 SkASSERT(entry);
165 SkASSERT(this == entry->fCacheWeakPtr);
166 SkASSERT(fLRU.isInList(entry));
167 SkASSERT(fHashTable.find(HashNode::GetKey(entry))->entry() == entry);
168
169 fHashTable.remove(HashNode::GetKey(entry)); // ~HashNode() handles the rest.
170}
171
Chris Dalton907102e2018-06-29 13:18:53 -0600172
173GrCCPathCacheEntry::~GrCCPathCacheEntry() {
174 SkASSERT(!fCacheWeakPtr); // HashNode should have cleared our cache pointer.
175 SkASSERT(!fCurrFlushAtlas); // Client is required to reset fCurrFlushAtlas back to null.
176
177 this->invalidateAtlas();
178}
179
Brian Salomon238069b2018-07-11 15:58:57 -0400180void GrCCPathCacheEntry::initAsStashedAtlas(const GrUniqueKey& atlasKey, uint32_t contextUniqueID,
Chris Dalton4da70192018-06-18 09:51:36 -0600181 const SkIVector& atlasOffset, const SkRect& devBounds,
182 const SkRect& devBounds45, const SkIRect& devIBounds,
183 const SkIVector& maskShift) {
Brian Salomon238069b2018-07-11 15:58:57 -0400184 SkASSERT(contextUniqueID != SK_InvalidUniqueID);
Chris Dalton4da70192018-06-18 09:51:36 -0600185 SkASSERT(atlasKey.isValid());
186 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
187
Brian Salomon238069b2018-07-11 15:58:57 -0400188 fContextUniqueID = contextUniqueID;
189
Chris Dalton4da70192018-06-18 09:51:36 -0600190 fAtlasKey = atlasKey;
191 fAtlasOffset = atlasOffset + maskShift;
192 SkASSERT(!fCachedAtlasInfo); // Otherwise they should have reused the cached atlas instead.
193
194 float dx = (float)maskShift.fX, dy = (float)maskShift.fY;
195 fDevBounds = devBounds.makeOffset(-dx, -dy);
196 fDevBounds45 = GrCCPathProcessor::MakeOffset45(devBounds45, -dx, -dy);
197 fDevIBounds = devIBounds.makeOffset(-maskShift.fX, -maskShift.fY);
198}
199
Brian Salomon238069b2018-07-11 15:58:57 -0400200void GrCCPathCacheEntry::updateToCachedAtlas(const GrUniqueKey& atlasKey, uint32_t contextUniqueID,
Chris Dalton4da70192018-06-18 09:51:36 -0600201 const SkIVector& newAtlasOffset,
202 sk_sp<GrCCAtlas::CachedAtlasInfo> info) {
Brian Salomon238069b2018-07-11 15:58:57 -0400203 SkASSERT(contextUniqueID != SK_InvalidUniqueID);
Chris Dalton4da70192018-06-18 09:51:36 -0600204 SkASSERT(atlasKey.isValid());
205 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
206
Brian Salomon238069b2018-07-11 15:58:57 -0400207 fContextUniqueID = contextUniqueID;
208
Chris Dalton4da70192018-06-18 09:51:36 -0600209 fAtlasKey = atlasKey;
210 fAtlasOffset = newAtlasOffset;
211
212 SkASSERT(!fCachedAtlasInfo); // Otherwise we need to invalidate our pixels in the old info.
213 fCachedAtlasInfo = std::move(info);
214 fCachedAtlasInfo->fNumPathPixels += this->height() * this->width();
215}
216
Chris Dalton907102e2018-06-29 13:18:53 -0600217void GrCCPathCacheEntry::invalidateAtlas() {
218 if (fCachedAtlasInfo) {
219 // Mark our own pixels invalid in the cached atlas texture.
220 fCachedAtlasInfo->fNumInvalidatedPathPixels += this->height() * this->width();
221 if (!fCachedAtlasInfo->fIsPurgedFromResourceCache &&
222 fCachedAtlasInfo->fNumInvalidatedPathPixels >= fCachedAtlasInfo->fNumPathPixels / 2) {
223 // Too many invalidated pixels: purge the atlas texture from the resource cache.
224 SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post(
Brian Salomon238069b2018-07-11 15:58:57 -0400225 GrUniqueKeyInvalidatedMessage(fAtlasKey, fContextUniqueID));
Chris Dalton907102e2018-06-29 13:18:53 -0600226 fCachedAtlasInfo->fIsPurgedFromResourceCache = true;
227 }
228 }
229
230 fAtlasKey.reset();
231 fCachedAtlasInfo = nullptr;
232}
233
Chris Dalton4da70192018-06-18 09:51:36 -0600234void GrCCPathCacheEntry::onChange() {
235 // Our corresponding path was modified or deleted. Evict ourselves.
236 if (fCacheWeakPtr) {
237 fCacheWeakPtr->evict(this);
238 }
239}