blob: 01781b08db48bae6d1e3e15ce9e3c31e7f64d295 [file] [log] [blame]
Chris Dalton4da70192018-06-18 09:51:36 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCPathCache.h"
9
10#include "GrShape.h"
11#include "SkNx.h"
Chris Dalton4da70192018-06-18 09:51:36 -060012
13// The maximum number of cache entries we allow in our own cache.
14static constexpr int kMaxCacheCount = 1 << 16;
15
16GrCCPathCache::MaskTransform::MaskTransform(const SkMatrix& m, SkIVector* shift)
17 : fMatrix2x2{m.getScaleX(), m.getSkewX(), m.getSkewY(), m.getScaleY()} {
18 SkASSERT(!m.hasPerspective());
Chris Dalton644341a2018-06-18 19:14:16 -060019#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
Chris Dalton4da70192018-06-18 09:51:36 -060020 Sk2f translate = Sk2f(m.getTranslateX(), m.getTranslateY());
21 Sk2f floor = translate.floor();
22 (translate - floor).store(fSubpixelTranslate);
23 shift->set((int)floor[0], (int)floor[1]);
24 SkASSERT((float)shift->fX == floor[0]);
25 SkASSERT((float)shift->fY == floor[1]);
Chris Dalton644341a2018-06-18 19:14:16 -060026#endif
Chris Dalton4da70192018-06-18 09:51:36 -060027}
28
29inline static bool fuzzy_equals(const GrCCPathCache::MaskTransform& a,
30 const GrCCPathCache::MaskTransform& b) {
Chris Dalton644341a2018-06-18 19:14:16 -060031 if ((Sk4f::Load(a.fMatrix2x2) != Sk4f::Load(b.fMatrix2x2)).anyTrue()) {
32 return false;
33 }
34#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
35 if (((Sk2f::Load(a.fSubpixelTranslate) -
36 Sk2f::Load(b.fSubpixelTranslate)).abs() > 1.f/256).anyTrue()) {
37 return false;
38 }
39#endif
40 return true;
Chris Dalton4da70192018-06-18 09:51:36 -060041}
42
Chris Dalton8f8bf882018-07-18 10:55:51 -060043namespace {
44
45// Produces a key that accounts both for a shape's path geometry, as well as any stroke/style.
46class WriteStyledKey {
47public:
Chris Dalton02e6efe2018-08-31 14:45:19 +000048 WriteStyledKey(const GrShape& shape)
49 : fShapeUnstyledKeyCount(shape.unstyledKeySize())
50 , fStyleKeyCount(
51 GrStyle::KeySize(shape.style(), GrStyle::Apply::kPathEffectAndStrokeRec)) {}
Chris Dalton8f8bf882018-07-18 10:55:51 -060052
53 // Returns the total number of uint32_t's to allocate for the key.
Chris Dalton02e6efe2018-08-31 14:45:19 +000054 int allocCountU32() const { return 2 + fShapeUnstyledKeyCount + fStyleKeyCount; }
Chris Dalton8f8bf882018-07-18 10:55:51 -060055
56 // Writes the key to out[].
57 void write(const GrShape& shape, uint32_t* out) {
Chris Dalton02e6efe2018-08-31 14:45:19 +000058 // How many bytes remain in the key, beginning on out[1]?
59 out[0] = (1 + fShapeUnstyledKeyCount + fStyleKeyCount) * sizeof(uint32_t);
60 out[1] = fStyleKeyCount;
61 shape.writeUnstyledKey(&out[2]);
62 GrStyle::WriteKey(&out[2 + fShapeUnstyledKeyCount], shape.style(),
63 GrStyle::Apply::kPathEffectAndStrokeRec, 1);
Chris Dalton8f8bf882018-07-18 10:55:51 -060064 }
65
66private:
67 int fShapeUnstyledKeyCount;
Chris Dalton02e6efe2018-08-31 14:45:19 +000068 int fStyleKeyCount;
Chris Dalton8f8bf882018-07-18 10:55:51 -060069};
70
71}
72
Chris Dalton4da70192018-06-18 09:51:36 -060073inline GrCCPathCache::HashNode::HashNode(GrCCPathCache* cache, const MaskTransform& m,
74 const GrShape& shape) {
75 SkASSERT(shape.hasUnstyledKey());
76
Chris Dalton8f8bf882018-07-18 10:55:51 -060077 WriteStyledKey writeKey(shape);
78 void* memory = ::operator new (sizeof(GrCCPathCacheEntry) +
79 writeKey.allocCountU32() * sizeof(uint32_t));
80 fEntry = new (memory) GrCCPathCacheEntry(cache, m);
Chris Dalton4da70192018-06-18 09:51:36 -060081
82 // The shape key is a variable-length footer to the entry allocation.
Chris Dalton8f8bf882018-07-18 10:55:51 -060083 uint32_t* keyData = (uint32_t*)((char*)memory + sizeof(GrCCPathCacheEntry));
84 writeKey.write(shape, keyData);
Chris Dalton4da70192018-06-18 09:51:36 -060085}
86
87inline bool operator==(const GrCCPathCache::HashKey& key1, const GrCCPathCache::HashKey& key2) {
Chris Dalton8f8bf882018-07-18 10:55:51 -060088 return key1.fData[0] == key2.fData[0] && !memcmp(&key1.fData[1], &key2.fData[1], key1.fData[0]);
Chris Dalton4da70192018-06-18 09:51:36 -060089}
90
91inline GrCCPathCache::HashKey GrCCPathCache::HashNode::GetKey(const GrCCPathCacheEntry* entry) {
92 // The shape key is a variable-length footer to the entry allocation.
93 return HashKey{(const uint32_t*)((const char*)entry + sizeof(GrCCPathCacheEntry))};
94}
95
96inline uint32_t GrCCPathCache::HashNode::Hash(HashKey key) {
97 return GrResourceKeyHash(&key.fData[1], key.fData[0]);
98}
99
100GrCCPathCache::HashNode::~HashNode() {
101 if (!fEntry) {
102 return;
103 }
104
105 // Finalize our eviction from the path cache.
106 SkASSERT(fEntry->fCacheWeakPtr);
107 fEntry->fCacheWeakPtr->fLRU.remove(fEntry);
108 fEntry->fCacheWeakPtr = nullptr;
Chris Dalton4da70192018-06-18 09:51:36 -0600109 fEntry->unref();
110}
111
112GrCCPathCache::HashNode& GrCCPathCache::HashNode::operator=(HashNode&& node) {
113 this->~HashNode();
114 return *new (this) HashNode(std::move(node));
115}
116
117sk_sp<GrCCPathCacheEntry> GrCCPathCache::find(const GrShape& shape, const MaskTransform& m,
118 CreateIfAbsent createIfAbsent) {
119 if (!shape.hasUnstyledKey()) {
120 return nullptr;
121 }
122
Chris Dalton8f8bf882018-07-18 10:55:51 -0600123 WriteStyledKey writeKey(shape);
124 SkAutoSTMalloc<GrShape::kMaxKeyFromDataVerbCnt * 4, uint32_t> keyData(writeKey.allocCountU32());
125 writeKey.write(shape, keyData.get());
Chris Dalton4da70192018-06-18 09:51:36 -0600126
127 GrCCPathCacheEntry* entry = nullptr;
128 if (HashNode* node = fHashTable.find({keyData.get()})) {
129 entry = node->entry();
130 SkASSERT(this == entry->fCacheWeakPtr);
Chris Daltona8429cf2018-06-22 11:43:31 -0600131 if (fuzzy_equals(m, entry->fMaskTransform)) {
Chris Dalton907102e2018-06-29 13:18:53 -0600132 ++entry->fHitCount; // The path was reused with a compatible matrix.
133 } else if (CreateIfAbsent::kYes == createIfAbsent && entry->unique()) {
134 // This entry is unique: we can recycle it instead of deleting and malloc-ing a new one.
135 entry->fMaskTransform = m;
136 entry->fHitCount = 1;
137 entry->invalidateAtlas();
138 SkASSERT(!entry->fCurrFlushAtlas); // Should be null because 'entry' is unique.
Chris Daltona8429cf2018-06-22 11:43:31 -0600139 } else {
Chris Dalton907102e2018-06-29 13:18:53 -0600140 this->evict(entry);
Chris Dalton4da70192018-06-18 09:51:36 -0600141 entry = nullptr;
142 }
143 }
144
145 if (!entry) {
146 if (CreateIfAbsent::kNo == createIfAbsent) {
147 return nullptr;
148 }
149 if (fHashTable.count() >= kMaxCacheCount) {
150 this->evict(fLRU.tail()); // We've exceeded our limit.
151 }
152 entry = fHashTable.set(HashNode(this, m, shape))->entry();
153 SkASSERT(fHashTable.count() <= kMaxCacheCount);
154 } else {
155 fLRU.remove(entry); // Will be re-added at head.
156 }
157
158 fLRU.addToHead(entry);
159 return sk_ref_sp(entry);
160}
161
162void GrCCPathCache::evict(const GrCCPathCacheEntry* entry) {
163 SkASSERT(entry);
164 SkASSERT(this == entry->fCacheWeakPtr);
165 SkASSERT(fLRU.isInList(entry));
166 SkASSERT(fHashTable.find(HashNode::GetKey(entry))->entry() == entry);
167
168 fHashTable.remove(HashNode::GetKey(entry)); // ~HashNode() handles the rest.
169}
170
Chris Dalton907102e2018-06-29 13:18:53 -0600171
172GrCCPathCacheEntry::~GrCCPathCacheEntry() {
173 SkASSERT(!fCacheWeakPtr); // HashNode should have cleared our cache pointer.
174 SkASSERT(!fCurrFlushAtlas); // Client is required to reset fCurrFlushAtlas back to null.
175
176 this->invalidateAtlas();
177}
178
Brian Salomon238069b2018-07-11 15:58:57 -0400179void GrCCPathCacheEntry::initAsStashedAtlas(const GrUniqueKey& atlasKey, uint32_t contextUniqueID,
Chris Dalton4da70192018-06-18 09:51:36 -0600180 const SkIVector& atlasOffset, const SkRect& devBounds,
181 const SkRect& devBounds45, const SkIRect& devIBounds,
182 const SkIVector& maskShift) {
Brian Salomon238069b2018-07-11 15:58:57 -0400183 SkASSERT(contextUniqueID != SK_InvalidUniqueID);
Chris Dalton4da70192018-06-18 09:51:36 -0600184 SkASSERT(atlasKey.isValid());
185 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
186
Brian Salomon238069b2018-07-11 15:58:57 -0400187 fContextUniqueID = contextUniqueID;
188
Chris Dalton4da70192018-06-18 09:51:36 -0600189 fAtlasKey = atlasKey;
190 fAtlasOffset = atlasOffset + maskShift;
191 SkASSERT(!fCachedAtlasInfo); // Otherwise they should have reused the cached atlas instead.
192
193 float dx = (float)maskShift.fX, dy = (float)maskShift.fY;
194 fDevBounds = devBounds.makeOffset(-dx, -dy);
195 fDevBounds45 = GrCCPathProcessor::MakeOffset45(devBounds45, -dx, -dy);
196 fDevIBounds = devIBounds.makeOffset(-maskShift.fX, -maskShift.fY);
197}
198
Brian Salomon238069b2018-07-11 15:58:57 -0400199void GrCCPathCacheEntry::updateToCachedAtlas(const GrUniqueKey& atlasKey, uint32_t contextUniqueID,
Chris Dalton4da70192018-06-18 09:51:36 -0600200 const SkIVector& newAtlasOffset,
201 sk_sp<GrCCAtlas::CachedAtlasInfo> info) {
Brian Salomon238069b2018-07-11 15:58:57 -0400202 SkASSERT(contextUniqueID != SK_InvalidUniqueID);
Chris Dalton4da70192018-06-18 09:51:36 -0600203 SkASSERT(atlasKey.isValid());
204 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
205
Brian Salomon238069b2018-07-11 15:58:57 -0400206 fContextUniqueID = contextUniqueID;
207
Chris Dalton4da70192018-06-18 09:51:36 -0600208 fAtlasKey = atlasKey;
209 fAtlasOffset = newAtlasOffset;
210
211 SkASSERT(!fCachedAtlasInfo); // Otherwise we need to invalidate our pixels in the old info.
212 fCachedAtlasInfo = std::move(info);
213 fCachedAtlasInfo->fNumPathPixels += this->height() * this->width();
214}
215
Chris Dalton907102e2018-06-29 13:18:53 -0600216void GrCCPathCacheEntry::invalidateAtlas() {
217 if (fCachedAtlasInfo) {
218 // Mark our own pixels invalid in the cached atlas texture.
219 fCachedAtlasInfo->fNumInvalidatedPathPixels += this->height() * this->width();
220 if (!fCachedAtlasInfo->fIsPurgedFromResourceCache &&
221 fCachedAtlasInfo->fNumInvalidatedPathPixels >= fCachedAtlasInfo->fNumPathPixels / 2) {
222 // Too many invalidated pixels: purge the atlas texture from the resource cache.
223 SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post(
Brian Salomon238069b2018-07-11 15:58:57 -0400224 GrUniqueKeyInvalidatedMessage(fAtlasKey, fContextUniqueID));
Chris Dalton907102e2018-06-29 13:18:53 -0600225 fCachedAtlasInfo->fIsPurgedFromResourceCache = true;
226 }
227 }
228
229 fAtlasKey.reset();
230 fCachedAtlasInfo = nullptr;
231}
232
Chris Dalton4da70192018-06-18 09:51:36 -0600233void GrCCPathCacheEntry::onChange() {
234 // Our corresponding path was modified or deleted. Evict ourselves.
235 if (fCacheWeakPtr) {
236 fCacheWeakPtr->evict(this);
237 }
238}