blob: b22b1dd72688a29f53ed3c70e4e0341725f527f2 [file] [log] [blame]
Chris Dalton4da70192018-06-18 09:51:36 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCPathCache.h"
9
10#include "GrShape.h"
11#include "SkNx.h"
12#include "ccpr/GrCCPathParser.h"
13
14// The maximum number of cache entries we allow in our own cache.
15static constexpr int kMaxCacheCount = 1 << 16;
16
17GrCCPathCache::MaskTransform::MaskTransform(const SkMatrix& m, SkIVector* shift)
18 : fMatrix2x2{m.getScaleX(), m.getSkewX(), m.getSkewY(), m.getScaleY()} {
19 SkASSERT(!m.hasPerspective());
Chris Dalton644341a2018-06-18 19:14:16 -060020#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
Chris Dalton4da70192018-06-18 09:51:36 -060021 Sk2f translate = Sk2f(m.getTranslateX(), m.getTranslateY());
22 Sk2f floor = translate.floor();
23 (translate - floor).store(fSubpixelTranslate);
24 shift->set((int)floor[0], (int)floor[1]);
25 SkASSERT((float)shift->fX == floor[0]);
26 SkASSERT((float)shift->fY == floor[1]);
Chris Dalton644341a2018-06-18 19:14:16 -060027#endif
Chris Dalton4da70192018-06-18 09:51:36 -060028}
29
30inline static bool fuzzy_equals(const GrCCPathCache::MaskTransform& a,
31 const GrCCPathCache::MaskTransform& b) {
Chris Dalton644341a2018-06-18 19:14:16 -060032 if ((Sk4f::Load(a.fMatrix2x2) != Sk4f::Load(b.fMatrix2x2)).anyTrue()) {
33 return false;
34 }
35#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
36 if (((Sk2f::Load(a.fSubpixelTranslate) -
37 Sk2f::Load(b.fSubpixelTranslate)).abs() > 1.f/256).anyTrue()) {
38 return false;
39 }
40#endif
41 return true;
Chris Dalton4da70192018-06-18 09:51:36 -060042}
43
44inline GrCCPathCache::HashNode::HashNode(GrCCPathCache* cache, const MaskTransform& m,
45 const GrShape& shape) {
46 SkASSERT(shape.hasUnstyledKey());
47
48 int keyLength = 1 + shape.unstyledKeySize();
49 void* mem = ::operator new (sizeof(GrCCPathCacheEntry) + keyLength * sizeof(uint32_t));
50 fEntry = new (mem) GrCCPathCacheEntry(cache, m);
51
52 // The shape key is a variable-length footer to the entry allocation.
53 uint32_t* keyData = (uint32_t*)((char*)mem + sizeof(GrCCPathCacheEntry));
54 keyData[0] = keyLength - 1;
55 shape.writeUnstyledKey(&keyData[1]);
56}
57
58inline bool operator==(const GrCCPathCache::HashKey& key1, const GrCCPathCache::HashKey& key2) {
59 return key1.fData[0] == key2.fData[0] &&
60 !memcmp(&key1.fData[1], &key2.fData[1], key1.fData[0] * sizeof(uint32_t));
61}
62
63inline GrCCPathCache::HashKey GrCCPathCache::HashNode::GetKey(const GrCCPathCacheEntry* entry) {
64 // The shape key is a variable-length footer to the entry allocation.
65 return HashKey{(const uint32_t*)((const char*)entry + sizeof(GrCCPathCacheEntry))};
66}
67
68inline uint32_t GrCCPathCache::HashNode::Hash(HashKey key) {
69 return GrResourceKeyHash(&key.fData[1], key.fData[0]);
70}
71
72GrCCPathCache::HashNode::~HashNode() {
73 if (!fEntry) {
74 return;
75 }
76
77 // Finalize our eviction from the path cache.
78 SkASSERT(fEntry->fCacheWeakPtr);
79 fEntry->fCacheWeakPtr->fLRU.remove(fEntry);
80 fEntry->fCacheWeakPtr = nullptr;
Chris Dalton4da70192018-06-18 09:51:36 -060081 fEntry->unref();
82}
83
84GrCCPathCache::HashNode& GrCCPathCache::HashNode::operator=(HashNode&& node) {
85 this->~HashNode();
86 return *new (this) HashNode(std::move(node));
87}
88
89sk_sp<GrCCPathCacheEntry> GrCCPathCache::find(const GrShape& shape, const MaskTransform& m,
90 CreateIfAbsent createIfAbsent) {
91 if (!shape.hasUnstyledKey()) {
92 return nullptr;
93 }
94
95 int keyLength = 1 + shape.unstyledKeySize();
96 SkAutoSTMalloc<GrShape::kMaxKeyFromDataVerbCnt * 4, uint32_t> keyData(keyLength);
97 keyData[0] = keyLength - 1;
98 shape.writeUnstyledKey(&keyData[1]);
99
100 GrCCPathCacheEntry* entry = nullptr;
101 if (HashNode* node = fHashTable.find({keyData.get()})) {
102 entry = node->entry();
103 SkASSERT(this == entry->fCacheWeakPtr);
Chris Daltona8429cf2018-06-22 11:43:31 -0600104 if (fuzzy_equals(m, entry->fMaskTransform)) {
Chris Dalton907102e2018-06-29 13:18:53 -0600105 ++entry->fHitCount; // The path was reused with a compatible matrix.
106 } else if (CreateIfAbsent::kYes == createIfAbsent && entry->unique()) {
107 // This entry is unique: we can recycle it instead of deleting and malloc-ing a new one.
108 entry->fMaskTransform = m;
109 entry->fHitCount = 1;
110 entry->invalidateAtlas();
111 SkASSERT(!entry->fCurrFlushAtlas); // Should be null because 'entry' is unique.
Chris Daltona8429cf2018-06-22 11:43:31 -0600112 } else {
Chris Dalton907102e2018-06-29 13:18:53 -0600113 this->evict(entry);
Chris Dalton4da70192018-06-18 09:51:36 -0600114 entry = nullptr;
115 }
116 }
117
118 if (!entry) {
119 if (CreateIfAbsent::kNo == createIfAbsent) {
120 return nullptr;
121 }
122 if (fHashTable.count() >= kMaxCacheCount) {
123 this->evict(fLRU.tail()); // We've exceeded our limit.
124 }
125 entry = fHashTable.set(HashNode(this, m, shape))->entry();
126 SkASSERT(fHashTable.count() <= kMaxCacheCount);
127 } else {
128 fLRU.remove(entry); // Will be re-added at head.
129 }
130
131 fLRU.addToHead(entry);
132 return sk_ref_sp(entry);
133}
134
135void GrCCPathCache::evict(const GrCCPathCacheEntry* entry) {
136 SkASSERT(entry);
137 SkASSERT(this == entry->fCacheWeakPtr);
138 SkASSERT(fLRU.isInList(entry));
139 SkASSERT(fHashTable.find(HashNode::GetKey(entry))->entry() == entry);
140
141 fHashTable.remove(HashNode::GetKey(entry)); // ~HashNode() handles the rest.
142}
143
Chris Dalton907102e2018-06-29 13:18:53 -0600144
145GrCCPathCacheEntry::~GrCCPathCacheEntry() {
146 SkASSERT(!fCacheWeakPtr); // HashNode should have cleared our cache pointer.
147 SkASSERT(!fCurrFlushAtlas); // Client is required to reset fCurrFlushAtlas back to null.
148
149 this->invalidateAtlas();
150}
151
Chris Dalton4da70192018-06-18 09:51:36 -0600152void GrCCPathCacheEntry::initAsStashedAtlas(const GrUniqueKey& atlasKey,
153 const SkIVector& atlasOffset, const SkRect& devBounds,
154 const SkRect& devBounds45, const SkIRect& devIBounds,
155 const SkIVector& maskShift) {
156 SkASSERT(atlasKey.isValid());
157 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
158
159 fAtlasKey = atlasKey;
160 fAtlasOffset = atlasOffset + maskShift;
161 SkASSERT(!fCachedAtlasInfo); // Otherwise they should have reused the cached atlas instead.
162
163 float dx = (float)maskShift.fX, dy = (float)maskShift.fY;
164 fDevBounds = devBounds.makeOffset(-dx, -dy);
165 fDevBounds45 = GrCCPathProcessor::MakeOffset45(devBounds45, -dx, -dy);
166 fDevIBounds = devIBounds.makeOffset(-maskShift.fX, -maskShift.fY);
167}
168
169void GrCCPathCacheEntry::updateToCachedAtlas(const GrUniqueKey& atlasKey,
170 const SkIVector& newAtlasOffset,
171 sk_sp<GrCCAtlas::CachedAtlasInfo> info) {
172 SkASSERT(atlasKey.isValid());
173 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
174
175 fAtlasKey = atlasKey;
176 fAtlasOffset = newAtlasOffset;
177
178 SkASSERT(!fCachedAtlasInfo); // Otherwise we need to invalidate our pixels in the old info.
179 fCachedAtlasInfo = std::move(info);
180 fCachedAtlasInfo->fNumPathPixels += this->height() * this->width();
181}
182
Chris Dalton907102e2018-06-29 13:18:53 -0600183void GrCCPathCacheEntry::invalidateAtlas() {
184 if (fCachedAtlasInfo) {
185 // Mark our own pixels invalid in the cached atlas texture.
186 fCachedAtlasInfo->fNumInvalidatedPathPixels += this->height() * this->width();
187 if (!fCachedAtlasInfo->fIsPurgedFromResourceCache &&
188 fCachedAtlasInfo->fNumInvalidatedPathPixels >= fCachedAtlasInfo->fNumPathPixels / 2) {
189 // Too many invalidated pixels: purge the atlas texture from the resource cache.
190 SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post(
191 GrUniqueKeyInvalidatedMessage(fAtlasKey));
192 fCachedAtlasInfo->fIsPurgedFromResourceCache = true;
193 }
194 }
195
196 fAtlasKey.reset();
197 fCachedAtlasInfo = nullptr;
198}
199
Chris Dalton4da70192018-06-18 09:51:36 -0600200void GrCCPathCacheEntry::onChange() {
201 // Our corresponding path was modified or deleted. Evict ourselves.
202 if (fCacheWeakPtr) {
203 fCacheWeakPtr->evict(this);
204 }
205}