blob: b6eb82a05a9a599a308d4a3aa627b39a1c8b2ed5 [file] [log] [blame]
Chris Dalton4da70192018-06-18 09:51:36 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCPathCache.h"
9
10#include "GrShape.h"
11#include "SkNx.h"
12#include "ccpr/GrCCPathParser.h"
13
14// The maximum number of cache entries we allow in our own cache.
15static constexpr int kMaxCacheCount = 1 << 16;
16
17GrCCPathCache::MaskTransform::MaskTransform(const SkMatrix& m, SkIVector* shift)
18 : fMatrix2x2{m.getScaleX(), m.getSkewX(), m.getSkewY(), m.getScaleY()} {
19 SkASSERT(!m.hasPerspective());
Chris Dalton644341a2018-06-18 19:14:16 -060020#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
Chris Dalton4da70192018-06-18 09:51:36 -060021 Sk2f translate = Sk2f(m.getTranslateX(), m.getTranslateY());
22 Sk2f floor = translate.floor();
23 (translate - floor).store(fSubpixelTranslate);
24 shift->set((int)floor[0], (int)floor[1]);
25 SkASSERT((float)shift->fX == floor[0]);
26 SkASSERT((float)shift->fY == floor[1]);
Chris Dalton644341a2018-06-18 19:14:16 -060027#endif
Chris Dalton4da70192018-06-18 09:51:36 -060028}
29
30inline static bool fuzzy_equals(const GrCCPathCache::MaskTransform& a,
31 const GrCCPathCache::MaskTransform& b) {
Chris Dalton644341a2018-06-18 19:14:16 -060032 if ((Sk4f::Load(a.fMatrix2x2) != Sk4f::Load(b.fMatrix2x2)).anyTrue()) {
33 return false;
34 }
35#ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK
36 if (((Sk2f::Load(a.fSubpixelTranslate) -
37 Sk2f::Load(b.fSubpixelTranslate)).abs() > 1.f/256).anyTrue()) {
38 return false;
39 }
40#endif
41 return true;
Chris Dalton4da70192018-06-18 09:51:36 -060042}
43
44inline GrCCPathCache::HashNode::HashNode(GrCCPathCache* cache, const MaskTransform& m,
45 const GrShape& shape) {
46 SkASSERT(shape.hasUnstyledKey());
47
48 int keyLength = 1 + shape.unstyledKeySize();
49 void* mem = ::operator new (sizeof(GrCCPathCacheEntry) + keyLength * sizeof(uint32_t));
50 fEntry = new (mem) GrCCPathCacheEntry(cache, m);
51
52 // The shape key is a variable-length footer to the entry allocation.
53 uint32_t* keyData = (uint32_t*)((char*)mem + sizeof(GrCCPathCacheEntry));
54 keyData[0] = keyLength - 1;
55 shape.writeUnstyledKey(&keyData[1]);
56}
57
58inline bool operator==(const GrCCPathCache::HashKey& key1, const GrCCPathCache::HashKey& key2) {
59 return key1.fData[0] == key2.fData[0] &&
60 !memcmp(&key1.fData[1], &key2.fData[1], key1.fData[0] * sizeof(uint32_t));
61}
62
63inline GrCCPathCache::HashKey GrCCPathCache::HashNode::GetKey(const GrCCPathCacheEntry* entry) {
64 // The shape key is a variable-length footer to the entry allocation.
65 return HashKey{(const uint32_t*)((const char*)entry + sizeof(GrCCPathCacheEntry))};
66}
67
68inline uint32_t GrCCPathCache::HashNode::Hash(HashKey key) {
69 return GrResourceKeyHash(&key.fData[1], key.fData[0]);
70}
71
72GrCCPathCache::HashNode::~HashNode() {
73 if (!fEntry) {
74 return;
75 }
76
77 // Finalize our eviction from the path cache.
78 SkASSERT(fEntry->fCacheWeakPtr);
79 fEntry->fCacheWeakPtr->fLRU.remove(fEntry);
80 fEntry->fCacheWeakPtr = nullptr;
81
82 if (GrCCAtlas::CachedAtlasInfo* info = fEntry->fCachedAtlasInfo.get()) {
83 // Mark our own pixels invalid in the cached atlas texture now that we have been evicted.
84 info->fNumInvalidatedPathPixels += fEntry->height() * fEntry->width();
85 if (!info->fIsPurgedFromResourceCache &&
86 info->fNumInvalidatedPathPixels >= info->fNumPathPixels / 2) {
87 // Too many invalidated pixels: purge the atlas texture from the resource cache.
88 SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post(
89 GrUniqueKeyInvalidatedMessage(fEntry->fAtlasKey));
90 info->fIsPurgedFromResourceCache = true;
91 }
92 }
93
94 fEntry->unref();
95}
96
97GrCCPathCache::HashNode& GrCCPathCache::HashNode::operator=(HashNode&& node) {
98 this->~HashNode();
99 return *new (this) HashNode(std::move(node));
100}
101
102sk_sp<GrCCPathCacheEntry> GrCCPathCache::find(const GrShape& shape, const MaskTransform& m,
103 CreateIfAbsent createIfAbsent) {
104 if (!shape.hasUnstyledKey()) {
105 return nullptr;
106 }
107
108 int keyLength = 1 + shape.unstyledKeySize();
109 SkAutoSTMalloc<GrShape::kMaxKeyFromDataVerbCnt * 4, uint32_t> keyData(keyLength);
110 keyData[0] = keyLength - 1;
111 shape.writeUnstyledKey(&keyData[1]);
112
113 GrCCPathCacheEntry* entry = nullptr;
114 if (HashNode* node = fHashTable.find({keyData.get()})) {
115 entry = node->entry();
116 SkASSERT(this == entry->fCacheWeakPtr);
Chris Daltona8429cf2018-06-22 11:43:31 -0600117 if (fuzzy_equals(m, entry->fMaskTransform)) {
118 ++entry->fHitCount;
119 } else {
Chris Dalton4da70192018-06-18 09:51:36 -0600120 this->evict(entry); // The path was reused with an incompatible matrix.
121 entry = nullptr;
122 }
123 }
124
125 if (!entry) {
126 if (CreateIfAbsent::kNo == createIfAbsent) {
127 return nullptr;
128 }
129 if (fHashTable.count() >= kMaxCacheCount) {
130 this->evict(fLRU.tail()); // We've exceeded our limit.
131 }
132 entry = fHashTable.set(HashNode(this, m, shape))->entry();
133 SkASSERT(fHashTable.count() <= kMaxCacheCount);
134 } else {
135 fLRU.remove(entry); // Will be re-added at head.
136 }
137
138 fLRU.addToHead(entry);
139 return sk_ref_sp(entry);
140}
141
142void GrCCPathCache::evict(const GrCCPathCacheEntry* entry) {
143 SkASSERT(entry);
144 SkASSERT(this == entry->fCacheWeakPtr);
145 SkASSERT(fLRU.isInList(entry));
146 SkASSERT(fHashTable.find(HashNode::GetKey(entry))->entry() == entry);
147
148 fHashTable.remove(HashNode::GetKey(entry)); // ~HashNode() handles the rest.
149}
150
151void GrCCPathCacheEntry::initAsStashedAtlas(const GrUniqueKey& atlasKey,
152 const SkIVector& atlasOffset, const SkRect& devBounds,
153 const SkRect& devBounds45, const SkIRect& devIBounds,
154 const SkIVector& maskShift) {
155 SkASSERT(atlasKey.isValid());
156 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
157
158 fAtlasKey = atlasKey;
159 fAtlasOffset = atlasOffset + maskShift;
160 SkASSERT(!fCachedAtlasInfo); // Otherwise they should have reused the cached atlas instead.
161
162 float dx = (float)maskShift.fX, dy = (float)maskShift.fY;
163 fDevBounds = devBounds.makeOffset(-dx, -dy);
164 fDevBounds45 = GrCCPathProcessor::MakeOffset45(devBounds45, -dx, -dy);
165 fDevIBounds = devIBounds.makeOffset(-maskShift.fX, -maskShift.fY);
166}
167
168void GrCCPathCacheEntry::updateToCachedAtlas(const GrUniqueKey& atlasKey,
169 const SkIVector& newAtlasOffset,
170 sk_sp<GrCCAtlas::CachedAtlasInfo> info) {
171 SkASSERT(atlasKey.isValid());
172 SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time.
173
174 fAtlasKey = atlasKey;
175 fAtlasOffset = newAtlasOffset;
176
177 SkASSERT(!fCachedAtlasInfo); // Otherwise we need to invalidate our pixels in the old info.
178 fCachedAtlasInfo = std::move(info);
179 fCachedAtlasInfo->fNumPathPixels += this->height() * this->width();
180}
181
182void GrCCPathCacheEntry::onChange() {
183 // Our corresponding path was modified or deleted. Evict ourselves.
184 if (fCacheWeakPtr) {
185 fCacheWeakPtr->evict(this);
186 }
187}