Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "GrCCPathCache.h" |
| 9 | |
| 10 | #include "GrShape.h" |
| 11 | #include "SkNx.h" |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 12 | |
| 13 | // The maximum number of cache entries we allow in our own cache. |
| 14 | static constexpr int kMaxCacheCount = 1 << 16; |
| 15 | |
| 16 | GrCCPathCache::MaskTransform::MaskTransform(const SkMatrix& m, SkIVector* shift) |
| 17 | : fMatrix2x2{m.getScaleX(), m.getSkewX(), m.getSkewY(), m.getScaleY()} { |
| 18 | SkASSERT(!m.hasPerspective()); |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 19 | #ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 20 | Sk2f translate = Sk2f(m.getTranslateX(), m.getTranslateY()); |
| 21 | Sk2f floor = translate.floor(); |
| 22 | (translate - floor).store(fSubpixelTranslate); |
| 23 | shift->set((int)floor[0], (int)floor[1]); |
| 24 | SkASSERT((float)shift->fX == floor[0]); |
| 25 | SkASSERT((float)shift->fY == floor[1]); |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 26 | #endif |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 27 | } |
| 28 | |
| 29 | inline static bool fuzzy_equals(const GrCCPathCache::MaskTransform& a, |
| 30 | const GrCCPathCache::MaskTransform& b) { |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 31 | if ((Sk4f::Load(a.fMatrix2x2) != Sk4f::Load(b.fMatrix2x2)).anyTrue()) { |
| 32 | return false; |
| 33 | } |
| 34 | #ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK |
| 35 | if (((Sk2f::Load(a.fSubpixelTranslate) - |
| 36 | Sk2f::Load(b.fSubpixelTranslate)).abs() > 1.f/256).anyTrue()) { |
| 37 | return false; |
| 38 | } |
| 39 | #endif |
| 40 | return true; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 41 | } |
| 42 | |
Chris Dalton | 8f8bf88 | 2018-07-18 10:55:51 -0600 | [diff] [blame] | 43 | namespace { |
| 44 | |
| 45 | // Produces a key that accounts both for a shape's path geometry, as well as any stroke/style. |
| 46 | class WriteStyledKey { |
| 47 | public: |
Chris Dalton | 02e6efe | 2018-08-31 14:45:19 +0000 | [diff] [blame] | 48 | WriteStyledKey(const GrShape& shape) |
| 49 | : fShapeUnstyledKeyCount(shape.unstyledKeySize()) |
| 50 | , fStyleKeyCount( |
| 51 | GrStyle::KeySize(shape.style(), GrStyle::Apply::kPathEffectAndStrokeRec)) {} |
Chris Dalton | 8f8bf88 | 2018-07-18 10:55:51 -0600 | [diff] [blame] | 52 | |
| 53 | // Returns the total number of uint32_t's to allocate for the key. |
Chris Dalton | 02e6efe | 2018-08-31 14:45:19 +0000 | [diff] [blame] | 54 | int allocCountU32() const { return 2 + fShapeUnstyledKeyCount + fStyleKeyCount; } |
Chris Dalton | 8f8bf88 | 2018-07-18 10:55:51 -0600 | [diff] [blame] | 55 | |
| 56 | // Writes the key to out[]. |
| 57 | void write(const GrShape& shape, uint32_t* out) { |
Chris Dalton | 02e6efe | 2018-08-31 14:45:19 +0000 | [diff] [blame] | 58 | // How many bytes remain in the key, beginning on out[1]? |
| 59 | out[0] = (1 + fShapeUnstyledKeyCount + fStyleKeyCount) * sizeof(uint32_t); |
| 60 | out[1] = fStyleKeyCount; |
| 61 | shape.writeUnstyledKey(&out[2]); |
| 62 | GrStyle::WriteKey(&out[2 + fShapeUnstyledKeyCount], shape.style(), |
| 63 | GrStyle::Apply::kPathEffectAndStrokeRec, 1); |
Chris Dalton | 8f8bf88 | 2018-07-18 10:55:51 -0600 | [diff] [blame] | 64 | } |
| 65 | |
| 66 | private: |
| 67 | int fShapeUnstyledKeyCount; |
Chris Dalton | 02e6efe | 2018-08-31 14:45:19 +0000 | [diff] [blame] | 68 | int fStyleKeyCount; |
Chris Dalton | 8f8bf88 | 2018-07-18 10:55:51 -0600 | [diff] [blame] | 69 | }; |
| 70 | |
| 71 | } |
| 72 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 73 | inline GrCCPathCache::HashNode::HashNode(GrCCPathCache* cache, const MaskTransform& m, |
| 74 | const GrShape& shape) { |
| 75 | SkASSERT(shape.hasUnstyledKey()); |
| 76 | |
Chris Dalton | 8f8bf88 | 2018-07-18 10:55:51 -0600 | [diff] [blame] | 77 | WriteStyledKey writeKey(shape); |
| 78 | void* memory = ::operator new (sizeof(GrCCPathCacheEntry) + |
| 79 | writeKey.allocCountU32() * sizeof(uint32_t)); |
| 80 | fEntry = new (memory) GrCCPathCacheEntry(cache, m); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 81 | |
| 82 | // The shape key is a variable-length footer to the entry allocation. |
Chris Dalton | 8f8bf88 | 2018-07-18 10:55:51 -0600 | [diff] [blame] | 83 | uint32_t* keyData = (uint32_t*)((char*)memory + sizeof(GrCCPathCacheEntry)); |
| 84 | writeKey.write(shape, keyData); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 85 | } |
| 86 | |
| 87 | inline bool operator==(const GrCCPathCache::HashKey& key1, const GrCCPathCache::HashKey& key2) { |
Chris Dalton | 8f8bf88 | 2018-07-18 10:55:51 -0600 | [diff] [blame] | 88 | return key1.fData[0] == key2.fData[0] && !memcmp(&key1.fData[1], &key2.fData[1], key1.fData[0]); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 89 | } |
| 90 | |
| 91 | inline GrCCPathCache::HashKey GrCCPathCache::HashNode::GetKey(const GrCCPathCacheEntry* entry) { |
| 92 | // The shape key is a variable-length footer to the entry allocation. |
| 93 | return HashKey{(const uint32_t*)((const char*)entry + sizeof(GrCCPathCacheEntry))}; |
| 94 | } |
| 95 | |
| 96 | inline uint32_t GrCCPathCache::HashNode::Hash(HashKey key) { |
| 97 | return GrResourceKeyHash(&key.fData[1], key.fData[0]); |
| 98 | } |
| 99 | |
| 100 | GrCCPathCache::HashNode::~HashNode() { |
| 101 | if (!fEntry) { |
| 102 | return; |
| 103 | } |
| 104 | |
| 105 | // Finalize our eviction from the path cache. |
| 106 | SkASSERT(fEntry->fCacheWeakPtr); |
| 107 | fEntry->fCacheWeakPtr->fLRU.remove(fEntry); |
| 108 | fEntry->fCacheWeakPtr = nullptr; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 109 | fEntry->unref(); |
| 110 | } |
| 111 | |
| 112 | GrCCPathCache::HashNode& GrCCPathCache::HashNode::operator=(HashNode&& node) { |
| 113 | this->~HashNode(); |
| 114 | return *new (this) HashNode(std::move(node)); |
| 115 | } |
| 116 | |
| 117 | sk_sp<GrCCPathCacheEntry> GrCCPathCache::find(const GrShape& shape, const MaskTransform& m, |
| 118 | CreateIfAbsent createIfAbsent) { |
| 119 | if (!shape.hasUnstyledKey()) { |
| 120 | return nullptr; |
| 121 | } |
| 122 | |
Chris Dalton | 8f8bf88 | 2018-07-18 10:55:51 -0600 | [diff] [blame] | 123 | WriteStyledKey writeKey(shape); |
| 124 | SkAutoSTMalloc<GrShape::kMaxKeyFromDataVerbCnt * 4, uint32_t> keyData(writeKey.allocCountU32()); |
| 125 | writeKey.write(shape, keyData.get()); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 126 | |
| 127 | GrCCPathCacheEntry* entry = nullptr; |
| 128 | if (HashNode* node = fHashTable.find({keyData.get()})) { |
| 129 | entry = node->entry(); |
| 130 | SkASSERT(this == entry->fCacheWeakPtr); |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 131 | if (fuzzy_equals(m, entry->fMaskTransform)) { |
Chris Dalton | 907102e | 2018-06-29 13:18:53 -0600 | [diff] [blame] | 132 | ++entry->fHitCount; // The path was reused with a compatible matrix. |
| 133 | } else if (CreateIfAbsent::kYes == createIfAbsent && entry->unique()) { |
| 134 | // This entry is unique: we can recycle it instead of deleting and malloc-ing a new one. |
| 135 | entry->fMaskTransform = m; |
| 136 | entry->fHitCount = 1; |
| 137 | entry->invalidateAtlas(); |
| 138 | SkASSERT(!entry->fCurrFlushAtlas); // Should be null because 'entry' is unique. |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 139 | } else { |
Chris Dalton | 907102e | 2018-06-29 13:18:53 -0600 | [diff] [blame] | 140 | this->evict(entry); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 141 | entry = nullptr; |
| 142 | } |
| 143 | } |
| 144 | |
| 145 | if (!entry) { |
| 146 | if (CreateIfAbsent::kNo == createIfAbsent) { |
| 147 | return nullptr; |
| 148 | } |
| 149 | if (fHashTable.count() >= kMaxCacheCount) { |
| 150 | this->evict(fLRU.tail()); // We've exceeded our limit. |
| 151 | } |
| 152 | entry = fHashTable.set(HashNode(this, m, shape))->entry(); |
| 153 | SkASSERT(fHashTable.count() <= kMaxCacheCount); |
| 154 | } else { |
| 155 | fLRU.remove(entry); // Will be re-added at head. |
| 156 | } |
| 157 | |
| 158 | fLRU.addToHead(entry); |
| 159 | return sk_ref_sp(entry); |
| 160 | } |
| 161 | |
| 162 | void GrCCPathCache::evict(const GrCCPathCacheEntry* entry) { |
| 163 | SkASSERT(entry); |
| 164 | SkASSERT(this == entry->fCacheWeakPtr); |
| 165 | SkASSERT(fLRU.isInList(entry)); |
| 166 | SkASSERT(fHashTable.find(HashNode::GetKey(entry))->entry() == entry); |
| 167 | |
| 168 | fHashTable.remove(HashNode::GetKey(entry)); // ~HashNode() handles the rest. |
| 169 | } |
| 170 | |
Chris Dalton | 907102e | 2018-06-29 13:18:53 -0600 | [diff] [blame] | 171 | |
| 172 | GrCCPathCacheEntry::~GrCCPathCacheEntry() { |
| 173 | SkASSERT(!fCacheWeakPtr); // HashNode should have cleared our cache pointer. |
| 174 | SkASSERT(!fCurrFlushAtlas); // Client is required to reset fCurrFlushAtlas back to null. |
| 175 | |
| 176 | this->invalidateAtlas(); |
| 177 | } |
| 178 | |
Brian Salomon | 238069b | 2018-07-11 15:58:57 -0400 | [diff] [blame] | 179 | void GrCCPathCacheEntry::initAsStashedAtlas(const GrUniqueKey& atlasKey, uint32_t contextUniqueID, |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 180 | const SkIVector& atlasOffset, const SkRect& devBounds, |
| 181 | const SkRect& devBounds45, const SkIRect& devIBounds, |
| 182 | const SkIVector& maskShift) { |
Brian Salomon | 238069b | 2018-07-11 15:58:57 -0400 | [diff] [blame] | 183 | SkASSERT(contextUniqueID != SK_InvalidUniqueID); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 184 | SkASSERT(atlasKey.isValid()); |
| 185 | SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time. |
| 186 | |
Brian Salomon | 238069b | 2018-07-11 15:58:57 -0400 | [diff] [blame] | 187 | fContextUniqueID = contextUniqueID; |
| 188 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 189 | fAtlasKey = atlasKey; |
| 190 | fAtlasOffset = atlasOffset + maskShift; |
| 191 | SkASSERT(!fCachedAtlasInfo); // Otherwise they should have reused the cached atlas instead. |
| 192 | |
| 193 | float dx = (float)maskShift.fX, dy = (float)maskShift.fY; |
| 194 | fDevBounds = devBounds.makeOffset(-dx, -dy); |
| 195 | fDevBounds45 = GrCCPathProcessor::MakeOffset45(devBounds45, -dx, -dy); |
| 196 | fDevIBounds = devIBounds.makeOffset(-maskShift.fX, -maskShift.fY); |
| 197 | } |
| 198 | |
Brian Salomon | 238069b | 2018-07-11 15:58:57 -0400 | [diff] [blame] | 199 | void GrCCPathCacheEntry::updateToCachedAtlas(const GrUniqueKey& atlasKey, uint32_t contextUniqueID, |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 200 | const SkIVector& newAtlasOffset, |
| 201 | sk_sp<GrCCAtlas::CachedAtlasInfo> info) { |
Brian Salomon | 238069b | 2018-07-11 15:58:57 -0400 | [diff] [blame] | 202 | SkASSERT(contextUniqueID != SK_InvalidUniqueID); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 203 | SkASSERT(atlasKey.isValid()); |
| 204 | SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time. |
| 205 | |
Brian Salomon | 238069b | 2018-07-11 15:58:57 -0400 | [diff] [blame] | 206 | fContextUniqueID = contextUniqueID; |
| 207 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 208 | fAtlasKey = atlasKey; |
| 209 | fAtlasOffset = newAtlasOffset; |
| 210 | |
| 211 | SkASSERT(!fCachedAtlasInfo); // Otherwise we need to invalidate our pixels in the old info. |
| 212 | fCachedAtlasInfo = std::move(info); |
| 213 | fCachedAtlasInfo->fNumPathPixels += this->height() * this->width(); |
| 214 | } |
| 215 | |
Chris Dalton | 907102e | 2018-06-29 13:18:53 -0600 | [diff] [blame] | 216 | void GrCCPathCacheEntry::invalidateAtlas() { |
| 217 | if (fCachedAtlasInfo) { |
| 218 | // Mark our own pixels invalid in the cached atlas texture. |
| 219 | fCachedAtlasInfo->fNumInvalidatedPathPixels += this->height() * this->width(); |
| 220 | if (!fCachedAtlasInfo->fIsPurgedFromResourceCache && |
| 221 | fCachedAtlasInfo->fNumInvalidatedPathPixels >= fCachedAtlasInfo->fNumPathPixels / 2) { |
| 222 | // Too many invalidated pixels: purge the atlas texture from the resource cache. |
| 223 | SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post( |
Brian Salomon | 238069b | 2018-07-11 15:58:57 -0400 | [diff] [blame] | 224 | GrUniqueKeyInvalidatedMessage(fAtlasKey, fContextUniqueID)); |
Chris Dalton | 907102e | 2018-06-29 13:18:53 -0600 | [diff] [blame] | 225 | fCachedAtlasInfo->fIsPurgedFromResourceCache = true; |
| 226 | } |
| 227 | } |
| 228 | |
| 229 | fAtlasKey.reset(); |
| 230 | fCachedAtlasInfo = nullptr; |
| 231 | } |
| 232 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 233 | void GrCCPathCacheEntry::onChange() { |
| 234 | // Our corresponding path was modified or deleted. Evict ourselves. |
| 235 | if (fCacheWeakPtr) { |
| 236 | fCacheWeakPtr->evict(this); |
| 237 | } |
| 238 | } |