Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "GrCCPathCache.h" |
| 9 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 10 | #include "GrShape.h" |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 11 | #include "SkNx.h" |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 12 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 13 | static constexpr int kMaxKeyDataCountU32 = 256; // 1kB of uint32_t's. |
| 14 | |
| 15 | DECLARE_SKMESSAGEBUS_MESSAGE(sk_sp<GrCCPathCache::Key>); |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 16 | |
Chris Dalton | 8429c79 | 2018-10-23 15:56:22 -0600 | [diff] [blame] | 17 | static inline uint32_t next_path_cache_id() { |
| 18 | static std::atomic<uint32_t> gNextID(1); |
| 19 | for (;;) { |
| 20 | uint32_t id = gNextID.fetch_add(+1, std::memory_order_acquire); |
| 21 | if (SK_InvalidUniqueID != id) { |
| 22 | return id; |
| 23 | } |
| 24 | } |
| 25 | } |
| 26 | |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 27 | static inline bool SkShouldPostMessageToBus( |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 28 | const sk_sp<GrCCPathCache::Key>& key, uint32_t msgBusUniqueID) { |
| 29 | return key->pathCacheUniqueID() == msgBusUniqueID; |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 30 | } |
| 31 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 32 | // The maximum number of cache entries we allow in our own cache. |
| 33 | static constexpr int kMaxCacheCount = 1 << 16; |
| 34 | |
Chris Dalton | 8429c79 | 2018-10-23 15:56:22 -0600 | [diff] [blame] | 35 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 36 | GrCCPathCache::MaskTransform::MaskTransform(const SkMatrix& m, SkIVector* shift) |
| 37 | : fMatrix2x2{m.getScaleX(), m.getSkewX(), m.getSkewY(), m.getScaleY()} { |
| 38 | SkASSERT(!m.hasPerspective()); |
| 39 | Sk2f translate = Sk2f(m.getTranslateX(), m.getTranslateY()); |
Chris Dalton | 76c775f | 2018-10-01 23:08:06 -0600 | [diff] [blame] | 40 | Sk2f transFloor; |
| 41 | #ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK |
| 42 | // On Android framework we pre-round view matrix translates to integers for better caching. |
| 43 | transFloor = translate; |
| 44 | #else |
| 45 | transFloor = translate.floor(); |
| 46 | (translate - transFloor).store(fSubpixelTranslate); |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 47 | #endif |
Chris Dalton | 76c775f | 2018-10-01 23:08:06 -0600 | [diff] [blame] | 48 | shift->set((int)transFloor[0], (int)transFloor[1]); |
| 49 | SkASSERT((float)shift->fX == transFloor[0]); // Make sure transFloor had integer values. |
| 50 | SkASSERT((float)shift->fY == transFloor[1]); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 51 | } |
| 52 | |
| 53 | inline static bool fuzzy_equals(const GrCCPathCache::MaskTransform& a, |
| 54 | const GrCCPathCache::MaskTransform& b) { |
Chris Dalton | 644341a | 2018-06-18 19:14:16 -0600 | [diff] [blame] | 55 | if ((Sk4f::Load(a.fMatrix2x2) != Sk4f::Load(b.fMatrix2x2)).anyTrue()) { |
| 56 | return false; |
| 57 | } |
| 58 | #ifndef SK_BUILD_FOR_ANDROID_FRAMEWORK |
| 59 | if (((Sk2f::Load(a.fSubpixelTranslate) - |
| 60 | Sk2f::Load(b.fSubpixelTranslate)).abs() > 1.f/256).anyTrue()) { |
| 61 | return false; |
| 62 | } |
| 63 | #endif |
| 64 | return true; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 65 | } |
| 66 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 67 | sk_sp<GrCCPathCache::Key> GrCCPathCache::Key::Make(uint32_t pathCacheUniqueID, |
| 68 | int dataCountU32, const void* data) { |
| 69 | void* memory = ::operator new (sizeof(Key) + dataCountU32 * sizeof(uint32_t)); |
| 70 | sk_sp<GrCCPathCache::Key> key(new (memory) Key(pathCacheUniqueID, dataCountU32)); |
| 71 | if (data) { |
| 72 | memcpy(key->data(), data, key->dataSizeInBytes()); |
| 73 | } |
| 74 | return key; |
| 75 | } |
| 76 | |
| 77 | const uint32_t* GrCCPathCache::Key::data() const { |
| 78 | // The shape key is a variable-length footer to the entry allocation. |
| 79 | return reinterpret_cast<const uint32_t*>(reinterpret_cast<const char*>(this) + sizeof(Key)); |
| 80 | } |
| 81 | |
| 82 | uint32_t* GrCCPathCache::Key::data() { |
| 83 | // The shape key is a variable-length footer to the entry allocation. |
| 84 | return reinterpret_cast<uint32_t*>(reinterpret_cast<char*>(this) + sizeof(Key)); |
| 85 | } |
| 86 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 87 | inline bool GrCCPathCache::Key::operator==(const GrCCPathCache::Key& that) const { |
| 88 | return fDataSizeInBytes == that.fDataSizeInBytes && |
| 89 | !memcmp(this->data(), that.data(), fDataSizeInBytes); |
| 90 | } |
| 91 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 92 | void GrCCPathCache::Key::onChange() { |
| 93 | // Our key's corresponding path was invalidated. Post a thread-safe eviction message. |
| 94 | SkMessageBus<sk_sp<Key>>::Post(sk_ref_sp(this)); |
| 95 | } |
| 96 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 97 | inline const GrCCPathCache::Key& GrCCPathCache::HashNode::GetKey( |
| 98 | const GrCCPathCache::HashNode& node) { |
| 99 | return *node.entry()->fCacheKey; |
| 100 | } |
| 101 | |
| 102 | inline uint32_t GrCCPathCache::HashNode::Hash(const Key& key) { |
| 103 | return GrResourceKeyHash(key.data(), key.dataSizeInBytes()); |
| 104 | } |
| 105 | |
| 106 | inline GrCCPathCache::HashNode::HashNode(GrCCPathCache* pathCache, sk_sp<Key> key, |
| 107 | const MaskTransform& m, const GrShape& shape) |
| 108 | : fPathCache(pathCache) |
| 109 | , fEntry(new GrCCPathCacheEntry(key, m)) { |
| 110 | SkASSERT(shape.hasUnstyledKey()); |
| 111 | shape.addGenIDChangeListener(std::move(key)); |
| 112 | } |
| 113 | |
| 114 | inline GrCCPathCache::HashNode::~HashNode() { |
| 115 | this->willExitHashTable(); |
| 116 | } |
| 117 | |
| 118 | inline GrCCPathCache::HashNode& GrCCPathCache::HashNode::operator=(HashNode&& node) { |
| 119 | this->willExitHashTable(); |
| 120 | fPathCache = node.fPathCache; |
| 121 | fEntry = std::move(node.fEntry); |
| 122 | SkASSERT(!node.fEntry); |
| 123 | return *this; |
| 124 | } |
| 125 | |
| 126 | inline void GrCCPathCache::HashNode::willExitHashTable() { |
| 127 | if (!fEntry) { |
| 128 | return; // We were moved. |
| 129 | } |
| 130 | |
| 131 | SkASSERT(fPathCache); |
| 132 | SkASSERT(fPathCache->fLRU.isInList(fEntry.get())); |
| 133 | |
| 134 | fEntry->fCacheKey->markShouldUnregisterFromPath(); // Unregister the path listener. |
| 135 | fPathCache->fLRU.remove(fEntry.get()); |
| 136 | } |
| 137 | |
| 138 | |
| 139 | GrCCPathCache::GrCCPathCache() |
| 140 | : fInvalidatedKeysInbox(next_path_cache_id()) |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 141 | , fScratchKey(Key::Make(fInvalidatedKeysInbox.uniqueID(), kMaxKeyDataCountU32)) { |
| 142 | } |
| 143 | |
| 144 | GrCCPathCache::~GrCCPathCache() { |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 145 | fHashTable.reset(); // Must be cleared first; ~HashNode calls fLRU.remove() on us. |
| 146 | SkASSERT(fLRU.isEmpty()); // Ensure the hash table and LRU list were coherent. |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 147 | } |
| 148 | |
Chris Dalton | 8f8bf88 | 2018-07-18 10:55:51 -0600 | [diff] [blame] | 149 | namespace { |
| 150 | |
| 151 | // Produces a key that accounts both for a shape's path geometry, as well as any stroke/style. |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 152 | class WriteKeyHelper { |
Chris Dalton | 8f8bf88 | 2018-07-18 10:55:51 -0600 | [diff] [blame] | 153 | public: |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 154 | static constexpr int kStrokeWidthIdx = 0; |
| 155 | static constexpr int kStrokeMiterIdx = 1; |
| 156 | static constexpr int kStrokeCapJoinIdx = 2; |
| 157 | static constexpr int kShapeUnstyledKeyIdx = 3; |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 158 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 159 | WriteKeyHelper(const GrShape& shape) : fShapeUnstyledKeyCount(shape.unstyledKeySize()) {} |
Chris Dalton | 8f8bf88 | 2018-07-18 10:55:51 -0600 | [diff] [blame] | 160 | |
| 161 | // Returns the total number of uint32_t's to allocate for the key. |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 162 | int allocCountU32() const { return kShapeUnstyledKeyIdx + fShapeUnstyledKeyCount; } |
Chris Dalton | 8f8bf88 | 2018-07-18 10:55:51 -0600 | [diff] [blame] | 163 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 164 | // Writes the key data to out[]. |
Chris Dalton | 8f8bf88 | 2018-07-18 10:55:51 -0600 | [diff] [blame] | 165 | void write(const GrShape& shape, uint32_t* out) { |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 166 | // Stroke key. |
| 167 | // We don't use GrStyle::WriteKey() because it does not account for hairlines. |
| 168 | // http://skbug.com/8273 |
| 169 | SkASSERT(!shape.style().hasPathEffect()); |
| 170 | const SkStrokeRec& stroke = shape.style().strokeRec(); |
| 171 | if (stroke.isFillStyle()) { |
| 172 | // Use a value for width that won't collide with a valid fp32 value >= 0. |
| 173 | out[kStrokeWidthIdx] = ~0; |
| 174 | out[kStrokeMiterIdx] = out[kStrokeCapJoinIdx] = 0; |
| 175 | } else { |
| 176 | float width = stroke.getWidth(), miterLimit = stroke.getMiter(); |
| 177 | memcpy(&out[kStrokeWidthIdx], &width, sizeof(float)); |
| 178 | memcpy(&out[kStrokeMiterIdx], &miterLimit, sizeof(float)); |
| 179 | out[kStrokeCapJoinIdx] = (stroke.getCap() << 16) | stroke.getJoin(); |
| 180 | GR_STATIC_ASSERT(sizeof(out[kStrokeWidthIdx]) == sizeof(float)); |
| 181 | } |
| 182 | |
| 183 | // Shape unstyled key. |
| 184 | shape.writeUnstyledKey(&out[kShapeUnstyledKeyIdx]); |
Chris Dalton | 8f8bf88 | 2018-07-18 10:55:51 -0600 | [diff] [blame] | 185 | } |
| 186 | |
| 187 | private: |
| 188 | int fShapeUnstyledKeyCount; |
Chris Dalton | 8f8bf88 | 2018-07-18 10:55:51 -0600 | [diff] [blame] | 189 | }; |
| 190 | |
| 191 | } |
| 192 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 193 | sk_sp<GrCCPathCacheEntry> GrCCPathCache::find(const GrShape& shape, const MaskTransform& m, |
| 194 | CreateIfAbsent createIfAbsent) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 195 | if (!shape.hasUnstyledKey()) { |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 196 | return nullptr; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 197 | } |
| 198 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 199 | WriteKeyHelper writeKeyHelper(shape); |
| 200 | if (writeKeyHelper.allocCountU32() > kMaxKeyDataCountU32) { |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 201 | return nullptr; |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 202 | } |
| 203 | |
| 204 | SkASSERT(fScratchKey->unique()); |
| 205 | fScratchKey->resetDataCountU32(writeKeyHelper.allocCountU32()); |
| 206 | writeKeyHelper.write(shape, fScratchKey->data()); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 207 | |
| 208 | GrCCPathCacheEntry* entry = nullptr; |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 209 | if (HashNode* node = fHashTable.find(*fScratchKey)) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 210 | entry = node->entry(); |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 211 | SkASSERT(fLRU.isInList(entry)); |
Chris Dalton | 6c3879d | 2018-11-01 11:13:19 -0600 | [diff] [blame] | 212 | if (!fuzzy_equals(m, entry->fMaskTransform)) { |
| 213 | // The path was reused with an incompatible matrix. |
| 214 | if (CreateIfAbsent::kYes == createIfAbsent && entry->unique()) { |
| 215 | // This entry is unique: recycle it instead of deleting and malloc-ing a new one. |
| 216 | entry->fMaskTransform = m; |
| 217 | entry->fHitCount = 0; |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 218 | entry->invalidateAtlas(); |
| 219 | SkASSERT(!entry->fCurrFlushAtlas); // Should be null because 'entry' is unique. |
Chris Dalton | 6c3879d | 2018-11-01 11:13:19 -0600 | [diff] [blame] | 220 | } else { |
| 221 | this->evict(*fScratchKey); |
| 222 | entry = nullptr; |
| 223 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 224 | } |
| 225 | } |
| 226 | |
| 227 | if (!entry) { |
| 228 | if (CreateIfAbsent::kNo == createIfAbsent) { |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 229 | return nullptr; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 230 | } |
| 231 | if (fHashTable.count() >= kMaxCacheCount) { |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 232 | SkDEBUGCODE(HashNode* node = fHashTable.find(*fLRU.tail()->fCacheKey)); |
| 233 | SkASSERT(node && node->entry() == fLRU.tail()); |
| 234 | this->evict(*fLRU.tail()->fCacheKey); // We've exceeded our limit. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 235 | } |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 236 | |
| 237 | // Create a new entry in the cache. |
| 238 | sk_sp<Key> permanentKey = Key::Make(fInvalidatedKeysInbox.uniqueID(), |
| 239 | writeKeyHelper.allocCountU32(), fScratchKey->data()); |
| 240 | SkASSERT(*permanentKey == *fScratchKey); |
| 241 | SkASSERT(!fHashTable.find(*permanentKey)); |
| 242 | entry = fHashTable.set(HashNode(this, std::move(permanentKey), m, shape))->entry(); |
| 243 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 244 | SkASSERT(fHashTable.count() <= kMaxCacheCount); |
| 245 | } else { |
| 246 | fLRU.remove(entry); // Will be re-added at head. |
| 247 | } |
| 248 | |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 249 | SkDEBUGCODE(HashNode* node = fHashTable.find(*fScratchKey)); |
Chris Dalton | 3b57279 | 2018-10-23 18:26:20 -0600 | [diff] [blame] | 250 | SkASSERT(node && node->entry() == entry); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 251 | fLRU.addToHead(entry); |
Chris Dalton | 6c3879d | 2018-11-01 11:13:19 -0600 | [diff] [blame] | 252 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 253 | entry->fTimestamp = this->quickPerFlushTimestamp(); |
| 254 | ++entry->fHitCount; |
| 255 | return sk_ref_sp(entry); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 256 | } |
| 257 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 258 | void GrCCPathCache::doPostFlushProcessing() { |
| 259 | this->purgeInvalidatedKeys(); |
Chris Dalton | 6c3879d | 2018-11-01 11:13:19 -0600 | [diff] [blame] | 260 | |
| 261 | // Mark the per-flush timestamp as needing to be updated with a newer clock reading. |
| 262 | fPerFlushTimestamp = GrStdSteadyClock::time_point::min(); |
| 263 | } |
| 264 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 265 | void GrCCPathCache::purgeEntriesOlderThan(const GrStdSteadyClock::time_point& purgeTime) { |
| 266 | this->purgeInvalidatedKeys(); |
Chris Dalton | 6c3879d | 2018-11-01 11:13:19 -0600 | [diff] [blame] | 267 | |
| 268 | #ifdef SK_DEBUG |
| 269 | auto lastTimestamp = (fLRU.isEmpty()) |
| 270 | ? GrStdSteadyClock::time_point::max() |
| 271 | : fLRU.tail()->fTimestamp; |
| 272 | #endif |
| 273 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 274 | // Drop every cache entry whose timestamp is older than purgeTime. |
Chris Dalton | 6c3879d | 2018-11-01 11:13:19 -0600 | [diff] [blame] | 275 | while (!fLRU.isEmpty() && fLRU.tail()->fTimestamp < purgeTime) { |
| 276 | #ifdef SK_DEBUG |
| 277 | // Verify that fLRU is sorted by timestamp. |
| 278 | auto timestamp = fLRU.tail()->fTimestamp; |
| 279 | SkASSERT(timestamp >= lastTimestamp); |
| 280 | lastTimestamp = timestamp; |
| 281 | #endif |
| 282 | this->evict(*fLRU.tail()->fCacheKey); |
| 283 | } |
| 284 | } |
| 285 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 286 | void GrCCPathCache::purgeInvalidatedKeys() { |
Chris Dalton | 9985a27 | 2018-10-30 14:29:39 -0600 | [diff] [blame] | 287 | SkTArray<sk_sp<Key>> invalidatedKeys; |
| 288 | fInvalidatedKeysInbox.poll(&invalidatedKeys); |
| 289 | for (const sk_sp<Key>& key : invalidatedKeys) { |
| 290 | bool isInCache = !key->shouldUnregisterFromPath(); // Gets set upon exiting the cache. |
| 291 | if (isInCache) { |
| 292 | this->evict(*key); |
| 293 | } |
Chris Dalton | 9a986cf | 2018-10-18 15:27:59 -0600 | [diff] [blame] | 294 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 295 | } |
| 296 | |
Chris Dalton | 907102e | 2018-06-29 13:18:53 -0600 | [diff] [blame] | 297 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 298 | void GrCCPathCacheEntry::initAsStashedAtlas(const GrUniqueKey& atlasKey, |
| 299 | const SkIVector& atlasOffset, const SkRect& devBounds, |
| 300 | const SkRect& devBounds45, const SkIRect& devIBounds, |
| 301 | const SkIVector& maskShift) { |
| 302 | SkASSERT(atlasKey.isValid()); |
| 303 | SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 304 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 305 | fAtlasKey = atlasKey; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 306 | fAtlasOffset = atlasOffset + maskShift; |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 307 | SkASSERT(!fCachedAtlasInfo); // Otherwise they should have reused the cached atlas instead. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 308 | |
| 309 | float dx = (float)maskShift.fX, dy = (float)maskShift.fY; |
| 310 | fDevBounds = devBounds.makeOffset(-dx, -dy); |
| 311 | fDevBounds45 = GrCCPathProcessor::MakeOffset45(devBounds45, -dx, -dy); |
| 312 | fDevIBounds = devIBounds.makeOffset(-maskShift.fX, -maskShift.fY); |
| 313 | } |
| 314 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 315 | void GrCCPathCacheEntry::updateToCachedAtlas(const GrUniqueKey& atlasKey, |
| 316 | const SkIVector& newAtlasOffset, |
| 317 | sk_sp<GrCCAtlas::CachedAtlasInfo> info) { |
| 318 | SkASSERT(atlasKey.isValid()); |
| 319 | SkASSERT(!fCurrFlushAtlas); // Otherwise we should reuse the atlas from last time. |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 320 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 321 | fAtlasKey = atlasKey; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 322 | fAtlasOffset = newAtlasOffset; |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 323 | |
| 324 | SkASSERT(!fCachedAtlasInfo); // Otherwise we need to invalidate our pixels in the old info. |
| 325 | fCachedAtlasInfo = std::move(info); |
| 326 | fCachedAtlasInfo->fNumPathPixels += this->height() * this->width(); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 327 | } |
| 328 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 329 | void GrCCPathCacheEntry::invalidateAtlas() { |
| 330 | if (fCachedAtlasInfo) { |
| 331 | // Mark our own pixels invalid in the cached atlas texture. |
| 332 | fCachedAtlasInfo->fNumInvalidatedPathPixels += this->height() * this->width(); |
| 333 | if (!fCachedAtlasInfo->fIsPurgedFromResourceCache && |
| 334 | fCachedAtlasInfo->fNumInvalidatedPathPixels >= fCachedAtlasInfo->fNumPathPixels / 2) { |
| 335 | // Too many invalidated pixels: purge the atlas texture from the resource cache. |
| 336 | // The GrContext and CCPR path cache both share the same unique ID. |
| 337 | SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post( |
| 338 | GrUniqueKeyInvalidatedMessage(fAtlasKey, fCachedAtlasInfo->fContextUniqueID)); |
| 339 | fCachedAtlasInfo->fIsPurgedFromResourceCache = true; |
Chris Dalton | 907102e | 2018-06-29 13:18:53 -0600 | [diff] [blame] | 340 | } |
| 341 | } |
| 342 | |
Chris Dalton | 2e825a3 | 2019-01-04 22:14:27 +0000 | [diff] [blame] | 343 | fAtlasKey.reset(); |
| 344 | fCachedAtlasInfo = nullptr; |
Chris Dalton | 907102e | 2018-06-29 13:18:53 -0600 | [diff] [blame] | 345 | } |