Revert "ccpr: Support caching of paths that span multiple tiles"

This reverts commit 6a3dc8be46728ce2042d363f89ed689a2236a37a.

Reason for revert: <INSERT REASONING HERE>

Original change's description:
> ccpr: Support caching of paths that span multiple tiles
> 
> Adds an accumulative "hit rect" for each cache entry that tracks the
> region of the path that has been drawn during its lifetime. Now, a
> path mask can be cached once the "hit rect" covers 50% of the path.
> This allows us to cache a path that spans multiple tiles.
> 
> To guard against unnecessarily caching gigantic path masks, we also
> require that 10% of the path be visible during the draw when it is
> cached.
> 
> Bug: skia:8462
> Change-Id: Iab2c277102b7a774eaa909c9663211694554c5a5
> Reviewed-on: https://skia-review.googlesource.com/c/180700
> Commit-Queue: Chris Dalton <csmartdalton@google.com>
> Reviewed-by: Robert Phillips <robertphillips@google.com>

TBR=bsalomon@google.com,robertphillips@google.com,brianosman@google.com,csmartdalton@google.com

Change-Id: Ibae9d46333e3178856fd623f26317366102dd344
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: skia:8462
Reviewed-on: https://skia-review.googlesource.com/c/181982
Reviewed-by: Chris Dalton <csmartdalton@google.com>
Commit-Queue: Chris Dalton <csmartdalton@google.com>
diff --git a/src/gpu/ccpr/GrCCPathCache.cpp b/src/gpu/ccpr/GrCCPathCache.cpp
index 1342431..c3f6498 100644
--- a/src/gpu/ccpr/GrCCPathCache.cpp
+++ b/src/gpu/ccpr/GrCCPathCache.cpp
@@ -158,9 +158,9 @@
 
 }
 
-GrCCPathCache::OnFlushEntryRef GrCCPathCache::find(
-        GrOnFlushResourceProvider* onFlushRP, const GrShape& shape,
-        const SkIRect& clippedDrawBounds, const SkMatrix& viewMatrix, SkIVector* maskShift) {
+GrCCPathCache::OnFlushEntryRef GrCCPathCache::find(GrOnFlushResourceProvider* onFlushRP,
+                                                   const GrShape& shape, const MaskTransform& m,
+                                                   CreateIfAbsent createIfAbsent) {
     if (!shape.hasUnstyledKey()) {
         return OnFlushEntryRef();
     }
@@ -174,7 +174,6 @@
     fScratchKey->resetDataCountU32(writeKeyHelper.allocCountU32());
     writeKeyHelper.write(shape, fScratchKey->data());
 
-    MaskTransform m(viewMatrix, maskShift);
     GrCCPathCacheEntry* entry = nullptr;
     if (HashNode* node = fHashTable.find(*fScratchKey)) {
         entry = node->entry();
@@ -182,12 +181,11 @@
 
         if (!fuzzy_equals(m, entry->fMaskTransform)) {
             // The path was reused with an incompatible matrix.
-            if (entry->unique()) {
+            if (CreateIfAbsent::kYes == createIfAbsent && entry->unique()) {
                 // This entry is unique: recycle it instead of deleting and malloc-ing a new one.
                 SkASSERT(0 == entry->fOnFlushRefCnt);  // Because we are unique.
                 entry->fMaskTransform = m;
                 entry->fHitCount = 0;
-                entry->fHitRect = SkIRect::MakeEmpty();
                 entry->releaseCachedAtlas(this);
             } else {
                 this->evict(*fScratchKey);
@@ -197,6 +195,9 @@
     }
 
     if (!entry) {
+        if (CreateIfAbsent::kNo == createIfAbsent) {
+            return OnFlushEntryRef();
+        }
         if (fHashTable.count() >= kMaxCacheCount) {
             SkDEBUGCODE(HashNode* node = fHashTable.find(*fLRU.tail()->fCacheKey));
             SkASSERT(node && node->entry() == fLRU.tail());
@@ -240,7 +241,6 @@
             }
         }
     }
-    entry->fHitRect.join(clippedDrawBounds.makeOffset(-maskShift->x(), -maskShift->y()));
     SkASSERT(!entry->fCachedAtlas || entry->fCachedAtlas->getOnFlushProxy());
     return OnFlushEntryRef::OnFlushRef(entry);
 }