only notify bitmaps that have been added to the cache

old code:
- calls=2677 hit-rate=3.51139%

new code:
- calls=94 hit-rate=97.8723%

BUG=skia:

Review URL: https://codereview.chromium.org/960563002
diff --git a/include/core/SkPixelRef.h b/include/core/SkPixelRef.h
index 5027a35..01303e5 100644
--- a/include/core/SkPixelRef.h
+++ b/include/core/SkPixelRef.h
@@ -245,6 +245,12 @@
     // Takes ownership of listener.
     void addGenIDChangeListener(GenIDChangeListener* listener);
 
+    // Call when this pixelref is part of the key to a resourcecache entry. This allows the cache
+    // to know automatically those entries can be purged when this pixelref is changed or deleted.
+    void notifyAddedToCache() {
+        fAddedToCache.store(true);
+    }
+
 protected:
     /**
      *  On success, returns true and fills out the LockRec for the pixels. On
@@ -315,6 +321,7 @@
 
     mutable SkAtomic<uint32_t> fGenerationID;
     mutable SkAtomic<bool>     fUniqueGenerationID;
+    SkAtomic<bool>             fAddedToCache;
 #ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
     const uint32_t fStableID;
 #endif
diff --git a/src/core/SkBitmapCache.cpp b/src/core/SkBitmapCache.cpp
index c411a1b..f569db8 100644
--- a/src/core/SkBitmapCache.cpp
+++ b/src/core/SkBitmapCache.cpp
@@ -8,6 +8,7 @@
 #include "SkBitmapCache.h"
 #include "SkResourceCache.h"
 #include "SkMipMap.h"
+#include "SkPixelRef.h"
 #include "SkRect.h"
 
 /**
@@ -112,6 +113,7 @@
     BitmapRec* rec = SkNEW_ARGS(BitmapRec, (src.getGenerationID(), invScaleX, invScaleY,
                                             get_bounds_from_bitmap(src), result));
     CHECK_LOCAL(localCache, add, Add, rec);
+    src.pixelRef()->notifyAddedToCache();
 }
 
 bool SkBitmapCache::Find(uint32_t genID, const SkIRect& subset, SkBitmap* result,
@@ -121,7 +123,7 @@
     return CHECK_LOCAL(localCache, find, Find, key, BitmapRec::Finder, result);
 }
 
-bool SkBitmapCache::Add(uint32_t genID, const SkIRect& subset, const SkBitmap& result,
+bool SkBitmapCache::Add(SkPixelRef* pr, const SkIRect& subset, const SkBitmap& result,
                         SkResourceCache* localCache) {
     SkASSERT(result.isImmutable());
 
@@ -132,9 +134,10 @@
         || result.height() != subset.height()) {
         return false;
     } else {
-        BitmapRec* rec = SkNEW_ARGS(BitmapRec, (genID, SK_Scalar1, SK_Scalar1, subset, result));
+        BitmapRec* rec = SkNEW_ARGS(BitmapRec, (pr->getGenerationID(), 1, 1, subset, result));
 
         CHECK_LOCAL(localCache, add, Add, rec);
+        pr->notifyAddedToCache();
         return true;
     }
 }
@@ -211,6 +214,7 @@
     if (mipmap) {
         MipMapRec* rec = SkNEW_ARGS(MipMapRec, (src, mipmap));
         CHECK_LOCAL(localCache, add, Add, rec);
+        src.pixelRef()->notifyAddedToCache();
     }
     return mipmap;
 }
diff --git a/src/core/SkBitmapCache.h b/src/core/SkBitmapCache.h
index c54f961..de50aab 100644
--- a/src/core/SkBitmapCache.h
+++ b/src/core/SkBitmapCache.h
@@ -50,7 +50,7 @@
      * The width and the height of the provided subset must be the same as the result bitmap ones.
      * result must be marked isImmutable()
      */
-    static bool Add(uint32_t genID, const SkIRect& subset, const SkBitmap& result,
+    static bool Add(SkPixelRef*, const SkIRect& subset, const SkBitmap& result,
                     SkResourceCache* localCache = NULL);
 };
 
diff --git a/src/core/SkPixelRef.cpp b/src/core/SkPixelRef.cpp
index 7aed66c..560748c 100644
--- a/src/core/SkPixelRef.cpp
+++ b/src/core/SkPixelRef.cpp
@@ -100,6 +100,7 @@
     this->needsNewGenID();
     fIsImmutable = false;
     fPreLocked = false;
+    fAddedToCache.store(false);
 }
 
 
@@ -115,6 +116,7 @@
     this->needsNewGenID();
     fIsImmutable = false;
     fPreLocked = false;
+    fAddedToCache.store(false);
 }
 
 SkPixelRef::~SkPixelRef() {
@@ -225,10 +227,11 @@
             fGenIDChangeListeners[i]->onChange();
         }
 
-        // If we can flag the pixelref somehow whenever it was actually added to the cache,
-        // perhaps it would be nice to only call this notifier in that case. For now we always
-        // call it, since we don't know if it was cached or not.
-        SkNotifyBitmapGenIDIsStale(this->getGenerationID());
+        // TODO: SkAtomic could add "old_value = atomic.xchg(new_value)" to make this clearer.
+        if (fAddedToCache.load()) {
+            SkNotifyBitmapGenIDIsStale(this->getGenerationID());
+            fAddedToCache.store(false);
+        }
     }
     // Listeners get at most one shot, so whether these triggered or not, blow them away.
     fGenIDChangeListeners.deleteAll();
diff --git a/src/core/SkResourceCache.cpp b/src/core/SkResourceCache.cpp
index 9da90c4..43e752b 100644
--- a/src/core/SkResourceCache.cpp
+++ b/src/core/SkResourceCache.cpp
@@ -305,11 +305,22 @@
     }
 }
 
+//#define SK_TRACK_PURGE_SHAREDID_HITRATE
+
+#ifdef SK_TRACK_PURGE_SHAREDID_HITRATE
+static int gPurgeCallCounter;
+static int gPurgeHitCounter;
+#endif
+
 void SkResourceCache::purgeSharedID(uint64_t sharedID) {
     if (0 == sharedID) {
         return;
     }
 
+#ifdef SK_TRACK_PURGE_SHAREDID_HITRATE
+    gPurgeCallCounter += 1;
+    bool found = false;
+#endif
     // go backwards, just like purgeAsNeeded, just to make the code similar.
     // could iterate either direction and still be correct.
     Rec* rec = fTail;
@@ -318,9 +329,21 @@
         if (rec->getKey().getSharedID() == sharedID) {
 //            SkDebugf("purgeSharedID id=%llx rec=%p\n", sharedID, rec);
             this->remove(rec);
+#ifdef SK_TRACK_PURGE_SHAREDID_HITRATE
+            found = true;
+#endif
         }
         rec = prev;
     }
+
+#ifdef SK_TRACK_PURGE_SHAREDID_HITRATE
+    if (found) {
+        gPurgeHitCounter += 1;
+    }
+
+    SkDebugf("PurgeShared calls=%d hits=%d rate=%g\n", gPurgeCallCounter, gPurgeHitCounter,
+             gPurgeHitCounter * 100.0 / gPurgeCallCounter);
+#endif
 }
 
 size_t SkResourceCache::setTotalByteLimit(size_t newLimit) {
diff --git a/src/gpu/SkGrPixelRef.cpp b/src/gpu/SkGrPixelRef.cpp
index b0e8909..01444af 100644
--- a/src/gpu/SkGrPixelRef.cpp
+++ b/src/gpu/SkGrPixelRef.cpp
@@ -189,7 +189,7 @@
         // If we are here, pixels were read correctly from the surface.
         cachedBitmap.setImmutable();
         //Add to the cache
-        SkBitmapCache::Add(this->getGenerationID(), bounds, cachedBitmap);
+        SkBitmapCache::Add(this, bounds, cachedBitmap);
 
         dst->swap(cachedBitmap);
     }
diff --git a/src/lazy/SkCachingPixelRef.cpp b/src/lazy/SkCachingPixelRef.cpp
index 570fc6f..dc53a5d 100644
--- a/src/lazy/SkCachingPixelRef.cpp
+++ b/src/lazy/SkCachingPixelRef.cpp
@@ -63,8 +63,7 @@
                 return false;
         }
         fLockedBitmap.setImmutable();
-        SkBitmapCache::Add(
-                this->getGenerationID(), info.bounds(), fLockedBitmap);
+        SkBitmapCache::Add(this, info.bounds(), fLockedBitmap);
     }
 
     // Now bitmap should contain a concrete PixelRef of the decoded image.
diff --git a/tests/SkResourceCacheTest.cpp b/tests/SkResourceCacheTest.cpp
index 626c837..fae0816 100644
--- a/tests/SkResourceCacheTest.cpp
+++ b/tests/SkResourceCacheTest.cpp
@@ -108,20 +108,22 @@
 
     SkBitmap bm;
     SkIRect rect = SkIRect::MakeWH(5, 5);
+    uint32_t cachedID = cachedBitmap.getGenerationID();
+    SkPixelRef* cachedPR = cachedBitmap.pixelRef();
 
     // Wrong subset size
-    REPORTER_ASSERT(reporter, !SkBitmapCache::Add(cachedBitmap.getGenerationID(), SkIRect::MakeWH(4, 6), cachedBitmap, cache));
-    REPORTER_ASSERT(reporter, !SkBitmapCache::Find(cachedBitmap.getGenerationID(), rect, &bm, cache));
+    REPORTER_ASSERT(reporter, !SkBitmapCache::Add(cachedPR, SkIRect::MakeWH(4, 6), cachedBitmap, cache));
+    REPORTER_ASSERT(reporter, !SkBitmapCache::Find(cachedID, rect, &bm, cache));
     // Wrong offset value
-    REPORTER_ASSERT(reporter, !SkBitmapCache::Add(cachedBitmap.getGenerationID(), SkIRect::MakeXYWH(-1, 0, 5, 5), cachedBitmap, cache));
-    REPORTER_ASSERT(reporter, !SkBitmapCache::Find(cachedBitmap.getGenerationID(), rect, &bm, cache));
+    REPORTER_ASSERT(reporter, !SkBitmapCache::Add(cachedPR, SkIRect::MakeXYWH(-1, 0, 5, 5), cachedBitmap, cache));
+    REPORTER_ASSERT(reporter, !SkBitmapCache::Find(cachedID, rect, &bm, cache));
 
     // Should not be in the cache
-    REPORTER_ASSERT(reporter, !SkBitmapCache::Find(cachedBitmap.getGenerationID(), rect, &bm, cache));
+    REPORTER_ASSERT(reporter, !SkBitmapCache::Find(cachedID, rect, &bm, cache));
 
-    REPORTER_ASSERT(reporter, SkBitmapCache::Add(cachedBitmap.getGenerationID(), rect, cachedBitmap, cache));
+    REPORTER_ASSERT(reporter, SkBitmapCache::Add(cachedPR, rect, cachedBitmap, cache));
     // Should be in the cache, we just added it
-    REPORTER_ASSERT(reporter, SkBitmapCache::Find(cachedBitmap.getGenerationID(), rect, &bm, cache));
+    REPORTER_ASSERT(reporter, SkBitmapCache::Find(cachedID, rect, &bm, cache));
 }
 
 #include "SkMipMap.h"
@@ -215,7 +217,7 @@
         src[i].setImmutable();
         dst[i].allocN32Pixels(5, 5);
         dst[i].setImmutable();
-        SkBitmapCache::Add(src[i].getGenerationID(), subset, dst[i], cache);
+        SkBitmapCache::Add(src[i].pixelRef(), subset, dst[i], cache);
     }
 
     for (int i = 0; i < N; ++i) {
@@ -255,7 +257,7 @@
     SkIRect rect = SkIRect::MakeWH(5, 5);
 
     // Add a bitmap to the cache.
-    REPORTER_ASSERT(reporter, SkBitmapCache::Add(cachedBitmap.getGenerationID(), rect, cachedBitmap, cache));
+    REPORTER_ASSERT(reporter, SkBitmapCache::Add(cachedBitmap.pixelRef(), rect, cachedBitmap, cache));
     REPORTER_ASSERT(reporter, SkBitmapCache::Find(cachedBitmap.getGenerationID(), rect, &bm, cache));
 
     // Finding more than once works fine.
@@ -277,7 +279,7 @@
     cachedBitmap.unlockPixels();
 
     // We can add the bitmap back to the cache and find it again.
-    REPORTER_ASSERT(reporter, SkBitmapCache::Add(cachedBitmap.getGenerationID(), rect, cachedBitmap, cache));
+    REPORTER_ASSERT(reporter, SkBitmapCache::Add(cachedBitmap.pixelRef(), rect, cachedBitmap, cache));
     REPORTER_ASSERT(reporter, SkBitmapCache::Find(cachedBitmap.getGenerationID(), rect, &bm, cache));
 
     test_mipmapcache(reporter, cache);