Reland "Make GPU cache invalidation SkMessageBus messages go to one GrContext."

This is a reland of f4c5bb9aba485aa47c27b15905d81992b7cf4707

Original change's description:
> Make GPU cache invalidation SkMessageBus messages go to one GrContext.
> 
> Makes it so the template param to SkMessageBus must implement:
> bool shouldSend(uint32_t inboxID) const
> 
> Updates all GPU backend message types to only go to the GrContext that
> is adding a cache entry.
> 
> Bug: skia:
> Change-Id: I3e8a4eb90654b7b8ac57cac9fb508c0ef1d51058
> Reviewed-on: https://skia-review.googlesource.com/140220
> Reviewed-by: Robert Phillips <robertphillips@google.com>
> Reviewed-by: Jim Van Verth <jvanverth@google.com>

Bug: skia:
Change-Id: I8402bfe3ed0170c99936d47050458817030b473b
Reviewed-on: https://skia-review.googlesource.com/140801
Reviewed-by: Brian Salomon <bsalomon@google.com>
Commit-Queue: Brian Salomon <bsalomon@google.com>
diff --git a/src/gpu/ccpr/GrCCPathCache.cpp b/src/gpu/ccpr/GrCCPathCache.cpp
index b22b1dd..d7dc714 100644
--- a/src/gpu/ccpr/GrCCPathCache.cpp
+++ b/src/gpu/ccpr/GrCCPathCache.cpp
@@ -149,13 +149,16 @@
     this->invalidateAtlas();
 }
 
-void GrCCPathCacheEntry::initAsStashedAtlas(const GrUniqueKey& atlasKey,
+void GrCCPathCacheEntry::initAsStashedAtlas(const GrUniqueKey& atlasKey, uint32_t contextUniqueID,
                                             const SkIVector& atlasOffset, const SkRect& devBounds,
                                             const SkRect& devBounds45, const SkIRect& devIBounds,
                                             const SkIVector& maskShift) {
+    SkASSERT(contextUniqueID != SK_InvalidUniqueID);
     SkASSERT(atlasKey.isValid());
     SkASSERT(!fCurrFlushAtlas);  // Otherwise we should reuse the atlas from last time.
 
+    fContextUniqueID = contextUniqueID;
+
     fAtlasKey = atlasKey;
     fAtlasOffset = atlasOffset + maskShift;
     SkASSERT(!fCachedAtlasInfo);  // Otherwise they should have reused the cached atlas instead.
@@ -166,12 +169,15 @@
     fDevIBounds = devIBounds.makeOffset(-maskShift.fX, -maskShift.fY);
 }
 
-void GrCCPathCacheEntry::updateToCachedAtlas(const GrUniqueKey& atlasKey,
+void GrCCPathCacheEntry::updateToCachedAtlas(const GrUniqueKey& atlasKey, uint32_t contextUniqueID,
                                              const SkIVector& newAtlasOffset,
                                              sk_sp<GrCCAtlas::CachedAtlasInfo> info) {
+    SkASSERT(contextUniqueID != SK_InvalidUniqueID);
     SkASSERT(atlasKey.isValid());
     SkASSERT(!fCurrFlushAtlas);  // Otherwise we should reuse the atlas from last time.
 
+    fContextUniqueID = contextUniqueID;
+
     fAtlasKey = atlasKey;
     fAtlasOffset = newAtlasOffset;
 
@@ -188,7 +194,7 @@
             fCachedAtlasInfo->fNumInvalidatedPathPixels >= fCachedAtlasInfo->fNumPathPixels / 2) {
             // Too many invalidated pixels: purge the atlas texture from the resource cache.
             SkMessageBus<GrUniqueKeyInvalidatedMessage>::Post(
-                    GrUniqueKeyInvalidatedMessage(fAtlasKey));
+                    GrUniqueKeyInvalidatedMessage(fAtlasKey, fContextUniqueID));
             fCachedAtlasInfo->fIsPurgedFromResourceCache = true;
         }
     }