Instantiate clip atlases directly

Calls assign() directly on clip atlas proxies instead of using the
scary, unprotected back-pointer from a callback function.

Bug: chromium:928984
Change-Id: I253fc0f3280b97b6af4f1fde2d0094d58dc5d125
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/387296
Reviewed-by: Adlai Holler <adlai@google.com>
Reviewed-by: Robert Phillips <robertphillips@google.com>
Commit-Queue: Chris Dalton <csmartdalton@google.com>
diff --git a/src/gpu/ccpr/GrCCClipPath.cpp b/src/gpu/ccpr/GrCCClipPath.cpp
index d2845d1..96b9e55 100644
--- a/src/gpu/ccpr/GrCCClipPath.cpp
+++ b/src/gpu/ccpr/GrCCClipPath.cpp
@@ -19,28 +19,10 @@
     SkASSERT(!this->isInitialized());
 
     fAtlasLazyProxy = GrCCAtlas::MakeLazyAtlasProxy(
-            [this](GrResourceProvider* resourceProvider, const GrCCAtlas::LazyAtlasDesc& desc) {
-                SkASSERT(fHasAtlas);
-                SkASSERT(!fHasAtlasTranslate);
-
-                GrTextureProxy* textureProxy = fAtlas ? fAtlas->textureProxy() : nullptr;
-
-                if (!textureProxy || !textureProxy->instantiate(resourceProvider)) {
-                    SkDEBUGCODE(fHasAtlasTranslate = true);
-                    return GrSurfaceProxy::LazyCallbackResult();
-                }
-
-                sk_sp<GrTexture> texture = sk_ref_sp(textureProxy->peekTexture());
-                SkASSERT(texture);
-
-                SkDEBUGCODE(fHasAtlasTranslate = true);
-
-                // We use LazyInstantiationKeyMode::kUnsynced here because CCPR clip masks are never
-                // cached, and the clip FP proxies need to ignore any unique keys that atlas
-                // textures use for path mask caching.
-                return GrSurfaceProxy::LazyCallbackResult(
-                        std::move(texture), true,
-                        GrSurfaceProxy::LazyInstantiationKeyMode::kUnsynced);
+            [](GrResourceProvider*, const GrCCAtlas::LazyAtlasDesc&) {
+                // GrCCClipPaths get instantiated explicitly after the atlas is laid out. If this
+                // callback gets invoked, it means atlas proxy itself failed to instantiate.
+                return GrSurfaceProxy::LazyCallbackResult();
             }, caps, GrSurfaceProxy::UseAllocator::kYes);
 
     fDeviceSpacePath = deviceSpacePath;
@@ -58,12 +40,13 @@
     }
 }
 
-void GrCCClipPath::renderPathInAtlas(GrCCPerFlushResources* resources,
-                                     GrOnFlushResourceProvider* onFlushRP) {
+const GrCCAtlas* GrCCClipPath::renderPathInAtlas(GrCCPerFlushResources* resources,
+                                                 GrOnFlushResourceProvider* onFlushRP) {
     SkASSERT(this->isInitialized());
     SkASSERT(!fHasAtlas);
-    fAtlas = resources->renderDeviceSpacePathInAtlas(
+    const GrCCAtlas* retiredAtlas = resources->renderDeviceSpacePathInAtlas(
             onFlushRP, fAccessRect, fDeviceSpacePath, fPathDevIBounds,
             GrFillRuleForSkPath(fDeviceSpacePath), &fDevToAtlasOffset);
     SkDEBUGCODE(fHasAtlas = true);
+    return retiredAtlas;
 }
diff --git a/src/gpu/ccpr/GrCCClipPath.h b/src/gpu/ccpr/GrCCClipPath.h
index 49e7071..6fb5768 100644
--- a/src/gpu/ccpr/GrCCClipPath.h
+++ b/src/gpu/ccpr/GrCCClipPath.h
@@ -9,6 +9,7 @@
 #define GrCCClipPath_DEFINED
 
 #include "include/core/SkPath.h"
+#include "src/gpu/GrSurfaceProxyPriv.h"
 #include "src/gpu/GrTextureProxy.h"
 #include "src/gpu/ccpr/GrCCAtlas.h"
 
@@ -27,15 +28,6 @@
     GrCCClipPath() = default;
     GrCCClipPath(const GrCCClipPath&) = delete;
 
-    ~GrCCClipPath() {
-        // Ensure no clip FP exists with a dangling pointer back into this class. This works because
-        // a clip FP will have a ref on the proxy if it exists.
-        //
-        // This assert also guarantees there won't be a lazy proxy callback with a dangling pointer
-        // back into this class, since no proxy will exist after we destruct, if the assert passes.
-        SkASSERT(!fAtlasLazyProxy || fAtlasLazyProxy->unique());
-    }
-
     bool isInitialized() const { return fAtlasLazyProxy != nullptr; }
     void init(const SkPath& deviceSpacePath,
               const SkIRect& desc,
@@ -59,23 +51,32 @@
     }
 
     void accountForOwnPath(GrCCAtlas::Specs*) const;
-    void renderPathInAtlas(GrCCPerFlushResources*, GrOnFlushResourceProvider*);
+
+    // Allocates our clip path in an atlas and records the offset.
+    //
+    // If the return value is non-null, it means the given path did not fit in the then-current
+    // atlas, so it was retired and a new one was added to the stack. The return value is the
+    // newly-retired atlas. (*NOT* the atlas this path will reside in.) The caller must call
+    // assignAtlasTexture on all prior GrCCClipPaths that will use the retired atlas.
+    const GrCCAtlas* renderPathInAtlas(GrCCPerFlushResources*, GrOnFlushResourceProvider*);
 
     const SkIVector& atlasTranslate() const {
-        SkASSERT(fHasAtlasTranslate);
+        SkASSERT(fHasAtlas);
         return fDevToAtlasOffset;
     }
 
+    void assignAtlasTexture(sk_sp<GrTexture> atlasTexture) {
+        fAtlasLazyProxy->priv().assign(std::move(atlasTexture));
+    }
+
 private:
     sk_sp<GrTextureProxy> fAtlasLazyProxy;
     SkPath fDeviceSpacePath;
     SkIRect fPathDevIBounds;
     SkIRect fAccessRect;
 
-    const GrCCAtlas* fAtlas = nullptr;
     SkIVector fDevToAtlasOffset;  // Translation from device space to location in atlas.
     SkDEBUGCODE(bool fHasAtlas = false;)
-    SkDEBUGCODE(bool fHasAtlasTranslate = false;)
 };
 
 #endif
diff --git a/src/gpu/ccpr/GrCCPerFlushResources.cpp b/src/gpu/ccpr/GrCCPerFlushResources.cpp
index 19c3202..efbda63 100644
--- a/src/gpu/ccpr/GrCCPerFlushResources.cpp
+++ b/src/gpu/ccpr/GrCCPerFlushResources.cpp
@@ -35,25 +35,27 @@
         enableScissorInAtlas = GrScissorTest::kEnabled;
     }
 
-    this->placeRenderedPathInAtlas(onFlushRP, clippedPathIBounds, enableScissorInAtlas,
-                                   devToAtlasOffset);
+    const GrCCAtlas* retiredAtlas = this->placeRenderedPathInAtlas(onFlushRP, clippedPathIBounds,
+                                                                   enableScissorInAtlas,
+                                                                   devToAtlasOffset);
 
     SkMatrix atlasMatrix = SkMatrix::Translate(devToAtlasOffset->fX, devToAtlasOffset->fY);
     this->enqueueRenderedPath(devPath, fillRule, clippedPathIBounds, atlasMatrix,
                               enableScissorInAtlas, *devToAtlasOffset);
 
-    return &fRenderedAtlasStack.current();
+    return retiredAtlas;
 }
 
-void GrCCPerFlushResources::placeRenderedPathInAtlas(
+const GrCCAtlas* GrCCPerFlushResources::placeRenderedPathInAtlas(
         GrOnFlushResourceProvider* onFlushRP, const SkIRect& clippedPathIBounds,
         GrScissorTest scissorTest, SkIVector* devToAtlasOffset) {
-    if (GrCCAtlas* retiredAtlas =
-                fRenderedAtlasStack.addRect(clippedPathIBounds, devToAtlasOffset)) {
+    GrCCAtlas* retiredAtlas = fRenderedAtlasStack.addRect(clippedPathIBounds, devToAtlasOffset);
+    if (retiredAtlas) {
         // We did not fit in the previous coverage count atlas and it was retired. Render the
         // retired atlas.
         this->flushRenderedPaths(onFlushRP, retiredAtlas);
     }
+    return retiredAtlas;
 }
 
 void GrCCPerFlushResources::enqueueRenderedPath(const SkPath& path, GrFillRule fillRule,
@@ -171,7 +173,7 @@
     }
 }
 
-bool GrCCPerFlushResources::finalize(GrOnFlushResourceProvider* onFlushRP) {
+const GrCCAtlas* GrCCPerFlushResources::finalize(GrOnFlushResourceProvider* onFlushRP) {
     if (!fRenderedAtlasStack.empty()) {
         this->flushRenderedPaths(onFlushRP, &fRenderedAtlasStack.current());
     }
@@ -182,5 +184,5 @@
         SkASSERT(fAtlasPaths[i].fScissoredPaths.empty());
     }
 #endif
-    return true;
+    return &fRenderedAtlasStack.current();
 }
diff --git a/src/gpu/ccpr/GrCCPerFlushResources.h b/src/gpu/ccpr/GrCCPerFlushResources.h
index 6c67401..2d520a4 100644
--- a/src/gpu/ccpr/GrCCPerFlushResources.h
+++ b/src/gpu/ccpr/GrCCPerFlushResources.h
@@ -23,15 +23,24 @@
     GrCCPerFlushResources(GrOnFlushResourceProvider*, const GrCCAtlas::Specs&);
 
     // Renders a path into an atlas.
+    //
+    // If the return value is non-null, it means the given path did not fit in the then-current
+    // atlas, so it was retired and a new one was added to the stack. The return value is the
+    // newly-retired atlas. (*NOT* the atlas the path was just drawn into.) The caller must call
+    // assignAtlasTexture on all GrCCClipPaths that will use the retired atlas.
     const GrCCAtlas* renderDeviceSpacePathInAtlas(
             GrOnFlushResourceProvider*, const SkIRect& clipIBounds, const SkPath& devPath,
             const SkIRect& devPathIBounds, GrFillRule fillRule, SkIVector* devToAtlasOffset);
 
     // Finishes off the GPU buffers and renders the atlas(es).
-    bool finalize(GrOnFlushResourceProvider*);
+    const GrCCAtlas* finalize(GrOnFlushResourceProvider*);
 
-    // Accessors used by draw calls, once the resources have been finalized.
-    void placeRenderedPathInAtlas(
+private:
+    // If the return value is non-null, it means the given path did not fit in the then-current
+    // atlas, so it was retired and a new one was added to the stack. The return value is the
+    // newly-retired atlas. (*NOT* the atlas the path was just drawn into.) The caller must call
+    // assignAtlasTexture on all GrCCClipPaths that will use the retired atlas.
+    const GrCCAtlas* placeRenderedPathInAtlas(
             GrOnFlushResourceProvider*, const SkIRect& clippedPathIBounds, GrScissorTest,
             SkIVector* devToAtlasOffset);
 
diff --git a/src/gpu/ccpr/GrCoverageCountingPathRenderer.cpp b/src/gpu/ccpr/GrCoverageCountingPathRenderer.cpp
index ec08d69..92add62 100644
--- a/src/gpu/ccpr/GrCoverageCountingPathRenderer.cpp
+++ b/src/gpu/ccpr/GrCoverageCountingPathRenderer.cpp
@@ -87,6 +87,62 @@
                                                            mustCheckBounds));
 }
 
+namespace {
+
+// Iterates all GrCCClipPaths in an array of non-empty maps.
+class ClipMapsIter {
+public:
+    ClipMapsIter(sk_sp<GrCCPerOpsTaskPaths>* mapsList) : fMapsList(mapsList) {}
+
+    bool operator!=(const ClipMapsIter& that) {
+        if (fMapsList != that.fMapsList) {
+            return true;
+        }
+        // fPerOpsTaskClipPaths will be null when we are on the first element.
+        if (fPerOpsTaskClipPaths != that.fPerOpsTaskClipPaths) {
+            return true;
+        }
+        return fPerOpsTaskClipPaths && fClipPathsIter != that.fClipPathsIter;
+    }
+
+    void operator++() {
+        // fPerOpsTaskClipPaths is null when we are on the first element.
+        if (!fPerOpsTaskClipPaths) {
+            fPerOpsTaskClipPaths = &(*fMapsList)->fClipPaths;
+            SkASSERT(!fPerOpsTaskClipPaths->empty());  // We don't handle empty lists.
+            fClipPathsIter = fPerOpsTaskClipPaths->begin();
+        }
+        if ((++fClipPathsIter) == fPerOpsTaskClipPaths->end()) {
+            ++fMapsList;
+            fPerOpsTaskClipPaths = nullptr;
+        }
+    }
+
+    GrCCClipPath* operator->() {
+        // fPerOpsTaskClipPaths is null when we are on the first element.
+        const auto& it = (!fPerOpsTaskClipPaths) ? (*fMapsList)->fClipPaths.begin()
+                                                 : fClipPathsIter;
+        return &(it->second);
+    }
+
+private:
+    sk_sp<GrCCPerOpsTaskPaths>* fMapsList;
+    std::map<uint32_t, GrCCClipPath>* fPerOpsTaskClipPaths = nullptr;
+    std::map<uint32_t, GrCCClipPath>::iterator fClipPathsIter;
+};
+
+}  // namespace
+
+static void assign_atlas_textures(GrTexture* atlasTexture, ClipMapsIter nextPathToAssign,
+                                  const ClipMapsIter& end) {
+    if (!atlasTexture) {
+        return;
+    }
+    for (; nextPathToAssign != end; ++nextPathToAssign) {
+        nextPathToAssign->assignAtlasTexture(sk_ref_sp(atlasTexture));
+    }
+}
+
 void GrCoverageCountingPathRenderer::preFlush(
         GrOnFlushResourceProvider* onFlushRP, SkSpan<const uint32_t> taskIDs) {
     SkASSERT(!fFlushing);
@@ -122,14 +178,21 @@
     fPerFlushResources = std::make_unique<GrCCPerFlushResources>(onFlushRP, specs);
 
     // Layout the atlas(es) and render paths.
-    for (const auto& flushingPaths : fFlushingPaths) {
-        for (auto& clipsIter : flushingPaths->fClipPaths) {
-            clipsIter.second.renderPathInAtlas(fPerFlushResources.get(), onFlushRP);
+    ClipMapsIter it(fFlushingPaths.begin());
+    ClipMapsIter end(fFlushingPaths.end());
+    ClipMapsIter nextPathToAssign = it;  // The next GrCCClipPath to call assignAtlasTexture on.
+    for (; it != end; ++it) {
+        if (const GrCCAtlas* retiredAtlas =
+                it->renderPathInAtlas(fPerFlushResources.get(), onFlushRP)) {
+            assign_atlas_textures(retiredAtlas->textureProxy()->peekTexture(), nextPathToAssign,
+                                  it);
+            nextPathToAssign = it;
         }
     }
 
     // Allocate resources and then render the atlas(es).
-    fPerFlushResources->finalize(onFlushRP);
+    const GrCCAtlas* atlas = fPerFlushResources->finalize(onFlushRP);
+    assign_atlas_textures(atlas->textureProxy()->peekTexture(), nextPathToAssign, end);
 }
 
 void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken,