ccpr: Implement path mask caching
Implement caching as follows:
1) Instead of deleting the mainline ccpr atlas when finished, stash it
away from flush to flush.
2) On subsequent flushes, check the stashed atlas to see if we can
reuse any of its cachable paths. Copy reusable paths into 8-bit
literal coverage atlases and store them in the resource cache.
3) Recycle the stashed atlas texture for the remaining paths in the
flush.
Bug: skia:
Change-Id: I9b20fbea708646df1df3a5f9c044e2299706b989
Reviewed-on: https://skia-review.googlesource.com/134703
Commit-Queue: Chris Dalton <csmartdalton@google.com>
Reviewed-by: Robert Phillips <robertphillips@google.com>
diff --git a/src/gpu/ccpr/GrCCClipPath.cpp b/src/gpu/ccpr/GrCCClipPath.cpp
index 61d58d4..8629cc2 100644
--- a/src/gpu/ccpr/GrCCClipPath.cpp
+++ b/src/gpu/ccpr/GrCCClipPath.cpp
@@ -48,15 +48,15 @@
fAccessRect = accessRect;
}
-void GrCCClipPath::accountForOwnPath(GrCCPerFlushResourceSpecs* resourceSpecs) const {
+void GrCCClipPath::accountForOwnPath(GrCCPerFlushResourceSpecs* specs) const {
SkASSERT(this->isInitialized());
- ++resourceSpecs->fNumClipPaths;
- resourceSpecs->fParsingPathStats.statPath(fDeviceSpacePath);
+ ++specs->fNumClipPaths;
+ specs->fRenderedPathStats.statPath(fDeviceSpacePath);
SkIRect ibounds;
if (ibounds.intersect(fAccessRect, fPathDevIBounds)) {
- resourceSpecs->fAtlasSpecs.accountForSpace(ibounds.width(), ibounds.height());
+ specs->fRenderedAtlasSpecs.accountForSpace(ibounds.width(), ibounds.height());
}
}