ccpr: Implement path mask caching
Implement caching as follows:
1) Instead of deleting the mainline ccpr atlas when finished, stash it
away from flush to flush.
2) On subsequent flushes, check the stashed atlas to see if we can
reuse any of its cachable paths. Copy reusable paths into 8-bit
literal coverage atlases and store them in the resource cache.
3) Recycle the stashed atlas texture for the remaining paths in the
flush.
Bug: skia:
Change-Id: I9b20fbea708646df1df3a5f9c044e2299706b989
Reviewed-on: https://skia-review.googlesource.com/134703
Commit-Queue: Chris Dalton <csmartdalton@google.com>
Reviewed-by: Robert Phillips <robertphillips@google.com>
diff --git a/src/gpu/ccpr/GrCCPathProcessor.h b/src/gpu/ccpr/GrCCPathProcessor.h
index 9ea0a16..785dd2c 100644
--- a/src/gpu/ccpr/GrCCPathProcessor.h
+++ b/src/gpu/ccpr/GrCCPathProcessor.h
@@ -13,6 +13,8 @@
#include "SkPath.h"
#include <array>
+class GrCCPathCacheEntry;
+class GrCCPerFlushResources;
class GrOnFlushResourceProvider;
class GrOpFlushState;
class GrPipeline;
@@ -37,15 +39,29 @@
};
static constexpr int kNumInstanceAttribs = 1 + (int)InstanceAttribs::kColor;
+ // Helper to offset the 45-degree bounding box returned by GrCCPathParser::parsePath().
+ static SkRect MakeOffset45(const SkRect& devBounds45, float dx, float dy) {
+ // devBounds45 is in "| 1 -1 | * devCoords" space.
+ // | 1 1 |
+ return devBounds45.makeOffset(dx - dy, dx + dy);
+ }
+
+ enum class DoEvenOddFill : bool {
+ kNo = false,
+ kYes = true
+ };
+
struct Instance {
SkRect fDevBounds; // "right < left" indicates even-odd fill type.
SkRect fDevBounds45; // Bounding box in "| 1 -1 | * devCoords" space.
// | 1 1 |
SkIVector fDevToAtlasOffset; // Translation from device space to location in atlas.
- uint32_t fColor;
+ GrColor fColor;
- void set(SkPath::FillType, const SkRect& devBounds, const SkRect& devBounds45,
- const SkIVector& devToAtlasOffset, uint32_t color);
+ void set(const SkRect& devBounds, const SkRect& devBounds45,
+ const SkIVector& devToAtlasOffset, GrColor, DoEvenOddFill = DoEvenOddFill::kNo);
+ void set(const GrCCPathCacheEntry&, const SkIVector& shift, GrColor,
+ DoEvenOddFill = DoEvenOddFill::kNo);
};
GR_STATIC_ASSERT(4 * 11 == sizeof(Instance));
@@ -75,9 +91,8 @@
void getGLSLProcessorKey(const GrShaderCaps&, GrProcessorKeyBuilder*) const override {}
GrGLSLPrimitiveProcessor* createGLSLInstance(const GrShaderCaps&) const override;
- void drawPaths(GrOpFlushState*, const GrPipeline&, const GrBuffer* indexBuffer,
- const GrBuffer* vertexBuffer, GrBuffer* instanceBuffer, int baseInstance,
- int endInstance, const SkRect& bounds) const;
+ void drawPaths(GrOpFlushState*, const GrPipeline&, const GrCCPerFlushResources&,
+ int baseInstance, int endInstance, const SkRect& bounds) const;
private:
const TextureSampler fAtlasAccess;
@@ -86,14 +101,13 @@
typedef GrGeometryProcessor INHERITED;
};
-inline void GrCCPathProcessor::Instance::set(SkPath::FillType fillType, const SkRect& devBounds,
- const SkRect& devBounds45,
- const SkIVector& devToAtlasOffset, uint32_t color) {
- if (SkPath::kEvenOdd_FillType == fillType) {
+inline void GrCCPathProcessor::Instance::set(const SkRect& devBounds, const SkRect& devBounds45,
+ const SkIVector& devToAtlasOffset, GrColor color,
+ DoEvenOddFill doEvenOddFill) {
+ if (DoEvenOddFill::kYes == doEvenOddFill) {
// "right < left" indicates even-odd fill type.
fDevBounds.setLTRB(devBounds.fRight, devBounds.fTop, devBounds.fLeft, devBounds.fBottom);
} else {
- SkASSERT(SkPath::kWinding_FillType == fillType);
fDevBounds = devBounds;
}
fDevBounds45 = devBounds45;