blob: 26ff7f67d2c5fd4682042797d7bb33fe5900f004 [file] [log] [blame]
Chris Dalton5b5403e2019-06-05 11:54:39 -06001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "gm/gm.h"
9
10#include "include/core/SkPath.h"
11#include "include/gpu/GrContext.h"
12#include "include/gpu/GrContextOptions.h"
13#include "src/gpu/GrContextPriv.h"
14#include "src/gpu/GrDrawingManager.h"
15#include "src/gpu/GrRenderTargetContext.h"
16#include "src/gpu/ccpr/GrCCPathCache.h"
17#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
18#include "tools/ToolUtils.h"
19
20namespace skiagm {
21
22#define ERR_MSG_ASSERT(COND) \
23 do { \
24 if (!(COND)) { \
25 errorMsg->printf("preservefillrule.cpp(%i): assert(%s)", \
26 __LINE__, #COND); \
27 return DrawResult::kFail; \
28 } \
29 } while (false)
30
31
32/**
33 * This test ensures that the ccpr path cache preserves fill rules properly, both in the case where
34 * we copy paths into a8 literal coverage atlases, as well as in the case where we just reuse a
35 * stashed fp16 coverage count atlas.
36 */
37class PreserveFillRuleGM : public GpuGM {
38public:
39 // fStarSize affects whether ccpr copies the paths to an a8 literal coverage atlas, or just
40 // leaves them stashed in an fp16 coverage count atlas. The threshold for copying to a8 is
41 // currently 256x256 total pixels copied. If this ever changes, there is code in onDraw that
42 // will detect the unexpected behavior and draw a failure message.
43 PreserveFillRuleGM(bool literalCoverageAtlas)
44 : fLiteralCoverageAtlas(literalCoverageAtlas)
45 , fStarSize((fLiteralCoverageAtlas) ? 200 : 20) {
46 }
47
48private:
49 SkString onShortName() override {
50 SkString name("preservefillrule");
51 name += (fLiteralCoverageAtlas) ? "_big" : "_little";
52 return name;
53 }
54 SkISize onISize() override { return SkISize::Make(fStarSize * 2, fStarSize * 2); }
55
56 void modifyGrContextOptions(GrContextOptions* ctxOptions) override {
57 ctxOptions->fGpuPathRenderers = GpuPathRenderers::kCoverageCounting;
58 ctxOptions->fAllowPathMaskCaching = true;
59 }
60
61 DrawResult onDraw(GrContext* ctx, GrRenderTargetContext* rtc, SkCanvas* canvas,
62 SkString* errorMsg) override {
63 using CoverageType = GrCCAtlas::CoverageType;
64
Chris Daltonc3318f02019-07-19 14:20:53 -060065 if (rtc->numSamples() > 1) {
66 errorMsg->set("ccpr is currently only used for coverage AA");
67 return DrawResult::kSkip;
68 }
69
Chris Dalton5b5403e2019-06-05 11:54:39 -060070 auto* ccpr = ctx->priv().drawingManager()->getCoverageCountingPathRenderer();
71 if (!ccpr) {
72 errorMsg->set("ccpr only");
73 return DrawResult::kSkip;
74 }
75
76 auto pathCache = ccpr->testingOnly_getPathCache();
77 if (!pathCache) {
78 errorMsg->set("ccpr is not in caching mode. "
79 "Are you using viewer? Launch with \"--cachePathMasks true\".");
80 return DrawResult::kFail;
81 }
82
83 auto starRect = SkRect::MakeWH(fStarSize, fStarSize);
84 SkPath star7_winding = ToolUtils::make_star(starRect, 7);
Mike Reed7d34dc72019-11-26 12:17:17 -050085 star7_winding.setFillType(SkPathFillType::kWinding);
Chris Dalton5b5403e2019-06-05 11:54:39 -060086
87 SkPath star7_evenOdd = star7_winding;
88 star7_evenOdd.transform(SkMatrix::MakeTrans(0, fStarSize));
Mike Reed7d34dc72019-11-26 12:17:17 -050089 star7_evenOdd.setFillType(SkPathFillType::kEvenOdd);
Chris Dalton5b5403e2019-06-05 11:54:39 -060090
91 SkPath star5_winding = ToolUtils::make_star(starRect, 5);
92 star5_winding.transform(SkMatrix::MakeTrans(fStarSize, 0));
Mike Reed7d34dc72019-11-26 12:17:17 -050093 star5_winding.setFillType(SkPathFillType::kWinding);
Chris Dalton5b5403e2019-06-05 11:54:39 -060094
95 SkPath star5_evenOdd = star5_winding;
96 star5_evenOdd.transform(SkMatrix::MakeTrans(0, fStarSize));
Mike Reed7d34dc72019-11-26 12:17:17 -050097 star5_evenOdd.setFillType(SkPathFillType::kEvenOdd);
Chris Dalton5b5403e2019-06-05 11:54:39 -060098
99 SkPaint paint;
100 paint.setColor(SK_ColorGREEN);
101 paint.setAntiAlias(true);
102
103 for (int i = 0; i < 3; ++i) {
104 canvas->clear(SK_ColorWHITE);
105 canvas->drawPath(star7_winding, paint);
106 canvas->drawPath(star7_evenOdd, paint);
107 canvas->drawPath(star5_winding, paint);
108 canvas->drawPath(star5_evenOdd, paint);
109 rtc->flush(SkSurface::BackendSurfaceAccess::kNoAccess, GrFlushInfo());
110
111 // Ensure the path cache is behaving in such a way that we are actually testing what we
112 // think we are.
113 int numCachedPaths = 0;
114 for (GrCCPathCacheEntry* entry : pathCache->testingOnly_getLRU()) {
115 if (0 == i) {
116 // We don't cache an atlas on the first hit.
117 ERR_MSG_ASSERT(!entry->cachedAtlas());
118 } else {
119 // The stars should be cached in an atlas now.
120 ERR_MSG_ASSERT(entry->cachedAtlas());
121
122 CoverageType atlasCoverageType = entry->cachedAtlas()->coverageType();
123 if (i < 2) {
124 // We never copy to an a8 atlas before the second hit.
Chris Daltonc3318f02019-07-19 14:20:53 -0600125 ERR_MSG_ASSERT(ccpr->coverageType() == atlasCoverageType);
Chris Dalton5b5403e2019-06-05 11:54:39 -0600126 } else if (fLiteralCoverageAtlas) {
127 // Verify fStarSize is large enough that the paths got copied to an a8
128 // atlas.
129 ERR_MSG_ASSERT(CoverageType::kA8_LiteralCoverage == atlasCoverageType);
130 } else {
131 // Verify fStarSize is small enough that the paths did *NOT* get copied to
132 // an a8 atlas.
Chris Daltonc3318f02019-07-19 14:20:53 -0600133 ERR_MSG_ASSERT(ccpr->coverageType() == atlasCoverageType);
Chris Dalton5b5403e2019-06-05 11:54:39 -0600134 }
135 }
136 ++numCachedPaths;
137 }
138 // Verify all 4 paths are tracked by the path cache.
139 ERR_MSG_ASSERT(4 == numCachedPaths);
140 }
141
142 return DrawResult::kOk;
143 }
144
145private:
146 const bool fLiteralCoverageAtlas;
147 const int fStarSize;
148};
149
150DEF_GM( return new PreserveFillRuleGM(true); )
151DEF_GM( return new PreserveFillRuleGM(false); )
152
153}