blob: 092951fff795b897249ef1dff79a39258e31a2f7 [file] [log] [blame]
Chris Dalton5b5403e2019-06-05 11:54:39 -06001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "gm/gm.h"
9
10#include "include/core/SkPath.h"
11#include "include/gpu/GrContext.h"
12#include "include/gpu/GrContextOptions.h"
13#include "src/gpu/GrContextPriv.h"
14#include "src/gpu/GrDrawingManager.h"
15#include "src/gpu/GrRenderTargetContext.h"
16#include "src/gpu/ccpr/GrCCPathCache.h"
17#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
18#include "tools/ToolUtils.h"
19
20namespace skiagm {
21
22#define ERR_MSG_ASSERT(COND) \
23 do { \
24 if (!(COND)) { \
25 errorMsg->printf("preservefillrule.cpp(%i): assert(%s)", \
26 __LINE__, #COND); \
27 return DrawResult::kFail; \
28 } \
29 } while (false)
30
31
32/**
33 * This test ensures that the ccpr path cache preserves fill rules properly, both in the case where
34 * we copy paths into a8 literal coverage atlases, as well as in the case where we just reuse a
35 * stashed fp16 coverage count atlas.
36 */
37class PreserveFillRuleGM : public GpuGM {
38public:
39 // fStarSize affects whether ccpr copies the paths to an a8 literal coverage atlas, or just
40 // leaves them stashed in an fp16 coverage count atlas. The threshold for copying to a8 is
41 // currently 256x256 total pixels copied. If this ever changes, there is code in onDraw that
42 // will detect the unexpected behavior and draw a failure message.
43 PreserveFillRuleGM(bool literalCoverageAtlas)
44 : fLiteralCoverageAtlas(literalCoverageAtlas)
45 , fStarSize((fLiteralCoverageAtlas) ? 200 : 20) {
46 }
47
48private:
49 SkString onShortName() override {
50 SkString name("preservefillrule");
51 name += (fLiteralCoverageAtlas) ? "_big" : "_little";
52 return name;
53 }
54 SkISize onISize() override { return SkISize::Make(fStarSize * 2, fStarSize * 2); }
55
56 void modifyGrContextOptions(GrContextOptions* ctxOptions) override {
57 ctxOptions->fGpuPathRenderers = GpuPathRenderers::kCoverageCounting;
58 ctxOptions->fAllowPathMaskCaching = true;
59 }
60
61 DrawResult onDraw(GrContext* ctx, GrRenderTargetContext* rtc, SkCanvas* canvas,
62 SkString* errorMsg) override {
63 using CoverageType = GrCCAtlas::CoverageType;
64
65 auto* ccpr = ctx->priv().drawingManager()->getCoverageCountingPathRenderer();
66 if (!ccpr) {
67 errorMsg->set("ccpr only");
68 return DrawResult::kSkip;
69 }
70
71 auto pathCache = ccpr->testingOnly_getPathCache();
72 if (!pathCache) {
73 errorMsg->set("ccpr is not in caching mode. "
74 "Are you using viewer? Launch with \"--cachePathMasks true\".");
75 return DrawResult::kFail;
76 }
77
78 auto starRect = SkRect::MakeWH(fStarSize, fStarSize);
79 SkPath star7_winding = ToolUtils::make_star(starRect, 7);
80 star7_winding.setFillType(SkPath::kWinding_FillType);
81
82 SkPath star7_evenOdd = star7_winding;
83 star7_evenOdd.transform(SkMatrix::MakeTrans(0, fStarSize));
84 star7_evenOdd.setFillType(SkPath::kEvenOdd_FillType);
85
86 SkPath star5_winding = ToolUtils::make_star(starRect, 5);
87 star5_winding.transform(SkMatrix::MakeTrans(fStarSize, 0));
88 star5_winding.setFillType(SkPath::kWinding_FillType);
89
90 SkPath star5_evenOdd = star5_winding;
91 star5_evenOdd.transform(SkMatrix::MakeTrans(0, fStarSize));
92 star5_evenOdd.setFillType(SkPath::kEvenOdd_FillType);
93
94 SkPaint paint;
95 paint.setColor(SK_ColorGREEN);
96 paint.setAntiAlias(true);
97
98 for (int i = 0; i < 3; ++i) {
99 canvas->clear(SK_ColorWHITE);
100 canvas->drawPath(star7_winding, paint);
101 canvas->drawPath(star7_evenOdd, paint);
102 canvas->drawPath(star5_winding, paint);
103 canvas->drawPath(star5_evenOdd, paint);
104 rtc->flush(SkSurface::BackendSurfaceAccess::kNoAccess, GrFlushInfo());
105
106 // Ensure the path cache is behaving in such a way that we are actually testing what we
107 // think we are.
108 int numCachedPaths = 0;
109 for (GrCCPathCacheEntry* entry : pathCache->testingOnly_getLRU()) {
110 if (0 == i) {
111 // We don't cache an atlas on the first hit.
112 ERR_MSG_ASSERT(!entry->cachedAtlas());
113 } else {
114 // The stars should be cached in an atlas now.
115 ERR_MSG_ASSERT(entry->cachedAtlas());
116
117 CoverageType atlasCoverageType = entry->cachedAtlas()->coverageType();
118 if (i < 2) {
119 // We never copy to an a8 atlas before the second hit.
120 ERR_MSG_ASSERT(CoverageType::kFP16_CoverageCount == atlasCoverageType);
121 } else if (fLiteralCoverageAtlas) {
122 // Verify fStarSize is large enough that the paths got copied to an a8
123 // atlas.
124 ERR_MSG_ASSERT(CoverageType::kA8_LiteralCoverage == atlasCoverageType);
125 } else {
126 // Verify fStarSize is small enough that the paths did *NOT* get copied to
127 // an a8 atlas.
128 ERR_MSG_ASSERT(CoverageType::kFP16_CoverageCount == atlasCoverageType);
129 }
130 }
131 ++numCachedPaths;
132 }
133 // Verify all 4 paths are tracked by the path cache.
134 ERR_MSG_ASSERT(4 == numCachedPaths);
135 }
136
137 return DrawResult::kOk;
138 }
139
140private:
141 const bool fLiteralCoverageAtlas;
142 const int fStarSize;
143};
144
145DEF_GM( return new PreserveFillRuleGM(true); )
146DEF_GM( return new PreserveFillRuleGM(false); )
147
148}