Chris Dalton | 5b5403e | 2019-06-05 11:54:39 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2019 Google LLC. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "gm/gm.h" |
| 9 | |
| 10 | #include "include/core/SkPath.h" |
| 11 | #include "include/gpu/GrContext.h" |
| 12 | #include "include/gpu/GrContextOptions.h" |
Robert Phillips | b7bfbc2 | 2020-07-01 12:55:01 -0400 | [diff] [blame] | 13 | #include "include/gpu/GrRecordingContext.h" |
Chris Dalton | 5b5403e | 2019-06-05 11:54:39 -0600 | [diff] [blame] | 14 | #include "src/gpu/GrContextPriv.h" |
| 15 | #include "src/gpu/GrDrawingManager.h" |
Robert Phillips | 95c250c | 2020-06-29 15:36:12 -0400 | [diff] [blame] | 16 | #include "src/gpu/GrRecordingContextPriv.h" |
Chris Dalton | 5b5403e | 2019-06-05 11:54:39 -0600 | [diff] [blame] | 17 | #include "src/gpu/GrRenderTargetContext.h" |
| 18 | #include "src/gpu/ccpr/GrCCPathCache.h" |
| 19 | #include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h" |
| 20 | #include "tools/ToolUtils.h" |
| 21 | |
| 22 | namespace skiagm { |
| 23 | |
| 24 | #define ERR_MSG_ASSERT(COND) \ |
| 25 | do { \ |
| 26 | if (!(COND)) { \ |
| 27 | errorMsg->printf("preservefillrule.cpp(%i): assert(%s)", \ |
| 28 | __LINE__, #COND); \ |
| 29 | return DrawResult::kFail; \ |
| 30 | } \ |
| 31 | } while (false) |
| 32 | |
| 33 | |
| 34 | /** |
| 35 | * This test ensures that the ccpr path cache preserves fill rules properly, both in the case where |
| 36 | * we copy paths into a8 literal coverage atlases, as well as in the case where we just reuse a |
| 37 | * stashed fp16 coverage count atlas. |
| 38 | */ |
| 39 | class PreserveFillRuleGM : public GpuGM { |
| 40 | public: |
| 41 | // fStarSize affects whether ccpr copies the paths to an a8 literal coverage atlas, or just |
| 42 | // leaves them stashed in an fp16 coverage count atlas. The threshold for copying to a8 is |
| 43 | // currently 256x256 total pixels copied. If this ever changes, there is code in onDraw that |
| 44 | // will detect the unexpected behavior and draw a failure message. |
| 45 | PreserveFillRuleGM(bool literalCoverageAtlas) |
| 46 | : fLiteralCoverageAtlas(literalCoverageAtlas) |
| 47 | , fStarSize((fLiteralCoverageAtlas) ? 200 : 20) { |
| 48 | } |
| 49 | |
| 50 | private: |
| 51 | SkString onShortName() override { |
| 52 | SkString name("preservefillrule"); |
| 53 | name += (fLiteralCoverageAtlas) ? "_big" : "_little"; |
| 54 | return name; |
| 55 | } |
| 56 | SkISize onISize() override { return SkISize::Make(fStarSize * 2, fStarSize * 2); } |
| 57 | |
| 58 | void modifyGrContextOptions(GrContextOptions* ctxOptions) override { |
| 59 | ctxOptions->fGpuPathRenderers = GpuPathRenderers::kCoverageCounting; |
| 60 | ctxOptions->fAllowPathMaskCaching = true; |
| 61 | } |
| 62 | |
Robert Phillips | 95c250c | 2020-06-29 15:36:12 -0400 | [diff] [blame] | 63 | DrawResult onDraw(GrRecordingContext* ctx, GrRenderTargetContext* rtc, SkCanvas* canvas, |
Chris Dalton | 5b5403e | 2019-06-05 11:54:39 -0600 | [diff] [blame] | 64 | SkString* errorMsg) override { |
| 65 | using CoverageType = GrCCAtlas::CoverageType; |
| 66 | |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 67 | if (rtc->numSamples() > 1) { |
| 68 | errorMsg->set("ccpr is currently only used for coverage AA"); |
| 69 | return DrawResult::kSkip; |
| 70 | } |
| 71 | |
Chris Dalton | 5b5403e | 2019-06-05 11:54:39 -0600 | [diff] [blame] | 72 | auto* ccpr = ctx->priv().drawingManager()->getCoverageCountingPathRenderer(); |
| 73 | if (!ccpr) { |
| 74 | errorMsg->set("ccpr only"); |
| 75 | return DrawResult::kSkip; |
| 76 | } |
| 77 | |
| 78 | auto pathCache = ccpr->testingOnly_getPathCache(); |
| 79 | if (!pathCache) { |
| 80 | errorMsg->set("ccpr is not in caching mode. " |
| 81 | "Are you using viewer? Launch with \"--cachePathMasks true\"."); |
| 82 | return DrawResult::kFail; |
| 83 | } |
| 84 | |
| 85 | auto starRect = SkRect::MakeWH(fStarSize, fStarSize); |
| 86 | SkPath star7_winding = ToolUtils::make_star(starRect, 7); |
Mike Reed | 7d34dc7 | 2019-11-26 12:17:17 -0500 | [diff] [blame] | 87 | star7_winding.setFillType(SkPathFillType::kWinding); |
Chris Dalton | 5b5403e | 2019-06-05 11:54:39 -0600 | [diff] [blame] | 88 | |
| 89 | SkPath star7_evenOdd = star7_winding; |
Mike Reed | 1f60733 | 2020-05-21 12:11:27 -0400 | [diff] [blame] | 90 | star7_evenOdd.transform(SkMatrix::Translate(0, fStarSize)); |
Mike Reed | 7d34dc7 | 2019-11-26 12:17:17 -0500 | [diff] [blame] | 91 | star7_evenOdd.setFillType(SkPathFillType::kEvenOdd); |
Chris Dalton | 5b5403e | 2019-06-05 11:54:39 -0600 | [diff] [blame] | 92 | |
| 93 | SkPath star5_winding = ToolUtils::make_star(starRect, 5); |
Mike Reed | 1f60733 | 2020-05-21 12:11:27 -0400 | [diff] [blame] | 94 | star5_winding.transform(SkMatrix::Translate(fStarSize, 0)); |
Mike Reed | 7d34dc7 | 2019-11-26 12:17:17 -0500 | [diff] [blame] | 95 | star5_winding.setFillType(SkPathFillType::kWinding); |
Chris Dalton | 5b5403e | 2019-06-05 11:54:39 -0600 | [diff] [blame] | 96 | |
| 97 | SkPath star5_evenOdd = star5_winding; |
Mike Reed | 1f60733 | 2020-05-21 12:11:27 -0400 | [diff] [blame] | 98 | star5_evenOdd.transform(SkMatrix::Translate(0, fStarSize)); |
Mike Reed | 7d34dc7 | 2019-11-26 12:17:17 -0500 | [diff] [blame] | 99 | star5_evenOdd.setFillType(SkPathFillType::kEvenOdd); |
Chris Dalton | 5b5403e | 2019-06-05 11:54:39 -0600 | [diff] [blame] | 100 | |
| 101 | SkPaint paint; |
| 102 | paint.setColor(SK_ColorGREEN); |
| 103 | paint.setAntiAlias(true); |
| 104 | |
| 105 | for (int i = 0; i < 3; ++i) { |
| 106 | canvas->clear(SK_ColorWHITE); |
| 107 | canvas->drawPath(star7_winding, paint); |
| 108 | canvas->drawPath(star7_evenOdd, paint); |
| 109 | canvas->drawPath(star5_winding, paint); |
| 110 | canvas->drawPath(star5_evenOdd, paint); |
Greg Daniel | 9efe386 | 2020-06-11 11:51:06 -0400 | [diff] [blame] | 111 | rtc->flush(SkSurface::BackendSurfaceAccess::kNoAccess, GrFlushInfo(), nullptr); |
Chris Dalton | 5b5403e | 2019-06-05 11:54:39 -0600 | [diff] [blame] | 112 | |
| 113 | // Ensure the path cache is behaving in such a way that we are actually testing what we |
| 114 | // think we are. |
| 115 | int numCachedPaths = 0; |
| 116 | for (GrCCPathCacheEntry* entry : pathCache->testingOnly_getLRU()) { |
| 117 | if (0 == i) { |
| 118 | // We don't cache an atlas on the first hit. |
| 119 | ERR_MSG_ASSERT(!entry->cachedAtlas()); |
| 120 | } else { |
| 121 | // The stars should be cached in an atlas now. |
| 122 | ERR_MSG_ASSERT(entry->cachedAtlas()); |
| 123 | |
| 124 | CoverageType atlasCoverageType = entry->cachedAtlas()->coverageType(); |
| 125 | if (i < 2) { |
| 126 | // We never copy to an a8 atlas before the second hit. |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 127 | ERR_MSG_ASSERT(ccpr->coverageType() == atlasCoverageType); |
Chris Dalton | 5b5403e | 2019-06-05 11:54:39 -0600 | [diff] [blame] | 128 | } else if (fLiteralCoverageAtlas) { |
| 129 | // Verify fStarSize is large enough that the paths got copied to an a8 |
| 130 | // atlas. |
| 131 | ERR_MSG_ASSERT(CoverageType::kA8_LiteralCoverage == atlasCoverageType); |
| 132 | } else { |
| 133 | // Verify fStarSize is small enough that the paths did *NOT* get copied to |
| 134 | // an a8 atlas. |
Chris Dalton | c3318f0 | 2019-07-19 14:20:53 -0600 | [diff] [blame] | 135 | ERR_MSG_ASSERT(ccpr->coverageType() == atlasCoverageType); |
Chris Dalton | 5b5403e | 2019-06-05 11:54:39 -0600 | [diff] [blame] | 136 | } |
| 137 | } |
| 138 | ++numCachedPaths; |
| 139 | } |
| 140 | // Verify all 4 paths are tracked by the path cache. |
| 141 | ERR_MSG_ASSERT(4 == numCachedPaths); |
| 142 | } |
| 143 | |
| 144 | return DrawResult::kOk; |
| 145 | } |
| 146 | |
| 147 | private: |
| 148 | const bool fLiteralCoverageAtlas; |
| 149 | const int fStarSize; |
| 150 | }; |
| 151 | |
| 152 | DEF_GM( return new PreserveFillRuleGM(true); ) |
| 153 | DEF_GM( return new PreserveFillRuleGM(false); ) |
| 154 | |
| 155 | } |