Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2017 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "SkTypes.h" |
| 9 | #include "Test.h" |
| 10 | |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 11 | #include "GrContext.h" |
| 12 | #include "GrContextPriv.h" |
| 13 | #include "GrClip.h" |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 14 | #include "GrDrawingManager.h" |
| 15 | #include "GrPathRenderer.h" |
| 16 | #include "GrPaint.h" |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 17 | #include "GrRenderTargetContext.h" |
| 18 | #include "GrRenderTargetContextPriv.h" |
| 19 | #include "GrShape.h" |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 20 | #include "GrTexture.h" |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 21 | #include "SkMatrix.h" |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 22 | #include "SkPathPriv.h" |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 23 | #include "SkRect.h" |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 24 | #include "sk_tool_utils.h" |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 25 | #include "ccpr/GrCoverageCountingPathRenderer.h" |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 26 | #include "mock/GrMockTypes.h" |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 27 | #include <cmath> |
| 28 | |
| 29 | static constexpr int kCanvasSize = 100; |
| 30 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 31 | class CCPRClip : public GrClip { |
| 32 | public: |
| 33 | CCPRClip(GrCoverageCountingPathRenderer* ccpr, const SkPath& path) : fCCPR(ccpr), fPath(path) {} |
| 34 | |
| 35 | private: |
Robert Phillips | 777707b | 2018-01-17 11:40:14 -0500 | [diff] [blame] | 36 | bool apply(GrContext* context, GrRenderTargetContext* rtc, bool, bool, GrAppliedClip* out, |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 37 | SkRect* bounds) const override { |
Chris Dalton | 4c458b1 | 2018-06-16 17:22:59 -0600 | [diff] [blame] | 38 | out->addCoverageFP(fCCPR->makeClipProcessor(rtc->priv().testingOnly_getOpListID(), fPath, |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 39 | SkIRect::MakeWH(rtc->width(), rtc->height()), |
Chris Dalton | 4c458b1 | 2018-06-16 17:22:59 -0600 | [diff] [blame] | 40 | rtc->width(), rtc->height(), |
| 41 | *context->contextPriv().caps())); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 42 | return true; |
| 43 | } |
| 44 | bool quickContains(const SkRect&) const final { return false; } |
| 45 | bool isRRect(const SkRect& rtBounds, SkRRect* rr, GrAA*) const final { return false; } |
| 46 | void getConservativeBounds(int width, int height, SkIRect* rect, bool* iior) const final { |
| 47 | rect->set(0, 0, width, height); |
| 48 | if (iior) { |
| 49 | *iior = false; |
| 50 | } |
| 51 | } |
| 52 | GrCoverageCountingPathRenderer* const fCCPR; |
| 53 | const SkPath fPath; |
| 54 | }; |
| 55 | |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 56 | class CCPRPathDrawer { |
| 57 | public: |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 58 | CCPRPathDrawer(GrContext* ctx, skiatest::Reporter* reporter) |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 59 | : fCtx(ctx) |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 60 | , fCCPR(fCtx->contextPriv().drawingManager()->getCoverageCountingPathRenderer()) |
Robert Phillips | 0c4b7b1 | 2018-03-06 08:20:37 -0500 | [diff] [blame] | 61 | , fRTC(fCtx->contextPriv().makeDeferredRenderTargetContext( |
| 62 | SkBackingFit::kExact, kCanvasSize, |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 63 | kCanvasSize, kRGBA_8888_GrPixelConfig, |
| 64 | nullptr)) { |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 65 | if (!fCCPR) { |
| 66 | ERRORF(reporter, "ccpr not enabled in GrContext for ccpr tests"); |
| 67 | } |
| 68 | if (!fRTC) { |
| 69 | ERRORF(reporter, "failed to create GrRenderTargetContext for ccpr tests"); |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 70 | } |
| 71 | } |
| 72 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 73 | GrContext* ctx() const { return fCtx; } |
| 74 | GrCoverageCountingPathRenderer* ccpr() const { return fCCPR; } |
| 75 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 76 | bool valid() const { return fCCPR && fRTC; } |
Chris Dalton | 344e903 | 2017-12-11 15:42:09 -0700 | [diff] [blame] | 77 | void clear() const { fRTC->clear(nullptr, 0, GrRenderTargetContext::CanClearFullscreen::kYes); } |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 78 | void abandonGrContext() { fCtx = nullptr; fCCPR = nullptr; fRTC = nullptr; } |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 79 | |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 80 | void drawPath(const SkPath& path, const SkMatrix& matrix = SkMatrix::I()) const { |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 81 | SkASSERT(this->valid()); |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 82 | |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 83 | GrPaint paint; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 84 | paint.setColor4f(GrColor4f(0, 1, 0, 1)); |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 85 | |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 86 | GrNoClip noClip; |
| 87 | SkIRect clipBounds = SkIRect::MakeWH(kCanvasSize, kCanvasSize); |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 88 | |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 89 | GrShape shape(path); |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 90 | |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 91 | fCCPR->testingOnly_drawPathDirectly({ |
| 92 | fCtx, std::move(paint), &GrUserStencilSettings::kUnused, fRTC.get(), &noClip, |
| 93 | &clipBounds, &matrix, &shape, GrAAType::kCoverage, false}); |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 94 | } |
| 95 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 96 | void clipFullscreenRect(SkPath clipPath, GrColor4f color = GrColor4f(0, 1, 0, 1)) { |
| 97 | SkASSERT(this->valid()); |
| 98 | |
| 99 | GrPaint paint; |
| 100 | paint.setColor4f(color); |
| 101 | |
| 102 | fRTC->drawRect(CCPRClip(fCCPR, clipPath), std::move(paint), GrAA::kYes, SkMatrix::I(), |
| 103 | SkRect::MakeIWH(kCanvasSize, kCanvasSize)); |
| 104 | } |
| 105 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 106 | void flush() const { |
| 107 | SkASSERT(this->valid()); |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 108 | fCtx->flush(); |
| 109 | } |
| 110 | |
| 111 | private: |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 112 | GrContext* fCtx; |
| 113 | GrCoverageCountingPathRenderer* fCCPR; |
| 114 | sk_sp<GrRenderTargetContext> fRTC; |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 115 | }; |
| 116 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 117 | class CCPRTest { |
| 118 | public: |
| 119 | void run(skiatest::Reporter* reporter) { |
| 120 | GrMockOptions mockOptions; |
| 121 | mockOptions.fInstanceAttribSupport = true; |
| 122 | mockOptions.fMapBufferFlags = GrCaps::kCanMap_MapFlag; |
Brian Salomon | bdecacf | 2018-02-02 20:32:49 -0500 | [diff] [blame] | 123 | mockOptions.fConfigOptions[kAlpha_half_GrPixelConfig].fRenderability = |
| 124 | GrMockOptions::ConfigOptions::Renderability::kNonMSAA; |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 125 | mockOptions.fConfigOptions[kAlpha_half_GrPixelConfig].fTexturable = true; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 126 | mockOptions.fConfigOptions[kAlpha_8_GrPixelConfig].fRenderability = |
| 127 | GrMockOptions::ConfigOptions::Renderability::kNonMSAA; |
| 128 | mockOptions.fConfigOptions[kAlpha_8_GrPixelConfig].fTexturable = true; |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 129 | mockOptions.fGeometryShaderSupport = true; |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 130 | mockOptions.fIntegerSupport = true; |
| 131 | mockOptions.fFlatInterpolationSupport = true; |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 132 | |
| 133 | GrContextOptions ctxOptions; |
| 134 | ctxOptions.fAllowPathMaskCaching = false; |
| 135 | ctxOptions.fGpuPathRenderers = GpuPathRenderers::kCoverageCounting; |
| 136 | |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 137 | this->customizeOptions(&mockOptions, &ctxOptions); |
| 138 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 139 | fMockContext = GrContext::MakeMock(&mockOptions, ctxOptions); |
| 140 | if (!fMockContext) { |
| 141 | ERRORF(reporter, "could not create mock context"); |
| 142 | return; |
| 143 | } |
| 144 | if (!fMockContext->unique()) { |
| 145 | ERRORF(reporter, "mock context is not unique"); |
| 146 | return; |
| 147 | } |
| 148 | |
| 149 | CCPRPathDrawer ccpr(fMockContext.get(), reporter); |
| 150 | if (!ccpr.valid()) { |
| 151 | return; |
| 152 | } |
| 153 | |
| 154 | fPath.moveTo(0, 0); |
| 155 | fPath.cubicTo(50, 50, 0, 50, 50, 0); |
| 156 | this->onRun(reporter, ccpr); |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 157 | } |
| 158 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 159 | virtual ~CCPRTest() {} |
| 160 | |
| 161 | protected: |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 162 | virtual void customizeOptions(GrMockOptions*, GrContextOptions*) {} |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 163 | virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) = 0; |
| 164 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 165 | sk_sp<GrContext> fMockContext; |
| 166 | SkPath fPath; |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 167 | }; |
| 168 | |
Brian Salomon | dcfca43 | 2017-11-15 15:48:03 -0500 | [diff] [blame] | 169 | #define DEF_CCPR_TEST(name) \ |
| 170 | DEF_GPUTEST(name, reporter, /* options */) { \ |
| 171 | name test; \ |
| 172 | test.run(reporter); \ |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 173 | } |
| 174 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 175 | class GrCCPRTest_cleanup : public CCPRTest { |
| 176 | void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override { |
| 177 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 178 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 179 | // Ensure paths get unreffed. |
| 180 | for (int i = 0; i < 10; ++i) { |
| 181 | ccpr.drawPath(fPath); |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 182 | } |
| 183 | REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath)); |
| 184 | ccpr.flush(); |
| 185 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
| 186 | |
| 187 | // Ensure clip paths get unreffed. |
| 188 | for (int i = 0; i < 10; ++i) { |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 189 | ccpr.clipFullscreenRect(fPath); |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 190 | } |
| 191 | REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath)); |
| 192 | ccpr.flush(); |
| 193 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
| 194 | |
| 195 | // Ensure paths get unreffed when we delete the context without flushing. |
| 196 | for (int i = 0; i < 10; ++i) { |
| 197 | ccpr.drawPath(fPath); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 198 | ccpr.clipFullscreenRect(fPath); |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 199 | } |
| 200 | ccpr.abandonGrContext(); |
| 201 | REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath)); |
| 202 | fMockContext.reset(); |
| 203 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
| 204 | } |
| 205 | }; |
| 206 | DEF_CCPR_TEST(GrCCPRTest_cleanup) |
| 207 | |
Chris Dalton | 91ab155 | 2018-04-18 13:24:25 -0600 | [diff] [blame] | 208 | class GrCCPRTest_cleanupWithTexAllocFail : public GrCCPRTest_cleanup { |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 209 | void customizeOptions(GrMockOptions* mockOptions, GrContextOptions*) override { |
| 210 | mockOptions->fFailTextureAllocations = true; |
Chris Dalton | 91ab155 | 2018-04-18 13:24:25 -0600 | [diff] [blame] | 211 | } |
| 212 | }; |
| 213 | DEF_CCPR_TEST(GrCCPRTest_cleanupWithTexAllocFail) |
| 214 | |
Chris Dalton | 080baa4 | 2017-11-06 14:19:19 -0700 | [diff] [blame] | 215 | class GrCCPRTest_unregisterCulledOps : public CCPRTest { |
| 216 | void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override { |
| 217 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
| 218 | |
| 219 | // Ensure Ops get unregistered from CCPR when culled early. |
| 220 | ccpr.drawPath(fPath); |
| 221 | REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath)); |
| 222 | ccpr.clear(); // Clear should delete the CCPR Op. |
| 223 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
| 224 | ccpr.flush(); // Should not crash (DrawPathsOp should have unregistered itself). |
| 225 | |
| 226 | // Ensure Op unregisters work when we delete the context without flushing. |
| 227 | ccpr.drawPath(fPath); |
| 228 | REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath)); |
| 229 | ccpr.clear(); // Clear should delete the CCPR DrawPathsOp. |
| 230 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
| 231 | ccpr.abandonGrContext(); |
| 232 | fMockContext.reset(); // Should not crash (DrawPathsOp should have unregistered itself). |
| 233 | } |
| 234 | }; |
| 235 | DEF_CCPR_TEST(GrCCPRTest_unregisterCulledOps) |
| 236 | |
Chris Dalton | c9c97b7 | 2017-11-27 15:34:26 -0700 | [diff] [blame] | 237 | class GrCCPRTest_parseEmptyPath : public CCPRTest { |
| 238 | void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override { |
| 239 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
| 240 | |
| 241 | // Make a path large enough that ccpr chooses to crop it by the RT bounds, and ends up with |
| 242 | // an empty path. |
| 243 | SkPath largeOutsidePath; |
| 244 | largeOutsidePath.moveTo(-1e30f, -1e30f); |
| 245 | largeOutsidePath.lineTo(-1e30f, +1e30f); |
| 246 | largeOutsidePath.lineTo(-1e10f, +1e30f); |
| 247 | ccpr.drawPath(largeOutsidePath); |
| 248 | |
| 249 | // Normally an empty path is culled before reaching ccpr, however we use a back door for |
| 250 | // testing so this path will make it. |
| 251 | SkPath emptyPath; |
| 252 | SkASSERT(emptyPath.isEmpty()); |
| 253 | ccpr.drawPath(emptyPath); |
| 254 | |
| 255 | // This is the test. It will exercise various internal asserts and verify we do not crash. |
| 256 | ccpr.flush(); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 257 | |
| 258 | // Now try again with clips. |
| 259 | ccpr.clipFullscreenRect(largeOutsidePath); |
| 260 | ccpr.clipFullscreenRect(emptyPath); |
| 261 | ccpr.flush(); |
| 262 | |
| 263 | // ... and both. |
| 264 | ccpr.drawPath(largeOutsidePath); |
| 265 | ccpr.clipFullscreenRect(largeOutsidePath); |
| 266 | ccpr.drawPath(emptyPath); |
| 267 | ccpr.clipFullscreenRect(emptyPath); |
| 268 | ccpr.flush(); |
Chris Dalton | c9c97b7 | 2017-11-27 15:34:26 -0700 | [diff] [blame] | 269 | } |
| 270 | }; |
| 271 | DEF_CCPR_TEST(GrCCPRTest_parseEmptyPath) |
| 272 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 273 | // This test exercises CCPR's cache capabilities by drawing many paths with two different |
| 274 | // transformation matrices. We then vary the matrices independently by whole and partial pixels, |
| 275 | // and verify the caching behaved as expected. |
| 276 | class GrCCPRTest_cache : public CCPRTest { |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 277 | void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override { |
| 278 | ctxOptions->fAllowPathMaskCaching = true; |
| 279 | } |
| 280 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 281 | void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override { |
| 282 | static constexpr int kPathSize = 20; |
| 283 | SkRandom rand; |
| 284 | |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 285 | SkPath paths[300]; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 286 | int primes[11] = {2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31}; |
| 287 | for (size_t i = 0; i < SK_ARRAY_COUNT(paths); ++i) { |
| 288 | int numPts = rand.nextRangeU(GrShape::kMaxKeyFromDataVerbCnt + 1, |
| 289 | GrShape::kMaxKeyFromDataVerbCnt * 2); |
| 290 | paths[i] = sk_tool_utils::make_star(SkRect::MakeIWH(kPathSize, kPathSize), numPts, |
| 291 | primes[rand.nextU() % SK_ARRAY_COUNT(primes)]); |
| 292 | } |
| 293 | |
| 294 | SkMatrix matrices[2] = { |
| 295 | SkMatrix::MakeTrans(5, 5), |
| 296 | SkMatrix::MakeTrans(kCanvasSize - kPathSize - 5, kCanvasSize - kPathSize - 5) |
| 297 | }; |
| 298 | |
| 299 | int firstAtlasID = -1; |
| 300 | |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 301 | for (int iterIdx = 0; iterIdx < 10; ++iterIdx) { |
| 302 | static constexpr int kNumHitsBeforeStash = 2; |
| 303 | static const GrUniqueKey gInvalidUniqueKey; |
| 304 | |
| 305 | // Draw all the paths then flush. Repeat until a new stash occurs. |
| 306 | const GrUniqueKey* stashedAtlasKey = &gInvalidUniqueKey; |
| 307 | for (int j = 0; j < kNumHitsBeforeStash; ++j) { |
| 308 | // Nothing should be stashed until its hit count reaches kNumHitsBeforeStash. |
| 309 | REPORTER_ASSERT(reporter, !stashedAtlasKey->isValid()); |
| 310 | |
| 311 | for (size_t i = 0; i < SK_ARRAY_COUNT(paths); ++i) { |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 312 | ccpr.drawPath(paths[i], matrices[i % 2]); |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 313 | } |
| 314 | ccpr.flush(); |
| 315 | |
| 316 | stashedAtlasKey = &ccpr.ccpr()->testingOnly_getStashedAtlasKey(); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 317 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 318 | |
| 319 | // Figure out the mock backend ID of the atlas texture stashed away by CCPR. |
| 320 | GrMockTextureInfo stashedAtlasInfo; |
| 321 | stashedAtlasInfo.fID = -1; |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 322 | if (stashedAtlasKey->isValid()) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 323 | GrResourceProvider* rp = ccpr.ctx()->contextPriv().resourceProvider(); |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 324 | sk_sp<GrSurface> stashedAtlas = rp->findByUniqueKey<GrSurface>(*stashedAtlasKey); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 325 | REPORTER_ASSERT(reporter, stashedAtlas); |
| 326 | if (stashedAtlas) { |
| 327 | const auto& backendTexture = stashedAtlas->asTexture()->getBackendTexture(); |
| 328 | backendTexture.getMockTextureInfo(&stashedAtlasInfo); |
| 329 | } |
| 330 | } |
| 331 | |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 332 | if (0 == iterIdx) { |
| 333 | // First iteration: just note the ID of the stashed atlas and continue. |
| 334 | REPORTER_ASSERT(reporter, stashedAtlasKey->isValid()); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 335 | firstAtlasID = stashedAtlasInfo.fID; |
| 336 | continue; |
| 337 | } |
| 338 | |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 339 | switch (iterIdx % 3) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 340 | case 1: |
| 341 | // This draw should have gotten 100% cache hits; we only did integer translates |
| 342 | // last time (or none if it was the first flush). Therefore, no atlas should |
| 343 | // have been stashed away. |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 344 | REPORTER_ASSERT(reporter, !stashedAtlasKey->isValid()); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 345 | |
| 346 | // Invalidate even path masks. |
| 347 | matrices[0].preTranslate(1.6f, 1.4f); |
| 348 | break; |
| 349 | |
| 350 | case 2: |
| 351 | // Even path masks were invalidated last iteration by a subpixel translate. They |
| 352 | // should have been re-rendered this time and stashed away in the CCPR atlas. |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 353 | REPORTER_ASSERT(reporter, stashedAtlasKey->isValid()); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 354 | |
| 355 | // 'firstAtlasID' should be kept as a scratch texture in the resource cache. |
| 356 | REPORTER_ASSERT(reporter, stashedAtlasInfo.fID == firstAtlasID); |
| 357 | |
| 358 | // Invalidate odd path masks. |
| 359 | matrices[1].preTranslate(-1.4f, -1.6f); |
| 360 | break; |
| 361 | |
| 362 | case 0: |
| 363 | // Odd path masks were invalidated last iteration by a subpixel translate. They |
| 364 | // should have been re-rendered this time and stashed away in the CCPR atlas. |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 365 | REPORTER_ASSERT(reporter, stashedAtlasKey->isValid()); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 366 | |
| 367 | // 'firstAtlasID' is the same texture that got stashed away last time (assuming |
| 368 | // no assertion failures). So if it also got stashed this time, it means we |
| 369 | // first copied the even paths out of it, then recycled the exact same texture |
| 370 | // to render the odd paths. This is the expected behavior. |
| 371 | REPORTER_ASSERT(reporter, stashedAtlasInfo.fID == firstAtlasID); |
| 372 | |
| 373 | // Integer translates: all path masks stay valid. |
| 374 | matrices[0].preTranslate(-1, -1); |
| 375 | matrices[1].preTranslate(1, 1); |
| 376 | break; |
| 377 | } |
| 378 | } |
| 379 | } |
| 380 | }; |
| 381 | DEF_CCPR_TEST(GrCCPRTest_cache) |
| 382 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 383 | class CCPRRenderingTest { |
| 384 | public: |
| 385 | void run(skiatest::Reporter* reporter, GrContext* ctx) const { |
| 386 | if (!ctx->contextPriv().drawingManager()->getCoverageCountingPathRenderer()) { |
| 387 | return; // CCPR is not enabled on this GPU. |
| 388 | } |
| 389 | CCPRPathDrawer ccpr(ctx, reporter); |
| 390 | if (!ccpr.valid()) { |
| 391 | return; |
| 392 | } |
| 393 | this->onRun(reporter, ccpr); |
| 394 | } |
| 395 | |
| 396 | virtual ~CCPRRenderingTest() {} |
| 397 | |
| 398 | protected: |
| 399 | virtual void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const = 0; |
| 400 | }; |
| 401 | |
| 402 | #define DEF_CCPR_RENDERING_TEST(name) \ |
| 403 | DEF_GPUTEST_FOR_RENDERING_CONTEXTS(name, reporter, ctxInfo) { \ |
| 404 | name test; \ |
| 405 | test.run(reporter, ctxInfo.grContext()); \ |
| 406 | } |
| 407 | |
| 408 | class GrCCPRTest_busyPath : public CCPRRenderingTest { |
| 409 | void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const override { |
| 410 | static constexpr int kNumBusyVerbs = 1 << 17; |
| 411 | ccpr.clear(); |
| 412 | SkPath busyPath; |
| 413 | busyPath.moveTo(0, 0); // top left |
| 414 | busyPath.lineTo(kCanvasSize, kCanvasSize); // bottom right |
| 415 | for (int i = 2; i < kNumBusyVerbs; ++i) { |
| 416 | float offset = i * ((float)kCanvasSize / kNumBusyVerbs); |
| 417 | busyPath.lineTo(kCanvasSize - offset, kCanvasSize + offset); // offscreen |
| 418 | } |
| 419 | ccpr.drawPath(busyPath); |
| 420 | |
| 421 | ccpr.flush(); // If this doesn't crash, the test passed. |
| 422 | // If it does, maybe fiddle with fMaxInstancesPerDrawArraysWithoutCrashing in |
| 423 | // your platform's GrGLCaps. |
| 424 | } |
| 425 | }; |
| 426 | DEF_CCPR_RENDERING_TEST(GrCCPRTest_busyPath) |