Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2017 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "SkTypes.h" |
| 9 | #include "Test.h" |
| 10 | |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 11 | #include "GrContext.h" |
| 12 | #include "GrContextPriv.h" |
| 13 | #include "GrClip.h" |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 14 | #include "GrDrawingManager.h" |
| 15 | #include "GrPathRenderer.h" |
| 16 | #include "GrPaint.h" |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 17 | #include "GrRenderTargetContext.h" |
| 18 | #include "GrRenderTargetContextPriv.h" |
| 19 | #include "GrShape.h" |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 20 | #include "GrTexture.h" |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 21 | #include "SkMatrix.h" |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 22 | #include "SkPathPriv.h" |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 23 | #include "SkRect.h" |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 24 | #include "sk_tool_utils.h" |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 25 | #include "ccpr/GrCoverageCountingPathRenderer.h" |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 26 | #include "mock/GrMockTypes.h" |
Hal Canary | 8a00144 | 2018-09-19 11:31:27 -0400 | [diff] [blame] | 27 | |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 28 | #include <cmath> |
| 29 | |
| 30 | static constexpr int kCanvasSize = 100; |
| 31 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 32 | class CCPRClip : public GrClip { |
| 33 | public: |
| 34 | CCPRClip(GrCoverageCountingPathRenderer* ccpr, const SkPath& path) : fCCPR(ccpr), fPath(path) {} |
| 35 | |
| 36 | private: |
Robert Phillips | 777707b | 2018-01-17 11:40:14 -0500 | [diff] [blame] | 37 | bool apply(GrContext* context, GrRenderTargetContext* rtc, bool, bool, GrAppliedClip* out, |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 38 | SkRect* bounds) const override { |
Chris Dalton | 4c458b1 | 2018-06-16 17:22:59 -0600 | [diff] [blame] | 39 | out->addCoverageFP(fCCPR->makeClipProcessor(rtc->priv().testingOnly_getOpListID(), fPath, |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 40 | SkIRect::MakeWH(rtc->width(), rtc->height()), |
Chris Dalton | 4c458b1 | 2018-06-16 17:22:59 -0600 | [diff] [blame] | 41 | rtc->width(), rtc->height(), |
| 42 | *context->contextPriv().caps())); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 43 | return true; |
| 44 | } |
| 45 | bool quickContains(const SkRect&) const final { return false; } |
| 46 | bool isRRect(const SkRect& rtBounds, SkRRect* rr, GrAA*) const final { return false; } |
| 47 | void getConservativeBounds(int width, int height, SkIRect* rect, bool* iior) const final { |
| 48 | rect->set(0, 0, width, height); |
| 49 | if (iior) { |
| 50 | *iior = false; |
| 51 | } |
| 52 | } |
| 53 | GrCoverageCountingPathRenderer* const fCCPR; |
| 54 | const SkPath fPath; |
| 55 | }; |
| 56 | |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 57 | class CCPRPathDrawer { |
| 58 | public: |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 59 | CCPRPathDrawer(GrContext* ctx, skiatest::Reporter* reporter, bool doStroke) |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 60 | : fCtx(ctx) |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 61 | , fCCPR(fCtx->contextPriv().drawingManager()->getCoverageCountingPathRenderer()) |
Robert Phillips | 0c4b7b1 | 2018-03-06 08:20:37 -0500 | [diff] [blame] | 62 | , fRTC(fCtx->contextPriv().makeDeferredRenderTargetContext( |
| 63 | SkBackingFit::kExact, kCanvasSize, |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 64 | kCanvasSize, kRGBA_8888_GrPixelConfig, |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 65 | nullptr)) |
| 66 | , fDoStroke(doStroke) { |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 67 | if (!fCCPR) { |
| 68 | ERRORF(reporter, "ccpr not enabled in GrContext for ccpr tests"); |
| 69 | } |
| 70 | if (!fRTC) { |
| 71 | ERRORF(reporter, "failed to create GrRenderTargetContext for ccpr tests"); |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 72 | } |
| 73 | } |
| 74 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 75 | GrContext* ctx() const { return fCtx; } |
| 76 | GrCoverageCountingPathRenderer* ccpr() const { return fCCPR; } |
| 77 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 78 | bool valid() const { return fCCPR && fRTC; } |
Chris Dalton | 344e903 | 2017-12-11 15:42:09 -0700 | [diff] [blame] | 79 | void clear() const { fRTC->clear(nullptr, 0, GrRenderTargetContext::CanClearFullscreen::kYes); } |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 80 | void abandonGrContext() { fCtx = nullptr; fCCPR = nullptr; fRTC = nullptr; } |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 81 | |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 82 | void drawPath(const SkPath& path, const SkMatrix& matrix = SkMatrix::I()) const { |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 83 | SkASSERT(this->valid()); |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 84 | |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 85 | GrPaint paint; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 86 | paint.setColor4f(GrColor4f(0, 1, 0, 1)); |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 87 | |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 88 | GrNoClip noClip; |
| 89 | SkIRect clipBounds = SkIRect::MakeWH(kCanvasSize, kCanvasSize); |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 90 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 91 | GrShape shape; |
| 92 | if (!fDoStroke) { |
| 93 | shape = GrShape(path); |
| 94 | } else { |
| 95 | // Use hairlines for now, since they are the only stroke type that doesn't require a |
| 96 | // rigid-body transform. The CCPR stroke code makes no distinction between hairlines |
| 97 | // and regular strokes other than how it decides the device-space stroke width. |
| 98 | SkStrokeRec stroke(SkStrokeRec::kHairline_InitStyle); |
| 99 | stroke.setStrokeParams(SkPaint::kRound_Cap, SkPaint::kMiter_Join, 4); |
| 100 | shape = GrShape(path, GrStyle(stroke, nullptr)); |
| 101 | } |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 102 | |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 103 | fCCPR->testingOnly_drawPathDirectly({ |
| 104 | fCtx, std::move(paint), &GrUserStencilSettings::kUnused, fRTC.get(), &noClip, |
| 105 | &clipBounds, &matrix, &shape, GrAAType::kCoverage, false}); |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 106 | } |
| 107 | |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 108 | void clipFullscreenRect(SkPath clipPath, GrColor4f color = GrColor4f(0, 1, 0, 1)) { |
| 109 | SkASSERT(this->valid()); |
| 110 | |
| 111 | GrPaint paint; |
| 112 | paint.setColor4f(color); |
| 113 | |
| 114 | fRTC->drawRect(CCPRClip(fCCPR, clipPath), std::move(paint), GrAA::kYes, SkMatrix::I(), |
| 115 | SkRect::MakeIWH(kCanvasSize, kCanvasSize)); |
| 116 | } |
| 117 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 118 | void flush() const { |
| 119 | SkASSERT(this->valid()); |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 120 | fCtx->flush(); |
| 121 | } |
| 122 | |
| 123 | private: |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 124 | GrContext* fCtx; |
| 125 | GrCoverageCountingPathRenderer* fCCPR; |
| 126 | sk_sp<GrRenderTargetContext> fRTC; |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 127 | const bool fDoStroke; |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 128 | }; |
| 129 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 130 | class CCPRTest { |
| 131 | public: |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 132 | void run(skiatest::Reporter* reporter, bool doStroke) { |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 133 | GrMockOptions mockOptions; |
| 134 | mockOptions.fInstanceAttribSupport = true; |
| 135 | mockOptions.fMapBufferFlags = GrCaps::kCanMap_MapFlag; |
Brian Salomon | bdecacf | 2018-02-02 20:32:49 -0500 | [diff] [blame] | 136 | mockOptions.fConfigOptions[kAlpha_half_GrPixelConfig].fRenderability = |
| 137 | GrMockOptions::ConfigOptions::Renderability::kNonMSAA; |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 138 | mockOptions.fConfigOptions[kAlpha_half_GrPixelConfig].fTexturable = true; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 139 | mockOptions.fConfigOptions[kAlpha_8_GrPixelConfig].fRenderability = |
| 140 | GrMockOptions::ConfigOptions::Renderability::kNonMSAA; |
| 141 | mockOptions.fConfigOptions[kAlpha_8_GrPixelConfig].fTexturable = true; |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 142 | mockOptions.fGeometryShaderSupport = true; |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 143 | mockOptions.fIntegerSupport = true; |
| 144 | mockOptions.fFlatInterpolationSupport = true; |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 145 | |
| 146 | GrContextOptions ctxOptions; |
| 147 | ctxOptions.fAllowPathMaskCaching = false; |
| 148 | ctxOptions.fGpuPathRenderers = GpuPathRenderers::kCoverageCounting; |
| 149 | |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 150 | this->customizeOptions(&mockOptions, &ctxOptions); |
| 151 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 152 | fMockContext = GrContext::MakeMock(&mockOptions, ctxOptions); |
| 153 | if (!fMockContext) { |
| 154 | ERRORF(reporter, "could not create mock context"); |
| 155 | return; |
| 156 | } |
| 157 | if (!fMockContext->unique()) { |
| 158 | ERRORF(reporter, "mock context is not unique"); |
| 159 | return; |
| 160 | } |
| 161 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 162 | CCPRPathDrawer ccpr(fMockContext.get(), reporter, doStroke); |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 163 | if (!ccpr.valid()) { |
| 164 | return; |
| 165 | } |
| 166 | |
| 167 | fPath.moveTo(0, 0); |
| 168 | fPath.cubicTo(50, 50, 0, 50, 50, 0); |
| 169 | this->onRun(reporter, ccpr); |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 170 | } |
| 171 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 172 | virtual ~CCPRTest() {} |
| 173 | |
| 174 | protected: |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 175 | virtual void customizeOptions(GrMockOptions*, GrContextOptions*) {} |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 176 | virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) = 0; |
| 177 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 178 | sk_sp<GrContext> fMockContext; |
| 179 | SkPath fPath; |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 180 | }; |
| 181 | |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 182 | #define DEF_CCPR_TEST(name) \ |
Brian Salomon | dcfca43 | 2017-11-15 15:48:03 -0500 | [diff] [blame] | 183 | DEF_GPUTEST(name, reporter, /* options */) { \ |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 184 | name test; \ |
| 185 | test.run(reporter, false); \ |
| 186 | test.run(reporter, true); \ |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 187 | } |
| 188 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 189 | class GrCCPRTest_cleanup : public CCPRTest { |
| 190 | void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override { |
| 191 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
Chris Dalton | cc604e5 | 2017-10-06 16:27:32 -0600 | [diff] [blame] | 192 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 193 | // Ensure paths get unreffed. |
| 194 | for (int i = 0; i < 10; ++i) { |
| 195 | ccpr.drawPath(fPath); |
Chris Dalton | 4bfb50b | 2018-05-21 09:10:53 -0600 | [diff] [blame] | 196 | } |
| 197 | REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath)); |
| 198 | ccpr.flush(); |
| 199 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
| 200 | |
| 201 | // Ensure clip paths get unreffed. |
| 202 | for (int i = 0; i < 10; ++i) { |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 203 | ccpr.clipFullscreenRect(fPath); |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 204 | } |
| 205 | REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath)); |
| 206 | ccpr.flush(); |
| 207 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
| 208 | |
| 209 | // Ensure paths get unreffed when we delete the context without flushing. |
| 210 | for (int i = 0; i < 10; ++i) { |
| 211 | ccpr.drawPath(fPath); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 212 | ccpr.clipFullscreenRect(fPath); |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 213 | } |
| 214 | ccpr.abandonGrContext(); |
| 215 | REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath)); |
| 216 | fMockContext.reset(); |
| 217 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
| 218 | } |
| 219 | }; |
| 220 | DEF_CCPR_TEST(GrCCPRTest_cleanup) |
| 221 | |
Chris Dalton | 91ab155 | 2018-04-18 13:24:25 -0600 | [diff] [blame] | 222 | class GrCCPRTest_cleanupWithTexAllocFail : public GrCCPRTest_cleanup { |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 223 | void customizeOptions(GrMockOptions* mockOptions, GrContextOptions*) override { |
| 224 | mockOptions->fFailTextureAllocations = true; |
Chris Dalton | 91ab155 | 2018-04-18 13:24:25 -0600 | [diff] [blame] | 225 | } |
| 226 | }; |
| 227 | DEF_CCPR_TEST(GrCCPRTest_cleanupWithTexAllocFail) |
| 228 | |
Chris Dalton | 080baa4 | 2017-11-06 14:19:19 -0700 | [diff] [blame] | 229 | class GrCCPRTest_unregisterCulledOps : public CCPRTest { |
| 230 | void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override { |
| 231 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
| 232 | |
| 233 | // Ensure Ops get unregistered from CCPR when culled early. |
| 234 | ccpr.drawPath(fPath); |
| 235 | REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath)); |
| 236 | ccpr.clear(); // Clear should delete the CCPR Op. |
| 237 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
| 238 | ccpr.flush(); // Should not crash (DrawPathsOp should have unregistered itself). |
| 239 | |
| 240 | // Ensure Op unregisters work when we delete the context without flushing. |
| 241 | ccpr.drawPath(fPath); |
| 242 | REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath)); |
| 243 | ccpr.clear(); // Clear should delete the CCPR DrawPathsOp. |
| 244 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
| 245 | ccpr.abandonGrContext(); |
| 246 | fMockContext.reset(); // Should not crash (DrawPathsOp should have unregistered itself). |
| 247 | } |
| 248 | }; |
| 249 | DEF_CCPR_TEST(GrCCPRTest_unregisterCulledOps) |
| 250 | |
Chris Dalton | c9c97b7 | 2017-11-27 15:34:26 -0700 | [diff] [blame] | 251 | class GrCCPRTest_parseEmptyPath : public CCPRTest { |
| 252 | void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override { |
| 253 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
| 254 | |
| 255 | // Make a path large enough that ccpr chooses to crop it by the RT bounds, and ends up with |
| 256 | // an empty path. |
| 257 | SkPath largeOutsidePath; |
| 258 | largeOutsidePath.moveTo(-1e30f, -1e30f); |
| 259 | largeOutsidePath.lineTo(-1e30f, +1e30f); |
| 260 | largeOutsidePath.lineTo(-1e10f, +1e30f); |
| 261 | ccpr.drawPath(largeOutsidePath); |
| 262 | |
| 263 | // Normally an empty path is culled before reaching ccpr, however we use a back door for |
| 264 | // testing so this path will make it. |
| 265 | SkPath emptyPath; |
| 266 | SkASSERT(emptyPath.isEmpty()); |
| 267 | ccpr.drawPath(emptyPath); |
| 268 | |
| 269 | // This is the test. It will exercise various internal asserts and verify we do not crash. |
| 270 | ccpr.flush(); |
Chris Dalton | a32a3c3 | 2017-12-05 10:05:21 -0700 | [diff] [blame] | 271 | |
| 272 | // Now try again with clips. |
| 273 | ccpr.clipFullscreenRect(largeOutsidePath); |
| 274 | ccpr.clipFullscreenRect(emptyPath); |
| 275 | ccpr.flush(); |
| 276 | |
| 277 | // ... and both. |
| 278 | ccpr.drawPath(largeOutsidePath); |
| 279 | ccpr.clipFullscreenRect(largeOutsidePath); |
| 280 | ccpr.drawPath(emptyPath); |
| 281 | ccpr.clipFullscreenRect(emptyPath); |
| 282 | ccpr.flush(); |
Chris Dalton | c9c97b7 | 2017-11-27 15:34:26 -0700 | [diff] [blame] | 283 | } |
| 284 | }; |
| 285 | DEF_CCPR_TEST(GrCCPRTest_parseEmptyPath) |
| 286 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 287 | // This test exercises CCPR's cache capabilities by drawing many paths with two different |
| 288 | // transformation matrices. We then vary the matrices independently by whole and partial pixels, |
| 289 | // and verify the caching behaved as expected. |
| 290 | class GrCCPRTest_cache : public CCPRTest { |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 291 | void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override { |
| 292 | ctxOptions->fAllowPathMaskCaching = true; |
| 293 | } |
| 294 | |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 295 | void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override { |
| 296 | static constexpr int kPathSize = 20; |
| 297 | SkRandom rand; |
| 298 | |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 299 | SkPath paths[300]; |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 300 | int primes[11] = {2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31}; |
| 301 | for (size_t i = 0; i < SK_ARRAY_COUNT(paths); ++i) { |
| 302 | int numPts = rand.nextRangeU(GrShape::kMaxKeyFromDataVerbCnt + 1, |
| 303 | GrShape::kMaxKeyFromDataVerbCnt * 2); |
| 304 | paths[i] = sk_tool_utils::make_star(SkRect::MakeIWH(kPathSize, kPathSize), numPts, |
| 305 | primes[rand.nextU() % SK_ARRAY_COUNT(primes)]); |
| 306 | } |
| 307 | |
| 308 | SkMatrix matrices[2] = { |
| 309 | SkMatrix::MakeTrans(5, 5), |
| 310 | SkMatrix::MakeTrans(kCanvasSize - kPathSize - 5, kCanvasSize - kPathSize - 5) |
| 311 | }; |
| 312 | |
| 313 | int firstAtlasID = -1; |
| 314 | |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 315 | for (int iterIdx = 0; iterIdx < 10; ++iterIdx) { |
| 316 | static constexpr int kNumHitsBeforeStash = 2; |
| 317 | static const GrUniqueKey gInvalidUniqueKey; |
| 318 | |
| 319 | // Draw all the paths then flush. Repeat until a new stash occurs. |
| 320 | const GrUniqueKey* stashedAtlasKey = &gInvalidUniqueKey; |
| 321 | for (int j = 0; j < kNumHitsBeforeStash; ++j) { |
| 322 | // Nothing should be stashed until its hit count reaches kNumHitsBeforeStash. |
| 323 | REPORTER_ASSERT(reporter, !stashedAtlasKey->isValid()); |
| 324 | |
| 325 | for (size_t i = 0; i < SK_ARRAY_COUNT(paths); ++i) { |
Chris Dalton | a2b5b64 | 2018-06-24 13:08:57 -0600 | [diff] [blame] | 326 | ccpr.drawPath(paths[i], matrices[i % 2]); |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 327 | } |
| 328 | ccpr.flush(); |
| 329 | |
| 330 | stashedAtlasKey = &ccpr.ccpr()->testingOnly_getStashedAtlasKey(); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 331 | } |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 332 | |
| 333 | // Figure out the mock backend ID of the atlas texture stashed away by CCPR. |
| 334 | GrMockTextureInfo stashedAtlasInfo; |
| 335 | stashedAtlasInfo.fID = -1; |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 336 | if (stashedAtlasKey->isValid()) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 337 | GrResourceProvider* rp = ccpr.ctx()->contextPriv().resourceProvider(); |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 338 | sk_sp<GrSurface> stashedAtlas = rp->findByUniqueKey<GrSurface>(*stashedAtlasKey); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 339 | REPORTER_ASSERT(reporter, stashedAtlas); |
| 340 | if (stashedAtlas) { |
| 341 | const auto& backendTexture = stashedAtlas->asTexture()->getBackendTexture(); |
| 342 | backendTexture.getMockTextureInfo(&stashedAtlasInfo); |
| 343 | } |
| 344 | } |
| 345 | |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 346 | if (0 == iterIdx) { |
| 347 | // First iteration: just note the ID of the stashed atlas and continue. |
| 348 | REPORTER_ASSERT(reporter, stashedAtlasKey->isValid()); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 349 | firstAtlasID = stashedAtlasInfo.fID; |
| 350 | continue; |
| 351 | } |
| 352 | |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 353 | switch (iterIdx % 3) { |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 354 | case 1: |
| 355 | // This draw should have gotten 100% cache hits; we only did integer translates |
| 356 | // last time (or none if it was the first flush). Therefore, no atlas should |
| 357 | // have been stashed away. |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 358 | REPORTER_ASSERT(reporter, !stashedAtlasKey->isValid()); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 359 | |
| 360 | // Invalidate even path masks. |
| 361 | matrices[0].preTranslate(1.6f, 1.4f); |
| 362 | break; |
| 363 | |
| 364 | case 2: |
| 365 | // Even path masks were invalidated last iteration by a subpixel translate. They |
| 366 | // should have been re-rendered this time and stashed away in the CCPR atlas. |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 367 | REPORTER_ASSERT(reporter, stashedAtlasKey->isValid()); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 368 | |
| 369 | // 'firstAtlasID' should be kept as a scratch texture in the resource cache. |
| 370 | REPORTER_ASSERT(reporter, stashedAtlasInfo.fID == firstAtlasID); |
| 371 | |
| 372 | // Invalidate odd path masks. |
| 373 | matrices[1].preTranslate(-1.4f, -1.6f); |
| 374 | break; |
| 375 | |
| 376 | case 0: |
| 377 | // Odd path masks were invalidated last iteration by a subpixel translate. They |
| 378 | // should have been re-rendered this time and stashed away in the CCPR atlas. |
Chris Dalton | a8429cf | 2018-06-22 11:43:31 -0600 | [diff] [blame] | 379 | REPORTER_ASSERT(reporter, stashedAtlasKey->isValid()); |
Chris Dalton | 4da7019 | 2018-06-18 09:51:36 -0600 | [diff] [blame] | 380 | |
| 381 | // 'firstAtlasID' is the same texture that got stashed away last time (assuming |
| 382 | // no assertion failures). So if it also got stashed this time, it means we |
| 383 | // first copied the even paths out of it, then recycled the exact same texture |
| 384 | // to render the odd paths. This is the expected behavior. |
| 385 | REPORTER_ASSERT(reporter, stashedAtlasInfo.fID == firstAtlasID); |
| 386 | |
| 387 | // Integer translates: all path masks stay valid. |
| 388 | matrices[0].preTranslate(-1, -1); |
| 389 | matrices[1].preTranslate(1, 1); |
| 390 | break; |
| 391 | } |
| 392 | } |
| 393 | } |
| 394 | }; |
| 395 | DEF_CCPR_TEST(GrCCPRTest_cache) |
| 396 | |
Chris Dalton | dedf8f2 | 2018-09-24 20:23:47 -0600 | [diff] [blame] | 397 | class GrCCPRTest_unrefPerOpListPathsBeforeOps : public CCPRTest { |
| 398 | void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override { |
| 399 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
| 400 | for (int i = 0; i < 10000; ++i) { |
| 401 | // Draw enough paths to make the arena allocator hit the heap. |
| 402 | ccpr.drawPath(fPath); |
| 403 | } |
| 404 | |
| 405 | // Unref the GrCCPerOpListPaths object. |
| 406 | auto perOpListPathsMap = ccpr.ccpr()->detachPendingPaths(); |
| 407 | perOpListPathsMap.clear(); |
| 408 | |
| 409 | // Now delete the Op and all its draws. |
| 410 | REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath)); |
| 411 | ccpr.flush(); |
| 412 | REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath)); |
| 413 | } |
| 414 | }; |
| 415 | DEF_CCPR_TEST(GrCCPRTest_unrefPerOpListPathsBeforeOps) |
| 416 | |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 417 | class CCPRRenderingTest { |
| 418 | public: |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 419 | void run(skiatest::Reporter* reporter, GrContext* ctx, bool doStroke) const { |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 420 | if (!ctx->contextPriv().drawingManager()->getCoverageCountingPathRenderer()) { |
| 421 | return; // CCPR is not enabled on this GPU. |
| 422 | } |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 423 | CCPRPathDrawer ccpr(ctx, reporter, doStroke); |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 424 | if (!ccpr.valid()) { |
| 425 | return; |
| 426 | } |
| 427 | this->onRun(reporter, ccpr); |
| 428 | } |
| 429 | |
| 430 | virtual ~CCPRRenderingTest() {} |
| 431 | |
| 432 | protected: |
| 433 | virtual void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const = 0; |
| 434 | }; |
| 435 | |
| 436 | #define DEF_CCPR_RENDERING_TEST(name) \ |
| 437 | DEF_GPUTEST_FOR_RENDERING_CONTEXTS(name, reporter, ctxInfo) { \ |
| 438 | name test; \ |
Chris Dalton | 09a7bb2 | 2018-08-31 19:53:15 +0800 | [diff] [blame] | 439 | test.run(reporter, ctxInfo.grContext(), false); \ |
| 440 | test.run(reporter, ctxInfo.grContext(), true); \ |
Chris Dalton | fddb6c0 | 2017-11-04 15:22:22 -0600 | [diff] [blame] | 441 | } |
| 442 | |
| 443 | class GrCCPRTest_busyPath : public CCPRRenderingTest { |
| 444 | void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const override { |
| 445 | static constexpr int kNumBusyVerbs = 1 << 17; |
| 446 | ccpr.clear(); |
| 447 | SkPath busyPath; |
| 448 | busyPath.moveTo(0, 0); // top left |
| 449 | busyPath.lineTo(kCanvasSize, kCanvasSize); // bottom right |
| 450 | for (int i = 2; i < kNumBusyVerbs; ++i) { |
| 451 | float offset = i * ((float)kCanvasSize / kNumBusyVerbs); |
| 452 | busyPath.lineTo(kCanvasSize - offset, kCanvasSize + offset); // offscreen |
| 453 | } |
| 454 | ccpr.drawPath(busyPath); |
| 455 | |
| 456 | ccpr.flush(); // If this doesn't crash, the test passed. |
| 457 | // If it does, maybe fiddle with fMaxInstancesPerDrawArraysWithoutCrashing in |
| 458 | // your platform's GrGLCaps. |
| 459 | } |
| 460 | }; |
| 461 | DEF_CCPR_RENDERING_TEST(GrCCPRTest_busyPath) |