blob: 864d29b660cf523dfcb01d1486c40a5872d70e1a [file] [log] [blame]
Chris Daltoncc604e52017-10-06 16:27:32 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "SkTypes.h"
9#include "Test.h"
10
Chris Daltoncc604e52017-10-06 16:27:32 -060011#include "GrContext.h"
12#include "GrContextPriv.h"
13#include "GrClip.h"
Chris Daltonfddb6c02017-11-04 15:22:22 -060014#include "GrDrawingManager.h"
15#include "GrPathRenderer.h"
16#include "GrPaint.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060017#include "GrRenderTargetContext.h"
18#include "GrRenderTargetContextPriv.h"
19#include "GrShape.h"
Chris Dalton4da70192018-06-18 09:51:36 -060020#include "GrTexture.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060021#include "SkMatrix.h"
Chris Daltonfddb6c02017-11-04 15:22:22 -060022#include "SkPathPriv.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060023#include "SkRect.h"
Chris Dalton4da70192018-06-18 09:51:36 -060024#include "sk_tool_utils.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060025#include "ccpr/GrCoverageCountingPathRenderer.h"
Chris Daltonfddb6c02017-11-04 15:22:22 -060026#include "mock/GrMockTypes.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060027#include <cmath>
28
29static constexpr int kCanvasSize = 100;
30
Chris Daltona32a3c32017-12-05 10:05:21 -070031class CCPRClip : public GrClip {
32public:
33 CCPRClip(GrCoverageCountingPathRenderer* ccpr, const SkPath& path) : fCCPR(ccpr), fPath(path) {}
34
35private:
Robert Phillips777707b2018-01-17 11:40:14 -050036 bool apply(GrContext* context, GrRenderTargetContext* rtc, bool, bool, GrAppliedClip* out,
Chris Daltona32a3c32017-12-05 10:05:21 -070037 SkRect* bounds) const override {
Chris Dalton4c458b12018-06-16 17:22:59 -060038 out->addCoverageFP(fCCPR->makeClipProcessor(rtc->priv().testingOnly_getOpListID(), fPath,
Chris Daltona32a3c32017-12-05 10:05:21 -070039 SkIRect::MakeWH(rtc->width(), rtc->height()),
Chris Dalton4c458b12018-06-16 17:22:59 -060040 rtc->width(), rtc->height(),
41 *context->contextPriv().caps()));
Chris Daltona32a3c32017-12-05 10:05:21 -070042 return true;
43 }
44 bool quickContains(const SkRect&) const final { return false; }
45 bool isRRect(const SkRect& rtBounds, SkRRect* rr, GrAA*) const final { return false; }
46 void getConservativeBounds(int width, int height, SkIRect* rect, bool* iior) const final {
47 rect->set(0, 0, width, height);
48 if (iior) {
49 *iior = false;
50 }
51 }
52 GrCoverageCountingPathRenderer* const fCCPR;
53 const SkPath fPath;
54};
55
Chris Daltoncc604e52017-10-06 16:27:32 -060056class CCPRPathDrawer {
57public:
Chris Dalton09a7bb22018-08-31 19:53:15 +080058 CCPRPathDrawer(GrContext* ctx, skiatest::Reporter* reporter, bool doStroke)
Chris Daltoncc604e52017-10-06 16:27:32 -060059 : fCtx(ctx)
Chris Daltonfddb6c02017-11-04 15:22:22 -060060 , fCCPR(fCtx->contextPriv().drawingManager()->getCoverageCountingPathRenderer())
Robert Phillips0c4b7b12018-03-06 08:20:37 -050061 , fRTC(fCtx->contextPriv().makeDeferredRenderTargetContext(
62 SkBackingFit::kExact, kCanvasSize,
Chris Daltoncc604e52017-10-06 16:27:32 -060063 kCanvasSize, kRGBA_8888_GrPixelConfig,
Chris Dalton09a7bb22018-08-31 19:53:15 +080064 nullptr))
65 , fDoStroke(doStroke) {
Chris Daltonfddb6c02017-11-04 15:22:22 -060066 if (!fCCPR) {
67 ERRORF(reporter, "ccpr not enabled in GrContext for ccpr tests");
68 }
69 if (!fRTC) {
70 ERRORF(reporter, "failed to create GrRenderTargetContext for ccpr tests");
Chris Daltoncc604e52017-10-06 16:27:32 -060071 }
72 }
73
Chris Dalton4da70192018-06-18 09:51:36 -060074 GrContext* ctx() const { return fCtx; }
75 GrCoverageCountingPathRenderer* ccpr() const { return fCCPR; }
76
Chris Daltonfddb6c02017-11-04 15:22:22 -060077 bool valid() const { return fCCPR && fRTC; }
Chris Dalton344e9032017-12-11 15:42:09 -070078 void clear() const { fRTC->clear(nullptr, 0, GrRenderTargetContext::CanClearFullscreen::kYes); }
Chris Daltonfddb6c02017-11-04 15:22:22 -060079 void abandonGrContext() { fCtx = nullptr; fCCPR = nullptr; fRTC = nullptr; }
Chris Daltoncc604e52017-10-06 16:27:32 -060080
Chris Daltona2b5b642018-06-24 13:08:57 -060081 void drawPath(const SkPath& path, const SkMatrix& matrix = SkMatrix::I()) const {
Chris Daltonfddb6c02017-11-04 15:22:22 -060082 SkASSERT(this->valid());
Chris Daltoncc604e52017-10-06 16:27:32 -060083
Chris Daltoncc604e52017-10-06 16:27:32 -060084 GrPaint paint;
Chris Dalton4da70192018-06-18 09:51:36 -060085 paint.setColor4f(GrColor4f(0, 1, 0, 1));
Chris Daltonfddb6c02017-11-04 15:22:22 -060086
Chris Daltoncc604e52017-10-06 16:27:32 -060087 GrNoClip noClip;
88 SkIRect clipBounds = SkIRect::MakeWH(kCanvasSize, kCanvasSize);
Chris Daltonfddb6c02017-11-04 15:22:22 -060089
Chris Dalton09a7bb22018-08-31 19:53:15 +080090 GrShape shape;
91 if (!fDoStroke) {
92 shape = GrShape(path);
93 } else {
94 // Use hairlines for now, since they are the only stroke type that doesn't require a
95 // rigid-body transform. The CCPR stroke code makes no distinction between hairlines
96 // and regular strokes other than how it decides the device-space stroke width.
97 SkStrokeRec stroke(SkStrokeRec::kHairline_InitStyle);
98 stroke.setStrokeParams(SkPaint::kRound_Cap, SkPaint::kMiter_Join, 4);
99 shape = GrShape(path, GrStyle(stroke, nullptr));
100 }
Chris Daltonfddb6c02017-11-04 15:22:22 -0600101
Chris Daltona2b5b642018-06-24 13:08:57 -0600102 fCCPR->testingOnly_drawPathDirectly({
103 fCtx, std::move(paint), &GrUserStencilSettings::kUnused, fRTC.get(), &noClip,
104 &clipBounds, &matrix, &shape, GrAAType::kCoverage, false});
Chris Daltoncc604e52017-10-06 16:27:32 -0600105 }
106
Chris Daltona32a3c32017-12-05 10:05:21 -0700107 void clipFullscreenRect(SkPath clipPath, GrColor4f color = GrColor4f(0, 1, 0, 1)) {
108 SkASSERT(this->valid());
109
110 GrPaint paint;
111 paint.setColor4f(color);
112
113 fRTC->drawRect(CCPRClip(fCCPR, clipPath), std::move(paint), GrAA::kYes, SkMatrix::I(),
114 SkRect::MakeIWH(kCanvasSize, kCanvasSize));
115 }
116
Chris Daltonfddb6c02017-11-04 15:22:22 -0600117 void flush() const {
118 SkASSERT(this->valid());
Chris Daltoncc604e52017-10-06 16:27:32 -0600119 fCtx->flush();
120 }
121
122private:
Chris Dalton4da70192018-06-18 09:51:36 -0600123 GrContext* fCtx;
124 GrCoverageCountingPathRenderer* fCCPR;
125 sk_sp<GrRenderTargetContext> fRTC;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800126 const bool fDoStroke;
Chris Daltoncc604e52017-10-06 16:27:32 -0600127};
128
Chris Daltonfddb6c02017-11-04 15:22:22 -0600129class CCPRTest {
130public:
Chris Dalton09a7bb22018-08-31 19:53:15 +0800131 void run(skiatest::Reporter* reporter, bool doStroke) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600132 GrMockOptions mockOptions;
133 mockOptions.fInstanceAttribSupport = true;
134 mockOptions.fMapBufferFlags = GrCaps::kCanMap_MapFlag;
Brian Salomonbdecacf2018-02-02 20:32:49 -0500135 mockOptions.fConfigOptions[kAlpha_half_GrPixelConfig].fRenderability =
136 GrMockOptions::ConfigOptions::Renderability::kNonMSAA;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600137 mockOptions.fConfigOptions[kAlpha_half_GrPixelConfig].fTexturable = true;
Chris Dalton4da70192018-06-18 09:51:36 -0600138 mockOptions.fConfigOptions[kAlpha_8_GrPixelConfig].fRenderability =
139 GrMockOptions::ConfigOptions::Renderability::kNonMSAA;
140 mockOptions.fConfigOptions[kAlpha_8_GrPixelConfig].fTexturable = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600141 mockOptions.fGeometryShaderSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600142 mockOptions.fIntegerSupport = true;
143 mockOptions.fFlatInterpolationSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600144
145 GrContextOptions ctxOptions;
146 ctxOptions.fAllowPathMaskCaching = false;
147 ctxOptions.fGpuPathRenderers = GpuPathRenderers::kCoverageCounting;
148
Chris Daltona2b5b642018-06-24 13:08:57 -0600149 this->customizeOptions(&mockOptions, &ctxOptions);
150
Chris Daltonfddb6c02017-11-04 15:22:22 -0600151 fMockContext = GrContext::MakeMock(&mockOptions, ctxOptions);
152 if (!fMockContext) {
153 ERRORF(reporter, "could not create mock context");
154 return;
155 }
156 if (!fMockContext->unique()) {
157 ERRORF(reporter, "mock context is not unique");
158 return;
159 }
160
Chris Dalton09a7bb22018-08-31 19:53:15 +0800161 CCPRPathDrawer ccpr(fMockContext.get(), reporter, doStroke);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600162 if (!ccpr.valid()) {
163 return;
164 }
165
166 fPath.moveTo(0, 0);
167 fPath.cubicTo(50, 50, 0, 50, 50, 0);
168 this->onRun(reporter, ccpr);
Chris Daltoncc604e52017-10-06 16:27:32 -0600169 }
170
Chris Daltonfddb6c02017-11-04 15:22:22 -0600171 virtual ~CCPRTest() {}
172
173protected:
Chris Daltona2b5b642018-06-24 13:08:57 -0600174 virtual void customizeOptions(GrMockOptions*, GrContextOptions*) {}
Chris Daltonfddb6c02017-11-04 15:22:22 -0600175 virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) = 0;
176
Chris Dalton4da70192018-06-18 09:51:36 -0600177 sk_sp<GrContext> fMockContext;
178 SkPath fPath;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600179};
180
Chris Dalton09a7bb22018-08-31 19:53:15 +0800181#define DEF_CCPR_TEST(name) \
Brian Salomondcfca432017-11-15 15:48:03 -0500182 DEF_GPUTEST(name, reporter, /* options */) { \
Chris Dalton09a7bb22018-08-31 19:53:15 +0800183 name test; \
184 test.run(reporter, false); \
185 test.run(reporter, true); \
Chris Daltoncc604e52017-10-06 16:27:32 -0600186 }
187
Chris Daltonfddb6c02017-11-04 15:22:22 -0600188class GrCCPRTest_cleanup : public CCPRTest {
189 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
190 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
Chris Daltoncc604e52017-10-06 16:27:32 -0600191
Chris Daltonfddb6c02017-11-04 15:22:22 -0600192 // Ensure paths get unreffed.
193 for (int i = 0; i < 10; ++i) {
194 ccpr.drawPath(fPath);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600195 }
196 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
197 ccpr.flush();
198 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
199
200 // Ensure clip paths get unreffed.
201 for (int i = 0; i < 10; ++i) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700202 ccpr.clipFullscreenRect(fPath);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600203 }
204 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
205 ccpr.flush();
206 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
207
208 // Ensure paths get unreffed when we delete the context without flushing.
209 for (int i = 0; i < 10; ++i) {
210 ccpr.drawPath(fPath);
Chris Daltona32a3c32017-12-05 10:05:21 -0700211 ccpr.clipFullscreenRect(fPath);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600212 }
213 ccpr.abandonGrContext();
214 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
215 fMockContext.reset();
216 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
217 }
218};
219DEF_CCPR_TEST(GrCCPRTest_cleanup)
220
Chris Dalton91ab1552018-04-18 13:24:25 -0600221class GrCCPRTest_cleanupWithTexAllocFail : public GrCCPRTest_cleanup {
Chris Daltona2b5b642018-06-24 13:08:57 -0600222 void customizeOptions(GrMockOptions* mockOptions, GrContextOptions*) override {
223 mockOptions->fFailTextureAllocations = true;
Chris Dalton91ab1552018-04-18 13:24:25 -0600224 }
225};
226DEF_CCPR_TEST(GrCCPRTest_cleanupWithTexAllocFail)
227
Chris Dalton080baa42017-11-06 14:19:19 -0700228class GrCCPRTest_unregisterCulledOps : public CCPRTest {
229 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
230 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
231
232 // Ensure Ops get unregistered from CCPR when culled early.
233 ccpr.drawPath(fPath);
234 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
235 ccpr.clear(); // Clear should delete the CCPR Op.
236 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
237 ccpr.flush(); // Should not crash (DrawPathsOp should have unregistered itself).
238
239 // Ensure Op unregisters work when we delete the context without flushing.
240 ccpr.drawPath(fPath);
241 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
242 ccpr.clear(); // Clear should delete the CCPR DrawPathsOp.
243 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
244 ccpr.abandonGrContext();
245 fMockContext.reset(); // Should not crash (DrawPathsOp should have unregistered itself).
246 }
247};
248DEF_CCPR_TEST(GrCCPRTest_unregisterCulledOps)
249
Chris Daltonc9c97b72017-11-27 15:34:26 -0700250class GrCCPRTest_parseEmptyPath : public CCPRTest {
251 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
252 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
253
254 // Make a path large enough that ccpr chooses to crop it by the RT bounds, and ends up with
255 // an empty path.
256 SkPath largeOutsidePath;
257 largeOutsidePath.moveTo(-1e30f, -1e30f);
258 largeOutsidePath.lineTo(-1e30f, +1e30f);
259 largeOutsidePath.lineTo(-1e10f, +1e30f);
260 ccpr.drawPath(largeOutsidePath);
261
262 // Normally an empty path is culled before reaching ccpr, however we use a back door for
263 // testing so this path will make it.
264 SkPath emptyPath;
265 SkASSERT(emptyPath.isEmpty());
266 ccpr.drawPath(emptyPath);
267
268 // This is the test. It will exercise various internal asserts and verify we do not crash.
269 ccpr.flush();
Chris Daltona32a3c32017-12-05 10:05:21 -0700270
271 // Now try again with clips.
272 ccpr.clipFullscreenRect(largeOutsidePath);
273 ccpr.clipFullscreenRect(emptyPath);
274 ccpr.flush();
275
276 // ... and both.
277 ccpr.drawPath(largeOutsidePath);
278 ccpr.clipFullscreenRect(largeOutsidePath);
279 ccpr.drawPath(emptyPath);
280 ccpr.clipFullscreenRect(emptyPath);
281 ccpr.flush();
Chris Daltonc9c97b72017-11-27 15:34:26 -0700282 }
283};
284DEF_CCPR_TEST(GrCCPRTest_parseEmptyPath)
285
Chris Dalton4da70192018-06-18 09:51:36 -0600286// This test exercises CCPR's cache capabilities by drawing many paths with two different
287// transformation matrices. We then vary the matrices independently by whole and partial pixels,
288// and verify the caching behaved as expected.
289class GrCCPRTest_cache : public CCPRTest {
Chris Daltona2b5b642018-06-24 13:08:57 -0600290 void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
291 ctxOptions->fAllowPathMaskCaching = true;
292 }
293
Chris Dalton4da70192018-06-18 09:51:36 -0600294 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
295 static constexpr int kPathSize = 20;
296 SkRandom rand;
297
Chris Daltona8429cf2018-06-22 11:43:31 -0600298 SkPath paths[300];
Chris Dalton4da70192018-06-18 09:51:36 -0600299 int primes[11] = {2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31};
300 for (size_t i = 0; i < SK_ARRAY_COUNT(paths); ++i) {
301 int numPts = rand.nextRangeU(GrShape::kMaxKeyFromDataVerbCnt + 1,
302 GrShape::kMaxKeyFromDataVerbCnt * 2);
303 paths[i] = sk_tool_utils::make_star(SkRect::MakeIWH(kPathSize, kPathSize), numPts,
304 primes[rand.nextU() % SK_ARRAY_COUNT(primes)]);
305 }
306
307 SkMatrix matrices[2] = {
308 SkMatrix::MakeTrans(5, 5),
309 SkMatrix::MakeTrans(kCanvasSize - kPathSize - 5, kCanvasSize - kPathSize - 5)
310 };
311
312 int firstAtlasID = -1;
313
Chris Daltona8429cf2018-06-22 11:43:31 -0600314 for (int iterIdx = 0; iterIdx < 10; ++iterIdx) {
315 static constexpr int kNumHitsBeforeStash = 2;
316 static const GrUniqueKey gInvalidUniqueKey;
317
318 // Draw all the paths then flush. Repeat until a new stash occurs.
319 const GrUniqueKey* stashedAtlasKey = &gInvalidUniqueKey;
320 for (int j = 0; j < kNumHitsBeforeStash; ++j) {
321 // Nothing should be stashed until its hit count reaches kNumHitsBeforeStash.
322 REPORTER_ASSERT(reporter, !stashedAtlasKey->isValid());
323
324 for (size_t i = 0; i < SK_ARRAY_COUNT(paths); ++i) {
Chris Daltona2b5b642018-06-24 13:08:57 -0600325 ccpr.drawPath(paths[i], matrices[i % 2]);
Chris Daltona8429cf2018-06-22 11:43:31 -0600326 }
327 ccpr.flush();
328
329 stashedAtlasKey = &ccpr.ccpr()->testingOnly_getStashedAtlasKey();
Chris Dalton4da70192018-06-18 09:51:36 -0600330 }
Chris Dalton4da70192018-06-18 09:51:36 -0600331
332 // Figure out the mock backend ID of the atlas texture stashed away by CCPR.
333 GrMockTextureInfo stashedAtlasInfo;
334 stashedAtlasInfo.fID = -1;
Chris Daltona8429cf2018-06-22 11:43:31 -0600335 if (stashedAtlasKey->isValid()) {
Chris Dalton4da70192018-06-18 09:51:36 -0600336 GrResourceProvider* rp = ccpr.ctx()->contextPriv().resourceProvider();
Chris Daltona8429cf2018-06-22 11:43:31 -0600337 sk_sp<GrSurface> stashedAtlas = rp->findByUniqueKey<GrSurface>(*stashedAtlasKey);
Chris Dalton4da70192018-06-18 09:51:36 -0600338 REPORTER_ASSERT(reporter, stashedAtlas);
339 if (stashedAtlas) {
340 const auto& backendTexture = stashedAtlas->asTexture()->getBackendTexture();
341 backendTexture.getMockTextureInfo(&stashedAtlasInfo);
342 }
343 }
344
Chris Daltona8429cf2018-06-22 11:43:31 -0600345 if (0 == iterIdx) {
346 // First iteration: just note the ID of the stashed atlas and continue.
347 REPORTER_ASSERT(reporter, stashedAtlasKey->isValid());
Chris Dalton4da70192018-06-18 09:51:36 -0600348 firstAtlasID = stashedAtlasInfo.fID;
349 continue;
350 }
351
Chris Daltona8429cf2018-06-22 11:43:31 -0600352 switch (iterIdx % 3) {
Chris Dalton4da70192018-06-18 09:51:36 -0600353 case 1:
354 // This draw should have gotten 100% cache hits; we only did integer translates
355 // last time (or none if it was the first flush). Therefore, no atlas should
356 // have been stashed away.
Chris Daltona8429cf2018-06-22 11:43:31 -0600357 REPORTER_ASSERT(reporter, !stashedAtlasKey->isValid());
Chris Dalton4da70192018-06-18 09:51:36 -0600358
359 // Invalidate even path masks.
360 matrices[0].preTranslate(1.6f, 1.4f);
361 break;
362
363 case 2:
364 // Even path masks were invalidated last iteration by a subpixel translate. They
365 // should have been re-rendered this time and stashed away in the CCPR atlas.
Chris Daltona8429cf2018-06-22 11:43:31 -0600366 REPORTER_ASSERT(reporter, stashedAtlasKey->isValid());
Chris Dalton4da70192018-06-18 09:51:36 -0600367
368 // 'firstAtlasID' should be kept as a scratch texture in the resource cache.
369 REPORTER_ASSERT(reporter, stashedAtlasInfo.fID == firstAtlasID);
370
371 // Invalidate odd path masks.
372 matrices[1].preTranslate(-1.4f, -1.6f);
373 break;
374
375 case 0:
376 // Odd path masks were invalidated last iteration by a subpixel translate. They
377 // should have been re-rendered this time and stashed away in the CCPR atlas.
Chris Daltona8429cf2018-06-22 11:43:31 -0600378 REPORTER_ASSERT(reporter, stashedAtlasKey->isValid());
Chris Dalton4da70192018-06-18 09:51:36 -0600379
380 // 'firstAtlasID' is the same texture that got stashed away last time (assuming
381 // no assertion failures). So if it also got stashed this time, it means we
382 // first copied the even paths out of it, then recycled the exact same texture
383 // to render the odd paths. This is the expected behavior.
384 REPORTER_ASSERT(reporter, stashedAtlasInfo.fID == firstAtlasID);
385
386 // Integer translates: all path masks stay valid.
387 matrices[0].preTranslate(-1, -1);
388 matrices[1].preTranslate(1, 1);
389 break;
390 }
391 }
392 }
393};
394DEF_CCPR_TEST(GrCCPRTest_cache)
395
Chris Daltonfddb6c02017-11-04 15:22:22 -0600396class CCPRRenderingTest {
397public:
Chris Dalton09a7bb22018-08-31 19:53:15 +0800398 void run(skiatest::Reporter* reporter, GrContext* ctx, bool doStroke) const {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600399 if (!ctx->contextPriv().drawingManager()->getCoverageCountingPathRenderer()) {
400 return; // CCPR is not enabled on this GPU.
401 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800402 CCPRPathDrawer ccpr(ctx, reporter, doStroke);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600403 if (!ccpr.valid()) {
404 return;
405 }
406 this->onRun(reporter, ccpr);
407 }
408
409 virtual ~CCPRRenderingTest() {}
410
411protected:
412 virtual void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const = 0;
413};
414
415#define DEF_CCPR_RENDERING_TEST(name) \
416 DEF_GPUTEST_FOR_RENDERING_CONTEXTS(name, reporter, ctxInfo) { \
417 name test; \
Chris Dalton09a7bb22018-08-31 19:53:15 +0800418 test.run(reporter, ctxInfo.grContext(), false); \
419 test.run(reporter, ctxInfo.grContext(), true); \
Chris Daltonfddb6c02017-11-04 15:22:22 -0600420 }
421
422class GrCCPRTest_busyPath : public CCPRRenderingTest {
423 void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const override {
424 static constexpr int kNumBusyVerbs = 1 << 17;
425 ccpr.clear();
426 SkPath busyPath;
427 busyPath.moveTo(0, 0); // top left
428 busyPath.lineTo(kCanvasSize, kCanvasSize); // bottom right
429 for (int i = 2; i < kNumBusyVerbs; ++i) {
430 float offset = i * ((float)kCanvasSize / kNumBusyVerbs);
431 busyPath.lineTo(kCanvasSize - offset, kCanvasSize + offset); // offscreen
432 }
433 ccpr.drawPath(busyPath);
434
435 ccpr.flush(); // If this doesn't crash, the test passed.
436 // If it does, maybe fiddle with fMaxInstancesPerDrawArraysWithoutCrashing in
437 // your platform's GrGLCaps.
438 }
439};
440DEF_CCPR_RENDERING_TEST(GrCCPRTest_busyPath)