blob: 01ba44386d9812b778faa37f49d0f9bb4e9a65f6 [file] [log] [blame]
Chris Daltoncc604e52017-10-06 16:27:32 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "SkTypes.h"
9#include "Test.h"
10
Chris Daltoncc604e52017-10-06 16:27:32 -060011#include "GrContext.h"
12#include "GrContextPriv.h"
13#include "GrClip.h"
Chris Daltonfddb6c02017-11-04 15:22:22 -060014#include "GrDrawingManager.h"
15#include "GrPathRenderer.h"
16#include "GrPaint.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060017#include "GrRenderTargetContext.h"
18#include "GrRenderTargetContextPriv.h"
19#include "GrShape.h"
Chris Dalton4da70192018-06-18 09:51:36 -060020#include "GrTexture.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060021#include "SkMatrix.h"
Chris Daltonfddb6c02017-11-04 15:22:22 -060022#include "SkPathPriv.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060023#include "SkRect.h"
Chris Dalton4da70192018-06-18 09:51:36 -060024#include "sk_tool_utils.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060025#include "ccpr/GrCoverageCountingPathRenderer.h"
Chris Daltonfddb6c02017-11-04 15:22:22 -060026#include "mock/GrMockTypes.h"
Hal Canary8a001442018-09-19 11:31:27 -040027
Chris Daltoncc604e52017-10-06 16:27:32 -060028#include <cmath>
29
30static constexpr int kCanvasSize = 100;
31
Chris Daltona32a3c32017-12-05 10:05:21 -070032class CCPRClip : public GrClip {
33public:
34 CCPRClip(GrCoverageCountingPathRenderer* ccpr, const SkPath& path) : fCCPR(ccpr), fPath(path) {}
35
36private:
Robert Phillips777707b2018-01-17 11:40:14 -050037 bool apply(GrContext* context, GrRenderTargetContext* rtc, bool, bool, GrAppliedClip* out,
Chris Daltona32a3c32017-12-05 10:05:21 -070038 SkRect* bounds) const override {
Chris Dalton4c458b12018-06-16 17:22:59 -060039 out->addCoverageFP(fCCPR->makeClipProcessor(rtc->priv().testingOnly_getOpListID(), fPath,
Chris Daltona32a3c32017-12-05 10:05:21 -070040 SkIRect::MakeWH(rtc->width(), rtc->height()),
Chris Dalton4c458b12018-06-16 17:22:59 -060041 rtc->width(), rtc->height(),
42 *context->contextPriv().caps()));
Chris Daltona32a3c32017-12-05 10:05:21 -070043 return true;
44 }
45 bool quickContains(const SkRect&) const final { return false; }
46 bool isRRect(const SkRect& rtBounds, SkRRect* rr, GrAA*) const final { return false; }
47 void getConservativeBounds(int width, int height, SkIRect* rect, bool* iior) const final {
48 rect->set(0, 0, width, height);
49 if (iior) {
50 *iior = false;
51 }
52 }
53 GrCoverageCountingPathRenderer* const fCCPR;
54 const SkPath fPath;
55};
56
Chris Daltoncc604e52017-10-06 16:27:32 -060057class CCPRPathDrawer {
58public:
Chris Dalton09a7bb22018-08-31 19:53:15 +080059 CCPRPathDrawer(GrContext* ctx, skiatest::Reporter* reporter, bool doStroke)
Chris Daltoncc604e52017-10-06 16:27:32 -060060 : fCtx(ctx)
Chris Daltonfddb6c02017-11-04 15:22:22 -060061 , fCCPR(fCtx->contextPriv().drawingManager()->getCoverageCountingPathRenderer())
Robert Phillips0c4b7b12018-03-06 08:20:37 -050062 , fRTC(fCtx->contextPriv().makeDeferredRenderTargetContext(
63 SkBackingFit::kExact, kCanvasSize,
Chris Daltoncc604e52017-10-06 16:27:32 -060064 kCanvasSize, kRGBA_8888_GrPixelConfig,
Chris Dalton09a7bb22018-08-31 19:53:15 +080065 nullptr))
66 , fDoStroke(doStroke) {
Chris Daltonfddb6c02017-11-04 15:22:22 -060067 if (!fCCPR) {
68 ERRORF(reporter, "ccpr not enabled in GrContext for ccpr tests");
69 }
70 if (!fRTC) {
71 ERRORF(reporter, "failed to create GrRenderTargetContext for ccpr tests");
Chris Daltoncc604e52017-10-06 16:27:32 -060072 }
73 }
74
Chris Dalton4da70192018-06-18 09:51:36 -060075 GrContext* ctx() const { return fCtx; }
76 GrCoverageCountingPathRenderer* ccpr() const { return fCCPR; }
77
Chris Daltonfddb6c02017-11-04 15:22:22 -060078 bool valid() const { return fCCPR && fRTC; }
Chris Dalton344e9032017-12-11 15:42:09 -070079 void clear() const { fRTC->clear(nullptr, 0, GrRenderTargetContext::CanClearFullscreen::kYes); }
Chris Daltonfddb6c02017-11-04 15:22:22 -060080 void abandonGrContext() { fCtx = nullptr; fCCPR = nullptr; fRTC = nullptr; }
Chris Daltoncc604e52017-10-06 16:27:32 -060081
Chris Daltona2b5b642018-06-24 13:08:57 -060082 void drawPath(const SkPath& path, const SkMatrix& matrix = SkMatrix::I()) const {
Chris Daltonfddb6c02017-11-04 15:22:22 -060083 SkASSERT(this->valid());
Chris Daltoncc604e52017-10-06 16:27:32 -060084
Chris Daltoncc604e52017-10-06 16:27:32 -060085 GrPaint paint;
Chris Dalton4da70192018-06-18 09:51:36 -060086 paint.setColor4f(GrColor4f(0, 1, 0, 1));
Chris Daltonfddb6c02017-11-04 15:22:22 -060087
Chris Daltoncc604e52017-10-06 16:27:32 -060088 GrNoClip noClip;
89 SkIRect clipBounds = SkIRect::MakeWH(kCanvasSize, kCanvasSize);
Chris Daltonfddb6c02017-11-04 15:22:22 -060090
Chris Dalton09a7bb22018-08-31 19:53:15 +080091 GrShape shape;
92 if (!fDoStroke) {
93 shape = GrShape(path);
94 } else {
95 // Use hairlines for now, since they are the only stroke type that doesn't require a
96 // rigid-body transform. The CCPR stroke code makes no distinction between hairlines
97 // and regular strokes other than how it decides the device-space stroke width.
98 SkStrokeRec stroke(SkStrokeRec::kHairline_InitStyle);
99 stroke.setStrokeParams(SkPaint::kRound_Cap, SkPaint::kMiter_Join, 4);
100 shape = GrShape(path, GrStyle(stroke, nullptr));
101 }
Chris Daltonfddb6c02017-11-04 15:22:22 -0600102
Chris Daltona2b5b642018-06-24 13:08:57 -0600103 fCCPR->testingOnly_drawPathDirectly({
104 fCtx, std::move(paint), &GrUserStencilSettings::kUnused, fRTC.get(), &noClip,
105 &clipBounds, &matrix, &shape, GrAAType::kCoverage, false});
Chris Daltoncc604e52017-10-06 16:27:32 -0600106 }
107
Chris Daltona32a3c32017-12-05 10:05:21 -0700108 void clipFullscreenRect(SkPath clipPath, GrColor4f color = GrColor4f(0, 1, 0, 1)) {
109 SkASSERT(this->valid());
110
111 GrPaint paint;
112 paint.setColor4f(color);
113
114 fRTC->drawRect(CCPRClip(fCCPR, clipPath), std::move(paint), GrAA::kYes, SkMatrix::I(),
115 SkRect::MakeIWH(kCanvasSize, kCanvasSize));
116 }
117
Chris Daltonfddb6c02017-11-04 15:22:22 -0600118 void flush() const {
119 SkASSERT(this->valid());
Chris Daltoncc604e52017-10-06 16:27:32 -0600120 fCtx->flush();
121 }
122
123private:
Chris Dalton4da70192018-06-18 09:51:36 -0600124 GrContext* fCtx;
125 GrCoverageCountingPathRenderer* fCCPR;
126 sk_sp<GrRenderTargetContext> fRTC;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800127 const bool fDoStroke;
Chris Daltoncc604e52017-10-06 16:27:32 -0600128};
129
Chris Daltonfddb6c02017-11-04 15:22:22 -0600130class CCPRTest {
131public:
Chris Dalton09a7bb22018-08-31 19:53:15 +0800132 void run(skiatest::Reporter* reporter, bool doStroke) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600133 GrMockOptions mockOptions;
134 mockOptions.fInstanceAttribSupport = true;
135 mockOptions.fMapBufferFlags = GrCaps::kCanMap_MapFlag;
Brian Salomonbdecacf2018-02-02 20:32:49 -0500136 mockOptions.fConfigOptions[kAlpha_half_GrPixelConfig].fRenderability =
137 GrMockOptions::ConfigOptions::Renderability::kNonMSAA;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600138 mockOptions.fConfigOptions[kAlpha_half_GrPixelConfig].fTexturable = true;
Chris Dalton4da70192018-06-18 09:51:36 -0600139 mockOptions.fConfigOptions[kAlpha_8_GrPixelConfig].fRenderability =
140 GrMockOptions::ConfigOptions::Renderability::kNonMSAA;
141 mockOptions.fConfigOptions[kAlpha_8_GrPixelConfig].fTexturable = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600142 mockOptions.fGeometryShaderSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600143 mockOptions.fIntegerSupport = true;
144 mockOptions.fFlatInterpolationSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600145
146 GrContextOptions ctxOptions;
147 ctxOptions.fAllowPathMaskCaching = false;
148 ctxOptions.fGpuPathRenderers = GpuPathRenderers::kCoverageCounting;
149
Chris Daltona2b5b642018-06-24 13:08:57 -0600150 this->customizeOptions(&mockOptions, &ctxOptions);
151
Chris Daltonfddb6c02017-11-04 15:22:22 -0600152 fMockContext = GrContext::MakeMock(&mockOptions, ctxOptions);
153 if (!fMockContext) {
154 ERRORF(reporter, "could not create mock context");
155 return;
156 }
157 if (!fMockContext->unique()) {
158 ERRORF(reporter, "mock context is not unique");
159 return;
160 }
161
Chris Dalton09a7bb22018-08-31 19:53:15 +0800162 CCPRPathDrawer ccpr(fMockContext.get(), reporter, doStroke);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600163 if (!ccpr.valid()) {
164 return;
165 }
166
167 fPath.moveTo(0, 0);
168 fPath.cubicTo(50, 50, 0, 50, 50, 0);
169 this->onRun(reporter, ccpr);
Chris Daltoncc604e52017-10-06 16:27:32 -0600170 }
171
Chris Daltonfddb6c02017-11-04 15:22:22 -0600172 virtual ~CCPRTest() {}
173
174protected:
Chris Daltona2b5b642018-06-24 13:08:57 -0600175 virtual void customizeOptions(GrMockOptions*, GrContextOptions*) {}
Chris Daltonfddb6c02017-11-04 15:22:22 -0600176 virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) = 0;
177
Chris Dalton4da70192018-06-18 09:51:36 -0600178 sk_sp<GrContext> fMockContext;
179 SkPath fPath;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600180};
181
Chris Dalton09a7bb22018-08-31 19:53:15 +0800182#define DEF_CCPR_TEST(name) \
Brian Salomondcfca432017-11-15 15:48:03 -0500183 DEF_GPUTEST(name, reporter, /* options */) { \
Chris Dalton09a7bb22018-08-31 19:53:15 +0800184 name test; \
185 test.run(reporter, false); \
186 test.run(reporter, true); \
Chris Daltoncc604e52017-10-06 16:27:32 -0600187 }
188
Chris Daltonfddb6c02017-11-04 15:22:22 -0600189class GrCCPRTest_cleanup : public CCPRTest {
190 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
191 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
Chris Daltoncc604e52017-10-06 16:27:32 -0600192
Chris Daltonfddb6c02017-11-04 15:22:22 -0600193 // Ensure paths get unreffed.
194 for (int i = 0; i < 10; ++i) {
195 ccpr.drawPath(fPath);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600196 }
197 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
198 ccpr.flush();
199 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
200
201 // Ensure clip paths get unreffed.
202 for (int i = 0; i < 10; ++i) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700203 ccpr.clipFullscreenRect(fPath);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600204 }
205 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
206 ccpr.flush();
207 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
208
209 // Ensure paths get unreffed when we delete the context without flushing.
210 for (int i = 0; i < 10; ++i) {
211 ccpr.drawPath(fPath);
Chris Daltona32a3c32017-12-05 10:05:21 -0700212 ccpr.clipFullscreenRect(fPath);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600213 }
214 ccpr.abandonGrContext();
215 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
216 fMockContext.reset();
217 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
218 }
219};
220DEF_CCPR_TEST(GrCCPRTest_cleanup)
221
Chris Dalton91ab1552018-04-18 13:24:25 -0600222class GrCCPRTest_cleanupWithTexAllocFail : public GrCCPRTest_cleanup {
Chris Daltona2b5b642018-06-24 13:08:57 -0600223 void customizeOptions(GrMockOptions* mockOptions, GrContextOptions*) override {
224 mockOptions->fFailTextureAllocations = true;
Chris Dalton91ab1552018-04-18 13:24:25 -0600225 }
226};
227DEF_CCPR_TEST(GrCCPRTest_cleanupWithTexAllocFail)
228
Chris Dalton080baa42017-11-06 14:19:19 -0700229class GrCCPRTest_unregisterCulledOps : public CCPRTest {
230 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
231 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
232
233 // Ensure Ops get unregistered from CCPR when culled early.
234 ccpr.drawPath(fPath);
235 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
236 ccpr.clear(); // Clear should delete the CCPR Op.
237 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
238 ccpr.flush(); // Should not crash (DrawPathsOp should have unregistered itself).
239
240 // Ensure Op unregisters work when we delete the context without flushing.
241 ccpr.drawPath(fPath);
242 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
243 ccpr.clear(); // Clear should delete the CCPR DrawPathsOp.
244 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
245 ccpr.abandonGrContext();
246 fMockContext.reset(); // Should not crash (DrawPathsOp should have unregistered itself).
247 }
248};
249DEF_CCPR_TEST(GrCCPRTest_unregisterCulledOps)
250
Chris Daltonc9c97b72017-11-27 15:34:26 -0700251class GrCCPRTest_parseEmptyPath : public CCPRTest {
252 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
253 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
254
255 // Make a path large enough that ccpr chooses to crop it by the RT bounds, and ends up with
256 // an empty path.
257 SkPath largeOutsidePath;
258 largeOutsidePath.moveTo(-1e30f, -1e30f);
259 largeOutsidePath.lineTo(-1e30f, +1e30f);
260 largeOutsidePath.lineTo(-1e10f, +1e30f);
261 ccpr.drawPath(largeOutsidePath);
262
263 // Normally an empty path is culled before reaching ccpr, however we use a back door for
264 // testing so this path will make it.
265 SkPath emptyPath;
266 SkASSERT(emptyPath.isEmpty());
267 ccpr.drawPath(emptyPath);
268
269 // This is the test. It will exercise various internal asserts and verify we do not crash.
270 ccpr.flush();
Chris Daltona32a3c32017-12-05 10:05:21 -0700271
272 // Now try again with clips.
273 ccpr.clipFullscreenRect(largeOutsidePath);
274 ccpr.clipFullscreenRect(emptyPath);
275 ccpr.flush();
276
277 // ... and both.
278 ccpr.drawPath(largeOutsidePath);
279 ccpr.clipFullscreenRect(largeOutsidePath);
280 ccpr.drawPath(emptyPath);
281 ccpr.clipFullscreenRect(emptyPath);
282 ccpr.flush();
Chris Daltonc9c97b72017-11-27 15:34:26 -0700283 }
284};
285DEF_CCPR_TEST(GrCCPRTest_parseEmptyPath)
286
Chris Dalton4da70192018-06-18 09:51:36 -0600287// This test exercises CCPR's cache capabilities by drawing many paths with two different
288// transformation matrices. We then vary the matrices independently by whole and partial pixels,
289// and verify the caching behaved as expected.
290class GrCCPRTest_cache : public CCPRTest {
Chris Daltona2b5b642018-06-24 13:08:57 -0600291 void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
292 ctxOptions->fAllowPathMaskCaching = true;
293 }
294
Chris Dalton4da70192018-06-18 09:51:36 -0600295 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
296 static constexpr int kPathSize = 20;
297 SkRandom rand;
298
Chris Daltona8429cf2018-06-22 11:43:31 -0600299 SkPath paths[300];
Chris Dalton4da70192018-06-18 09:51:36 -0600300 int primes[11] = {2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31};
301 for (size_t i = 0; i < SK_ARRAY_COUNT(paths); ++i) {
302 int numPts = rand.nextRangeU(GrShape::kMaxKeyFromDataVerbCnt + 1,
303 GrShape::kMaxKeyFromDataVerbCnt * 2);
304 paths[i] = sk_tool_utils::make_star(SkRect::MakeIWH(kPathSize, kPathSize), numPts,
305 primes[rand.nextU() % SK_ARRAY_COUNT(primes)]);
306 }
307
308 SkMatrix matrices[2] = {
309 SkMatrix::MakeTrans(5, 5),
310 SkMatrix::MakeTrans(kCanvasSize - kPathSize - 5, kCanvasSize - kPathSize - 5)
311 };
312
313 int firstAtlasID = -1;
314
Chris Daltona8429cf2018-06-22 11:43:31 -0600315 for (int iterIdx = 0; iterIdx < 10; ++iterIdx) {
316 static constexpr int kNumHitsBeforeStash = 2;
317 static const GrUniqueKey gInvalidUniqueKey;
318
319 // Draw all the paths then flush. Repeat until a new stash occurs.
320 const GrUniqueKey* stashedAtlasKey = &gInvalidUniqueKey;
321 for (int j = 0; j < kNumHitsBeforeStash; ++j) {
322 // Nothing should be stashed until its hit count reaches kNumHitsBeforeStash.
323 REPORTER_ASSERT(reporter, !stashedAtlasKey->isValid());
324
325 for (size_t i = 0; i < SK_ARRAY_COUNT(paths); ++i) {
Chris Daltona2b5b642018-06-24 13:08:57 -0600326 ccpr.drawPath(paths[i], matrices[i % 2]);
Chris Daltona8429cf2018-06-22 11:43:31 -0600327 }
328 ccpr.flush();
329
330 stashedAtlasKey = &ccpr.ccpr()->testingOnly_getStashedAtlasKey();
Chris Dalton4da70192018-06-18 09:51:36 -0600331 }
Chris Dalton4da70192018-06-18 09:51:36 -0600332
333 // Figure out the mock backend ID of the atlas texture stashed away by CCPR.
334 GrMockTextureInfo stashedAtlasInfo;
335 stashedAtlasInfo.fID = -1;
Chris Daltona8429cf2018-06-22 11:43:31 -0600336 if (stashedAtlasKey->isValid()) {
Chris Dalton4da70192018-06-18 09:51:36 -0600337 GrResourceProvider* rp = ccpr.ctx()->contextPriv().resourceProvider();
Chris Daltona8429cf2018-06-22 11:43:31 -0600338 sk_sp<GrSurface> stashedAtlas = rp->findByUniqueKey<GrSurface>(*stashedAtlasKey);
Chris Dalton4da70192018-06-18 09:51:36 -0600339 REPORTER_ASSERT(reporter, stashedAtlas);
340 if (stashedAtlas) {
341 const auto& backendTexture = stashedAtlas->asTexture()->getBackendTexture();
342 backendTexture.getMockTextureInfo(&stashedAtlasInfo);
343 }
344 }
345
Chris Daltona8429cf2018-06-22 11:43:31 -0600346 if (0 == iterIdx) {
347 // First iteration: just note the ID of the stashed atlas and continue.
348 REPORTER_ASSERT(reporter, stashedAtlasKey->isValid());
Chris Dalton4da70192018-06-18 09:51:36 -0600349 firstAtlasID = stashedAtlasInfo.fID;
350 continue;
351 }
352
Chris Daltona8429cf2018-06-22 11:43:31 -0600353 switch (iterIdx % 3) {
Chris Dalton4da70192018-06-18 09:51:36 -0600354 case 1:
355 // This draw should have gotten 100% cache hits; we only did integer translates
356 // last time (or none if it was the first flush). Therefore, no atlas should
357 // have been stashed away.
Chris Daltona8429cf2018-06-22 11:43:31 -0600358 REPORTER_ASSERT(reporter, !stashedAtlasKey->isValid());
Chris Dalton4da70192018-06-18 09:51:36 -0600359
360 // Invalidate even path masks.
361 matrices[0].preTranslate(1.6f, 1.4f);
362 break;
363
364 case 2:
365 // Even path masks were invalidated last iteration by a subpixel translate. They
366 // should have been re-rendered this time and stashed away in the CCPR atlas.
Chris Daltona8429cf2018-06-22 11:43:31 -0600367 REPORTER_ASSERT(reporter, stashedAtlasKey->isValid());
Chris Dalton4da70192018-06-18 09:51:36 -0600368
369 // 'firstAtlasID' should be kept as a scratch texture in the resource cache.
370 REPORTER_ASSERT(reporter, stashedAtlasInfo.fID == firstAtlasID);
371
372 // Invalidate odd path masks.
373 matrices[1].preTranslate(-1.4f, -1.6f);
374 break;
375
376 case 0:
377 // Odd path masks were invalidated last iteration by a subpixel translate. They
378 // should have been re-rendered this time and stashed away in the CCPR atlas.
Chris Daltona8429cf2018-06-22 11:43:31 -0600379 REPORTER_ASSERT(reporter, stashedAtlasKey->isValid());
Chris Dalton4da70192018-06-18 09:51:36 -0600380
381 // 'firstAtlasID' is the same texture that got stashed away last time (assuming
382 // no assertion failures). So if it also got stashed this time, it means we
383 // first copied the even paths out of it, then recycled the exact same texture
384 // to render the odd paths. This is the expected behavior.
385 REPORTER_ASSERT(reporter, stashedAtlasInfo.fID == firstAtlasID);
386
387 // Integer translates: all path masks stay valid.
388 matrices[0].preTranslate(-1, -1);
389 matrices[1].preTranslate(1, 1);
390 break;
391 }
392 }
393 }
394};
395DEF_CCPR_TEST(GrCCPRTest_cache)
396
Chris Daltondedf8f22018-09-24 20:23:47 -0600397class GrCCPRTest_unrefPerOpListPathsBeforeOps : public CCPRTest {
398 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
399 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
400 for (int i = 0; i < 10000; ++i) {
401 // Draw enough paths to make the arena allocator hit the heap.
402 ccpr.drawPath(fPath);
403 }
404
405 // Unref the GrCCPerOpListPaths object.
406 auto perOpListPathsMap = ccpr.ccpr()->detachPendingPaths();
407 perOpListPathsMap.clear();
408
409 // Now delete the Op and all its draws.
410 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
411 ccpr.flush();
412 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
413 }
414};
415DEF_CCPR_TEST(GrCCPRTest_unrefPerOpListPathsBeforeOps)
416
Chris Daltonfddb6c02017-11-04 15:22:22 -0600417class CCPRRenderingTest {
418public:
Chris Dalton09a7bb22018-08-31 19:53:15 +0800419 void run(skiatest::Reporter* reporter, GrContext* ctx, bool doStroke) const {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600420 if (!ctx->contextPriv().drawingManager()->getCoverageCountingPathRenderer()) {
421 return; // CCPR is not enabled on this GPU.
422 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800423 CCPRPathDrawer ccpr(ctx, reporter, doStroke);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600424 if (!ccpr.valid()) {
425 return;
426 }
427 this->onRun(reporter, ccpr);
428 }
429
430 virtual ~CCPRRenderingTest() {}
431
432protected:
433 virtual void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const = 0;
434};
435
436#define DEF_CCPR_RENDERING_TEST(name) \
437 DEF_GPUTEST_FOR_RENDERING_CONTEXTS(name, reporter, ctxInfo) { \
438 name test; \
Chris Dalton09a7bb22018-08-31 19:53:15 +0800439 test.run(reporter, ctxInfo.grContext(), false); \
440 test.run(reporter, ctxInfo.grContext(), true); \
Chris Daltonfddb6c02017-11-04 15:22:22 -0600441 }
442
443class GrCCPRTest_busyPath : public CCPRRenderingTest {
444 void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const override {
445 static constexpr int kNumBusyVerbs = 1 << 17;
446 ccpr.clear();
447 SkPath busyPath;
448 busyPath.moveTo(0, 0); // top left
449 busyPath.lineTo(kCanvasSize, kCanvasSize); // bottom right
450 for (int i = 2; i < kNumBusyVerbs; ++i) {
451 float offset = i * ((float)kCanvasSize / kNumBusyVerbs);
452 busyPath.lineTo(kCanvasSize - offset, kCanvasSize + offset); // offscreen
453 }
454 ccpr.drawPath(busyPath);
455
456 ccpr.flush(); // If this doesn't crash, the test passed.
457 // If it does, maybe fiddle with fMaxInstancesPerDrawArraysWithoutCrashing in
458 // your platform's GrGLCaps.
459 }
460};
461DEF_CCPR_RENDERING_TEST(GrCCPRTest_busyPath)