blob: a400985b311a0338368dee4122b75de20b5fca84 [file] [log] [blame]
Chris Daltoncc604e52017-10-06 16:27:32 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "SkTypes.h"
9#include "Test.h"
10
Chris Daltoncc604e52017-10-06 16:27:32 -060011#include "GrContext.h"
12#include "GrContextPriv.h"
13#include "GrClip.h"
Chris Daltonfddb6c02017-11-04 15:22:22 -060014#include "GrDrawingManager.h"
15#include "GrPathRenderer.h"
16#include "GrPaint.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060017#include "GrRenderTargetContext.h"
18#include "GrRenderTargetContextPriv.h"
19#include "GrShape.h"
Chris Dalton4da70192018-06-18 09:51:36 -060020#include "GrTexture.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060021#include "SkMatrix.h"
Chris Daltonfddb6c02017-11-04 15:22:22 -060022#include "SkPathPriv.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060023#include "SkRect.h"
Chris Dalton4da70192018-06-18 09:51:36 -060024#include "sk_tool_utils.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060025#include "ccpr/GrCoverageCountingPathRenderer.h"
Chris Daltonfddb6c02017-11-04 15:22:22 -060026#include "mock/GrMockTypes.h"
Hal Canary8a001442018-09-19 11:31:27 -040027
Chris Daltoncc604e52017-10-06 16:27:32 -060028#include <cmath>
29
30static constexpr int kCanvasSize = 100;
31
Chris Daltona32a3c32017-12-05 10:05:21 -070032class CCPRClip : public GrClip {
33public:
34 CCPRClip(GrCoverageCountingPathRenderer* ccpr, const SkPath& path) : fCCPR(ccpr), fPath(path) {}
35
36private:
Robert Phillips777707b2018-01-17 11:40:14 -050037 bool apply(GrContext* context, GrRenderTargetContext* rtc, bool, bool, GrAppliedClip* out,
Chris Daltona32a3c32017-12-05 10:05:21 -070038 SkRect* bounds) const override {
Chris Dalton4c458b12018-06-16 17:22:59 -060039 out->addCoverageFP(fCCPR->makeClipProcessor(rtc->priv().testingOnly_getOpListID(), fPath,
Chris Daltona32a3c32017-12-05 10:05:21 -070040 SkIRect::MakeWH(rtc->width(), rtc->height()),
Chris Dalton4c458b12018-06-16 17:22:59 -060041 rtc->width(), rtc->height(),
42 *context->contextPriv().caps()));
Chris Daltona32a3c32017-12-05 10:05:21 -070043 return true;
44 }
45 bool quickContains(const SkRect&) const final { return false; }
46 bool isRRect(const SkRect& rtBounds, SkRRect* rr, GrAA*) const final { return false; }
47 void getConservativeBounds(int width, int height, SkIRect* rect, bool* iior) const final {
48 rect->set(0, 0, width, height);
49 if (iior) {
50 *iior = false;
51 }
52 }
53 GrCoverageCountingPathRenderer* const fCCPR;
54 const SkPath fPath;
55};
56
Chris Daltoncc604e52017-10-06 16:27:32 -060057class CCPRPathDrawer {
58public:
Chris Dalton09a7bb22018-08-31 19:53:15 +080059 CCPRPathDrawer(GrContext* ctx, skiatest::Reporter* reporter, bool doStroke)
Chris Daltoncc604e52017-10-06 16:27:32 -060060 : fCtx(ctx)
Chris Daltonfddb6c02017-11-04 15:22:22 -060061 , fCCPR(fCtx->contextPriv().drawingManager()->getCoverageCountingPathRenderer())
Robert Phillips0c4b7b12018-03-06 08:20:37 -050062 , fRTC(fCtx->contextPriv().makeDeferredRenderTargetContext(
Greg Daniel4065d452018-11-16 15:43:41 -050063 ctx->contextPriv().caps()->getBackendFormatFromColorType(kRGBA_8888_SkColorType),
64 SkBackingFit::kExact, kCanvasSize, kCanvasSize, kRGBA_8888_GrPixelConfig,
65 nullptr))
Chris Dalton09a7bb22018-08-31 19:53:15 +080066 , fDoStroke(doStroke) {
Chris Daltonfddb6c02017-11-04 15:22:22 -060067 if (!fCCPR) {
68 ERRORF(reporter, "ccpr not enabled in GrContext for ccpr tests");
69 }
70 if (!fRTC) {
71 ERRORF(reporter, "failed to create GrRenderTargetContext for ccpr tests");
Chris Daltoncc604e52017-10-06 16:27:32 -060072 }
73 }
74
Chris Dalton4da70192018-06-18 09:51:36 -060075 GrContext* ctx() const { return fCtx; }
76 GrCoverageCountingPathRenderer* ccpr() const { return fCCPR; }
77
Chris Daltonfddb6c02017-11-04 15:22:22 -060078 bool valid() const { return fCCPR && fRTC; }
Brian Osman9a9baae2018-11-05 15:06:26 -050079 void clear() const { fRTC->clear(nullptr, SK_PMColor4fTRANSPARENT,
80 GrRenderTargetContext::CanClearFullscreen::kYes); }
Chris Daltonfddb6c02017-11-04 15:22:22 -060081 void abandonGrContext() { fCtx = nullptr; fCCPR = nullptr; fRTC = nullptr; }
Chris Daltoncc604e52017-10-06 16:27:32 -060082
Chris Daltona2b5b642018-06-24 13:08:57 -060083 void drawPath(const SkPath& path, const SkMatrix& matrix = SkMatrix::I()) const {
Chris Daltonfddb6c02017-11-04 15:22:22 -060084 SkASSERT(this->valid());
Chris Daltoncc604e52017-10-06 16:27:32 -060085
Chris Daltoncc604e52017-10-06 16:27:32 -060086 GrPaint paint;
Brian Osmancb3d0872018-10-16 15:19:28 -040087 paint.setColor4f({ 0, 1, 0, 1 });
Chris Daltonfddb6c02017-11-04 15:22:22 -060088
Chris Daltoncc604e52017-10-06 16:27:32 -060089 GrNoClip noClip;
90 SkIRect clipBounds = SkIRect::MakeWH(kCanvasSize, kCanvasSize);
Chris Daltonfddb6c02017-11-04 15:22:22 -060091
Chris Dalton09a7bb22018-08-31 19:53:15 +080092 GrShape shape;
93 if (!fDoStroke) {
94 shape = GrShape(path);
95 } else {
96 // Use hairlines for now, since they are the only stroke type that doesn't require a
97 // rigid-body transform. The CCPR stroke code makes no distinction between hairlines
98 // and regular strokes other than how it decides the device-space stroke width.
99 SkStrokeRec stroke(SkStrokeRec::kHairline_InitStyle);
100 stroke.setStrokeParams(SkPaint::kRound_Cap, SkPaint::kMiter_Join, 4);
101 shape = GrShape(path, GrStyle(stroke, nullptr));
102 }
Chris Daltonfddb6c02017-11-04 15:22:22 -0600103
Chris Daltona2b5b642018-06-24 13:08:57 -0600104 fCCPR->testingOnly_drawPathDirectly({
105 fCtx, std::move(paint), &GrUserStencilSettings::kUnused, fRTC.get(), &noClip,
106 &clipBounds, &matrix, &shape, GrAAType::kCoverage, false});
Chris Daltoncc604e52017-10-06 16:27:32 -0600107 }
108
Brian Osmancb3d0872018-10-16 15:19:28 -0400109 void clipFullscreenRect(SkPath clipPath, SkPMColor4f color = { 0, 1, 0, 1 }) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700110 SkASSERT(this->valid());
111
112 GrPaint paint;
113 paint.setColor4f(color);
114
115 fRTC->drawRect(CCPRClip(fCCPR, clipPath), std::move(paint), GrAA::kYes, SkMatrix::I(),
116 SkRect::MakeIWH(kCanvasSize, kCanvasSize));
117 }
118
Chris Daltonfddb6c02017-11-04 15:22:22 -0600119 void flush() const {
120 SkASSERT(this->valid());
Chris Daltoncc604e52017-10-06 16:27:32 -0600121 fCtx->flush();
122 }
123
124private:
Chris Dalton4da70192018-06-18 09:51:36 -0600125 GrContext* fCtx;
126 GrCoverageCountingPathRenderer* fCCPR;
127 sk_sp<GrRenderTargetContext> fRTC;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800128 const bool fDoStroke;
Chris Daltoncc604e52017-10-06 16:27:32 -0600129};
130
Chris Daltonfddb6c02017-11-04 15:22:22 -0600131class CCPRTest {
132public:
Chris Dalton09a7bb22018-08-31 19:53:15 +0800133 void run(skiatest::Reporter* reporter, bool doStroke) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600134 GrMockOptions mockOptions;
135 mockOptions.fInstanceAttribSupport = true;
136 mockOptions.fMapBufferFlags = GrCaps::kCanMap_MapFlag;
Brian Salomonbdecacf2018-02-02 20:32:49 -0500137 mockOptions.fConfigOptions[kAlpha_half_GrPixelConfig].fRenderability =
138 GrMockOptions::ConfigOptions::Renderability::kNonMSAA;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600139 mockOptions.fConfigOptions[kAlpha_half_GrPixelConfig].fTexturable = true;
Chris Dalton4da70192018-06-18 09:51:36 -0600140 mockOptions.fConfigOptions[kAlpha_8_GrPixelConfig].fRenderability =
141 GrMockOptions::ConfigOptions::Renderability::kNonMSAA;
142 mockOptions.fConfigOptions[kAlpha_8_GrPixelConfig].fTexturable = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600143 mockOptions.fGeometryShaderSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600144 mockOptions.fIntegerSupport = true;
145 mockOptions.fFlatInterpolationSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600146
147 GrContextOptions ctxOptions;
148 ctxOptions.fAllowPathMaskCaching = false;
149 ctxOptions.fGpuPathRenderers = GpuPathRenderers::kCoverageCounting;
150
Chris Daltona2b5b642018-06-24 13:08:57 -0600151 this->customizeOptions(&mockOptions, &ctxOptions);
152
Chris Daltonfddb6c02017-11-04 15:22:22 -0600153 fMockContext = GrContext::MakeMock(&mockOptions, ctxOptions);
154 if (!fMockContext) {
155 ERRORF(reporter, "could not create mock context");
156 return;
157 }
158 if (!fMockContext->unique()) {
159 ERRORF(reporter, "mock context is not unique");
160 return;
161 }
162
Chris Dalton09a7bb22018-08-31 19:53:15 +0800163 CCPRPathDrawer ccpr(fMockContext.get(), reporter, doStroke);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600164 if (!ccpr.valid()) {
165 return;
166 }
167
168 fPath.moveTo(0, 0);
169 fPath.cubicTo(50, 50, 0, 50, 50, 0);
170 this->onRun(reporter, ccpr);
Chris Daltoncc604e52017-10-06 16:27:32 -0600171 }
172
Chris Daltonfddb6c02017-11-04 15:22:22 -0600173 virtual ~CCPRTest() {}
174
175protected:
Chris Daltona2b5b642018-06-24 13:08:57 -0600176 virtual void customizeOptions(GrMockOptions*, GrContextOptions*) {}
Chris Daltonfddb6c02017-11-04 15:22:22 -0600177 virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) = 0;
178
Chris Dalton4da70192018-06-18 09:51:36 -0600179 sk_sp<GrContext> fMockContext;
180 SkPath fPath;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600181};
182
Chris Dalton09a7bb22018-08-31 19:53:15 +0800183#define DEF_CCPR_TEST(name) \
Brian Salomondcfca432017-11-15 15:48:03 -0500184 DEF_GPUTEST(name, reporter, /* options */) { \
Chris Dalton09a7bb22018-08-31 19:53:15 +0800185 name test; \
186 test.run(reporter, false); \
187 test.run(reporter, true); \
Chris Daltoncc604e52017-10-06 16:27:32 -0600188 }
189
Chris Daltonfddb6c02017-11-04 15:22:22 -0600190class GrCCPRTest_cleanup : public CCPRTest {
191 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
192 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
Chris Daltoncc604e52017-10-06 16:27:32 -0600193
Chris Daltonfddb6c02017-11-04 15:22:22 -0600194 // Ensure paths get unreffed.
195 for (int i = 0; i < 10; ++i) {
196 ccpr.drawPath(fPath);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600197 }
198 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
199 ccpr.flush();
200 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
201
202 // Ensure clip paths get unreffed.
203 for (int i = 0; i < 10; ++i) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700204 ccpr.clipFullscreenRect(fPath);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600205 }
206 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
207 ccpr.flush();
208 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
209
210 // Ensure paths get unreffed when we delete the context without flushing.
211 for (int i = 0; i < 10; ++i) {
212 ccpr.drawPath(fPath);
Chris Daltona32a3c32017-12-05 10:05:21 -0700213 ccpr.clipFullscreenRect(fPath);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600214 }
215 ccpr.abandonGrContext();
216 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
217 fMockContext.reset();
218 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
219 }
220};
221DEF_CCPR_TEST(GrCCPRTest_cleanup)
222
Chris Dalton91ab1552018-04-18 13:24:25 -0600223class GrCCPRTest_cleanupWithTexAllocFail : public GrCCPRTest_cleanup {
Chris Daltona2b5b642018-06-24 13:08:57 -0600224 void customizeOptions(GrMockOptions* mockOptions, GrContextOptions*) override {
225 mockOptions->fFailTextureAllocations = true;
Chris Dalton91ab1552018-04-18 13:24:25 -0600226 }
227};
228DEF_CCPR_TEST(GrCCPRTest_cleanupWithTexAllocFail)
229
Chris Dalton080baa42017-11-06 14:19:19 -0700230class GrCCPRTest_unregisterCulledOps : public CCPRTest {
231 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
232 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
233
234 // Ensure Ops get unregistered from CCPR when culled early.
235 ccpr.drawPath(fPath);
236 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
237 ccpr.clear(); // Clear should delete the CCPR Op.
238 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
239 ccpr.flush(); // Should not crash (DrawPathsOp should have unregistered itself).
240
241 // Ensure Op unregisters work when we delete the context without flushing.
242 ccpr.drawPath(fPath);
243 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
244 ccpr.clear(); // Clear should delete the CCPR DrawPathsOp.
245 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
246 ccpr.abandonGrContext();
247 fMockContext.reset(); // Should not crash (DrawPathsOp should have unregistered itself).
248 }
249};
250DEF_CCPR_TEST(GrCCPRTest_unregisterCulledOps)
251
Chris Daltonc9c97b72017-11-27 15:34:26 -0700252class GrCCPRTest_parseEmptyPath : public CCPRTest {
253 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
254 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
255
256 // Make a path large enough that ccpr chooses to crop it by the RT bounds, and ends up with
257 // an empty path.
258 SkPath largeOutsidePath;
259 largeOutsidePath.moveTo(-1e30f, -1e30f);
260 largeOutsidePath.lineTo(-1e30f, +1e30f);
261 largeOutsidePath.lineTo(-1e10f, +1e30f);
262 ccpr.drawPath(largeOutsidePath);
263
264 // Normally an empty path is culled before reaching ccpr, however we use a back door for
265 // testing so this path will make it.
266 SkPath emptyPath;
267 SkASSERT(emptyPath.isEmpty());
268 ccpr.drawPath(emptyPath);
269
270 // This is the test. It will exercise various internal asserts and verify we do not crash.
271 ccpr.flush();
Chris Daltona32a3c32017-12-05 10:05:21 -0700272
273 // Now try again with clips.
274 ccpr.clipFullscreenRect(largeOutsidePath);
275 ccpr.clipFullscreenRect(emptyPath);
276 ccpr.flush();
277
278 // ... and both.
279 ccpr.drawPath(largeOutsidePath);
280 ccpr.clipFullscreenRect(largeOutsidePath);
281 ccpr.drawPath(emptyPath);
282 ccpr.clipFullscreenRect(emptyPath);
283 ccpr.flush();
Chris Daltonc9c97b72017-11-27 15:34:26 -0700284 }
285};
286DEF_CCPR_TEST(GrCCPRTest_parseEmptyPath)
287
Chris Dalton4da70192018-06-18 09:51:36 -0600288// This test exercises CCPR's cache capabilities by drawing many paths with two different
289// transformation matrices. We then vary the matrices independently by whole and partial pixels,
290// and verify the caching behaved as expected.
291class GrCCPRTest_cache : public CCPRTest {
Chris Daltona2b5b642018-06-24 13:08:57 -0600292 void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
293 ctxOptions->fAllowPathMaskCaching = true;
294 }
295
Chris Dalton4da70192018-06-18 09:51:36 -0600296 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
297 static constexpr int kPathSize = 20;
298 SkRandom rand;
299
Chris Daltona8429cf2018-06-22 11:43:31 -0600300 SkPath paths[300];
Chris Dalton4da70192018-06-18 09:51:36 -0600301 int primes[11] = {2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31};
302 for (size_t i = 0; i < SK_ARRAY_COUNT(paths); ++i) {
303 int numPts = rand.nextRangeU(GrShape::kMaxKeyFromDataVerbCnt + 1,
304 GrShape::kMaxKeyFromDataVerbCnt * 2);
305 paths[i] = sk_tool_utils::make_star(SkRect::MakeIWH(kPathSize, kPathSize), numPts,
306 primes[rand.nextU() % SK_ARRAY_COUNT(primes)]);
307 }
308
309 SkMatrix matrices[2] = {
310 SkMatrix::MakeTrans(5, 5),
311 SkMatrix::MakeTrans(kCanvasSize - kPathSize - 5, kCanvasSize - kPathSize - 5)
312 };
313
314 int firstAtlasID = -1;
315
Chris Daltona8429cf2018-06-22 11:43:31 -0600316 for (int iterIdx = 0; iterIdx < 10; ++iterIdx) {
317 static constexpr int kNumHitsBeforeStash = 2;
318 static const GrUniqueKey gInvalidUniqueKey;
319
320 // Draw all the paths then flush. Repeat until a new stash occurs.
321 const GrUniqueKey* stashedAtlasKey = &gInvalidUniqueKey;
322 for (int j = 0; j < kNumHitsBeforeStash; ++j) {
323 // Nothing should be stashed until its hit count reaches kNumHitsBeforeStash.
324 REPORTER_ASSERT(reporter, !stashedAtlasKey->isValid());
325
326 for (size_t i = 0; i < SK_ARRAY_COUNT(paths); ++i) {
Chris Daltona2b5b642018-06-24 13:08:57 -0600327 ccpr.drawPath(paths[i], matrices[i % 2]);
Chris Daltona8429cf2018-06-22 11:43:31 -0600328 }
329 ccpr.flush();
330
331 stashedAtlasKey = &ccpr.ccpr()->testingOnly_getStashedAtlasKey();
Chris Dalton4da70192018-06-18 09:51:36 -0600332 }
Chris Dalton4da70192018-06-18 09:51:36 -0600333
334 // Figure out the mock backend ID of the atlas texture stashed away by CCPR.
335 GrMockTextureInfo stashedAtlasInfo;
336 stashedAtlasInfo.fID = -1;
Chris Daltona8429cf2018-06-22 11:43:31 -0600337 if (stashedAtlasKey->isValid()) {
Chris Dalton4da70192018-06-18 09:51:36 -0600338 GrResourceProvider* rp = ccpr.ctx()->contextPriv().resourceProvider();
Chris Daltona8429cf2018-06-22 11:43:31 -0600339 sk_sp<GrSurface> stashedAtlas = rp->findByUniqueKey<GrSurface>(*stashedAtlasKey);
Chris Dalton4da70192018-06-18 09:51:36 -0600340 REPORTER_ASSERT(reporter, stashedAtlas);
341 if (stashedAtlas) {
342 const auto& backendTexture = stashedAtlas->asTexture()->getBackendTexture();
343 backendTexture.getMockTextureInfo(&stashedAtlasInfo);
344 }
345 }
346
Chris Daltona8429cf2018-06-22 11:43:31 -0600347 if (0 == iterIdx) {
348 // First iteration: just note the ID of the stashed atlas and continue.
349 REPORTER_ASSERT(reporter, stashedAtlasKey->isValid());
Chris Dalton4da70192018-06-18 09:51:36 -0600350 firstAtlasID = stashedAtlasInfo.fID;
351 continue;
352 }
353
Chris Daltona8429cf2018-06-22 11:43:31 -0600354 switch (iterIdx % 3) {
Chris Dalton4da70192018-06-18 09:51:36 -0600355 case 1:
356 // This draw should have gotten 100% cache hits; we only did integer translates
357 // last time (or none if it was the first flush). Therefore, no atlas should
358 // have been stashed away.
Chris Daltona8429cf2018-06-22 11:43:31 -0600359 REPORTER_ASSERT(reporter, !stashedAtlasKey->isValid());
Chris Dalton4da70192018-06-18 09:51:36 -0600360
361 // Invalidate even path masks.
362 matrices[0].preTranslate(1.6f, 1.4f);
363 break;
364
365 case 2:
366 // Even path masks were invalidated last iteration by a subpixel translate. They
367 // should have been re-rendered this time and stashed away in the CCPR atlas.
Chris Daltona8429cf2018-06-22 11:43:31 -0600368 REPORTER_ASSERT(reporter, stashedAtlasKey->isValid());
Chris Dalton4da70192018-06-18 09:51:36 -0600369
370 // 'firstAtlasID' should be kept as a scratch texture in the resource cache.
371 REPORTER_ASSERT(reporter, stashedAtlasInfo.fID == firstAtlasID);
372
373 // Invalidate odd path masks.
374 matrices[1].preTranslate(-1.4f, -1.6f);
375 break;
376
377 case 0:
378 // Odd path masks were invalidated last iteration by a subpixel translate. They
379 // should have been re-rendered this time and stashed away in the CCPR atlas.
Chris Daltona8429cf2018-06-22 11:43:31 -0600380 REPORTER_ASSERT(reporter, stashedAtlasKey->isValid());
Chris Dalton4da70192018-06-18 09:51:36 -0600381
382 // 'firstAtlasID' is the same texture that got stashed away last time (assuming
383 // no assertion failures). So if it also got stashed this time, it means we
384 // first copied the even paths out of it, then recycled the exact same texture
385 // to render the odd paths. This is the expected behavior.
386 REPORTER_ASSERT(reporter, stashedAtlasInfo.fID == firstAtlasID);
387
388 // Integer translates: all path masks stay valid.
389 matrices[0].preTranslate(-1, -1);
390 matrices[1].preTranslate(1, 1);
391 break;
392 }
393 }
394 }
395};
396DEF_CCPR_TEST(GrCCPRTest_cache)
397
Chris Daltondedf8f22018-09-24 20:23:47 -0600398class GrCCPRTest_unrefPerOpListPathsBeforeOps : public CCPRTest {
399 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
400 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
401 for (int i = 0; i < 10000; ++i) {
402 // Draw enough paths to make the arena allocator hit the heap.
403 ccpr.drawPath(fPath);
404 }
405
406 // Unref the GrCCPerOpListPaths object.
407 auto perOpListPathsMap = ccpr.ccpr()->detachPendingPaths();
408 perOpListPathsMap.clear();
409
410 // Now delete the Op and all its draws.
411 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
412 ccpr.flush();
413 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
414 }
415};
416DEF_CCPR_TEST(GrCCPRTest_unrefPerOpListPathsBeforeOps)
417
Chris Daltonfddb6c02017-11-04 15:22:22 -0600418class CCPRRenderingTest {
419public:
Chris Dalton09a7bb22018-08-31 19:53:15 +0800420 void run(skiatest::Reporter* reporter, GrContext* ctx, bool doStroke) const {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600421 if (!ctx->contextPriv().drawingManager()->getCoverageCountingPathRenderer()) {
422 return; // CCPR is not enabled on this GPU.
423 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800424 CCPRPathDrawer ccpr(ctx, reporter, doStroke);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600425 if (!ccpr.valid()) {
426 return;
427 }
428 this->onRun(reporter, ccpr);
429 }
430
431 virtual ~CCPRRenderingTest() {}
432
433protected:
434 virtual void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const = 0;
435};
436
437#define DEF_CCPR_RENDERING_TEST(name) \
438 DEF_GPUTEST_FOR_RENDERING_CONTEXTS(name, reporter, ctxInfo) { \
439 name test; \
Chris Dalton09a7bb22018-08-31 19:53:15 +0800440 test.run(reporter, ctxInfo.grContext(), false); \
441 test.run(reporter, ctxInfo.grContext(), true); \
Chris Daltonfddb6c02017-11-04 15:22:22 -0600442 }
443
444class GrCCPRTest_busyPath : public CCPRRenderingTest {
445 void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const override {
446 static constexpr int kNumBusyVerbs = 1 << 17;
447 ccpr.clear();
448 SkPath busyPath;
449 busyPath.moveTo(0, 0); // top left
450 busyPath.lineTo(kCanvasSize, kCanvasSize); // bottom right
451 for (int i = 2; i < kNumBusyVerbs; ++i) {
452 float offset = i * ((float)kCanvasSize / kNumBusyVerbs);
453 busyPath.lineTo(kCanvasSize - offset, kCanvasSize + offset); // offscreen
454 }
455 ccpr.drawPath(busyPath);
456
457 ccpr.flush(); // If this doesn't crash, the test passed.
458 // If it does, maybe fiddle with fMaxInstancesPerDrawArraysWithoutCrashing in
459 // your platform's GrGLCaps.
460 }
461};
462DEF_CCPR_RENDERING_TEST(GrCCPRTest_busyPath)