blob: 1011f6651ef1d10a3678e7511eac36b87c179a7f [file] [log] [blame]
Chris Daltoncc604e52017-10-06 16:27:32 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "SkTypes.h"
9#include "Test.h"
10
Chris Daltoncc604e52017-10-06 16:27:32 -060011#include "GrClip.h"
Robert Phillipsbe9aff22019-02-15 11:33:22 -050012#include "GrContextPriv.h"
Chris Daltonfddb6c02017-11-04 15:22:22 -060013#include "GrDrawingManager.h"
14#include "GrPathRenderer.h"
15#include "GrPaint.h"
Robert Phillips6f0e02f2019-02-13 11:02:28 -050016#include "GrRecordingContext.h"
17#include "GrRecordingContextPriv.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060018#include "GrRenderTargetContext.h"
19#include "GrRenderTargetContextPriv.h"
20#include "GrShape.h"
Chris Dalton4da70192018-06-18 09:51:36 -060021#include "GrTexture.h"
Chris Dalton351e80c2019-01-06 22:51:00 -070022#include "SkExchange.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060023#include "SkMatrix.h"
Chris Daltonfddb6c02017-11-04 15:22:22 -060024#include "SkPathPriv.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060025#include "SkRect.h"
Chris Dalton4da70192018-06-18 09:51:36 -060026#include "sk_tool_utils.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060027#include "ccpr/GrCoverageCountingPathRenderer.h"
Chris Dalton351e80c2019-01-06 22:51:00 -070028#include "ccpr/GrCCPathCache.h"
Chris Daltonfddb6c02017-11-04 15:22:22 -060029#include "mock/GrMockTypes.h"
Hal Canary8a001442018-09-19 11:31:27 -040030
Chris Daltoncc604e52017-10-06 16:27:32 -060031#include <cmath>
32
33static constexpr int kCanvasSize = 100;
34
Chris Daltona32a3c32017-12-05 10:05:21 -070035class CCPRClip : public GrClip {
36public:
37 CCPRClip(GrCoverageCountingPathRenderer* ccpr, const SkPath& path) : fCCPR(ccpr), fPath(path) {}
38
39private:
Robert Phillips6f0e02f2019-02-13 11:02:28 -050040 bool apply(GrRecordingContext* context, GrRenderTargetContext* rtc, bool useHWAA,
41 bool hasUserStencilSettings, GrAppliedClip* out, SkRect* bounds) const override {
Chris Dalton4c458b12018-06-16 17:22:59 -060042 out->addCoverageFP(fCCPR->makeClipProcessor(rtc->priv().testingOnly_getOpListID(), fPath,
Chris Daltona32a3c32017-12-05 10:05:21 -070043 SkIRect::MakeWH(rtc->width(), rtc->height()),
Chris Dalton4c458b12018-06-16 17:22:59 -060044 rtc->width(), rtc->height(),
Robert Phillips9da87e02019-02-04 13:26:26 -050045 *context->priv().caps()));
Chris Daltona32a3c32017-12-05 10:05:21 -070046 return true;
47 }
48 bool quickContains(const SkRect&) const final { return false; }
49 bool isRRect(const SkRect& rtBounds, SkRRect* rr, GrAA*) const final { return false; }
50 void getConservativeBounds(int width, int height, SkIRect* rect, bool* iior) const final {
51 rect->set(0, 0, width, height);
52 if (iior) {
53 *iior = false;
54 }
55 }
56 GrCoverageCountingPathRenderer* const fCCPR;
57 const SkPath fPath;
58};
59
Chris Daltoncc604e52017-10-06 16:27:32 -060060class CCPRPathDrawer {
61public:
Chris Dalton351e80c2019-01-06 22:51:00 -070062 CCPRPathDrawer(sk_sp<GrContext> ctx, skiatest::Reporter* reporter, bool doStroke)
Chris Daltoncc604e52017-10-06 16:27:32 -060063 : fCtx(ctx)
Robert Phillips9da87e02019-02-04 13:26:26 -050064 , fCCPR(fCtx->priv().drawingManager()->getCoverageCountingPathRenderer())
65 , fRTC(fCtx->priv().makeDeferredRenderTargetContext(
66 ctx->priv().caps()->getBackendFormatFromColorType(kRGBA_8888_SkColorType),
Greg Daniel4065d452018-11-16 15:43:41 -050067 SkBackingFit::kExact, kCanvasSize, kCanvasSize, kRGBA_8888_GrPixelConfig,
68 nullptr))
Chris Dalton09a7bb22018-08-31 19:53:15 +080069 , fDoStroke(doStroke) {
Chris Daltonfddb6c02017-11-04 15:22:22 -060070 if (!fCCPR) {
71 ERRORF(reporter, "ccpr not enabled in GrContext for ccpr tests");
72 }
73 if (!fRTC) {
74 ERRORF(reporter, "failed to create GrRenderTargetContext for ccpr tests");
Chris Daltoncc604e52017-10-06 16:27:32 -060075 }
76 }
77
Chris Dalton351e80c2019-01-06 22:51:00 -070078 GrContext* ctx() const { return fCtx.get(); }
Chris Dalton4da70192018-06-18 09:51:36 -060079 GrCoverageCountingPathRenderer* ccpr() const { return fCCPR; }
80
Chris Daltonfddb6c02017-11-04 15:22:22 -060081 bool valid() const { return fCCPR && fRTC; }
Brian Osman9a9baae2018-11-05 15:06:26 -050082 void clear() const { fRTC->clear(nullptr, SK_PMColor4fTRANSPARENT,
83 GrRenderTargetContext::CanClearFullscreen::kYes); }
Chris Dalton351e80c2019-01-06 22:51:00 -070084 void destroyGrContext() {
85 SkASSERT(fRTC->unique());
86 SkASSERT(fCtx->unique());
87 fRTC.reset();
88 fCCPR = nullptr;
89 fCtx.reset();
90 }
Chris Daltoncc604e52017-10-06 16:27:32 -060091
Chris Daltona2b5b642018-06-24 13:08:57 -060092 void drawPath(const SkPath& path, const SkMatrix& matrix = SkMatrix::I()) const {
Chris Daltonfddb6c02017-11-04 15:22:22 -060093 SkASSERT(this->valid());
Chris Daltoncc604e52017-10-06 16:27:32 -060094
Chris Daltoncc604e52017-10-06 16:27:32 -060095 GrPaint paint;
Brian Osmancb3d0872018-10-16 15:19:28 -040096 paint.setColor4f({ 0, 1, 0, 1 });
Chris Daltonfddb6c02017-11-04 15:22:22 -060097
Chris Daltoncc604e52017-10-06 16:27:32 -060098 GrNoClip noClip;
99 SkIRect clipBounds = SkIRect::MakeWH(kCanvasSize, kCanvasSize);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600100
Chris Dalton09a7bb22018-08-31 19:53:15 +0800101 GrShape shape;
102 if (!fDoStroke) {
103 shape = GrShape(path);
104 } else {
105 // Use hairlines for now, since they are the only stroke type that doesn't require a
106 // rigid-body transform. The CCPR stroke code makes no distinction between hairlines
107 // and regular strokes other than how it decides the device-space stroke width.
108 SkStrokeRec stroke(SkStrokeRec::kHairline_InitStyle);
109 stroke.setStrokeParams(SkPaint::kRound_Cap, SkPaint::kMiter_Join, 4);
110 shape = GrShape(path, GrStyle(stroke, nullptr));
111 }
Chris Daltonfddb6c02017-11-04 15:22:22 -0600112
Chris Daltona2b5b642018-06-24 13:08:57 -0600113 fCCPR->testingOnly_drawPathDirectly({
Chris Dalton351e80c2019-01-06 22:51:00 -0700114 fCtx.get(), std::move(paint), &GrUserStencilSettings::kUnused, fRTC.get(), &noClip,
Chris Daltona2b5b642018-06-24 13:08:57 -0600115 &clipBounds, &matrix, &shape, GrAAType::kCoverage, false});
Chris Daltoncc604e52017-10-06 16:27:32 -0600116 }
117
Brian Osmancb3d0872018-10-16 15:19:28 -0400118 void clipFullscreenRect(SkPath clipPath, SkPMColor4f color = { 0, 1, 0, 1 }) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700119 SkASSERT(this->valid());
120
121 GrPaint paint;
122 paint.setColor4f(color);
123
124 fRTC->drawRect(CCPRClip(fCCPR, clipPath), std::move(paint), GrAA::kYes, SkMatrix::I(),
125 SkRect::MakeIWH(kCanvasSize, kCanvasSize));
126 }
127
Chris Daltonfddb6c02017-11-04 15:22:22 -0600128 void flush() const {
129 SkASSERT(this->valid());
Chris Daltoncc604e52017-10-06 16:27:32 -0600130 fCtx->flush();
131 }
132
133private:
Chris Dalton351e80c2019-01-06 22:51:00 -0700134 sk_sp<GrContext> fCtx;
Chris Dalton4da70192018-06-18 09:51:36 -0600135 GrCoverageCountingPathRenderer* fCCPR;
136 sk_sp<GrRenderTargetContext> fRTC;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800137 const bool fDoStroke;
Chris Daltoncc604e52017-10-06 16:27:32 -0600138};
139
Chris Daltonfddb6c02017-11-04 15:22:22 -0600140class CCPRTest {
141public:
Chris Dalton09a7bb22018-08-31 19:53:15 +0800142 void run(skiatest::Reporter* reporter, bool doStroke) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600143 GrMockOptions mockOptions;
144 mockOptions.fInstanceAttribSupport = true;
Brian Osmanc6444d22019-01-09 16:30:12 -0500145 mockOptions.fHalfFloatVertexAttributeSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600146 mockOptions.fMapBufferFlags = GrCaps::kCanMap_MapFlag;
Brian Salomonbdecacf2018-02-02 20:32:49 -0500147 mockOptions.fConfigOptions[kAlpha_half_GrPixelConfig].fRenderability =
148 GrMockOptions::ConfigOptions::Renderability::kNonMSAA;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600149 mockOptions.fConfigOptions[kAlpha_half_GrPixelConfig].fTexturable = true;
Chris Dalton4da70192018-06-18 09:51:36 -0600150 mockOptions.fConfigOptions[kAlpha_8_GrPixelConfig].fRenderability =
151 GrMockOptions::ConfigOptions::Renderability::kNonMSAA;
152 mockOptions.fConfigOptions[kAlpha_8_GrPixelConfig].fTexturable = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600153 mockOptions.fGeometryShaderSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600154 mockOptions.fIntegerSupport = true;
155 mockOptions.fFlatInterpolationSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600156
157 GrContextOptions ctxOptions;
158 ctxOptions.fAllowPathMaskCaching = false;
159 ctxOptions.fGpuPathRenderers = GpuPathRenderers::kCoverageCounting;
160
Chris Daltona2b5b642018-06-24 13:08:57 -0600161 this->customizeOptions(&mockOptions, &ctxOptions);
162
Chris Dalton351e80c2019-01-06 22:51:00 -0700163 sk_sp<GrContext> mockContext = GrContext::MakeMock(&mockOptions, ctxOptions);
164 if (!mockContext) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600165 ERRORF(reporter, "could not create mock context");
166 return;
167 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700168 if (!mockContext->unique()) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600169 ERRORF(reporter, "mock context is not unique");
170 return;
171 }
172
Chris Dalton351e80c2019-01-06 22:51:00 -0700173 CCPRPathDrawer ccpr(skstd::exchange(mockContext, nullptr), reporter, doStroke);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600174 if (!ccpr.valid()) {
175 return;
176 }
177
178 fPath.moveTo(0, 0);
179 fPath.cubicTo(50, 50, 0, 50, 50, 0);
180 this->onRun(reporter, ccpr);
Chris Daltoncc604e52017-10-06 16:27:32 -0600181 }
182
Chris Daltonfddb6c02017-11-04 15:22:22 -0600183 virtual ~CCPRTest() {}
184
185protected:
Chris Daltona2b5b642018-06-24 13:08:57 -0600186 virtual void customizeOptions(GrMockOptions*, GrContextOptions*) {}
Chris Daltonfddb6c02017-11-04 15:22:22 -0600187 virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) = 0;
188
Chris Dalton4da70192018-06-18 09:51:36 -0600189 SkPath fPath;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600190};
191
Chris Dalton09a7bb22018-08-31 19:53:15 +0800192#define DEF_CCPR_TEST(name) \
Brian Salomondcfca432017-11-15 15:48:03 -0500193 DEF_GPUTEST(name, reporter, /* options */) { \
Chris Dalton09a7bb22018-08-31 19:53:15 +0800194 name test; \
195 test.run(reporter, false); \
196 test.run(reporter, true); \
Chris Daltoncc604e52017-10-06 16:27:32 -0600197 }
198
Chris Dalton351e80c2019-01-06 22:51:00 -0700199class CCPR_cleanup : public CCPRTest {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600200 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
201 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
Chris Daltoncc604e52017-10-06 16:27:32 -0600202
Chris Daltonfddb6c02017-11-04 15:22:22 -0600203 // Ensure paths get unreffed.
204 for (int i = 0; i < 10; ++i) {
205 ccpr.drawPath(fPath);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600206 }
207 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
208 ccpr.flush();
209 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
210
211 // Ensure clip paths get unreffed.
212 for (int i = 0; i < 10; ++i) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700213 ccpr.clipFullscreenRect(fPath);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600214 }
215 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
216 ccpr.flush();
217 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
218
219 // Ensure paths get unreffed when we delete the context without flushing.
220 for (int i = 0; i < 10; ++i) {
221 ccpr.drawPath(fPath);
Chris Daltona32a3c32017-12-05 10:05:21 -0700222 ccpr.clipFullscreenRect(fPath);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600223 }
Chris Daltonfddb6c02017-11-04 15:22:22 -0600224 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
Chris Dalton351e80c2019-01-06 22:51:00 -0700225
226 ccpr.destroyGrContext();
Chris Daltonfddb6c02017-11-04 15:22:22 -0600227 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
228 }
229};
Chris Dalton351e80c2019-01-06 22:51:00 -0700230DEF_CCPR_TEST(CCPR_cleanup)
Chris Daltonfddb6c02017-11-04 15:22:22 -0600231
Chris Dalton351e80c2019-01-06 22:51:00 -0700232class CCPR_cleanupWithTexAllocFail : public CCPR_cleanup {
Chris Daltona2b5b642018-06-24 13:08:57 -0600233 void customizeOptions(GrMockOptions* mockOptions, GrContextOptions*) override {
234 mockOptions->fFailTextureAllocations = true;
Chris Dalton91ab1552018-04-18 13:24:25 -0600235 }
236};
Chris Dalton351e80c2019-01-06 22:51:00 -0700237DEF_CCPR_TEST(CCPR_cleanupWithTexAllocFail)
Chris Dalton91ab1552018-04-18 13:24:25 -0600238
Chris Dalton351e80c2019-01-06 22:51:00 -0700239class CCPR_unregisterCulledOps : public CCPRTest {
Chris Dalton080baa42017-11-06 14:19:19 -0700240 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
241 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
242
243 // Ensure Ops get unregistered from CCPR when culled early.
244 ccpr.drawPath(fPath);
245 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
246 ccpr.clear(); // Clear should delete the CCPR Op.
247 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
248 ccpr.flush(); // Should not crash (DrawPathsOp should have unregistered itself).
249
250 // Ensure Op unregisters work when we delete the context without flushing.
251 ccpr.drawPath(fPath);
252 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
253 ccpr.clear(); // Clear should delete the CCPR DrawPathsOp.
254 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
Chris Dalton351e80c2019-01-06 22:51:00 -0700255 ccpr.destroyGrContext(); // Should not crash (DrawPathsOp should have unregistered itself).
Chris Dalton080baa42017-11-06 14:19:19 -0700256 }
257};
Chris Dalton351e80c2019-01-06 22:51:00 -0700258DEF_CCPR_TEST(CCPR_unregisterCulledOps)
Chris Dalton080baa42017-11-06 14:19:19 -0700259
Chris Dalton351e80c2019-01-06 22:51:00 -0700260class CCPR_parseEmptyPath : public CCPRTest {
Chris Daltonc9c97b72017-11-27 15:34:26 -0700261 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
262 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
263
264 // Make a path large enough that ccpr chooses to crop it by the RT bounds, and ends up with
265 // an empty path.
266 SkPath largeOutsidePath;
267 largeOutsidePath.moveTo(-1e30f, -1e30f);
268 largeOutsidePath.lineTo(-1e30f, +1e30f);
269 largeOutsidePath.lineTo(-1e10f, +1e30f);
270 ccpr.drawPath(largeOutsidePath);
271
272 // Normally an empty path is culled before reaching ccpr, however we use a back door for
273 // testing so this path will make it.
274 SkPath emptyPath;
275 SkASSERT(emptyPath.isEmpty());
276 ccpr.drawPath(emptyPath);
277
278 // This is the test. It will exercise various internal asserts and verify we do not crash.
279 ccpr.flush();
Chris Daltona32a3c32017-12-05 10:05:21 -0700280
281 // Now try again with clips.
282 ccpr.clipFullscreenRect(largeOutsidePath);
283 ccpr.clipFullscreenRect(emptyPath);
284 ccpr.flush();
285
286 // ... and both.
287 ccpr.drawPath(largeOutsidePath);
288 ccpr.clipFullscreenRect(largeOutsidePath);
289 ccpr.drawPath(emptyPath);
290 ccpr.clipFullscreenRect(emptyPath);
291 ccpr.flush();
Chris Daltonc9c97b72017-11-27 15:34:26 -0700292 }
293};
Chris Dalton351e80c2019-01-06 22:51:00 -0700294DEF_CCPR_TEST(CCPR_parseEmptyPath)
Chris Daltond6fa4542019-01-04 13:23:51 -0700295
Chris Dalton351e80c2019-01-06 22:51:00 -0700296static int get_mock_texture_id(const GrTexture* texture) {
297 const GrBackendTexture& backingTexture = texture->getBackendTexture();
298 SkASSERT(GrBackendApi::kMock == backingTexture.backend());
299
300 if (!backingTexture.isValid()) {
301 return 0;
302 }
303
304 GrMockTextureInfo info;
305 backingTexture.getMockTextureInfo(&info);
306 return info.fID;
307}
308
309// Base class for cache path unit tests.
310class CCPRCacheTest : public CCPRTest {
311protected:
312 // Registers as an onFlush callback in order to snag the CCPR per-flush resources and note the
313 // texture IDs.
314 class RecordLastMockAtlasIDs : public GrOnFlushCallbackObject {
315 public:
316 RecordLastMockAtlasIDs(sk_sp<GrCoverageCountingPathRenderer> ccpr) : fCCPR(ccpr) {}
317
318 int lastCopyAtlasID() const { return fLastCopyAtlasID; }
319 int lastRenderedAtlasID() const { return fLastRenderedAtlasID; }
320
321 void preFlush(GrOnFlushResourceProvider*, const uint32_t* opListIDs, int numOpListIDs,
322 SkTArray<sk_sp<GrRenderTargetContext>>* out) override {
323 fLastRenderedAtlasID = fLastCopyAtlasID = 0;
324
325 const GrCCPerFlushResources* resources = fCCPR->testingOnly_getCurrentFlushResources();
326 if (!resources) {
327 return;
328 }
329
330 if (const GrTexture* tex = resources->testingOnly_frontCopyAtlasTexture()) {
331 fLastCopyAtlasID = get_mock_texture_id(tex);
332 }
333 if (const GrTexture* tex = resources->testingOnly_frontRenderedAtlasTexture()) {
334 fLastRenderedAtlasID = get_mock_texture_id(tex);
335 }
336 }
337
338 void postFlush(GrDeferredUploadToken, const uint32_t*, int) override {}
339
340 private:
341 sk_sp<GrCoverageCountingPathRenderer> fCCPR;
342 int fLastCopyAtlasID = 0;
343 int fLastRenderedAtlasID = 0;
344 };
345
346 CCPRCacheTest() {
347 static constexpr int primes[11] = {2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31};
348
349 SkRandom rand;
350 for (size_t i = 0; i < SK_ARRAY_COUNT(fPaths); ++i) {
351 int numPts = rand.nextRangeU(GrShape::kMaxKeyFromDataVerbCnt + 1,
352 GrShape::kMaxKeyFromDataVerbCnt * 2);
353 int step;
354 do {
355 step = primes[rand.nextU() % SK_ARRAY_COUNT(primes)];
356 } while (step == numPts);
357 fPaths[i] = sk_tool_utils::make_star(SkRect::MakeLTRB(0,0,1,1), numPts, step);
358 }
359 }
360
361 void drawPathsAndFlush(CCPRPathDrawer& ccpr, const SkMatrix& m) {
362 this->drawPathsAndFlush(ccpr, &m, 1);
363 }
364 void drawPathsAndFlush(CCPRPathDrawer& ccpr, const SkMatrix* matrices, int numMatrices) {
365 // Draw all the paths.
366 for (size_t i = 0; i < SK_ARRAY_COUNT(fPaths); ++i) {
367 ccpr.drawPath(fPaths[i], matrices[i % numMatrices]);
368 }
369 // Re-draw a few paths, to test the case where a cache entry is hit more than once in a
370 // single flush.
371 SkRandom rand;
372 int duplicateIndices[10];
373 for (size_t i = 0; i < SK_ARRAY_COUNT(duplicateIndices); ++i) {
374 duplicateIndices[i] = rand.nextULessThan(SK_ARRAY_COUNT(fPaths));
375 }
376 for (size_t i = 0; i < SK_ARRAY_COUNT(duplicateIndices); ++i) {
377 for (size_t j = 0; j <= i; ++j) {
378 int idx = duplicateIndices[j];
379 ccpr.drawPath(fPaths[idx], matrices[idx % numMatrices]);
380 }
381 }
382 ccpr.flush();
383 }
384
385private:
Chris Daltond6fa4542019-01-04 13:23:51 -0700386 void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
387 ctxOptions->fAllowPathMaskCaching = true;
388 }
389
Chris Dalton351e80c2019-01-06 22:51:00 -0700390 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) final {
391 RecordLastMockAtlasIDs atlasIDRecorder(sk_ref_sp(ccpr.ccpr()));
Robert Phillips9da87e02019-02-04 13:26:26 -0500392 ccpr.ctx()->priv().addOnFlushCallbackObject(&atlasIDRecorder);
Chris Daltond6fa4542019-01-04 13:23:51 -0700393
Chris Dalton351e80c2019-01-06 22:51:00 -0700394 this->onRun(reporter, ccpr, atlasIDRecorder);
395
Robert Phillips9da87e02019-02-04 13:26:26 -0500396 ccpr.ctx()->priv().testingOnly_flushAndRemoveOnFlushCallbackObject(&atlasIDRecorder);
Chris Dalton351e80c2019-01-06 22:51:00 -0700397 }
398
399 virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
400 const RecordLastMockAtlasIDs&) = 0;
401
402protected:
403 SkPath fPaths[350];
404};
405
406// Ensures ccpr always reuses the same atlas texture in the animation use case.
407class CCPR_cache_animationAtlasReuse : public CCPRCacheTest {
408 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
409 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
410 SkMatrix m = SkMatrix::MakeTrans(kCanvasSize/2, kCanvasSize/2);
411 m.preScale(80, 80);
412 m.preTranslate(-.5,-.5);
413 this->drawPathsAndFlush(ccpr, m);
414
415 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
416 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
417 const int atlasID = atlasIDRecorder.lastRenderedAtlasID();
418
419 // Ensures we always reuse the same atlas texture in the animation use case.
420 for (int i = 0; i < 12; ++i) {
421 // 59 is prime, so we will hit every integer modulo 360 before repeating.
422 m.preRotate(59, .5, .5);
423
424 // Go twice. Paths have to get drawn twice with the same matrix before we cache their
425 // atlas. This makes sure that on the subsequent draw, after an atlas has been cached
426 // and is then invalidated since the matrix will change, that the same underlying
427 // texture object is still reused for the next atlas.
428 for (int j = 0; j < 2; ++j) {
429 this->drawPathsAndFlush(ccpr, m);
430 // Nothing should be copied to an 8-bit atlas after just two draws.
431 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
432 REPORTER_ASSERT(reporter, atlasIDRecorder.lastRenderedAtlasID() == atlasID);
433 }
Chris Dalton2e825a32019-01-04 22:14:27 +0000434 }
435
Chris Dalton351e80c2019-01-06 22:51:00 -0700436 // Do the last draw again. (On draw 3 they should get copied to an 8-bit atlas.)
437 this->drawPathsAndFlush(ccpr, m);
438 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
439 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
440
441 // Now double-check that everything continues to hit the cache as expected when the matrix
442 // doesn't change.
443 for (int i = 0; i < 10; ++i) {
444 this->drawPathsAndFlush(ccpr, m);
445 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
446 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
447 }
448 }
449};
450DEF_CCPR_TEST(CCPR_cache_animationAtlasReuse)
451
452class CCPR_cache_recycleEntries : public CCPRCacheTest {
453 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
454 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
455 SkMatrix m = SkMatrix::MakeTrans(kCanvasSize/2, kCanvasSize/2);
456 m.preScale(80, 80);
457 m.preTranslate(-.5,-.5);
458
459 auto cache = ccpr.ccpr()->testingOnly_getPathCache();
460 REPORTER_ASSERT(reporter, cache);
461
462 const auto& lru = cache->testingOnly_getLRU();
463
464 SkTArray<const void*> expectedPtrs;
465
466 // Ensures we always reuse the same atlas texture in the animation use case.
467 for (int i = 0; i < 5; ++i) {
468 // 59 is prime, so we will hit every integer modulo 360 before repeating.
469 m.preRotate(59, .5, .5);
470
471 // Go twice. Paths have to get drawn twice with the same matrix before we cache their
472 // atlas.
473 for (int j = 0; j < 2; ++j) {
474 this->drawPathsAndFlush(ccpr, m);
475 // Nothing should be copied to an 8-bit atlas after just two draws.
476 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
477 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
478 }
479
480 int idx = 0;
481 for (const GrCCPathCacheEntry* entry : lru) {
482 if (0 == i) {
483 expectedPtrs.push_back(entry);
484 } else {
485 // The same pointer should have been recycled for the new matrix.
486 REPORTER_ASSERT(reporter, entry == expectedPtrs[idx]);
487 }
488 ++idx;
489 }
490 }
491 }
492};
493DEF_CCPR_TEST(CCPR_cache_recycleEntries)
494
495// Ensures mostly-visible paths get their full mask cached.
496class CCPR_cache_mostlyVisible : public CCPRCacheTest {
497 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
498 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
499 SkMatrix matrices[3] = {
500 SkMatrix::MakeScale(kCanvasSize/2, kCanvasSize/2), // Fully visible.
501 SkMatrix::MakeScale(kCanvasSize * 1.25, kCanvasSize * 1.25), // Mostly visible.
502 SkMatrix::MakeScale(kCanvasSize * 1.5, kCanvasSize * 1.5), // Mostly NOT visible.
503 };
504
505 for (int i = 0; i < 10; ++i) {
506 this->drawPathsAndFlush(ccpr, matrices, 3);
507 if (2 == i) {
508 // The mostly-visible paths should still get cached.
509 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
510 } else {
511 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
512 }
513 // Ensure mostly NOT-visible paths never get cached.
514 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
515 }
516
517 // Clear the path cache.
518 this->drawPathsAndFlush(ccpr, SkMatrix::I());
519
520 // Now only draw the fully/mostly visible ones.
521 for (int i = 0; i < 2; ++i) {
522 this->drawPathsAndFlush(ccpr, matrices, 2);
523 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
524 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
525 }
526
527 // On draw 3 they should get copied to an 8-bit atlas.
528 this->drawPathsAndFlush(ccpr, matrices, 2);
529 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
530 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
531
532 for (int i = 0; i < 10; ++i) {
533 this->drawPathsAndFlush(ccpr, matrices, 2);
534 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
535 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
536 }
537
538 // Draw a different part of the path to ensure the full mask was cached.
539 matrices[1].postTranslate(SkScalarFloorToInt(kCanvasSize * -.25f),
540 SkScalarFloorToInt(kCanvasSize * -.25f));
541 for (int i = 0; i < 10; ++i) {
542 this->drawPathsAndFlush(ccpr, matrices, 2);
543 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
544 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
545 }
546 }
547};
548DEF_CCPR_TEST(CCPR_cache_mostlyVisible)
549
550// Ensures GrContext::performDeferredCleanup works.
551class CCPR_cache_deferredCleanup : public CCPRCacheTest {
552 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
553 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
554 SkMatrix m = SkMatrix::MakeScale(20, 20);
555 int lastRenderedAtlasID = 0;
556
557 for (int i = 0; i < 5; ++i) {
558 this->drawPathsAndFlush(ccpr, m);
559 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
560 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
561 int renderedAtlasID = atlasIDRecorder.lastRenderedAtlasID();
562 REPORTER_ASSERT(reporter, renderedAtlasID != lastRenderedAtlasID);
563 lastRenderedAtlasID = renderedAtlasID;
564
565 this->drawPathsAndFlush(ccpr, m);
566 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
567 REPORTER_ASSERT(reporter, lastRenderedAtlasID == atlasIDRecorder.lastRenderedAtlasID());
568
569 // On draw 3 they should get copied to an 8-bit atlas.
570 this->drawPathsAndFlush(ccpr, m);
571 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
572 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
573
574 for (int i = 0; i < 10; ++i) {
575 this->drawPathsAndFlush(ccpr, m);
576 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
577 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
578 }
579
580 ccpr.ctx()->performDeferredCleanup(std::chrono::milliseconds(0));
581 }
582 }
583};
584DEF_CCPR_TEST(CCPR_cache_deferredCleanup)
585
586// Verifies the cache/hash table internals.
587class CCPR_cache_hashTable : public CCPRCacheTest {
588 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
589 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
590 using CoverageType = GrCCAtlas::CoverageType;
591 SkMatrix m = SkMatrix::MakeScale(20, 20);
592
593 for (int i = 0; i < 5; ++i) {
594 this->drawPathsAndFlush(ccpr, m);
595 if (2 == i) {
596 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
597 } else {
598 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
599 }
600 if (i < 2) {
601 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
602 } else {
603 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
604 }
605
606 auto cache = ccpr.ccpr()->testingOnly_getPathCache();
607 REPORTER_ASSERT(reporter, cache);
608
609 const auto& hash = cache->testingOnly_getHashTable();
610 const auto& lru = cache->testingOnly_getLRU();
611 int count = 0;
612 for (GrCCPathCacheEntry* entry : lru) {
613 auto* node = hash.find(entry->cacheKey());
614 REPORTER_ASSERT(reporter, node);
615 REPORTER_ASSERT(reporter, node->entry() == entry);
616 REPORTER_ASSERT(reporter, 0 == entry->testingOnly_peekOnFlushRefCnt());
617 REPORTER_ASSERT(reporter, entry->unique());
618 if (0 == i) {
619 REPORTER_ASSERT(reporter, !entry->cachedAtlas());
620 } else {
621 const GrCCCachedAtlas* cachedAtlas = entry->cachedAtlas();
622 REPORTER_ASSERT(reporter, cachedAtlas);
623 if (1 == i) {
624 REPORTER_ASSERT(reporter, CoverageType::kFP16_CoverageCount
625 == cachedAtlas->coverageType());
626 } else {
627 REPORTER_ASSERT(reporter, CoverageType::kA8_LiteralCoverage
628 == cachedAtlas->coverageType());
629 }
630 REPORTER_ASSERT(reporter, cachedAtlas->textureKey().isValid());
631 // The actual proxy should not be held past the end of a flush.
632 REPORTER_ASSERT(reporter, !cachedAtlas->getOnFlushProxy());
633 REPORTER_ASSERT(reporter, 0 == cachedAtlas->testingOnly_peekOnFlushRefCnt());
634 }
635 ++count;
636 }
637 REPORTER_ASSERT(reporter, hash.count() == count);
638 }
639 }
640};
641DEF_CCPR_TEST(CCPR_cache_hashTable)
642
643// Ensures paths get cached even when using a sporadic flushing pattern and drawing out of order
644// (a la Chrome tiles).
645class CCPR_cache_multiFlush : public CCPRCacheTest {
646 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
647 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
648 static constexpr int kNumPaths = SK_ARRAY_COUNT(fPaths);
649 static constexpr int kBigPrimes[] = {
650 9323, 11059, 22993, 38749, 45127, 53147, 64853, 77969, 83269, 99989};
651
652 SkRandom rand;
653 SkMatrix m = SkMatrix::I();
654
655 for (size_t i = 0; i < SK_ARRAY_COUNT(kBigPrimes); ++i) {
656 int prime = kBigPrimes[i];
657 int endPathIdx = (int)rand.nextULessThan(kNumPaths);
658 int pathIdx = endPathIdx;
659 int nextFlush = rand.nextRangeU(1, 47);
660 for (int j = 0; j < kNumPaths; ++j) {
661 pathIdx = (pathIdx + prime) % kNumPaths;
662 int repeat = rand.nextRangeU(1, 3);
663 for (int k = 0; k < repeat; ++k) {
664 ccpr.drawPath(fPaths[pathIdx], m);
665 }
666 if (nextFlush == j) {
667 ccpr.flush();
668 // The paths are small enough that we should never copy to an A8 atlas.
669 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
670 if (i < 2) {
671 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
672 } else {
673 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
674 }
675 nextFlush = SkTMin(j + (int)rand.nextRangeU(1, 29), kNumPaths - 1);
676 }
677 }
678 SkASSERT(endPathIdx == pathIdx % kNumPaths);
679 }
680 }
681};
682DEF_CCPR_TEST(CCPR_cache_multiFlush)
683
Chris Daltonaaa77c12019-01-07 17:45:36 -0700684// Ensures a path drawn over mutiple tiles gets cached.
685class CCPR_cache_multiTileCache : public CCPRCacheTest {
686 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
687 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
688 // Make sure a path drawn over 9 tiles gets cached (1 tile out of 9 is >10% visibility).
689 const SkMatrix m0 = SkMatrix::MakeScale(kCanvasSize*3, kCanvasSize*3);
690 const SkPath p0 = fPaths[0];
691 for (int i = 0; i < 9; ++i) {
692 static constexpr int kRowOrder[9] = {0,1,1,0,2,2,2,1,0};
693 static constexpr int kColumnOrder[9] = {0,0,1,1,0,1,2,2,2};
694
695 SkMatrix tileM = m0;
696 tileM.postTranslate(-kCanvasSize * kColumnOrder[i], -kCanvasSize * kRowOrder[i]);
697 ccpr.drawPath(p0, tileM);
698 ccpr.flush();
699 if (i < 5) {
700 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
701 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
702 } else if (5 == i) {
703 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
704 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
705 } else {
706 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
707 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
708 }
709 }
710
711 // Now make sure paths don't get cached when visibility is <10% for every draw (12 tiles).
712 const SkMatrix m1 = SkMatrix::MakeScale(kCanvasSize*4, kCanvasSize*3);
713 const SkPath p1 = fPaths[1];
714 for (int row = 0; row < 3; ++row) {
715 for (int col = 0; col < 4; ++col) {
716 SkMatrix tileM = m1;
717 tileM.postTranslate(-kCanvasSize * col, -kCanvasSize * row);
718 ccpr.drawPath(p1, tileM);
719 ccpr.flush();
720 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
721 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
722 }
723 }
724
725 // Double-check the cache is still intact.
726 ccpr.drawPath(p0, m0);
727 ccpr.flush();
728 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
729 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
730
731 ccpr.drawPath(p1, m1);
732 ccpr.flush();
733 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
734 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
735 }
736};
737DEF_CCPR_TEST(CCPR_cache_multiTileCache)
738
Chris Dalton351e80c2019-01-06 22:51:00 -0700739// This test exercises CCPR's cache capabilities by drawing many paths with two different
740// transformation matrices. We then vary the matrices independently by whole and partial pixels,
741// and verify the caching behaved as expected.
742class CCPR_cache_partialInvalidate : public CCPRCacheTest {
743 void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
744 ctxOptions->fAllowPathMaskCaching = true;
745 }
746
747 static constexpr int kPathSize = 4;
748
749 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
750 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
Chris Dalton4da70192018-06-18 09:51:36 -0600751 SkMatrix matrices[2] = {
752 SkMatrix::MakeTrans(5, 5),
753 SkMatrix::MakeTrans(kCanvasSize - kPathSize - 5, kCanvasSize - kPathSize - 5)
754 };
Chris Dalton351e80c2019-01-06 22:51:00 -0700755 matrices[0].preScale(kPathSize, kPathSize);
756 matrices[1].preScale(kPathSize, kPathSize);
Chris Dalton4da70192018-06-18 09:51:36 -0600757
Chris Dalton351e80c2019-01-06 22:51:00 -0700758 int firstAtlasID = 0;
Chris Dalton4da70192018-06-18 09:51:36 -0600759
Chris Dalton351e80c2019-01-06 22:51:00 -0700760 for (int iterIdx = 0; iterIdx < 4*3*2; ++iterIdx) {
761 this->drawPathsAndFlush(ccpr, matrices, 2);
Chris Dalton4da70192018-06-18 09:51:36 -0600762
Chris Daltona8429cf2018-06-22 11:43:31 -0600763 if (0 == iterIdx) {
764 // First iteration: just note the ID of the stashed atlas and continue.
Chris Dalton351e80c2019-01-06 22:51:00 -0700765 firstAtlasID = atlasIDRecorder.lastRenderedAtlasID();
766 REPORTER_ASSERT(reporter, 0 != firstAtlasID);
Chris Dalton4da70192018-06-18 09:51:36 -0600767 continue;
768 }
769
Chris Dalton351e80c2019-01-06 22:51:00 -0700770 int testIdx = (iterIdx/2) % 3;
771 int repetitionIdx = iterIdx % 2;
772 switch (testIdx) {
773 case 0:
774 if (0 == repetitionIdx) {
775 // This is the big test. New paths were drawn twice last round. On hit 2
776 // (last time), 'firstAtlasID' was cached as a 16-bit atlas. Now, on hit 3,
777 // these paths should be copied out of 'firstAtlasID', and into an A8 atlas.
778 // THEN: we should recycle 'firstAtlasID' and reuse that same texture to
779 // render the new masks.
780 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
781 REPORTER_ASSERT(reporter,
782 atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
783 } else {
784 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
785 // This is hit 2 for the new masks. Next time they will be copied to an A8
786 // atlas.
787 REPORTER_ASSERT(reporter,
788 atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
789 }
Chris Daltond6fa4542019-01-04 13:23:51 -0700790
Chris Dalton351e80c2019-01-06 22:51:00 -0700791 if (1 == repetitionIdx) {
792 // Integer translates: all path masks stay valid.
793 matrices[0].preTranslate(-1, -1);
794 matrices[1].preTranslate(1, 1);
795 }
796 break;
797
798 case 1:
799 if (0 == repetitionIdx) {
800 // New paths were drawn twice last round. The third hit (now) they should be
801 // copied to an A8 atlas.
802 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
803 } else {
804 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
805 }
806
807 // This draw should have gotten 100% cache hits; we only did integer translates
808 // last time (or none if it was the first flush). Therefore, everything should
809 // have been cached.
810 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
811
812 if (1 == repetitionIdx) {
813 // Invalidate even path masks.
814 matrices[0].preTranslate(1.6f, 1.4f);
815 }
Chris Dalton4da70192018-06-18 09:51:36 -0600816 break;
817
818 case 2:
Chris Dalton351e80c2019-01-06 22:51:00 -0700819 // No new masks to copy from last time; it had 100% cache hits.
820 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
Chris Dalton4da70192018-06-18 09:51:36 -0600821
Chris Dalton351e80c2019-01-06 22:51:00 -0700822 // Even path masks were invalidated last iteration by a subpixel translate.
823 // They should have been re-rendered this time in the original 'firstAtlasID'
824 // texture.
825 REPORTER_ASSERT(reporter,
826 atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
Chris Dalton4da70192018-06-18 09:51:36 -0600827
Chris Dalton351e80c2019-01-06 22:51:00 -0700828 if (1 == repetitionIdx) {
829 // Invalidate odd path masks.
830 matrices[1].preTranslate(-1.4f, -1.6f);
831 }
Chris Dalton4da70192018-06-18 09:51:36 -0600832 break;
833 }
834 }
835 }
836};
Chris Dalton351e80c2019-01-06 22:51:00 -0700837DEF_CCPR_TEST(CCPR_cache_partialInvalidate)
Chris Dalton4da70192018-06-18 09:51:36 -0600838
Chris Dalton351e80c2019-01-06 22:51:00 -0700839class CCPR_unrefPerOpListPathsBeforeOps : public CCPRTest {
Chris Daltondedf8f22018-09-24 20:23:47 -0600840 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
841 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
842 for (int i = 0; i < 10000; ++i) {
843 // Draw enough paths to make the arena allocator hit the heap.
844 ccpr.drawPath(fPath);
845 }
846
847 // Unref the GrCCPerOpListPaths object.
848 auto perOpListPathsMap = ccpr.ccpr()->detachPendingPaths();
849 perOpListPathsMap.clear();
850
851 // Now delete the Op and all its draws.
852 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
853 ccpr.flush();
854 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
855 }
856};
Chris Dalton351e80c2019-01-06 22:51:00 -0700857DEF_CCPR_TEST(CCPR_unrefPerOpListPathsBeforeOps)
Chris Daltondedf8f22018-09-24 20:23:47 -0600858
Chris Daltonfddb6c02017-11-04 15:22:22 -0600859class CCPRRenderingTest {
860public:
Chris Dalton09a7bb22018-08-31 19:53:15 +0800861 void run(skiatest::Reporter* reporter, GrContext* ctx, bool doStroke) const {
Robert Phillips9da87e02019-02-04 13:26:26 -0500862 if (!ctx->priv().drawingManager()->getCoverageCountingPathRenderer()) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600863 return; // CCPR is not enabled on this GPU.
864 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700865 CCPRPathDrawer ccpr(sk_ref_sp(ctx), reporter, doStroke);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600866 if (!ccpr.valid()) {
867 return;
868 }
869 this->onRun(reporter, ccpr);
870 }
871
872 virtual ~CCPRRenderingTest() {}
873
874protected:
875 virtual void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const = 0;
876};
877
878#define DEF_CCPR_RENDERING_TEST(name) \
879 DEF_GPUTEST_FOR_RENDERING_CONTEXTS(name, reporter, ctxInfo) { \
880 name test; \
Chris Dalton09a7bb22018-08-31 19:53:15 +0800881 test.run(reporter, ctxInfo.grContext(), false); \
882 test.run(reporter, ctxInfo.grContext(), true); \
Chris Daltonfddb6c02017-11-04 15:22:22 -0600883 }
884
Chris Dalton351e80c2019-01-06 22:51:00 -0700885class CCPR_busyPath : public CCPRRenderingTest {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600886 void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const override {
887 static constexpr int kNumBusyVerbs = 1 << 17;
888 ccpr.clear();
889 SkPath busyPath;
890 busyPath.moveTo(0, 0); // top left
891 busyPath.lineTo(kCanvasSize, kCanvasSize); // bottom right
892 for (int i = 2; i < kNumBusyVerbs; ++i) {
893 float offset = i * ((float)kCanvasSize / kNumBusyVerbs);
894 busyPath.lineTo(kCanvasSize - offset, kCanvasSize + offset); // offscreen
895 }
896 ccpr.drawPath(busyPath);
897
898 ccpr.flush(); // If this doesn't crash, the test passed.
899 // If it does, maybe fiddle with fMaxInstancesPerDrawArraysWithoutCrashing in
900 // your platform's GrGLCaps.
901 }
902};
Chris Dalton351e80c2019-01-06 22:51:00 -0700903DEF_CCPR_RENDERING_TEST(CCPR_busyPath)