blob: f7976fee3db89a8dc395bcc5332a5763d29bc71c [file] [log] [blame]
Chris Daltoncc604e52017-10-06 16:27:32 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "include/core/SkTypes.h"
9#include "tests/Test.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060010
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "include/core/SkMatrix.h"
12#include "include/core/SkRect.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "include/gpu/mock/GrMockTypes.h"
14#include "include/private/GrRecordingContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050015#include "src/core/SkPathPriv.h"
16#include "src/gpu/GrClip.h"
17#include "src/gpu/GrContextPriv.h"
18#include "src/gpu/GrDrawingManager.h"
19#include "src/gpu/GrPaint.h"
20#include "src/gpu/GrPathRenderer.h"
21#include "src/gpu/GrRecordingContextPriv.h"
22#include "src/gpu/GrRenderTargetContext.h"
23#include "src/gpu/GrRenderTargetContextPriv.h"
Greg Daniel456f9b52020-03-05 19:14:18 +000024#include "src/gpu/GrTexture.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050025#include "src/gpu/ccpr/GrCCPathCache.h"
26#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Michael Ludwig2686d692020-04-17 20:21:37 +000027#include "src/gpu/geometry/GrStyledShape.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050028#include "tools/ToolUtils.h"
Hal Canary8a001442018-09-19 11:31:27 -040029
Chris Daltoncc604e52017-10-06 16:27:32 -060030#include <cmath>
31
32static constexpr int kCanvasSize = 100;
33
Chris Daltonc3318f02019-07-19 14:20:53 -060034enum class DoCoverageCount { kNo = false, kYes };
35enum class DoStroke { kNo = false, kYes };
36
Chris Daltona32a3c32017-12-05 10:05:21 -070037class CCPRClip : public GrClip {
38public:
39 CCPRClip(GrCoverageCountingPathRenderer* ccpr, const SkPath& path) : fCCPR(ccpr), fPath(path) {}
40
41private:
Robert Phillips6f0e02f2019-02-13 11:02:28 -050042 bool apply(GrRecordingContext* context, GrRenderTargetContext* rtc, bool useHWAA,
43 bool hasUserStencilSettings, GrAppliedClip* out, SkRect* bounds) const override {
Greg Danielf41b2bd2019-08-22 16:19:24 -040044 out->addCoverageFP(fCCPR->makeClipProcessor(rtc->priv().testingOnly_getOpsTaskID(), fPath,
Chris Daltona32a3c32017-12-05 10:05:21 -070045 SkIRect::MakeWH(rtc->width(), rtc->height()),
Robert Phillips9da87e02019-02-04 13:26:26 -050046 *context->priv().caps()));
Chris Daltona32a3c32017-12-05 10:05:21 -070047 return true;
48 }
49 bool quickContains(const SkRect&) const final { return false; }
50 bool isRRect(const SkRect& rtBounds, SkRRect* rr, GrAA*) const final { return false; }
Michael Ludwigc002d562020-05-13 14:17:57 -040051
Chris Daltona32a3c32017-12-05 10:05:21 -070052 GrCoverageCountingPathRenderer* const fCCPR;
53 const SkPath fPath;
54};
55
Chris Daltoncc604e52017-10-06 16:27:32 -060056class CCPRPathDrawer {
57public:
Chris Daltonc3318f02019-07-19 14:20:53 -060058 CCPRPathDrawer(sk_sp<GrContext> ctx, skiatest::Reporter* reporter, DoStroke doStroke)
Chris Daltoncc604e52017-10-06 16:27:32 -060059 : fCtx(ctx)
Robert Phillips9da87e02019-02-04 13:26:26 -050060 , fCCPR(fCtx->priv().drawingManager()->getCoverageCountingPathRenderer())
Greg Daniele20fcad2020-01-08 11:52:34 -050061 , fRTC(GrRenderTargetContext::Make(
62 fCtx.get(), GrColorType::kRGBA_8888, nullptr, SkBackingFit::kExact,
63 {kCanvasSize, kCanvasSize}))
Chris Daltonc3318f02019-07-19 14:20:53 -060064 , fDoStroke(DoStroke::kYes == doStroke) {
Chris Daltonfddb6c02017-11-04 15:22:22 -060065 if (!fCCPR) {
66 ERRORF(reporter, "ccpr not enabled in GrContext for ccpr tests");
67 }
68 if (!fRTC) {
69 ERRORF(reporter, "failed to create GrRenderTargetContext for ccpr tests");
Chris Daltoncc604e52017-10-06 16:27:32 -060070 }
71 }
72
Chris Dalton351e80c2019-01-06 22:51:00 -070073 GrContext* ctx() const { return fCtx.get(); }
Chris Dalton4da70192018-06-18 09:51:36 -060074 GrCoverageCountingPathRenderer* ccpr() const { return fCCPR; }
75
Chris Daltonfddb6c02017-11-04 15:22:22 -060076 bool valid() const { return fCCPR && fRTC; }
Brian Osman9a9baae2018-11-05 15:06:26 -050077 void clear() const { fRTC->clear(nullptr, SK_PMColor4fTRANSPARENT,
78 GrRenderTargetContext::CanClearFullscreen::kYes); }
Chris Dalton351e80c2019-01-06 22:51:00 -070079 void destroyGrContext() {
Chris Dalton351e80c2019-01-06 22:51:00 -070080 SkASSERT(fCtx->unique());
81 fRTC.reset();
82 fCCPR = nullptr;
83 fCtx.reset();
84 }
Chris Daltoncc604e52017-10-06 16:27:32 -060085
Chris Daltona2b5b642018-06-24 13:08:57 -060086 void drawPath(const SkPath& path, const SkMatrix& matrix = SkMatrix::I()) const {
Chris Daltonfddb6c02017-11-04 15:22:22 -060087 SkASSERT(this->valid());
Chris Daltoncc604e52017-10-06 16:27:32 -060088
Chris Daltoncc604e52017-10-06 16:27:32 -060089 GrPaint paint;
Brian Osmancb3d0872018-10-16 15:19:28 -040090 paint.setColor4f({ 0, 1, 0, 1 });
Chris Daltonfddb6c02017-11-04 15:22:22 -060091
Chris Daltoncc604e52017-10-06 16:27:32 -060092 GrNoClip noClip;
93 SkIRect clipBounds = SkIRect::MakeWH(kCanvasSize, kCanvasSize);
Chris Daltonfddb6c02017-11-04 15:22:22 -060094
Michael Ludwig2686d692020-04-17 20:21:37 +000095 GrStyledShape shape;
Chris Dalton09a7bb22018-08-31 19:53:15 +080096 if (!fDoStroke) {
Michael Ludwig2686d692020-04-17 20:21:37 +000097 shape = GrStyledShape(path);
Chris Dalton09a7bb22018-08-31 19:53:15 +080098 } else {
99 // Use hairlines for now, since they are the only stroke type that doesn't require a
100 // rigid-body transform. The CCPR stroke code makes no distinction between hairlines
101 // and regular strokes other than how it decides the device-space stroke width.
102 SkStrokeRec stroke(SkStrokeRec::kHairline_InitStyle);
103 stroke.setStrokeParams(SkPaint::kRound_Cap, SkPaint::kMiter_Join, 4);
Michael Ludwig2686d692020-04-17 20:21:37 +0000104 shape = GrStyledShape(path, GrStyle(stroke, nullptr));
Chris Dalton09a7bb22018-08-31 19:53:15 +0800105 }
Chris Daltonfddb6c02017-11-04 15:22:22 -0600106
Chris Daltona2b5b642018-06-24 13:08:57 -0600107 fCCPR->testingOnly_drawPathDirectly({
Chris Dalton351e80c2019-01-06 22:51:00 -0700108 fCtx.get(), std::move(paint), &GrUserStencilSettings::kUnused, fRTC.get(), &noClip,
Chris Dalton6ce447a2019-06-23 18:07:38 -0600109 &clipBounds, &matrix, &shape, GrAAType::kCoverage, false});
Chris Daltoncc604e52017-10-06 16:27:32 -0600110 }
111
Brian Osmancb3d0872018-10-16 15:19:28 -0400112 void clipFullscreenRect(SkPath clipPath, SkPMColor4f color = { 0, 1, 0, 1 }) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700113 SkASSERT(this->valid());
114
115 GrPaint paint;
116 paint.setColor4f(color);
117
118 fRTC->drawRect(CCPRClip(fCCPR, clipPath), std::move(paint), GrAA::kYes, SkMatrix::I(),
119 SkRect::MakeIWH(kCanvasSize, kCanvasSize));
120 }
121
Chris Daltonfddb6c02017-11-04 15:22:22 -0600122 void flush() const {
123 SkASSERT(this->valid());
Greg Daniel0a2464f2020-05-14 15:45:44 -0400124 fCtx->flushAndSubmit();
Chris Daltoncc604e52017-10-06 16:27:32 -0600125 }
126
127private:
Chris Dalton351e80c2019-01-06 22:51:00 -0700128 sk_sp<GrContext> fCtx;
Chris Dalton4da70192018-06-18 09:51:36 -0600129 GrCoverageCountingPathRenderer* fCCPR;
Brian Salomonbf6b9792019-08-21 09:38:10 -0400130 std::unique_ptr<GrRenderTargetContext> fRTC;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800131 const bool fDoStroke;
Chris Daltoncc604e52017-10-06 16:27:32 -0600132};
133
Chris Daltonfddb6c02017-11-04 15:22:22 -0600134class CCPRTest {
135public:
Chris Daltonc3318f02019-07-19 14:20:53 -0600136 void run(skiatest::Reporter* reporter, DoCoverageCount doCoverageCount, DoStroke doStroke) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600137 GrMockOptions mockOptions;
Chris Daltona77cdee2020-04-03 14:50:43 -0600138 mockOptions.fDrawInstancedSupport = true;
Brian Osmanc6444d22019-01-09 16:30:12 -0500139 mockOptions.fHalfFloatVertexAttributeSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600140 mockOptions.fMapBufferFlags = GrCaps::kCanMap_MapFlag;
Robert Phillipsa5e78be2019-07-09 12:34:38 -0400141 mockOptions.fConfigOptions[(int)GrColorType::kAlpha_F16].fRenderability =
Brian Salomonbdecacf2018-02-02 20:32:49 -0500142 GrMockOptions::ConfigOptions::Renderability::kNonMSAA;
Robert Phillipsa5e78be2019-07-09 12:34:38 -0400143 mockOptions.fConfigOptions[(int)GrColorType::kAlpha_F16].fTexturable = true;
144 mockOptions.fConfigOptions[(int)GrColorType::kAlpha_8].fRenderability =
Chris Daltonc3318f02019-07-19 14:20:53 -0600145 GrMockOptions::ConfigOptions::Renderability::kMSAA;
Robert Phillipsa5e78be2019-07-09 12:34:38 -0400146 mockOptions.fConfigOptions[(int)GrColorType::kAlpha_8].fTexturable = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600147 mockOptions.fGeometryShaderSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600148 mockOptions.fIntegerSupport = true;
149 mockOptions.fFlatInterpolationSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600150
151 GrContextOptions ctxOptions;
Chris Daltonc3318f02019-07-19 14:20:53 -0600152 ctxOptions.fDisableCoverageCountingPaths = (DoCoverageCount::kNo == doCoverageCount);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600153 ctxOptions.fAllowPathMaskCaching = false;
154 ctxOptions.fGpuPathRenderers = GpuPathRenderers::kCoverageCounting;
155
Chris Daltona2b5b642018-06-24 13:08:57 -0600156 this->customizeOptions(&mockOptions, &ctxOptions);
157
Chris Dalton351e80c2019-01-06 22:51:00 -0700158 sk_sp<GrContext> mockContext = GrContext::MakeMock(&mockOptions, ctxOptions);
159 if (!mockContext) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600160 ERRORF(reporter, "could not create mock context");
161 return;
162 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700163 if (!mockContext->unique()) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600164 ERRORF(reporter, "mock context is not unique");
165 return;
166 }
167
Adlai Holler5ba50af2020-04-29 21:11:14 -0400168 CCPRPathDrawer ccpr(std::exchange(mockContext, nullptr), reporter, doStroke);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600169 if (!ccpr.valid()) {
170 return;
171 }
172
173 fPath.moveTo(0, 0);
174 fPath.cubicTo(50, 50, 0, 50, 50, 0);
175 this->onRun(reporter, ccpr);
Chris Daltoncc604e52017-10-06 16:27:32 -0600176 }
177
Chris Daltonfddb6c02017-11-04 15:22:22 -0600178 virtual ~CCPRTest() {}
179
180protected:
Chris Daltona2b5b642018-06-24 13:08:57 -0600181 virtual void customizeOptions(GrMockOptions*, GrContextOptions*) {}
Chris Daltonfddb6c02017-11-04 15:22:22 -0600182 virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) = 0;
183
Chris Dalton4da70192018-06-18 09:51:36 -0600184 SkPath fPath;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600185};
186
Chris Dalton09a7bb22018-08-31 19:53:15 +0800187#define DEF_CCPR_TEST(name) \
Brian Salomondcfca432017-11-15 15:48:03 -0500188 DEF_GPUTEST(name, reporter, /* options */) { \
Chris Dalton09a7bb22018-08-31 19:53:15 +0800189 name test; \
Chris Daltonc3318f02019-07-19 14:20:53 -0600190 test.run(reporter, DoCoverageCount::kYes, DoStroke::kNo); \
191 test.run(reporter, DoCoverageCount::kYes, DoStroke::kYes); \
192 test.run(reporter, DoCoverageCount::kNo, DoStroke::kNo); \
193 /* FIXME: test.run(reporter, (DoCoverageCount::kNo, DoStroke::kYes) once supported. */ \
Chris Daltoncc604e52017-10-06 16:27:32 -0600194 }
195
Chris Dalton351e80c2019-01-06 22:51:00 -0700196class CCPR_cleanup : public CCPRTest {
Chris Dalton5a5fe792020-02-15 11:41:30 -0700197protected:
Chris Daltonfddb6c02017-11-04 15:22:22 -0600198 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
199 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
Chris Daltoncc604e52017-10-06 16:27:32 -0600200
Chris Daltonfddb6c02017-11-04 15:22:22 -0600201 // Ensure paths get unreffed.
202 for (int i = 0; i < 10; ++i) {
203 ccpr.drawPath(fPath);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600204 }
205 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
206 ccpr.flush();
207 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
208
209 // Ensure clip paths get unreffed.
210 for (int i = 0; i < 10; ++i) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700211 ccpr.clipFullscreenRect(fPath);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600212 }
213 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
214 ccpr.flush();
215 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
216
217 // Ensure paths get unreffed when we delete the context without flushing.
218 for (int i = 0; i < 10; ++i) {
219 ccpr.drawPath(fPath);
Chris Daltona32a3c32017-12-05 10:05:21 -0700220 ccpr.clipFullscreenRect(fPath);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600221 }
Chris Daltonfddb6c02017-11-04 15:22:22 -0600222 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
Chris Dalton351e80c2019-01-06 22:51:00 -0700223
224 ccpr.destroyGrContext();
Chris Daltonfddb6c02017-11-04 15:22:22 -0600225 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
226 }
227};
Chris Dalton351e80c2019-01-06 22:51:00 -0700228DEF_CCPR_TEST(CCPR_cleanup)
Chris Daltonfddb6c02017-11-04 15:22:22 -0600229
Chris Dalton351e80c2019-01-06 22:51:00 -0700230class CCPR_cleanupWithTexAllocFail : public CCPR_cleanup {
Chris Daltona2b5b642018-06-24 13:08:57 -0600231 void customizeOptions(GrMockOptions* mockOptions, GrContextOptions*) override {
232 mockOptions->fFailTextureAllocations = true;
Chris Dalton91ab1552018-04-18 13:24:25 -0600233 }
Chris Dalton5a5fe792020-02-15 11:41:30 -0700234 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
235 ((GrRecordingContext*)ccpr.ctx())->priv().incrSuppressWarningMessages();
236 this->CCPR_cleanup::onRun(reporter, ccpr);
237 }
Chris Dalton91ab1552018-04-18 13:24:25 -0600238};
Chris Dalton351e80c2019-01-06 22:51:00 -0700239DEF_CCPR_TEST(CCPR_cleanupWithTexAllocFail)
Chris Dalton91ab1552018-04-18 13:24:25 -0600240
Chris Dalton351e80c2019-01-06 22:51:00 -0700241class CCPR_unregisterCulledOps : public CCPRTest {
Chris Dalton080baa42017-11-06 14:19:19 -0700242 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
243 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
244
245 // Ensure Ops get unregistered from CCPR when culled early.
246 ccpr.drawPath(fPath);
247 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
248 ccpr.clear(); // Clear should delete the CCPR Op.
249 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
250 ccpr.flush(); // Should not crash (DrawPathsOp should have unregistered itself).
251
252 // Ensure Op unregisters work when we delete the context without flushing.
253 ccpr.drawPath(fPath);
254 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
255 ccpr.clear(); // Clear should delete the CCPR DrawPathsOp.
256 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
Chris Dalton351e80c2019-01-06 22:51:00 -0700257 ccpr.destroyGrContext(); // Should not crash (DrawPathsOp should have unregistered itself).
Chris Dalton080baa42017-11-06 14:19:19 -0700258 }
259};
Chris Dalton351e80c2019-01-06 22:51:00 -0700260DEF_CCPR_TEST(CCPR_unregisterCulledOps)
Chris Dalton080baa42017-11-06 14:19:19 -0700261
Chris Dalton351e80c2019-01-06 22:51:00 -0700262class CCPR_parseEmptyPath : public CCPRTest {
Chris Daltonc9c97b72017-11-27 15:34:26 -0700263 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
264 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
265
266 // Make a path large enough that ccpr chooses to crop it by the RT bounds, and ends up with
267 // an empty path.
268 SkPath largeOutsidePath;
269 largeOutsidePath.moveTo(-1e30f, -1e30f);
270 largeOutsidePath.lineTo(-1e30f, +1e30f);
271 largeOutsidePath.lineTo(-1e10f, +1e30f);
272 ccpr.drawPath(largeOutsidePath);
273
274 // Normally an empty path is culled before reaching ccpr, however we use a back door for
275 // testing so this path will make it.
276 SkPath emptyPath;
277 SkASSERT(emptyPath.isEmpty());
278 ccpr.drawPath(emptyPath);
279
280 // This is the test. It will exercise various internal asserts and verify we do not crash.
281 ccpr.flush();
Chris Daltona32a3c32017-12-05 10:05:21 -0700282
283 // Now try again with clips.
284 ccpr.clipFullscreenRect(largeOutsidePath);
285 ccpr.clipFullscreenRect(emptyPath);
286 ccpr.flush();
287
288 // ... and both.
289 ccpr.drawPath(largeOutsidePath);
290 ccpr.clipFullscreenRect(largeOutsidePath);
291 ccpr.drawPath(emptyPath);
292 ccpr.clipFullscreenRect(emptyPath);
293 ccpr.flush();
Chris Daltonc9c97b72017-11-27 15:34:26 -0700294 }
295};
Chris Dalton351e80c2019-01-06 22:51:00 -0700296DEF_CCPR_TEST(CCPR_parseEmptyPath)
Chris Daltond6fa4542019-01-04 13:23:51 -0700297
Chris Dalton351e80c2019-01-06 22:51:00 -0700298static int get_mock_texture_id(const GrTexture* texture) {
299 const GrBackendTexture& backingTexture = texture->getBackendTexture();
300 SkASSERT(GrBackendApi::kMock == backingTexture.backend());
301
302 if (!backingTexture.isValid()) {
303 return 0;
304 }
305
306 GrMockTextureInfo info;
307 backingTexture.getMockTextureInfo(&info);
Robert Phillipsa27d6252019-12-10 14:48:36 -0500308 return info.id();
Chris Dalton351e80c2019-01-06 22:51:00 -0700309}
310
311// Base class for cache path unit tests.
312class CCPRCacheTest : public CCPRTest {
313protected:
314 // Registers as an onFlush callback in order to snag the CCPR per-flush resources and note the
315 // texture IDs.
316 class RecordLastMockAtlasIDs : public GrOnFlushCallbackObject {
317 public:
318 RecordLastMockAtlasIDs(sk_sp<GrCoverageCountingPathRenderer> ccpr) : fCCPR(ccpr) {}
319
320 int lastCopyAtlasID() const { return fLastCopyAtlasID; }
321 int lastRenderedAtlasID() const { return fLastRenderedAtlasID; }
322
Chris Daltonc4b47352019-08-23 10:10:36 -0600323 void preFlush(GrOnFlushResourceProvider*, const uint32_t* opsTaskIDs,
324 int numOpsTaskIDs) override {
Chris Dalton351e80c2019-01-06 22:51:00 -0700325 fLastRenderedAtlasID = fLastCopyAtlasID = 0;
326
327 const GrCCPerFlushResources* resources = fCCPR->testingOnly_getCurrentFlushResources();
328 if (!resources) {
329 return;
330 }
331
332 if (const GrTexture* tex = resources->testingOnly_frontCopyAtlasTexture()) {
333 fLastCopyAtlasID = get_mock_texture_id(tex);
334 }
335 if (const GrTexture* tex = resources->testingOnly_frontRenderedAtlasTexture()) {
336 fLastRenderedAtlasID = get_mock_texture_id(tex);
337 }
338 }
339
340 void postFlush(GrDeferredUploadToken, const uint32_t*, int) override {}
341
342 private:
343 sk_sp<GrCoverageCountingPathRenderer> fCCPR;
344 int fLastCopyAtlasID = 0;
345 int fLastRenderedAtlasID = 0;
346 };
347
348 CCPRCacheTest() {
349 static constexpr int primes[11] = {2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31};
350
351 SkRandom rand;
352 for (size_t i = 0; i < SK_ARRAY_COUNT(fPaths); ++i) {
Michael Ludwig2686d692020-04-17 20:21:37 +0000353 int numPts = rand.nextRangeU(GrStyledShape::kMaxKeyFromDataVerbCnt + 1,
354 GrStyledShape::kMaxKeyFromDataVerbCnt * 2);
Chris Dalton351e80c2019-01-06 22:51:00 -0700355 int step;
356 do {
357 step = primes[rand.nextU() % SK_ARRAY_COUNT(primes)];
358 } while (step == numPts);
Mike Kleinea3f0142019-03-20 11:12:10 -0500359 fPaths[i] = ToolUtils::make_star(SkRect::MakeLTRB(0, 0, 1, 1), numPts, step);
Chris Dalton351e80c2019-01-06 22:51:00 -0700360 }
361 }
362
363 void drawPathsAndFlush(CCPRPathDrawer& ccpr, const SkMatrix& m) {
364 this->drawPathsAndFlush(ccpr, &m, 1);
365 }
366 void drawPathsAndFlush(CCPRPathDrawer& ccpr, const SkMatrix* matrices, int numMatrices) {
367 // Draw all the paths.
368 for (size_t i = 0; i < SK_ARRAY_COUNT(fPaths); ++i) {
369 ccpr.drawPath(fPaths[i], matrices[i % numMatrices]);
370 }
371 // Re-draw a few paths, to test the case where a cache entry is hit more than once in a
372 // single flush.
373 SkRandom rand;
374 int duplicateIndices[10];
375 for (size_t i = 0; i < SK_ARRAY_COUNT(duplicateIndices); ++i) {
376 duplicateIndices[i] = rand.nextULessThan(SK_ARRAY_COUNT(fPaths));
377 }
378 for (size_t i = 0; i < SK_ARRAY_COUNT(duplicateIndices); ++i) {
379 for (size_t j = 0; j <= i; ++j) {
380 int idx = duplicateIndices[j];
381 ccpr.drawPath(fPaths[idx], matrices[idx % numMatrices]);
382 }
383 }
384 ccpr.flush();
385 }
386
387private:
Chris Daltond6fa4542019-01-04 13:23:51 -0700388 void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
389 ctxOptions->fAllowPathMaskCaching = true;
390 }
391
Chris Dalton351e80c2019-01-06 22:51:00 -0700392 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) final {
393 RecordLastMockAtlasIDs atlasIDRecorder(sk_ref_sp(ccpr.ccpr()));
Robert Phillips9da87e02019-02-04 13:26:26 -0500394 ccpr.ctx()->priv().addOnFlushCallbackObject(&atlasIDRecorder);
Chris Daltond6fa4542019-01-04 13:23:51 -0700395
Chris Dalton351e80c2019-01-06 22:51:00 -0700396 this->onRun(reporter, ccpr, atlasIDRecorder);
397
Robert Phillips9da87e02019-02-04 13:26:26 -0500398 ccpr.ctx()->priv().testingOnly_flushAndRemoveOnFlushCallbackObject(&atlasIDRecorder);
Chris Dalton351e80c2019-01-06 22:51:00 -0700399 }
400
401 virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
402 const RecordLastMockAtlasIDs&) = 0;
403
404protected:
405 SkPath fPaths[350];
406};
407
408// Ensures ccpr always reuses the same atlas texture in the animation use case.
409class CCPR_cache_animationAtlasReuse : public CCPRCacheTest {
410 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
411 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
Mike Reed1f607332020-05-21 12:11:27 -0400412 SkMatrix m = SkMatrix::Translate(kCanvasSize/2, kCanvasSize/2);
Chris Dalton351e80c2019-01-06 22:51:00 -0700413 m.preScale(80, 80);
414 m.preTranslate(-.5,-.5);
415 this->drawPathsAndFlush(ccpr, m);
416
417 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
418 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
419 const int atlasID = atlasIDRecorder.lastRenderedAtlasID();
420
421 // Ensures we always reuse the same atlas texture in the animation use case.
422 for (int i = 0; i < 12; ++i) {
423 // 59 is prime, so we will hit every integer modulo 360 before repeating.
424 m.preRotate(59, .5, .5);
425
426 // Go twice. Paths have to get drawn twice with the same matrix before we cache their
427 // atlas. This makes sure that on the subsequent draw, after an atlas has been cached
428 // and is then invalidated since the matrix will change, that the same underlying
429 // texture object is still reused for the next atlas.
430 for (int j = 0; j < 2; ++j) {
431 this->drawPathsAndFlush(ccpr, m);
432 // Nothing should be copied to an 8-bit atlas after just two draws.
433 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
434 REPORTER_ASSERT(reporter, atlasIDRecorder.lastRenderedAtlasID() == atlasID);
435 }
Chris Dalton2e825a32019-01-04 22:14:27 +0000436 }
437
Chris Dalton351e80c2019-01-06 22:51:00 -0700438 // Do the last draw again. (On draw 3 they should get copied to an 8-bit atlas.)
439 this->drawPathsAndFlush(ccpr, m);
440 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
441 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
442
443 // Now double-check that everything continues to hit the cache as expected when the matrix
444 // doesn't change.
445 for (int i = 0; i < 10; ++i) {
446 this->drawPathsAndFlush(ccpr, m);
447 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
448 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
449 }
450 }
451};
452DEF_CCPR_TEST(CCPR_cache_animationAtlasReuse)
453
454class CCPR_cache_recycleEntries : public CCPRCacheTest {
455 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
456 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
Mike Reed1f607332020-05-21 12:11:27 -0400457 SkMatrix m = SkMatrix::Translate(kCanvasSize/2, kCanvasSize/2);
Chris Dalton351e80c2019-01-06 22:51:00 -0700458 m.preScale(80, 80);
459 m.preTranslate(-.5,-.5);
460
461 auto cache = ccpr.ccpr()->testingOnly_getPathCache();
462 REPORTER_ASSERT(reporter, cache);
463
464 const auto& lru = cache->testingOnly_getLRU();
465
466 SkTArray<const void*> expectedPtrs;
467
468 // Ensures we always reuse the same atlas texture in the animation use case.
469 for (int i = 0; i < 5; ++i) {
470 // 59 is prime, so we will hit every integer modulo 360 before repeating.
471 m.preRotate(59, .5, .5);
472
473 // Go twice. Paths have to get drawn twice with the same matrix before we cache their
474 // atlas.
475 for (int j = 0; j < 2; ++j) {
476 this->drawPathsAndFlush(ccpr, m);
477 // Nothing should be copied to an 8-bit atlas after just two draws.
478 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
479 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
480 }
481
482 int idx = 0;
483 for (const GrCCPathCacheEntry* entry : lru) {
484 if (0 == i) {
485 expectedPtrs.push_back(entry);
486 } else {
487 // The same pointer should have been recycled for the new matrix.
488 REPORTER_ASSERT(reporter, entry == expectedPtrs[idx]);
489 }
490 ++idx;
491 }
492 }
493 }
494};
495DEF_CCPR_TEST(CCPR_cache_recycleEntries)
496
497// Ensures mostly-visible paths get their full mask cached.
498class CCPR_cache_mostlyVisible : public CCPRCacheTest {
499 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
500 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
501 SkMatrix matrices[3] = {
Mike Reed1f607332020-05-21 12:11:27 -0400502 SkMatrix::Scale(kCanvasSize/2, kCanvasSize/2), // Fully visible.
503 SkMatrix::Scale(kCanvasSize * 1.25, kCanvasSize * 1.25), // Mostly visible.
504 SkMatrix::Scale(kCanvasSize * 1.5, kCanvasSize * 1.5), // Mostly NOT visible.
Chris Dalton351e80c2019-01-06 22:51:00 -0700505 };
506
507 for (int i = 0; i < 10; ++i) {
508 this->drawPathsAndFlush(ccpr, matrices, 3);
509 if (2 == i) {
510 // The mostly-visible paths should still get cached.
511 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
512 } else {
513 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
514 }
515 // Ensure mostly NOT-visible paths never get cached.
516 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
517 }
518
519 // Clear the path cache.
520 this->drawPathsAndFlush(ccpr, SkMatrix::I());
521
522 // Now only draw the fully/mostly visible ones.
523 for (int i = 0; i < 2; ++i) {
524 this->drawPathsAndFlush(ccpr, matrices, 2);
525 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
526 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
527 }
528
529 // On draw 3 they should get copied to an 8-bit atlas.
530 this->drawPathsAndFlush(ccpr, matrices, 2);
531 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
532 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
533
534 for (int i = 0; i < 10; ++i) {
535 this->drawPathsAndFlush(ccpr, matrices, 2);
536 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
537 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
538 }
539
540 // Draw a different part of the path to ensure the full mask was cached.
541 matrices[1].postTranslate(SkScalarFloorToInt(kCanvasSize * -.25f),
542 SkScalarFloorToInt(kCanvasSize * -.25f));
543 for (int i = 0; i < 10; ++i) {
544 this->drawPathsAndFlush(ccpr, matrices, 2);
545 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
546 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
547 }
548 }
549};
550DEF_CCPR_TEST(CCPR_cache_mostlyVisible)
551
552// Ensures GrContext::performDeferredCleanup works.
553class CCPR_cache_deferredCleanup : public CCPRCacheTest {
554 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
555 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
Mike Reed1f607332020-05-21 12:11:27 -0400556 SkMatrix m = SkMatrix::Scale(20, 20);
Chris Dalton351e80c2019-01-06 22:51:00 -0700557 int lastRenderedAtlasID = 0;
558
559 for (int i = 0; i < 5; ++i) {
560 this->drawPathsAndFlush(ccpr, m);
561 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
562 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
563 int renderedAtlasID = atlasIDRecorder.lastRenderedAtlasID();
564 REPORTER_ASSERT(reporter, renderedAtlasID != lastRenderedAtlasID);
565 lastRenderedAtlasID = renderedAtlasID;
566
567 this->drawPathsAndFlush(ccpr, m);
568 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
569 REPORTER_ASSERT(reporter, lastRenderedAtlasID == atlasIDRecorder.lastRenderedAtlasID());
570
571 // On draw 3 they should get copied to an 8-bit atlas.
572 this->drawPathsAndFlush(ccpr, m);
573 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
574 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
575
576 for (int i = 0; i < 10; ++i) {
577 this->drawPathsAndFlush(ccpr, m);
578 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
579 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
580 }
581
582 ccpr.ctx()->performDeferredCleanup(std::chrono::milliseconds(0));
583 }
584 }
585};
586DEF_CCPR_TEST(CCPR_cache_deferredCleanup)
587
588// Verifies the cache/hash table internals.
589class CCPR_cache_hashTable : public CCPRCacheTest {
590 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
591 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
592 using CoverageType = GrCCAtlas::CoverageType;
Mike Reed1f607332020-05-21 12:11:27 -0400593 SkMatrix m = SkMatrix::Scale(20, 20);
Chris Dalton351e80c2019-01-06 22:51:00 -0700594
595 for (int i = 0; i < 5; ++i) {
596 this->drawPathsAndFlush(ccpr, m);
597 if (2 == i) {
598 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
599 } else {
600 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
601 }
602 if (i < 2) {
603 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
604 } else {
605 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
606 }
607
608 auto cache = ccpr.ccpr()->testingOnly_getPathCache();
609 REPORTER_ASSERT(reporter, cache);
610
611 const auto& hash = cache->testingOnly_getHashTable();
612 const auto& lru = cache->testingOnly_getLRU();
613 int count = 0;
614 for (GrCCPathCacheEntry* entry : lru) {
615 auto* node = hash.find(entry->cacheKey());
616 REPORTER_ASSERT(reporter, node);
617 REPORTER_ASSERT(reporter, node->entry() == entry);
618 REPORTER_ASSERT(reporter, 0 == entry->testingOnly_peekOnFlushRefCnt());
619 REPORTER_ASSERT(reporter, entry->unique());
620 if (0 == i) {
621 REPORTER_ASSERT(reporter, !entry->cachedAtlas());
622 } else {
623 const GrCCCachedAtlas* cachedAtlas = entry->cachedAtlas();
624 REPORTER_ASSERT(reporter, cachedAtlas);
625 if (1 == i) {
Chris Daltonc3318f02019-07-19 14:20:53 -0600626 REPORTER_ASSERT(reporter, ccpr.ccpr()->coverageType()
Chris Dalton351e80c2019-01-06 22:51:00 -0700627 == cachedAtlas->coverageType());
628 } else {
629 REPORTER_ASSERT(reporter, CoverageType::kA8_LiteralCoverage
630 == cachedAtlas->coverageType());
631 }
632 REPORTER_ASSERT(reporter, cachedAtlas->textureKey().isValid());
633 // The actual proxy should not be held past the end of a flush.
634 REPORTER_ASSERT(reporter, !cachedAtlas->getOnFlushProxy());
635 REPORTER_ASSERT(reporter, 0 == cachedAtlas->testingOnly_peekOnFlushRefCnt());
636 }
637 ++count;
638 }
639 REPORTER_ASSERT(reporter, hash.count() == count);
640 }
641 }
642};
643DEF_CCPR_TEST(CCPR_cache_hashTable)
644
645// Ensures paths get cached even when using a sporadic flushing pattern and drawing out of order
646// (a la Chrome tiles).
647class CCPR_cache_multiFlush : public CCPRCacheTest {
648 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
649 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
650 static constexpr int kNumPaths = SK_ARRAY_COUNT(fPaths);
651 static constexpr int kBigPrimes[] = {
652 9323, 11059, 22993, 38749, 45127, 53147, 64853, 77969, 83269, 99989};
653
654 SkRandom rand;
655 SkMatrix m = SkMatrix::I();
656
657 for (size_t i = 0; i < SK_ARRAY_COUNT(kBigPrimes); ++i) {
658 int prime = kBigPrimes[i];
659 int endPathIdx = (int)rand.nextULessThan(kNumPaths);
660 int pathIdx = endPathIdx;
661 int nextFlush = rand.nextRangeU(1, 47);
662 for (int j = 0; j < kNumPaths; ++j) {
663 pathIdx = (pathIdx + prime) % kNumPaths;
664 int repeat = rand.nextRangeU(1, 3);
665 for (int k = 0; k < repeat; ++k) {
666 ccpr.drawPath(fPaths[pathIdx], m);
667 }
668 if (nextFlush == j) {
669 ccpr.flush();
670 // The paths are small enough that we should never copy to an A8 atlas.
671 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
672 if (i < 2) {
673 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
674 } else {
675 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
676 }
Brian Osman788b9162020-02-07 10:36:46 -0500677 nextFlush = std::min(j + (int)rand.nextRangeU(1, 29), kNumPaths - 1);
Chris Dalton351e80c2019-01-06 22:51:00 -0700678 }
679 }
680 SkASSERT(endPathIdx == pathIdx % kNumPaths);
681 }
682 }
683};
684DEF_CCPR_TEST(CCPR_cache_multiFlush)
685
Chris Daltonaaa77c12019-01-07 17:45:36 -0700686// Ensures a path drawn over mutiple tiles gets cached.
687class CCPR_cache_multiTileCache : public CCPRCacheTest {
688 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
689 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
690 // Make sure a path drawn over 9 tiles gets cached (1 tile out of 9 is >10% visibility).
Mike Reed1f607332020-05-21 12:11:27 -0400691 const SkMatrix m0 = SkMatrix::Scale(kCanvasSize*3, kCanvasSize*3);
Chris Daltonaaa77c12019-01-07 17:45:36 -0700692 const SkPath p0 = fPaths[0];
693 for (int i = 0; i < 9; ++i) {
694 static constexpr int kRowOrder[9] = {0,1,1,0,2,2,2,1,0};
695 static constexpr int kColumnOrder[9] = {0,0,1,1,0,1,2,2,2};
696
697 SkMatrix tileM = m0;
698 tileM.postTranslate(-kCanvasSize * kColumnOrder[i], -kCanvasSize * kRowOrder[i]);
699 ccpr.drawPath(p0, tileM);
700 ccpr.flush();
701 if (i < 5) {
702 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
703 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
704 } else if (5 == i) {
705 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
706 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
707 } else {
708 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
709 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
710 }
711 }
712
713 // Now make sure paths don't get cached when visibility is <10% for every draw (12 tiles).
Mike Reed1f607332020-05-21 12:11:27 -0400714 const SkMatrix m1 = SkMatrix::Scale(kCanvasSize*4, kCanvasSize*3);
Chris Daltonaaa77c12019-01-07 17:45:36 -0700715 const SkPath p1 = fPaths[1];
716 for (int row = 0; row < 3; ++row) {
717 for (int col = 0; col < 4; ++col) {
718 SkMatrix tileM = m1;
719 tileM.postTranslate(-kCanvasSize * col, -kCanvasSize * row);
720 ccpr.drawPath(p1, tileM);
721 ccpr.flush();
722 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
723 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
724 }
725 }
726
727 // Double-check the cache is still intact.
728 ccpr.drawPath(p0, m0);
729 ccpr.flush();
730 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
731 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
732
733 ccpr.drawPath(p1, m1);
734 ccpr.flush();
735 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
736 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
737 }
738};
739DEF_CCPR_TEST(CCPR_cache_multiTileCache)
740
Chris Dalton351e80c2019-01-06 22:51:00 -0700741// This test exercises CCPR's cache capabilities by drawing many paths with two different
742// transformation matrices. We then vary the matrices independently by whole and partial pixels,
743// and verify the caching behaved as expected.
744class CCPR_cache_partialInvalidate : public CCPRCacheTest {
745 void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
746 ctxOptions->fAllowPathMaskCaching = true;
747 }
748
749 static constexpr int kPathSize = 4;
750
751 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
752 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
Chris Dalton4da70192018-06-18 09:51:36 -0600753 SkMatrix matrices[2] = {
Mike Reed1f607332020-05-21 12:11:27 -0400754 SkMatrix::Translate(5, 5),
755 SkMatrix::Translate(kCanvasSize - kPathSize - 5, kCanvasSize - kPathSize - 5)
Chris Dalton4da70192018-06-18 09:51:36 -0600756 };
Chris Dalton351e80c2019-01-06 22:51:00 -0700757 matrices[0].preScale(kPathSize, kPathSize);
758 matrices[1].preScale(kPathSize, kPathSize);
Chris Dalton4da70192018-06-18 09:51:36 -0600759
Chris Dalton351e80c2019-01-06 22:51:00 -0700760 int firstAtlasID = 0;
Chris Dalton4da70192018-06-18 09:51:36 -0600761
Chris Dalton351e80c2019-01-06 22:51:00 -0700762 for (int iterIdx = 0; iterIdx < 4*3*2; ++iterIdx) {
763 this->drawPathsAndFlush(ccpr, matrices, 2);
Chris Dalton4da70192018-06-18 09:51:36 -0600764
Chris Daltona8429cf2018-06-22 11:43:31 -0600765 if (0 == iterIdx) {
766 // First iteration: just note the ID of the stashed atlas and continue.
Chris Dalton351e80c2019-01-06 22:51:00 -0700767 firstAtlasID = atlasIDRecorder.lastRenderedAtlasID();
768 REPORTER_ASSERT(reporter, 0 != firstAtlasID);
Chris Dalton4da70192018-06-18 09:51:36 -0600769 continue;
770 }
771
Chris Dalton351e80c2019-01-06 22:51:00 -0700772 int testIdx = (iterIdx/2) % 3;
773 int repetitionIdx = iterIdx % 2;
774 switch (testIdx) {
775 case 0:
776 if (0 == repetitionIdx) {
777 // This is the big test. New paths were drawn twice last round. On hit 2
778 // (last time), 'firstAtlasID' was cached as a 16-bit atlas. Now, on hit 3,
779 // these paths should be copied out of 'firstAtlasID', and into an A8 atlas.
780 // THEN: we should recycle 'firstAtlasID' and reuse that same texture to
781 // render the new masks.
782 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
783 REPORTER_ASSERT(reporter,
784 atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
785 } else {
786 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
787 // This is hit 2 for the new masks. Next time they will be copied to an A8
788 // atlas.
789 REPORTER_ASSERT(reporter,
790 atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
791 }
Chris Daltond6fa4542019-01-04 13:23:51 -0700792
Chris Dalton351e80c2019-01-06 22:51:00 -0700793 if (1 == repetitionIdx) {
794 // Integer translates: all path masks stay valid.
795 matrices[0].preTranslate(-1, -1);
796 matrices[1].preTranslate(1, 1);
797 }
798 break;
799
800 case 1:
801 if (0 == repetitionIdx) {
802 // New paths were drawn twice last round. The third hit (now) they should be
803 // copied to an A8 atlas.
804 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
805 } else {
806 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
807 }
808
809 // This draw should have gotten 100% cache hits; we only did integer translates
810 // last time (or none if it was the first flush). Therefore, everything should
811 // have been cached.
812 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
813
814 if (1 == repetitionIdx) {
815 // Invalidate even path masks.
816 matrices[0].preTranslate(1.6f, 1.4f);
817 }
Chris Dalton4da70192018-06-18 09:51:36 -0600818 break;
819
820 case 2:
Chris Dalton351e80c2019-01-06 22:51:00 -0700821 // No new masks to copy from last time; it had 100% cache hits.
822 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
Chris Dalton4da70192018-06-18 09:51:36 -0600823
Chris Dalton351e80c2019-01-06 22:51:00 -0700824 // Even path masks were invalidated last iteration by a subpixel translate.
825 // They should have been re-rendered this time in the original 'firstAtlasID'
826 // texture.
827 REPORTER_ASSERT(reporter,
828 atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
Chris Dalton4da70192018-06-18 09:51:36 -0600829
Chris Dalton351e80c2019-01-06 22:51:00 -0700830 if (1 == repetitionIdx) {
831 // Invalidate odd path masks.
832 matrices[1].preTranslate(-1.4f, -1.6f);
833 }
Chris Dalton4da70192018-06-18 09:51:36 -0600834 break;
835 }
836 }
837 }
838};
Chris Dalton351e80c2019-01-06 22:51:00 -0700839DEF_CCPR_TEST(CCPR_cache_partialInvalidate)
Chris Dalton4da70192018-06-18 09:51:36 -0600840
Greg Danielf41b2bd2019-08-22 16:19:24 -0400841class CCPR_unrefPerOpsTaskPathsBeforeOps : public CCPRTest {
Chris Daltondedf8f22018-09-24 20:23:47 -0600842 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
843 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
844 for (int i = 0; i < 10000; ++i) {
845 // Draw enough paths to make the arena allocator hit the heap.
846 ccpr.drawPath(fPath);
847 }
848
Greg Danielf41b2bd2019-08-22 16:19:24 -0400849 // Unref the GrCCPerOpsTaskPaths object.
850 auto perOpsTaskPathsMap = ccpr.ccpr()->detachPendingPaths();
851 perOpsTaskPathsMap.clear();
Chris Daltondedf8f22018-09-24 20:23:47 -0600852
853 // Now delete the Op and all its draws.
854 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
855 ccpr.flush();
856 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
857 }
858};
Greg Danielf41b2bd2019-08-22 16:19:24 -0400859DEF_CCPR_TEST(CCPR_unrefPerOpsTaskPathsBeforeOps)
Chris Daltondedf8f22018-09-24 20:23:47 -0600860
Chris Daltonfddb6c02017-11-04 15:22:22 -0600861class CCPRRenderingTest {
862public:
Chris Daltonc3318f02019-07-19 14:20:53 -0600863 void run(skiatest::Reporter* reporter, GrContext* ctx, DoStroke doStroke) const {
864 if (auto ccpr = ctx->priv().drawingManager()->getCoverageCountingPathRenderer()) {
865 if (DoStroke::kYes == doStroke &&
866 GrCCAtlas::CoverageType::kA8_Multisample == ccpr->coverageType()) {
867 return; // Stroking is not yet supported for multisample.
868 }
869 CCPRPathDrawer drawer(sk_ref_sp(ctx), reporter, doStroke);
870 if (!drawer.valid()) {
871 return;
872 }
873 this->onRun(reporter, drawer);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600874 }
Chris Daltonfddb6c02017-11-04 15:22:22 -0600875 }
876
877 virtual ~CCPRRenderingTest() {}
878
879protected:
880 virtual void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const = 0;
881};
882
883#define DEF_CCPR_RENDERING_TEST(name) \
884 DEF_GPUTEST_FOR_RENDERING_CONTEXTS(name, reporter, ctxInfo) { \
885 name test; \
Chris Daltonc3318f02019-07-19 14:20:53 -0600886 test.run(reporter, ctxInfo.grContext(), DoStroke::kNo); \
887 test.run(reporter, ctxInfo.grContext(), DoStroke::kYes); \
Chris Daltonfddb6c02017-11-04 15:22:22 -0600888 }
889
Chris Dalton351e80c2019-01-06 22:51:00 -0700890class CCPR_busyPath : public CCPRRenderingTest {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600891 void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const override {
892 static constexpr int kNumBusyVerbs = 1 << 17;
893 ccpr.clear();
894 SkPath busyPath;
895 busyPath.moveTo(0, 0); // top left
896 busyPath.lineTo(kCanvasSize, kCanvasSize); // bottom right
897 for (int i = 2; i < kNumBusyVerbs; ++i) {
898 float offset = i * ((float)kCanvasSize / kNumBusyVerbs);
899 busyPath.lineTo(kCanvasSize - offset, kCanvasSize + offset); // offscreen
900 }
901 ccpr.drawPath(busyPath);
902
903 ccpr.flush(); // If this doesn't crash, the test passed.
904 // If it does, maybe fiddle with fMaxInstancesPerDrawArraysWithoutCrashing in
905 // your platform's GrGLCaps.
906 }
907};
Chris Dalton351e80c2019-01-06 22:51:00 -0700908DEF_CCPR_RENDERING_TEST(CCPR_busyPath)