blob: f37bef1c2b62b6620fffbfefc3844030ec196213 [file] [log] [blame]
Chris Daltoncc604e52017-10-06 16:27:32 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "include/core/SkTypes.h"
9#include "tests/Test.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060010
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "include/core/SkMatrix.h"
12#include "include/core/SkRect.h"
Robert Phillips6d344c32020-07-06 10:56:46 -040013#include "include/gpu/GrDirectContext.h"
14#include "include/gpu/GrRecordingContext.h"
Robert Phillipsb7bfbc22020-07-01 12:55:01 -040015#include "include/gpu/GrRecordingContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050016#include "include/gpu/mock/GrMockTypes.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050017#include "src/core/SkPathPriv.h"
18#include "src/gpu/GrClip.h"
19#include "src/gpu/GrContextPriv.h"
20#include "src/gpu/GrDrawingManager.h"
21#include "src/gpu/GrPaint.h"
22#include "src/gpu/GrPathRenderer.h"
23#include "src/gpu/GrRecordingContextPriv.h"
24#include "src/gpu/GrRenderTargetContext.h"
25#include "src/gpu/GrRenderTargetContextPriv.h"
Greg Daniel456f9b52020-03-05 19:14:18 +000026#include "src/gpu/GrTexture.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050027#include "src/gpu/ccpr/GrCCPathCache.h"
28#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Michael Ludwig2686d692020-04-17 20:21:37 +000029#include "src/gpu/geometry/GrStyledShape.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050030#include "tools/ToolUtils.h"
Hal Canary8a001442018-09-19 11:31:27 -040031
Chris Daltoncc604e52017-10-06 16:27:32 -060032#include <cmath>
33
34static constexpr int kCanvasSize = 100;
35
Chris Daltonc3318f02019-07-19 14:20:53 -060036enum class DoCoverageCount { kNo = false, kYes };
37enum class DoStroke { kNo = false, kYes };
38
Chris Daltona32a3c32017-12-05 10:05:21 -070039class CCPRClip : public GrClip {
40public:
41 CCPRClip(GrCoverageCountingPathRenderer* ccpr, const SkPath& path) : fCCPR(ccpr), fPath(path) {}
42
43private:
Michael Ludwige06a8972020-06-11 10:29:00 -040044 SkIRect getConservativeBounds() const final { return fPath.getBounds().roundOut(); }
Michael Ludwig4e3cab72020-06-30 11:12:46 -040045 Effect apply(GrRecordingContext* context, GrRenderTargetContext* rtc, bool useHWAA,
46 bool hasUserStencilSettings, GrAppliedClip* out,
47 SkRect* bounds) const override {
John Stiles956ec8a2020-06-19 15:32:16 -040048 out->addCoverageFP(fCCPR->makeClipProcessor(/*inputFP=*/nullptr,
49 rtc->priv().testingOnly_getOpsTaskID(), fPath,
Chris Daltona32a3c32017-12-05 10:05:21 -070050 SkIRect::MakeWH(rtc->width(), rtc->height()),
Robert Phillips9da87e02019-02-04 13:26:26 -050051 *context->priv().caps()));
Michael Ludwig4e3cab72020-06-30 11:12:46 -040052 return Effect::kClipped;
Chris Daltona32a3c32017-12-05 10:05:21 -070053 }
Michael Ludwigc002d562020-05-13 14:17:57 -040054
Chris Daltona32a3c32017-12-05 10:05:21 -070055 GrCoverageCountingPathRenderer* const fCCPR;
56 const SkPath fPath;
57};
58
Chris Daltoncc604e52017-10-06 16:27:32 -060059class CCPRPathDrawer {
60public:
Chris Daltonc3318f02019-07-19 14:20:53 -060061 CCPRPathDrawer(sk_sp<GrContext> ctx, skiatest::Reporter* reporter, DoStroke doStroke)
Chris Daltoncc604e52017-10-06 16:27:32 -060062 : fCtx(ctx)
Robert Phillips9da87e02019-02-04 13:26:26 -050063 , fCCPR(fCtx->priv().drawingManager()->getCoverageCountingPathRenderer())
Greg Daniele20fcad2020-01-08 11:52:34 -050064 , fRTC(GrRenderTargetContext::Make(
65 fCtx.get(), GrColorType::kRGBA_8888, nullptr, SkBackingFit::kExact,
66 {kCanvasSize, kCanvasSize}))
Chris Daltonc3318f02019-07-19 14:20:53 -060067 , fDoStroke(DoStroke::kYes == doStroke) {
Chris Daltonfddb6c02017-11-04 15:22:22 -060068 if (!fCCPR) {
69 ERRORF(reporter, "ccpr not enabled in GrContext for ccpr tests");
70 }
71 if (!fRTC) {
72 ERRORF(reporter, "failed to create GrRenderTargetContext for ccpr tests");
Chris Daltoncc604e52017-10-06 16:27:32 -060073 }
74 }
75
Chris Dalton351e80c2019-01-06 22:51:00 -070076 GrContext* ctx() const { return fCtx.get(); }
Chris Dalton4da70192018-06-18 09:51:36 -060077 GrCoverageCountingPathRenderer* ccpr() const { return fCCPR; }
78
Chris Daltonfddb6c02017-11-04 15:22:22 -060079 bool valid() const { return fCCPR && fRTC; }
Michael Ludwig81d41722020-05-26 16:57:38 -040080 void clear() const { fRTC->clear(SK_PMColor4fTRANSPARENT); }
Chris Dalton351e80c2019-01-06 22:51:00 -070081 void destroyGrContext() {
Chris Dalton351e80c2019-01-06 22:51:00 -070082 SkASSERT(fCtx->unique());
83 fRTC.reset();
84 fCCPR = nullptr;
85 fCtx.reset();
86 }
Chris Daltoncc604e52017-10-06 16:27:32 -060087
Chris Daltona2b5b642018-06-24 13:08:57 -060088 void drawPath(const SkPath& path, const SkMatrix& matrix = SkMatrix::I()) const {
Chris Daltonfddb6c02017-11-04 15:22:22 -060089 SkASSERT(this->valid());
Chris Daltoncc604e52017-10-06 16:27:32 -060090
Chris Daltoncc604e52017-10-06 16:27:32 -060091 GrPaint paint;
Brian Osmancb3d0872018-10-16 15:19:28 -040092 paint.setColor4f({ 0, 1, 0, 1 });
Chris Daltonfddb6c02017-11-04 15:22:22 -060093
Chris Daltoncc604e52017-10-06 16:27:32 -060094 SkIRect clipBounds = SkIRect::MakeWH(kCanvasSize, kCanvasSize);
Chris Daltonfddb6c02017-11-04 15:22:22 -060095
Michael Ludwig2686d692020-04-17 20:21:37 +000096 GrStyledShape shape;
Chris Dalton09a7bb22018-08-31 19:53:15 +080097 if (!fDoStroke) {
Michael Ludwig2686d692020-04-17 20:21:37 +000098 shape = GrStyledShape(path);
Chris Dalton09a7bb22018-08-31 19:53:15 +080099 } else {
100 // Use hairlines for now, since they are the only stroke type that doesn't require a
101 // rigid-body transform. The CCPR stroke code makes no distinction between hairlines
102 // and regular strokes other than how it decides the device-space stroke width.
103 SkStrokeRec stroke(SkStrokeRec::kHairline_InitStyle);
104 stroke.setStrokeParams(SkPaint::kRound_Cap, SkPaint::kMiter_Join, 4);
Michael Ludwig2686d692020-04-17 20:21:37 +0000105 shape = GrStyledShape(path, GrStyle(stroke, nullptr));
Chris Dalton09a7bb22018-08-31 19:53:15 +0800106 }
Chris Daltonfddb6c02017-11-04 15:22:22 -0600107
Chris Daltona2b5b642018-06-24 13:08:57 -0600108 fCCPR->testingOnly_drawPathDirectly({
Michael Ludwig7c12e282020-05-29 09:54:07 -0400109 fCtx.get(), std::move(paint), &GrUserStencilSettings::kUnused, fRTC.get(), nullptr,
Chris Dalton6ce447a2019-06-23 18:07:38 -0600110 &clipBounds, &matrix, &shape, GrAAType::kCoverage, false});
Chris Daltoncc604e52017-10-06 16:27:32 -0600111 }
112
Brian Osmancb3d0872018-10-16 15:19:28 -0400113 void clipFullscreenRect(SkPath clipPath, SkPMColor4f color = { 0, 1, 0, 1 }) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700114 SkASSERT(this->valid());
115
116 GrPaint paint;
117 paint.setColor4f(color);
118
Michael Ludwig7c12e282020-05-29 09:54:07 -0400119 CCPRClip clip(fCCPR, clipPath);
120 fRTC->drawRect(&clip, std::move(paint), GrAA::kYes, SkMatrix::I(),
Chris Daltona32a3c32017-12-05 10:05:21 -0700121 SkRect::MakeIWH(kCanvasSize, kCanvasSize));
122 }
123
Chris Daltonfddb6c02017-11-04 15:22:22 -0600124 void flush() const {
125 SkASSERT(this->valid());
Greg Daniel0a2464f2020-05-14 15:45:44 -0400126 fCtx->flushAndSubmit();
Chris Daltoncc604e52017-10-06 16:27:32 -0600127 }
128
129private:
Chris Dalton351e80c2019-01-06 22:51:00 -0700130 sk_sp<GrContext> fCtx;
Chris Dalton4da70192018-06-18 09:51:36 -0600131 GrCoverageCountingPathRenderer* fCCPR;
Brian Salomonbf6b9792019-08-21 09:38:10 -0400132 std::unique_ptr<GrRenderTargetContext> fRTC;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800133 const bool fDoStroke;
Chris Daltoncc604e52017-10-06 16:27:32 -0600134};
135
Chris Daltonfddb6c02017-11-04 15:22:22 -0600136class CCPRTest {
137public:
Chris Daltonc3318f02019-07-19 14:20:53 -0600138 void run(skiatest::Reporter* reporter, DoCoverageCount doCoverageCount, DoStroke doStroke) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600139 GrMockOptions mockOptions;
Chris Daltona77cdee2020-04-03 14:50:43 -0600140 mockOptions.fDrawInstancedSupport = true;
Brian Osmanc6444d22019-01-09 16:30:12 -0500141 mockOptions.fHalfFloatVertexAttributeSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600142 mockOptions.fMapBufferFlags = GrCaps::kCanMap_MapFlag;
Robert Phillipsa5e78be2019-07-09 12:34:38 -0400143 mockOptions.fConfigOptions[(int)GrColorType::kAlpha_F16].fRenderability =
Brian Salomonbdecacf2018-02-02 20:32:49 -0500144 GrMockOptions::ConfigOptions::Renderability::kNonMSAA;
Robert Phillipsa5e78be2019-07-09 12:34:38 -0400145 mockOptions.fConfigOptions[(int)GrColorType::kAlpha_F16].fTexturable = true;
146 mockOptions.fConfigOptions[(int)GrColorType::kAlpha_8].fRenderability =
Chris Daltonc3318f02019-07-19 14:20:53 -0600147 GrMockOptions::ConfigOptions::Renderability::kMSAA;
Robert Phillipsa5e78be2019-07-09 12:34:38 -0400148 mockOptions.fConfigOptions[(int)GrColorType::kAlpha_8].fTexturable = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600149 mockOptions.fGeometryShaderSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600150 mockOptions.fIntegerSupport = true;
151 mockOptions.fFlatInterpolationSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600152
153 GrContextOptions ctxOptions;
Chris Daltonc3318f02019-07-19 14:20:53 -0600154 ctxOptions.fDisableCoverageCountingPaths = (DoCoverageCount::kNo == doCoverageCount);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600155 ctxOptions.fAllowPathMaskCaching = false;
156 ctxOptions.fGpuPathRenderers = GpuPathRenderers::kCoverageCounting;
157
Chris Daltona2b5b642018-06-24 13:08:57 -0600158 this->customizeOptions(&mockOptions, &ctxOptions);
159
Robert Phillipsf4f80112020-07-13 16:13:31 -0400160 sk_sp<GrDirectContext> mockContext = GrDirectContext::MakeMock(&mockOptions, ctxOptions);
Chris Dalton351e80c2019-01-06 22:51:00 -0700161 if (!mockContext) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600162 ERRORF(reporter, "could not create mock context");
163 return;
164 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700165 if (!mockContext->unique()) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600166 ERRORF(reporter, "mock context is not unique");
167 return;
168 }
169
Adlai Holler5ba50af2020-04-29 21:11:14 -0400170 CCPRPathDrawer ccpr(std::exchange(mockContext, nullptr), reporter, doStroke);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600171 if (!ccpr.valid()) {
172 return;
173 }
174
175 fPath.moveTo(0, 0);
176 fPath.cubicTo(50, 50, 0, 50, 50, 0);
177 this->onRun(reporter, ccpr);
Chris Daltoncc604e52017-10-06 16:27:32 -0600178 }
179
Chris Daltonfddb6c02017-11-04 15:22:22 -0600180 virtual ~CCPRTest() {}
181
182protected:
Chris Daltona2b5b642018-06-24 13:08:57 -0600183 virtual void customizeOptions(GrMockOptions*, GrContextOptions*) {}
Chris Daltonfddb6c02017-11-04 15:22:22 -0600184 virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) = 0;
185
Chris Dalton4da70192018-06-18 09:51:36 -0600186 SkPath fPath;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600187};
188
Chris Dalton09a7bb22018-08-31 19:53:15 +0800189#define DEF_CCPR_TEST(name) \
Brian Salomondcfca432017-11-15 15:48:03 -0500190 DEF_GPUTEST(name, reporter, /* options */) { \
Chris Dalton09a7bb22018-08-31 19:53:15 +0800191 name test; \
Chris Daltonc3318f02019-07-19 14:20:53 -0600192 test.run(reporter, DoCoverageCount::kYes, DoStroke::kNo); \
193 test.run(reporter, DoCoverageCount::kYes, DoStroke::kYes); \
194 test.run(reporter, DoCoverageCount::kNo, DoStroke::kNo); \
195 /* FIXME: test.run(reporter, (DoCoverageCount::kNo, DoStroke::kYes) once supported. */ \
Chris Daltoncc604e52017-10-06 16:27:32 -0600196 }
197
Chris Dalton351e80c2019-01-06 22:51:00 -0700198class CCPR_cleanup : public CCPRTest {
Chris Dalton5a5fe792020-02-15 11:41:30 -0700199protected:
Chris Daltonfddb6c02017-11-04 15:22:22 -0600200 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
201 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
Chris Daltoncc604e52017-10-06 16:27:32 -0600202
Chris Daltonfddb6c02017-11-04 15:22:22 -0600203 // Ensure paths get unreffed.
204 for (int i = 0; i < 10; ++i) {
205 ccpr.drawPath(fPath);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600206 }
207 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
208 ccpr.flush();
209 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
210
211 // Ensure clip paths get unreffed.
212 for (int i = 0; i < 10; ++i) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700213 ccpr.clipFullscreenRect(fPath);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600214 }
215 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
216 ccpr.flush();
217 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
218
219 // Ensure paths get unreffed when we delete the context without flushing.
220 for (int i = 0; i < 10; ++i) {
221 ccpr.drawPath(fPath);
Chris Daltona32a3c32017-12-05 10:05:21 -0700222 ccpr.clipFullscreenRect(fPath);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600223 }
Chris Daltonfddb6c02017-11-04 15:22:22 -0600224 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
Chris Dalton351e80c2019-01-06 22:51:00 -0700225
226 ccpr.destroyGrContext();
Chris Daltonfddb6c02017-11-04 15:22:22 -0600227 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
228 }
229};
Chris Dalton351e80c2019-01-06 22:51:00 -0700230DEF_CCPR_TEST(CCPR_cleanup)
Chris Daltonfddb6c02017-11-04 15:22:22 -0600231
Chris Dalton351e80c2019-01-06 22:51:00 -0700232class CCPR_cleanupWithTexAllocFail : public CCPR_cleanup {
Chris Daltona2b5b642018-06-24 13:08:57 -0600233 void customizeOptions(GrMockOptions* mockOptions, GrContextOptions*) override {
234 mockOptions->fFailTextureAllocations = true;
Chris Dalton91ab1552018-04-18 13:24:25 -0600235 }
Chris Dalton5a5fe792020-02-15 11:41:30 -0700236 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
237 ((GrRecordingContext*)ccpr.ctx())->priv().incrSuppressWarningMessages();
238 this->CCPR_cleanup::onRun(reporter, ccpr);
239 }
Chris Dalton91ab1552018-04-18 13:24:25 -0600240};
Chris Dalton351e80c2019-01-06 22:51:00 -0700241DEF_CCPR_TEST(CCPR_cleanupWithTexAllocFail)
Chris Dalton91ab1552018-04-18 13:24:25 -0600242
Chris Dalton351e80c2019-01-06 22:51:00 -0700243class CCPR_unregisterCulledOps : public CCPRTest {
Chris Dalton080baa42017-11-06 14:19:19 -0700244 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
245 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
246
247 // Ensure Ops get unregistered from CCPR when culled early.
248 ccpr.drawPath(fPath);
249 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
250 ccpr.clear(); // Clear should delete the CCPR Op.
251 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
252 ccpr.flush(); // Should not crash (DrawPathsOp should have unregistered itself).
253
254 // Ensure Op unregisters work when we delete the context without flushing.
255 ccpr.drawPath(fPath);
256 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
257 ccpr.clear(); // Clear should delete the CCPR DrawPathsOp.
258 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
Chris Dalton351e80c2019-01-06 22:51:00 -0700259 ccpr.destroyGrContext(); // Should not crash (DrawPathsOp should have unregistered itself).
Chris Dalton080baa42017-11-06 14:19:19 -0700260 }
261};
Chris Dalton351e80c2019-01-06 22:51:00 -0700262DEF_CCPR_TEST(CCPR_unregisterCulledOps)
Chris Dalton080baa42017-11-06 14:19:19 -0700263
Chris Dalton351e80c2019-01-06 22:51:00 -0700264class CCPR_parseEmptyPath : public CCPRTest {
Chris Daltonc9c97b72017-11-27 15:34:26 -0700265 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
266 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
267
268 // Make a path large enough that ccpr chooses to crop it by the RT bounds, and ends up with
269 // an empty path.
270 SkPath largeOutsidePath;
271 largeOutsidePath.moveTo(-1e30f, -1e30f);
272 largeOutsidePath.lineTo(-1e30f, +1e30f);
273 largeOutsidePath.lineTo(-1e10f, +1e30f);
274 ccpr.drawPath(largeOutsidePath);
275
276 // Normally an empty path is culled before reaching ccpr, however we use a back door for
277 // testing so this path will make it.
278 SkPath emptyPath;
279 SkASSERT(emptyPath.isEmpty());
280 ccpr.drawPath(emptyPath);
281
282 // This is the test. It will exercise various internal asserts and verify we do not crash.
283 ccpr.flush();
Chris Daltona32a3c32017-12-05 10:05:21 -0700284
285 // Now try again with clips.
286 ccpr.clipFullscreenRect(largeOutsidePath);
287 ccpr.clipFullscreenRect(emptyPath);
288 ccpr.flush();
289
290 // ... and both.
291 ccpr.drawPath(largeOutsidePath);
292 ccpr.clipFullscreenRect(largeOutsidePath);
293 ccpr.drawPath(emptyPath);
294 ccpr.clipFullscreenRect(emptyPath);
295 ccpr.flush();
Chris Daltonc9c97b72017-11-27 15:34:26 -0700296 }
297};
Chris Dalton351e80c2019-01-06 22:51:00 -0700298DEF_CCPR_TEST(CCPR_parseEmptyPath)
Chris Daltond6fa4542019-01-04 13:23:51 -0700299
Chris Dalton351e80c2019-01-06 22:51:00 -0700300static int get_mock_texture_id(const GrTexture* texture) {
301 const GrBackendTexture& backingTexture = texture->getBackendTexture();
302 SkASSERT(GrBackendApi::kMock == backingTexture.backend());
303
304 if (!backingTexture.isValid()) {
305 return 0;
306 }
307
308 GrMockTextureInfo info;
309 backingTexture.getMockTextureInfo(&info);
Robert Phillipsa27d6252019-12-10 14:48:36 -0500310 return info.id();
Chris Dalton351e80c2019-01-06 22:51:00 -0700311}
312
313// Base class for cache path unit tests.
314class CCPRCacheTest : public CCPRTest {
315protected:
316 // Registers as an onFlush callback in order to snag the CCPR per-flush resources and note the
317 // texture IDs.
318 class RecordLastMockAtlasIDs : public GrOnFlushCallbackObject {
319 public:
320 RecordLastMockAtlasIDs(sk_sp<GrCoverageCountingPathRenderer> ccpr) : fCCPR(ccpr) {}
321
322 int lastCopyAtlasID() const { return fLastCopyAtlasID; }
323 int lastRenderedAtlasID() const { return fLastRenderedAtlasID; }
324
Chris Daltonc4b47352019-08-23 10:10:36 -0600325 void preFlush(GrOnFlushResourceProvider*, const uint32_t* opsTaskIDs,
326 int numOpsTaskIDs) override {
Chris Dalton351e80c2019-01-06 22:51:00 -0700327 fLastRenderedAtlasID = fLastCopyAtlasID = 0;
328
329 const GrCCPerFlushResources* resources = fCCPR->testingOnly_getCurrentFlushResources();
330 if (!resources) {
331 return;
332 }
333
334 if (const GrTexture* tex = resources->testingOnly_frontCopyAtlasTexture()) {
335 fLastCopyAtlasID = get_mock_texture_id(tex);
336 }
337 if (const GrTexture* tex = resources->testingOnly_frontRenderedAtlasTexture()) {
338 fLastRenderedAtlasID = get_mock_texture_id(tex);
339 }
340 }
341
342 void postFlush(GrDeferredUploadToken, const uint32_t*, int) override {}
343
344 private:
345 sk_sp<GrCoverageCountingPathRenderer> fCCPR;
346 int fLastCopyAtlasID = 0;
347 int fLastRenderedAtlasID = 0;
348 };
349
350 CCPRCacheTest() {
351 static constexpr int primes[11] = {2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31};
352
353 SkRandom rand;
354 for (size_t i = 0; i < SK_ARRAY_COUNT(fPaths); ++i) {
Michael Ludwig2686d692020-04-17 20:21:37 +0000355 int numPts = rand.nextRangeU(GrStyledShape::kMaxKeyFromDataVerbCnt + 1,
356 GrStyledShape::kMaxKeyFromDataVerbCnt * 2);
Chris Dalton351e80c2019-01-06 22:51:00 -0700357 int step;
358 do {
359 step = primes[rand.nextU() % SK_ARRAY_COUNT(primes)];
360 } while (step == numPts);
Mike Kleinea3f0142019-03-20 11:12:10 -0500361 fPaths[i] = ToolUtils::make_star(SkRect::MakeLTRB(0, 0, 1, 1), numPts, step);
Chris Dalton351e80c2019-01-06 22:51:00 -0700362 }
363 }
364
365 void drawPathsAndFlush(CCPRPathDrawer& ccpr, const SkMatrix& m) {
366 this->drawPathsAndFlush(ccpr, &m, 1);
367 }
368 void drawPathsAndFlush(CCPRPathDrawer& ccpr, const SkMatrix* matrices, int numMatrices) {
369 // Draw all the paths.
370 for (size_t i = 0; i < SK_ARRAY_COUNT(fPaths); ++i) {
371 ccpr.drawPath(fPaths[i], matrices[i % numMatrices]);
372 }
373 // Re-draw a few paths, to test the case where a cache entry is hit more than once in a
374 // single flush.
375 SkRandom rand;
376 int duplicateIndices[10];
377 for (size_t i = 0; i < SK_ARRAY_COUNT(duplicateIndices); ++i) {
378 duplicateIndices[i] = rand.nextULessThan(SK_ARRAY_COUNT(fPaths));
379 }
380 for (size_t i = 0; i < SK_ARRAY_COUNT(duplicateIndices); ++i) {
381 for (size_t j = 0; j <= i; ++j) {
382 int idx = duplicateIndices[j];
383 ccpr.drawPath(fPaths[idx], matrices[idx % numMatrices]);
384 }
385 }
386 ccpr.flush();
387 }
388
389private:
Chris Daltond6fa4542019-01-04 13:23:51 -0700390 void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
391 ctxOptions->fAllowPathMaskCaching = true;
392 }
393
Chris Dalton351e80c2019-01-06 22:51:00 -0700394 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) final {
395 RecordLastMockAtlasIDs atlasIDRecorder(sk_ref_sp(ccpr.ccpr()));
Robert Phillips9da87e02019-02-04 13:26:26 -0500396 ccpr.ctx()->priv().addOnFlushCallbackObject(&atlasIDRecorder);
Chris Daltond6fa4542019-01-04 13:23:51 -0700397
Chris Dalton351e80c2019-01-06 22:51:00 -0700398 this->onRun(reporter, ccpr, atlasIDRecorder);
399
Robert Phillips9da87e02019-02-04 13:26:26 -0500400 ccpr.ctx()->priv().testingOnly_flushAndRemoveOnFlushCallbackObject(&atlasIDRecorder);
Chris Dalton351e80c2019-01-06 22:51:00 -0700401 }
402
403 virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
404 const RecordLastMockAtlasIDs&) = 0;
405
406protected:
407 SkPath fPaths[350];
408};
409
410// Ensures ccpr always reuses the same atlas texture in the animation use case.
411class CCPR_cache_animationAtlasReuse : public CCPRCacheTest {
412 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
413 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
Mike Reed1f607332020-05-21 12:11:27 -0400414 SkMatrix m = SkMatrix::Translate(kCanvasSize/2, kCanvasSize/2);
Chris Dalton351e80c2019-01-06 22:51:00 -0700415 m.preScale(80, 80);
416 m.preTranslate(-.5,-.5);
417 this->drawPathsAndFlush(ccpr, m);
418
419 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
420 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
421 const int atlasID = atlasIDRecorder.lastRenderedAtlasID();
422
423 // Ensures we always reuse the same atlas texture in the animation use case.
424 for (int i = 0; i < 12; ++i) {
425 // 59 is prime, so we will hit every integer modulo 360 before repeating.
426 m.preRotate(59, .5, .5);
427
428 // Go twice. Paths have to get drawn twice with the same matrix before we cache their
429 // atlas. This makes sure that on the subsequent draw, after an atlas has been cached
430 // and is then invalidated since the matrix will change, that the same underlying
431 // texture object is still reused for the next atlas.
432 for (int j = 0; j < 2; ++j) {
433 this->drawPathsAndFlush(ccpr, m);
434 // Nothing should be copied to an 8-bit atlas after just two draws.
435 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
436 REPORTER_ASSERT(reporter, atlasIDRecorder.lastRenderedAtlasID() == atlasID);
437 }
Chris Dalton2e825a32019-01-04 22:14:27 +0000438 }
439
Chris Dalton351e80c2019-01-06 22:51:00 -0700440 // Do the last draw again. (On draw 3 they should get copied to an 8-bit atlas.)
441 this->drawPathsAndFlush(ccpr, m);
442 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
443 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
444
445 // Now double-check that everything continues to hit the cache as expected when the matrix
446 // doesn't change.
447 for (int i = 0; i < 10; ++i) {
448 this->drawPathsAndFlush(ccpr, m);
449 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
450 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
451 }
452 }
453};
454DEF_CCPR_TEST(CCPR_cache_animationAtlasReuse)
455
456class CCPR_cache_recycleEntries : public CCPRCacheTest {
457 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
458 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
Mike Reed1f607332020-05-21 12:11:27 -0400459 SkMatrix m = SkMatrix::Translate(kCanvasSize/2, kCanvasSize/2);
Chris Dalton351e80c2019-01-06 22:51:00 -0700460 m.preScale(80, 80);
461 m.preTranslate(-.5,-.5);
462
463 auto cache = ccpr.ccpr()->testingOnly_getPathCache();
464 REPORTER_ASSERT(reporter, cache);
465
466 const auto& lru = cache->testingOnly_getLRU();
467
468 SkTArray<const void*> expectedPtrs;
469
470 // Ensures we always reuse the same atlas texture in the animation use case.
471 for (int i = 0; i < 5; ++i) {
472 // 59 is prime, so we will hit every integer modulo 360 before repeating.
473 m.preRotate(59, .5, .5);
474
475 // Go twice. Paths have to get drawn twice with the same matrix before we cache their
476 // atlas.
477 for (int j = 0; j < 2; ++j) {
478 this->drawPathsAndFlush(ccpr, m);
479 // Nothing should be copied to an 8-bit atlas after just two draws.
480 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
481 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
482 }
483
484 int idx = 0;
485 for (const GrCCPathCacheEntry* entry : lru) {
486 if (0 == i) {
487 expectedPtrs.push_back(entry);
488 } else {
489 // The same pointer should have been recycled for the new matrix.
490 REPORTER_ASSERT(reporter, entry == expectedPtrs[idx]);
491 }
492 ++idx;
493 }
494 }
495 }
496};
497DEF_CCPR_TEST(CCPR_cache_recycleEntries)
498
499// Ensures mostly-visible paths get their full mask cached.
500class CCPR_cache_mostlyVisible : public CCPRCacheTest {
501 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
502 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
503 SkMatrix matrices[3] = {
Mike Reed1f607332020-05-21 12:11:27 -0400504 SkMatrix::Scale(kCanvasSize/2, kCanvasSize/2), // Fully visible.
505 SkMatrix::Scale(kCanvasSize * 1.25, kCanvasSize * 1.25), // Mostly visible.
506 SkMatrix::Scale(kCanvasSize * 1.5, kCanvasSize * 1.5), // Mostly NOT visible.
Chris Dalton351e80c2019-01-06 22:51:00 -0700507 };
508
509 for (int i = 0; i < 10; ++i) {
510 this->drawPathsAndFlush(ccpr, matrices, 3);
511 if (2 == i) {
512 // The mostly-visible paths should still get cached.
513 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
514 } else {
515 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
516 }
517 // Ensure mostly NOT-visible paths never get cached.
518 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
519 }
520
521 // Clear the path cache.
522 this->drawPathsAndFlush(ccpr, SkMatrix::I());
523
524 // Now only draw the fully/mostly visible ones.
525 for (int i = 0; i < 2; ++i) {
526 this->drawPathsAndFlush(ccpr, matrices, 2);
527 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
528 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
529 }
530
531 // On draw 3 they should get copied to an 8-bit atlas.
532 this->drawPathsAndFlush(ccpr, matrices, 2);
533 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
534 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
535
536 for (int i = 0; i < 10; ++i) {
537 this->drawPathsAndFlush(ccpr, matrices, 2);
538 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
539 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
540 }
541
542 // Draw a different part of the path to ensure the full mask was cached.
543 matrices[1].postTranslate(SkScalarFloorToInt(kCanvasSize * -.25f),
544 SkScalarFloorToInt(kCanvasSize * -.25f));
545 for (int i = 0; i < 10; ++i) {
546 this->drawPathsAndFlush(ccpr, matrices, 2);
547 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
548 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
549 }
550 }
551};
552DEF_CCPR_TEST(CCPR_cache_mostlyVisible)
553
554// Ensures GrContext::performDeferredCleanup works.
555class CCPR_cache_deferredCleanup : public CCPRCacheTest {
556 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
557 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
Mike Reed1f607332020-05-21 12:11:27 -0400558 SkMatrix m = SkMatrix::Scale(20, 20);
Chris Dalton351e80c2019-01-06 22:51:00 -0700559 int lastRenderedAtlasID = 0;
560
561 for (int i = 0; i < 5; ++i) {
562 this->drawPathsAndFlush(ccpr, m);
563 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
564 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
565 int renderedAtlasID = atlasIDRecorder.lastRenderedAtlasID();
566 REPORTER_ASSERT(reporter, renderedAtlasID != lastRenderedAtlasID);
567 lastRenderedAtlasID = renderedAtlasID;
568
569 this->drawPathsAndFlush(ccpr, m);
570 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
571 REPORTER_ASSERT(reporter, lastRenderedAtlasID == atlasIDRecorder.lastRenderedAtlasID());
572
573 // On draw 3 they should get copied to an 8-bit atlas.
574 this->drawPathsAndFlush(ccpr, m);
575 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
576 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
577
578 for (int i = 0; i < 10; ++i) {
579 this->drawPathsAndFlush(ccpr, m);
580 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
581 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
582 }
583
584 ccpr.ctx()->performDeferredCleanup(std::chrono::milliseconds(0));
585 }
586 }
587};
588DEF_CCPR_TEST(CCPR_cache_deferredCleanup)
589
590// Verifies the cache/hash table internals.
591class CCPR_cache_hashTable : public CCPRCacheTest {
592 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
593 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
594 using CoverageType = GrCCAtlas::CoverageType;
Mike Reed1f607332020-05-21 12:11:27 -0400595 SkMatrix m = SkMatrix::Scale(20, 20);
Chris Dalton351e80c2019-01-06 22:51:00 -0700596
597 for (int i = 0; i < 5; ++i) {
598 this->drawPathsAndFlush(ccpr, m);
599 if (2 == i) {
600 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
601 } else {
602 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
603 }
604 if (i < 2) {
605 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
606 } else {
607 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
608 }
609
610 auto cache = ccpr.ccpr()->testingOnly_getPathCache();
611 REPORTER_ASSERT(reporter, cache);
612
613 const auto& hash = cache->testingOnly_getHashTable();
614 const auto& lru = cache->testingOnly_getLRU();
615 int count = 0;
616 for (GrCCPathCacheEntry* entry : lru) {
617 auto* node = hash.find(entry->cacheKey());
618 REPORTER_ASSERT(reporter, node);
619 REPORTER_ASSERT(reporter, node->entry() == entry);
620 REPORTER_ASSERT(reporter, 0 == entry->testingOnly_peekOnFlushRefCnt());
621 REPORTER_ASSERT(reporter, entry->unique());
622 if (0 == i) {
623 REPORTER_ASSERT(reporter, !entry->cachedAtlas());
624 } else {
625 const GrCCCachedAtlas* cachedAtlas = entry->cachedAtlas();
626 REPORTER_ASSERT(reporter, cachedAtlas);
627 if (1 == i) {
Chris Daltonc3318f02019-07-19 14:20:53 -0600628 REPORTER_ASSERT(reporter, ccpr.ccpr()->coverageType()
Chris Dalton351e80c2019-01-06 22:51:00 -0700629 == cachedAtlas->coverageType());
630 } else {
631 REPORTER_ASSERT(reporter, CoverageType::kA8_LiteralCoverage
632 == cachedAtlas->coverageType());
633 }
634 REPORTER_ASSERT(reporter, cachedAtlas->textureKey().isValid());
635 // The actual proxy should not be held past the end of a flush.
636 REPORTER_ASSERT(reporter, !cachedAtlas->getOnFlushProxy());
637 REPORTER_ASSERT(reporter, 0 == cachedAtlas->testingOnly_peekOnFlushRefCnt());
638 }
639 ++count;
640 }
641 REPORTER_ASSERT(reporter, hash.count() == count);
642 }
643 }
644};
645DEF_CCPR_TEST(CCPR_cache_hashTable)
646
647// Ensures paths get cached even when using a sporadic flushing pattern and drawing out of order
648// (a la Chrome tiles).
649class CCPR_cache_multiFlush : public CCPRCacheTest {
650 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
651 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
652 static constexpr int kNumPaths = SK_ARRAY_COUNT(fPaths);
653 static constexpr int kBigPrimes[] = {
654 9323, 11059, 22993, 38749, 45127, 53147, 64853, 77969, 83269, 99989};
655
656 SkRandom rand;
657 SkMatrix m = SkMatrix::I();
658
659 for (size_t i = 0; i < SK_ARRAY_COUNT(kBigPrimes); ++i) {
660 int prime = kBigPrimes[i];
661 int endPathIdx = (int)rand.nextULessThan(kNumPaths);
662 int pathIdx = endPathIdx;
663 int nextFlush = rand.nextRangeU(1, 47);
664 for (int j = 0; j < kNumPaths; ++j) {
665 pathIdx = (pathIdx + prime) % kNumPaths;
666 int repeat = rand.nextRangeU(1, 3);
667 for (int k = 0; k < repeat; ++k) {
668 ccpr.drawPath(fPaths[pathIdx], m);
669 }
670 if (nextFlush == j) {
671 ccpr.flush();
672 // The paths are small enough that we should never copy to an A8 atlas.
673 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
674 if (i < 2) {
675 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
676 } else {
677 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
678 }
Brian Osman788b9162020-02-07 10:36:46 -0500679 nextFlush = std::min(j + (int)rand.nextRangeU(1, 29), kNumPaths - 1);
Chris Dalton351e80c2019-01-06 22:51:00 -0700680 }
681 }
682 SkASSERT(endPathIdx == pathIdx % kNumPaths);
683 }
684 }
685};
686DEF_CCPR_TEST(CCPR_cache_multiFlush)
687
Chris Daltonaaa77c12019-01-07 17:45:36 -0700688// Ensures a path drawn over mutiple tiles gets cached.
689class CCPR_cache_multiTileCache : public CCPRCacheTest {
690 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
691 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
692 // Make sure a path drawn over 9 tiles gets cached (1 tile out of 9 is >10% visibility).
Mike Reed1f607332020-05-21 12:11:27 -0400693 const SkMatrix m0 = SkMatrix::Scale(kCanvasSize*3, kCanvasSize*3);
Chris Daltonaaa77c12019-01-07 17:45:36 -0700694 const SkPath p0 = fPaths[0];
695 for (int i = 0; i < 9; ++i) {
696 static constexpr int kRowOrder[9] = {0,1,1,0,2,2,2,1,0};
697 static constexpr int kColumnOrder[9] = {0,0,1,1,0,1,2,2,2};
698
699 SkMatrix tileM = m0;
700 tileM.postTranslate(-kCanvasSize * kColumnOrder[i], -kCanvasSize * kRowOrder[i]);
701 ccpr.drawPath(p0, tileM);
702 ccpr.flush();
703 if (i < 5) {
704 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
705 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
706 } else if (5 == i) {
707 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
708 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
709 } else {
710 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
711 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
712 }
713 }
714
715 // Now make sure paths don't get cached when visibility is <10% for every draw (12 tiles).
Mike Reed1f607332020-05-21 12:11:27 -0400716 const SkMatrix m1 = SkMatrix::Scale(kCanvasSize*4, kCanvasSize*3);
Chris Daltonaaa77c12019-01-07 17:45:36 -0700717 const SkPath p1 = fPaths[1];
718 for (int row = 0; row < 3; ++row) {
719 for (int col = 0; col < 4; ++col) {
720 SkMatrix tileM = m1;
721 tileM.postTranslate(-kCanvasSize * col, -kCanvasSize * row);
722 ccpr.drawPath(p1, tileM);
723 ccpr.flush();
724 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
725 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
726 }
727 }
728
729 // Double-check the cache is still intact.
730 ccpr.drawPath(p0, m0);
731 ccpr.flush();
732 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
733 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
734
735 ccpr.drawPath(p1, m1);
736 ccpr.flush();
737 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
738 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
739 }
740};
741DEF_CCPR_TEST(CCPR_cache_multiTileCache)
742
Chris Dalton351e80c2019-01-06 22:51:00 -0700743// This test exercises CCPR's cache capabilities by drawing many paths with two different
744// transformation matrices. We then vary the matrices independently by whole and partial pixels,
745// and verify the caching behaved as expected.
746class CCPR_cache_partialInvalidate : public CCPRCacheTest {
747 void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
748 ctxOptions->fAllowPathMaskCaching = true;
749 }
750
751 static constexpr int kPathSize = 4;
752
753 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
754 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
Chris Dalton4da70192018-06-18 09:51:36 -0600755 SkMatrix matrices[2] = {
Mike Reed1f607332020-05-21 12:11:27 -0400756 SkMatrix::Translate(5, 5),
757 SkMatrix::Translate(kCanvasSize - kPathSize - 5, kCanvasSize - kPathSize - 5)
Chris Dalton4da70192018-06-18 09:51:36 -0600758 };
Chris Dalton351e80c2019-01-06 22:51:00 -0700759 matrices[0].preScale(kPathSize, kPathSize);
760 matrices[1].preScale(kPathSize, kPathSize);
Chris Dalton4da70192018-06-18 09:51:36 -0600761
Chris Dalton351e80c2019-01-06 22:51:00 -0700762 int firstAtlasID = 0;
Chris Dalton4da70192018-06-18 09:51:36 -0600763
Chris Dalton351e80c2019-01-06 22:51:00 -0700764 for (int iterIdx = 0; iterIdx < 4*3*2; ++iterIdx) {
765 this->drawPathsAndFlush(ccpr, matrices, 2);
Chris Dalton4da70192018-06-18 09:51:36 -0600766
Chris Daltona8429cf2018-06-22 11:43:31 -0600767 if (0 == iterIdx) {
768 // First iteration: just note the ID of the stashed atlas and continue.
Chris Dalton351e80c2019-01-06 22:51:00 -0700769 firstAtlasID = atlasIDRecorder.lastRenderedAtlasID();
770 REPORTER_ASSERT(reporter, 0 != firstAtlasID);
Chris Dalton4da70192018-06-18 09:51:36 -0600771 continue;
772 }
773
Chris Dalton351e80c2019-01-06 22:51:00 -0700774 int testIdx = (iterIdx/2) % 3;
775 int repetitionIdx = iterIdx % 2;
776 switch (testIdx) {
777 case 0:
778 if (0 == repetitionIdx) {
779 // This is the big test. New paths were drawn twice last round. On hit 2
780 // (last time), 'firstAtlasID' was cached as a 16-bit atlas. Now, on hit 3,
781 // these paths should be copied out of 'firstAtlasID', and into an A8 atlas.
782 // THEN: we should recycle 'firstAtlasID' and reuse that same texture to
783 // render the new masks.
784 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
785 REPORTER_ASSERT(reporter,
786 atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
787 } else {
788 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
789 // This is hit 2 for the new masks. Next time they will be copied to an A8
790 // atlas.
791 REPORTER_ASSERT(reporter,
792 atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
793 }
Chris Daltond6fa4542019-01-04 13:23:51 -0700794
Chris Dalton351e80c2019-01-06 22:51:00 -0700795 if (1 == repetitionIdx) {
796 // Integer translates: all path masks stay valid.
797 matrices[0].preTranslate(-1, -1);
798 matrices[1].preTranslate(1, 1);
799 }
800 break;
801
802 case 1:
803 if (0 == repetitionIdx) {
804 // New paths were drawn twice last round. The third hit (now) they should be
805 // copied to an A8 atlas.
806 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
807 } else {
808 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
809 }
810
811 // This draw should have gotten 100% cache hits; we only did integer translates
812 // last time (or none if it was the first flush). Therefore, everything should
813 // have been cached.
814 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
815
816 if (1 == repetitionIdx) {
817 // Invalidate even path masks.
818 matrices[0].preTranslate(1.6f, 1.4f);
819 }
Chris Dalton4da70192018-06-18 09:51:36 -0600820 break;
821
822 case 2:
Chris Dalton351e80c2019-01-06 22:51:00 -0700823 // No new masks to copy from last time; it had 100% cache hits.
824 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
Chris Dalton4da70192018-06-18 09:51:36 -0600825
Chris Dalton351e80c2019-01-06 22:51:00 -0700826 // Even path masks were invalidated last iteration by a subpixel translate.
827 // They should have been re-rendered this time in the original 'firstAtlasID'
828 // texture.
829 REPORTER_ASSERT(reporter,
830 atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
Chris Dalton4da70192018-06-18 09:51:36 -0600831
Chris Dalton351e80c2019-01-06 22:51:00 -0700832 if (1 == repetitionIdx) {
833 // Invalidate odd path masks.
834 matrices[1].preTranslate(-1.4f, -1.6f);
835 }
Chris Dalton4da70192018-06-18 09:51:36 -0600836 break;
837 }
838 }
839 }
840};
Chris Dalton351e80c2019-01-06 22:51:00 -0700841DEF_CCPR_TEST(CCPR_cache_partialInvalidate)
Chris Dalton4da70192018-06-18 09:51:36 -0600842
Greg Danielf41b2bd2019-08-22 16:19:24 -0400843class CCPR_unrefPerOpsTaskPathsBeforeOps : public CCPRTest {
Chris Daltondedf8f22018-09-24 20:23:47 -0600844 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
845 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
846 for (int i = 0; i < 10000; ++i) {
847 // Draw enough paths to make the arena allocator hit the heap.
848 ccpr.drawPath(fPath);
849 }
850
Greg Danielf41b2bd2019-08-22 16:19:24 -0400851 // Unref the GrCCPerOpsTaskPaths object.
852 auto perOpsTaskPathsMap = ccpr.ccpr()->detachPendingPaths();
853 perOpsTaskPathsMap.clear();
Chris Daltondedf8f22018-09-24 20:23:47 -0600854
855 // Now delete the Op and all its draws.
856 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
857 ccpr.flush();
858 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
859 }
860};
Greg Danielf41b2bd2019-08-22 16:19:24 -0400861DEF_CCPR_TEST(CCPR_unrefPerOpsTaskPathsBeforeOps)
Chris Daltondedf8f22018-09-24 20:23:47 -0600862
Chris Daltonfddb6c02017-11-04 15:22:22 -0600863class CCPRRenderingTest {
864public:
Chris Daltonc3318f02019-07-19 14:20:53 -0600865 void run(skiatest::Reporter* reporter, GrContext* ctx, DoStroke doStroke) const {
866 if (auto ccpr = ctx->priv().drawingManager()->getCoverageCountingPathRenderer()) {
867 if (DoStroke::kYes == doStroke &&
868 GrCCAtlas::CoverageType::kA8_Multisample == ccpr->coverageType()) {
869 return; // Stroking is not yet supported for multisample.
870 }
871 CCPRPathDrawer drawer(sk_ref_sp(ctx), reporter, doStroke);
872 if (!drawer.valid()) {
873 return;
874 }
875 this->onRun(reporter, drawer);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600876 }
Chris Daltonfddb6c02017-11-04 15:22:22 -0600877 }
878
879 virtual ~CCPRRenderingTest() {}
880
881protected:
882 virtual void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const = 0;
883};
884
885#define DEF_CCPR_RENDERING_TEST(name) \
886 DEF_GPUTEST_FOR_RENDERING_CONTEXTS(name, reporter, ctxInfo) { \
887 name test; \
Robert Phillips6d344c32020-07-06 10:56:46 -0400888 test.run(reporter, ctxInfo.directContext(), DoStroke::kNo); \
889 test.run(reporter, ctxInfo.directContext(), DoStroke::kYes); \
Chris Daltonfddb6c02017-11-04 15:22:22 -0600890 }
891
Chris Dalton351e80c2019-01-06 22:51:00 -0700892class CCPR_busyPath : public CCPRRenderingTest {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600893 void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const override {
894 static constexpr int kNumBusyVerbs = 1 << 17;
895 ccpr.clear();
896 SkPath busyPath;
897 busyPath.moveTo(0, 0); // top left
898 busyPath.lineTo(kCanvasSize, kCanvasSize); // bottom right
899 for (int i = 2; i < kNumBusyVerbs; ++i) {
900 float offset = i * ((float)kCanvasSize / kNumBusyVerbs);
901 busyPath.lineTo(kCanvasSize - offset, kCanvasSize + offset); // offscreen
902 }
903 ccpr.drawPath(busyPath);
904
905 ccpr.flush(); // If this doesn't crash, the test passed.
906 // If it does, maybe fiddle with fMaxInstancesPerDrawArraysWithoutCrashing in
907 // your platform's GrGLCaps.
908 }
909};
Chris Dalton351e80c2019-01-06 22:51:00 -0700910DEF_CCPR_RENDERING_TEST(CCPR_busyPath)