blob: 15e683251eff33accf939da80866ab1a5058e2b1 [file] [log] [blame]
Chris Daltoncc604e52017-10-06 16:27:32 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "include/core/SkTypes.h"
9#include "tests/Test.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060010
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "include/core/SkMatrix.h"
12#include "include/core/SkRect.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "include/gpu/mock/GrMockTypes.h"
14#include "include/private/GrRecordingContext.h"
15#include "src/core/SkExchange.h"
16#include "src/core/SkPathPriv.h"
17#include "src/gpu/GrClip.h"
18#include "src/gpu/GrContextPriv.h"
19#include "src/gpu/GrDrawingManager.h"
20#include "src/gpu/GrPaint.h"
21#include "src/gpu/GrPathRenderer.h"
22#include "src/gpu/GrRecordingContextPriv.h"
23#include "src/gpu/GrRenderTargetContext.h"
24#include "src/gpu/GrRenderTargetContextPriv.h"
Greg Daniel456f9b52020-03-05 19:14:18 +000025#include "src/gpu/GrTexture.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050026#include "src/gpu/ccpr/GrCCPathCache.h"
27#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Michael Ludwig2686d692020-04-17 20:21:37 +000028#include "src/gpu/geometry/GrStyledShape.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "tools/ToolUtils.h"
Hal Canary8a001442018-09-19 11:31:27 -040030
Chris Daltoncc604e52017-10-06 16:27:32 -060031#include <cmath>
32
33static constexpr int kCanvasSize = 100;
34
Chris Daltonc3318f02019-07-19 14:20:53 -060035enum class DoCoverageCount { kNo = false, kYes };
36enum class DoStroke { kNo = false, kYes };
37
Chris Daltona32a3c32017-12-05 10:05:21 -070038class CCPRClip : public GrClip {
39public:
40 CCPRClip(GrCoverageCountingPathRenderer* ccpr, const SkPath& path) : fCCPR(ccpr), fPath(path) {}
41
42private:
Robert Phillips6f0e02f2019-02-13 11:02:28 -050043 bool apply(GrRecordingContext* context, GrRenderTargetContext* rtc, bool useHWAA,
44 bool hasUserStencilSettings, GrAppliedClip* out, SkRect* bounds) const override {
Greg Danielf41b2bd2019-08-22 16:19:24 -040045 out->addCoverageFP(fCCPR->makeClipProcessor(rtc->priv().testingOnly_getOpsTaskID(), fPath,
Chris Daltona32a3c32017-12-05 10:05:21 -070046 SkIRect::MakeWH(rtc->width(), rtc->height()),
Robert Phillips9da87e02019-02-04 13:26:26 -050047 *context->priv().caps()));
Chris Daltona32a3c32017-12-05 10:05:21 -070048 return true;
49 }
50 bool quickContains(const SkRect&) const final { return false; }
51 bool isRRect(const SkRect& rtBounds, SkRRect* rr, GrAA*) const final { return false; }
52 void getConservativeBounds(int width, int height, SkIRect* rect, bool* iior) const final {
Mike Reed92b33352019-08-24 19:39:13 -040053 rect->setWH(width, height);
Chris Daltona32a3c32017-12-05 10:05:21 -070054 if (iior) {
55 *iior = false;
56 }
57 }
58 GrCoverageCountingPathRenderer* const fCCPR;
59 const SkPath fPath;
60};
61
Chris Daltoncc604e52017-10-06 16:27:32 -060062class CCPRPathDrawer {
63public:
Chris Daltonc3318f02019-07-19 14:20:53 -060064 CCPRPathDrawer(sk_sp<GrContext> ctx, skiatest::Reporter* reporter, DoStroke doStroke)
Chris Daltoncc604e52017-10-06 16:27:32 -060065 : fCtx(ctx)
Robert Phillips9da87e02019-02-04 13:26:26 -050066 , fCCPR(fCtx->priv().drawingManager()->getCoverageCountingPathRenderer())
Greg Daniele20fcad2020-01-08 11:52:34 -050067 , fRTC(GrRenderTargetContext::Make(
68 fCtx.get(), GrColorType::kRGBA_8888, nullptr, SkBackingFit::kExact,
69 {kCanvasSize, kCanvasSize}))
Chris Daltonc3318f02019-07-19 14:20:53 -060070 , fDoStroke(DoStroke::kYes == doStroke) {
Chris Daltonfddb6c02017-11-04 15:22:22 -060071 if (!fCCPR) {
72 ERRORF(reporter, "ccpr not enabled in GrContext for ccpr tests");
73 }
74 if (!fRTC) {
75 ERRORF(reporter, "failed to create GrRenderTargetContext for ccpr tests");
Chris Daltoncc604e52017-10-06 16:27:32 -060076 }
77 }
78
Chris Dalton351e80c2019-01-06 22:51:00 -070079 GrContext* ctx() const { return fCtx.get(); }
Chris Dalton4da70192018-06-18 09:51:36 -060080 GrCoverageCountingPathRenderer* ccpr() const { return fCCPR; }
81
Chris Daltonfddb6c02017-11-04 15:22:22 -060082 bool valid() const { return fCCPR && fRTC; }
Brian Osman9a9baae2018-11-05 15:06:26 -050083 void clear() const { fRTC->clear(nullptr, SK_PMColor4fTRANSPARENT,
84 GrRenderTargetContext::CanClearFullscreen::kYes); }
Chris Dalton351e80c2019-01-06 22:51:00 -070085 void destroyGrContext() {
Chris Dalton351e80c2019-01-06 22:51:00 -070086 SkASSERT(fCtx->unique());
87 fRTC.reset();
88 fCCPR = nullptr;
89 fCtx.reset();
90 }
Chris Daltoncc604e52017-10-06 16:27:32 -060091
Chris Daltona2b5b642018-06-24 13:08:57 -060092 void drawPath(const SkPath& path, const SkMatrix& matrix = SkMatrix::I()) const {
Chris Daltonfddb6c02017-11-04 15:22:22 -060093 SkASSERT(this->valid());
Chris Daltoncc604e52017-10-06 16:27:32 -060094
Chris Daltoncc604e52017-10-06 16:27:32 -060095 GrPaint paint;
Brian Osmancb3d0872018-10-16 15:19:28 -040096 paint.setColor4f({ 0, 1, 0, 1 });
Chris Daltonfddb6c02017-11-04 15:22:22 -060097
Chris Daltoncc604e52017-10-06 16:27:32 -060098 GrNoClip noClip;
99 SkIRect clipBounds = SkIRect::MakeWH(kCanvasSize, kCanvasSize);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600100
Michael Ludwig2686d692020-04-17 20:21:37 +0000101 GrStyledShape shape;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800102 if (!fDoStroke) {
Michael Ludwig2686d692020-04-17 20:21:37 +0000103 shape = GrStyledShape(path);
Chris Dalton09a7bb22018-08-31 19:53:15 +0800104 } else {
105 // Use hairlines for now, since they are the only stroke type that doesn't require a
106 // rigid-body transform. The CCPR stroke code makes no distinction between hairlines
107 // and regular strokes other than how it decides the device-space stroke width.
108 SkStrokeRec stroke(SkStrokeRec::kHairline_InitStyle);
109 stroke.setStrokeParams(SkPaint::kRound_Cap, SkPaint::kMiter_Join, 4);
Michael Ludwig2686d692020-04-17 20:21:37 +0000110 shape = GrStyledShape(path, GrStyle(stroke, nullptr));
Chris Dalton09a7bb22018-08-31 19:53:15 +0800111 }
Chris Daltonfddb6c02017-11-04 15:22:22 -0600112
Chris Daltona2b5b642018-06-24 13:08:57 -0600113 fCCPR->testingOnly_drawPathDirectly({
Chris Dalton351e80c2019-01-06 22:51:00 -0700114 fCtx.get(), std::move(paint), &GrUserStencilSettings::kUnused, fRTC.get(), &noClip,
Chris Dalton6ce447a2019-06-23 18:07:38 -0600115 &clipBounds, &matrix, &shape, GrAAType::kCoverage, false});
Chris Daltoncc604e52017-10-06 16:27:32 -0600116 }
117
Brian Osmancb3d0872018-10-16 15:19:28 -0400118 void clipFullscreenRect(SkPath clipPath, SkPMColor4f color = { 0, 1, 0, 1 }) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700119 SkASSERT(this->valid());
120
121 GrPaint paint;
122 paint.setColor4f(color);
123
124 fRTC->drawRect(CCPRClip(fCCPR, clipPath), std::move(paint), GrAA::kYes, SkMatrix::I(),
125 SkRect::MakeIWH(kCanvasSize, kCanvasSize));
126 }
127
Chris Daltonfddb6c02017-11-04 15:22:22 -0600128 void flush() const {
129 SkASSERT(this->valid());
Chris Daltoncc604e52017-10-06 16:27:32 -0600130 fCtx->flush();
131 }
132
133private:
Chris Dalton351e80c2019-01-06 22:51:00 -0700134 sk_sp<GrContext> fCtx;
Chris Dalton4da70192018-06-18 09:51:36 -0600135 GrCoverageCountingPathRenderer* fCCPR;
Brian Salomonbf6b9792019-08-21 09:38:10 -0400136 std::unique_ptr<GrRenderTargetContext> fRTC;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800137 const bool fDoStroke;
Chris Daltoncc604e52017-10-06 16:27:32 -0600138};
139
Chris Daltonfddb6c02017-11-04 15:22:22 -0600140class CCPRTest {
141public:
Chris Daltonc3318f02019-07-19 14:20:53 -0600142 void run(skiatest::Reporter* reporter, DoCoverageCount doCoverageCount, DoStroke doStroke) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600143 GrMockOptions mockOptions;
Chris Daltona77cdee2020-04-03 14:50:43 -0600144 mockOptions.fDrawInstancedSupport = true;
Brian Osmanc6444d22019-01-09 16:30:12 -0500145 mockOptions.fHalfFloatVertexAttributeSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600146 mockOptions.fMapBufferFlags = GrCaps::kCanMap_MapFlag;
Robert Phillipsa5e78be2019-07-09 12:34:38 -0400147 mockOptions.fConfigOptions[(int)GrColorType::kAlpha_F16].fRenderability =
Brian Salomonbdecacf2018-02-02 20:32:49 -0500148 GrMockOptions::ConfigOptions::Renderability::kNonMSAA;
Robert Phillipsa5e78be2019-07-09 12:34:38 -0400149 mockOptions.fConfigOptions[(int)GrColorType::kAlpha_F16].fTexturable = true;
150 mockOptions.fConfigOptions[(int)GrColorType::kAlpha_8].fRenderability =
Chris Daltonc3318f02019-07-19 14:20:53 -0600151 GrMockOptions::ConfigOptions::Renderability::kMSAA;
Robert Phillipsa5e78be2019-07-09 12:34:38 -0400152 mockOptions.fConfigOptions[(int)GrColorType::kAlpha_8].fTexturable = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600153 mockOptions.fGeometryShaderSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600154 mockOptions.fIntegerSupport = true;
155 mockOptions.fFlatInterpolationSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600156
157 GrContextOptions ctxOptions;
Chris Daltonc3318f02019-07-19 14:20:53 -0600158 ctxOptions.fDisableCoverageCountingPaths = (DoCoverageCount::kNo == doCoverageCount);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600159 ctxOptions.fAllowPathMaskCaching = false;
160 ctxOptions.fGpuPathRenderers = GpuPathRenderers::kCoverageCounting;
161
Chris Daltona2b5b642018-06-24 13:08:57 -0600162 this->customizeOptions(&mockOptions, &ctxOptions);
163
Chris Dalton351e80c2019-01-06 22:51:00 -0700164 sk_sp<GrContext> mockContext = GrContext::MakeMock(&mockOptions, ctxOptions);
165 if (!mockContext) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600166 ERRORF(reporter, "could not create mock context");
167 return;
168 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700169 if (!mockContext->unique()) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600170 ERRORF(reporter, "mock context is not unique");
171 return;
172 }
173
Chris Dalton351e80c2019-01-06 22:51:00 -0700174 CCPRPathDrawer ccpr(skstd::exchange(mockContext, nullptr), reporter, doStroke);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600175 if (!ccpr.valid()) {
176 return;
177 }
178
179 fPath.moveTo(0, 0);
180 fPath.cubicTo(50, 50, 0, 50, 50, 0);
181 this->onRun(reporter, ccpr);
Chris Daltoncc604e52017-10-06 16:27:32 -0600182 }
183
Chris Daltonfddb6c02017-11-04 15:22:22 -0600184 virtual ~CCPRTest() {}
185
186protected:
Chris Daltona2b5b642018-06-24 13:08:57 -0600187 virtual void customizeOptions(GrMockOptions*, GrContextOptions*) {}
Chris Daltonfddb6c02017-11-04 15:22:22 -0600188 virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) = 0;
189
Chris Dalton4da70192018-06-18 09:51:36 -0600190 SkPath fPath;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600191};
192
Chris Dalton09a7bb22018-08-31 19:53:15 +0800193#define DEF_CCPR_TEST(name) \
Brian Salomondcfca432017-11-15 15:48:03 -0500194 DEF_GPUTEST(name, reporter, /* options */) { \
Chris Dalton09a7bb22018-08-31 19:53:15 +0800195 name test; \
Chris Daltonc3318f02019-07-19 14:20:53 -0600196 test.run(reporter, DoCoverageCount::kYes, DoStroke::kNo); \
197 test.run(reporter, DoCoverageCount::kYes, DoStroke::kYes); \
198 test.run(reporter, DoCoverageCount::kNo, DoStroke::kNo); \
199 /* FIXME: test.run(reporter, (DoCoverageCount::kNo, DoStroke::kYes) once supported. */ \
Chris Daltoncc604e52017-10-06 16:27:32 -0600200 }
201
Chris Dalton351e80c2019-01-06 22:51:00 -0700202class CCPR_cleanup : public CCPRTest {
Chris Dalton5a5fe792020-02-15 11:41:30 -0700203protected:
Chris Daltonfddb6c02017-11-04 15:22:22 -0600204 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
205 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
Chris Daltoncc604e52017-10-06 16:27:32 -0600206
Chris Daltonfddb6c02017-11-04 15:22:22 -0600207 // Ensure paths get unreffed.
208 for (int i = 0; i < 10; ++i) {
209 ccpr.drawPath(fPath);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600210 }
211 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
212 ccpr.flush();
213 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
214
215 // Ensure clip paths get unreffed.
216 for (int i = 0; i < 10; ++i) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700217 ccpr.clipFullscreenRect(fPath);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600218 }
219 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
220 ccpr.flush();
221 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
222
223 // Ensure paths get unreffed when we delete the context without flushing.
224 for (int i = 0; i < 10; ++i) {
225 ccpr.drawPath(fPath);
Chris Daltona32a3c32017-12-05 10:05:21 -0700226 ccpr.clipFullscreenRect(fPath);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600227 }
Chris Daltonfddb6c02017-11-04 15:22:22 -0600228 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
Chris Dalton351e80c2019-01-06 22:51:00 -0700229
230 ccpr.destroyGrContext();
Chris Daltonfddb6c02017-11-04 15:22:22 -0600231 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
232 }
233};
Chris Dalton351e80c2019-01-06 22:51:00 -0700234DEF_CCPR_TEST(CCPR_cleanup)
Chris Daltonfddb6c02017-11-04 15:22:22 -0600235
Chris Dalton351e80c2019-01-06 22:51:00 -0700236class CCPR_cleanupWithTexAllocFail : public CCPR_cleanup {
Chris Daltona2b5b642018-06-24 13:08:57 -0600237 void customizeOptions(GrMockOptions* mockOptions, GrContextOptions*) override {
238 mockOptions->fFailTextureAllocations = true;
Chris Dalton91ab1552018-04-18 13:24:25 -0600239 }
Chris Dalton5a5fe792020-02-15 11:41:30 -0700240 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
241 ((GrRecordingContext*)ccpr.ctx())->priv().incrSuppressWarningMessages();
242 this->CCPR_cleanup::onRun(reporter, ccpr);
243 }
Chris Dalton91ab1552018-04-18 13:24:25 -0600244};
Chris Dalton351e80c2019-01-06 22:51:00 -0700245DEF_CCPR_TEST(CCPR_cleanupWithTexAllocFail)
Chris Dalton91ab1552018-04-18 13:24:25 -0600246
Chris Dalton351e80c2019-01-06 22:51:00 -0700247class CCPR_unregisterCulledOps : public CCPRTest {
Chris Dalton080baa42017-11-06 14:19:19 -0700248 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
249 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
250
251 // Ensure Ops get unregistered from CCPR when culled early.
252 ccpr.drawPath(fPath);
253 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
254 ccpr.clear(); // Clear should delete the CCPR Op.
255 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
256 ccpr.flush(); // Should not crash (DrawPathsOp should have unregistered itself).
257
258 // Ensure Op unregisters work when we delete the context without flushing.
259 ccpr.drawPath(fPath);
260 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
261 ccpr.clear(); // Clear should delete the CCPR DrawPathsOp.
262 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
Chris Dalton351e80c2019-01-06 22:51:00 -0700263 ccpr.destroyGrContext(); // Should not crash (DrawPathsOp should have unregistered itself).
Chris Dalton080baa42017-11-06 14:19:19 -0700264 }
265};
Chris Dalton351e80c2019-01-06 22:51:00 -0700266DEF_CCPR_TEST(CCPR_unregisterCulledOps)
Chris Dalton080baa42017-11-06 14:19:19 -0700267
Chris Dalton351e80c2019-01-06 22:51:00 -0700268class CCPR_parseEmptyPath : public CCPRTest {
Chris Daltonc9c97b72017-11-27 15:34:26 -0700269 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
270 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
271
272 // Make a path large enough that ccpr chooses to crop it by the RT bounds, and ends up with
273 // an empty path.
274 SkPath largeOutsidePath;
275 largeOutsidePath.moveTo(-1e30f, -1e30f);
276 largeOutsidePath.lineTo(-1e30f, +1e30f);
277 largeOutsidePath.lineTo(-1e10f, +1e30f);
278 ccpr.drawPath(largeOutsidePath);
279
280 // Normally an empty path is culled before reaching ccpr, however we use a back door for
281 // testing so this path will make it.
282 SkPath emptyPath;
283 SkASSERT(emptyPath.isEmpty());
284 ccpr.drawPath(emptyPath);
285
286 // This is the test. It will exercise various internal asserts and verify we do not crash.
287 ccpr.flush();
Chris Daltona32a3c32017-12-05 10:05:21 -0700288
289 // Now try again with clips.
290 ccpr.clipFullscreenRect(largeOutsidePath);
291 ccpr.clipFullscreenRect(emptyPath);
292 ccpr.flush();
293
294 // ... and both.
295 ccpr.drawPath(largeOutsidePath);
296 ccpr.clipFullscreenRect(largeOutsidePath);
297 ccpr.drawPath(emptyPath);
298 ccpr.clipFullscreenRect(emptyPath);
299 ccpr.flush();
Chris Daltonc9c97b72017-11-27 15:34:26 -0700300 }
301};
Chris Dalton351e80c2019-01-06 22:51:00 -0700302DEF_CCPR_TEST(CCPR_parseEmptyPath)
Chris Daltond6fa4542019-01-04 13:23:51 -0700303
Chris Dalton351e80c2019-01-06 22:51:00 -0700304static int get_mock_texture_id(const GrTexture* texture) {
305 const GrBackendTexture& backingTexture = texture->getBackendTexture();
306 SkASSERT(GrBackendApi::kMock == backingTexture.backend());
307
308 if (!backingTexture.isValid()) {
309 return 0;
310 }
311
312 GrMockTextureInfo info;
313 backingTexture.getMockTextureInfo(&info);
Robert Phillipsa27d6252019-12-10 14:48:36 -0500314 return info.id();
Chris Dalton351e80c2019-01-06 22:51:00 -0700315}
316
317// Base class for cache path unit tests.
318class CCPRCacheTest : public CCPRTest {
319protected:
320 // Registers as an onFlush callback in order to snag the CCPR per-flush resources and note the
321 // texture IDs.
322 class RecordLastMockAtlasIDs : public GrOnFlushCallbackObject {
323 public:
324 RecordLastMockAtlasIDs(sk_sp<GrCoverageCountingPathRenderer> ccpr) : fCCPR(ccpr) {}
325
326 int lastCopyAtlasID() const { return fLastCopyAtlasID; }
327 int lastRenderedAtlasID() const { return fLastRenderedAtlasID; }
328
Chris Daltonc4b47352019-08-23 10:10:36 -0600329 void preFlush(GrOnFlushResourceProvider*, const uint32_t* opsTaskIDs,
330 int numOpsTaskIDs) override {
Chris Dalton351e80c2019-01-06 22:51:00 -0700331 fLastRenderedAtlasID = fLastCopyAtlasID = 0;
332
333 const GrCCPerFlushResources* resources = fCCPR->testingOnly_getCurrentFlushResources();
334 if (!resources) {
335 return;
336 }
337
338 if (const GrTexture* tex = resources->testingOnly_frontCopyAtlasTexture()) {
339 fLastCopyAtlasID = get_mock_texture_id(tex);
340 }
341 if (const GrTexture* tex = resources->testingOnly_frontRenderedAtlasTexture()) {
342 fLastRenderedAtlasID = get_mock_texture_id(tex);
343 }
344 }
345
346 void postFlush(GrDeferredUploadToken, const uint32_t*, int) override {}
347
348 private:
349 sk_sp<GrCoverageCountingPathRenderer> fCCPR;
350 int fLastCopyAtlasID = 0;
351 int fLastRenderedAtlasID = 0;
352 };
353
354 CCPRCacheTest() {
355 static constexpr int primes[11] = {2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31};
356
357 SkRandom rand;
358 for (size_t i = 0; i < SK_ARRAY_COUNT(fPaths); ++i) {
Michael Ludwig2686d692020-04-17 20:21:37 +0000359 int numPts = rand.nextRangeU(GrStyledShape::kMaxKeyFromDataVerbCnt + 1,
360 GrStyledShape::kMaxKeyFromDataVerbCnt * 2);
Chris Dalton351e80c2019-01-06 22:51:00 -0700361 int step;
362 do {
363 step = primes[rand.nextU() % SK_ARRAY_COUNT(primes)];
364 } while (step == numPts);
Mike Kleinea3f0142019-03-20 11:12:10 -0500365 fPaths[i] = ToolUtils::make_star(SkRect::MakeLTRB(0, 0, 1, 1), numPts, step);
Chris Dalton351e80c2019-01-06 22:51:00 -0700366 }
367 }
368
369 void drawPathsAndFlush(CCPRPathDrawer& ccpr, const SkMatrix& m) {
370 this->drawPathsAndFlush(ccpr, &m, 1);
371 }
372 void drawPathsAndFlush(CCPRPathDrawer& ccpr, const SkMatrix* matrices, int numMatrices) {
373 // Draw all the paths.
374 for (size_t i = 0; i < SK_ARRAY_COUNT(fPaths); ++i) {
375 ccpr.drawPath(fPaths[i], matrices[i % numMatrices]);
376 }
377 // Re-draw a few paths, to test the case where a cache entry is hit more than once in a
378 // single flush.
379 SkRandom rand;
380 int duplicateIndices[10];
381 for (size_t i = 0; i < SK_ARRAY_COUNT(duplicateIndices); ++i) {
382 duplicateIndices[i] = rand.nextULessThan(SK_ARRAY_COUNT(fPaths));
383 }
384 for (size_t i = 0; i < SK_ARRAY_COUNT(duplicateIndices); ++i) {
385 for (size_t j = 0; j <= i; ++j) {
386 int idx = duplicateIndices[j];
387 ccpr.drawPath(fPaths[idx], matrices[idx % numMatrices]);
388 }
389 }
390 ccpr.flush();
391 }
392
393private:
Chris Daltond6fa4542019-01-04 13:23:51 -0700394 void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
395 ctxOptions->fAllowPathMaskCaching = true;
396 }
397
Chris Dalton351e80c2019-01-06 22:51:00 -0700398 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) final {
399 RecordLastMockAtlasIDs atlasIDRecorder(sk_ref_sp(ccpr.ccpr()));
Robert Phillips9da87e02019-02-04 13:26:26 -0500400 ccpr.ctx()->priv().addOnFlushCallbackObject(&atlasIDRecorder);
Chris Daltond6fa4542019-01-04 13:23:51 -0700401
Chris Dalton351e80c2019-01-06 22:51:00 -0700402 this->onRun(reporter, ccpr, atlasIDRecorder);
403
Robert Phillips9da87e02019-02-04 13:26:26 -0500404 ccpr.ctx()->priv().testingOnly_flushAndRemoveOnFlushCallbackObject(&atlasIDRecorder);
Chris Dalton351e80c2019-01-06 22:51:00 -0700405 }
406
407 virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
408 const RecordLastMockAtlasIDs&) = 0;
409
410protected:
411 SkPath fPaths[350];
412};
413
414// Ensures ccpr always reuses the same atlas texture in the animation use case.
415class CCPR_cache_animationAtlasReuse : public CCPRCacheTest {
416 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
417 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
418 SkMatrix m = SkMatrix::MakeTrans(kCanvasSize/2, kCanvasSize/2);
419 m.preScale(80, 80);
420 m.preTranslate(-.5,-.5);
421 this->drawPathsAndFlush(ccpr, m);
422
423 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
424 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
425 const int atlasID = atlasIDRecorder.lastRenderedAtlasID();
426
427 // Ensures we always reuse the same atlas texture in the animation use case.
428 for (int i = 0; i < 12; ++i) {
429 // 59 is prime, so we will hit every integer modulo 360 before repeating.
430 m.preRotate(59, .5, .5);
431
432 // Go twice. Paths have to get drawn twice with the same matrix before we cache their
433 // atlas. This makes sure that on the subsequent draw, after an atlas has been cached
434 // and is then invalidated since the matrix will change, that the same underlying
435 // texture object is still reused for the next atlas.
436 for (int j = 0; j < 2; ++j) {
437 this->drawPathsAndFlush(ccpr, m);
438 // Nothing should be copied to an 8-bit atlas after just two draws.
439 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
440 REPORTER_ASSERT(reporter, atlasIDRecorder.lastRenderedAtlasID() == atlasID);
441 }
Chris Dalton2e825a32019-01-04 22:14:27 +0000442 }
443
Chris Dalton351e80c2019-01-06 22:51:00 -0700444 // Do the last draw again. (On draw 3 they should get copied to an 8-bit atlas.)
445 this->drawPathsAndFlush(ccpr, m);
446 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
447 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
448
449 // Now double-check that everything continues to hit the cache as expected when the matrix
450 // doesn't change.
451 for (int i = 0; i < 10; ++i) {
452 this->drawPathsAndFlush(ccpr, m);
453 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
454 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
455 }
456 }
457};
458DEF_CCPR_TEST(CCPR_cache_animationAtlasReuse)
459
460class CCPR_cache_recycleEntries : public CCPRCacheTest {
461 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
462 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
463 SkMatrix m = SkMatrix::MakeTrans(kCanvasSize/2, kCanvasSize/2);
464 m.preScale(80, 80);
465 m.preTranslate(-.5,-.5);
466
467 auto cache = ccpr.ccpr()->testingOnly_getPathCache();
468 REPORTER_ASSERT(reporter, cache);
469
470 const auto& lru = cache->testingOnly_getLRU();
471
472 SkTArray<const void*> expectedPtrs;
473
474 // Ensures we always reuse the same atlas texture in the animation use case.
475 for (int i = 0; i < 5; ++i) {
476 // 59 is prime, so we will hit every integer modulo 360 before repeating.
477 m.preRotate(59, .5, .5);
478
479 // Go twice. Paths have to get drawn twice with the same matrix before we cache their
480 // atlas.
481 for (int j = 0; j < 2; ++j) {
482 this->drawPathsAndFlush(ccpr, m);
483 // Nothing should be copied to an 8-bit atlas after just two draws.
484 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
485 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
486 }
487
488 int idx = 0;
489 for (const GrCCPathCacheEntry* entry : lru) {
490 if (0 == i) {
491 expectedPtrs.push_back(entry);
492 } else {
493 // The same pointer should have been recycled for the new matrix.
494 REPORTER_ASSERT(reporter, entry == expectedPtrs[idx]);
495 }
496 ++idx;
497 }
498 }
499 }
500};
501DEF_CCPR_TEST(CCPR_cache_recycleEntries)
502
503// Ensures mostly-visible paths get their full mask cached.
504class CCPR_cache_mostlyVisible : public CCPRCacheTest {
505 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
506 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
507 SkMatrix matrices[3] = {
508 SkMatrix::MakeScale(kCanvasSize/2, kCanvasSize/2), // Fully visible.
509 SkMatrix::MakeScale(kCanvasSize * 1.25, kCanvasSize * 1.25), // Mostly visible.
510 SkMatrix::MakeScale(kCanvasSize * 1.5, kCanvasSize * 1.5), // Mostly NOT visible.
511 };
512
513 for (int i = 0; i < 10; ++i) {
514 this->drawPathsAndFlush(ccpr, matrices, 3);
515 if (2 == i) {
516 // The mostly-visible paths should still get cached.
517 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
518 } else {
519 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
520 }
521 // Ensure mostly NOT-visible paths never get cached.
522 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
523 }
524
525 // Clear the path cache.
526 this->drawPathsAndFlush(ccpr, SkMatrix::I());
527
528 // Now only draw the fully/mostly visible ones.
529 for (int i = 0; i < 2; ++i) {
530 this->drawPathsAndFlush(ccpr, matrices, 2);
531 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
532 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
533 }
534
535 // On draw 3 they should get copied to an 8-bit atlas.
536 this->drawPathsAndFlush(ccpr, matrices, 2);
537 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
538 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
539
540 for (int i = 0; i < 10; ++i) {
541 this->drawPathsAndFlush(ccpr, matrices, 2);
542 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
543 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
544 }
545
546 // Draw a different part of the path to ensure the full mask was cached.
547 matrices[1].postTranslate(SkScalarFloorToInt(kCanvasSize * -.25f),
548 SkScalarFloorToInt(kCanvasSize * -.25f));
549 for (int i = 0; i < 10; ++i) {
550 this->drawPathsAndFlush(ccpr, matrices, 2);
551 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
552 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
553 }
554 }
555};
556DEF_CCPR_TEST(CCPR_cache_mostlyVisible)
557
558// Ensures GrContext::performDeferredCleanup works.
559class CCPR_cache_deferredCleanup : public CCPRCacheTest {
560 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
561 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
562 SkMatrix m = SkMatrix::MakeScale(20, 20);
563 int lastRenderedAtlasID = 0;
564
565 for (int i = 0; i < 5; ++i) {
566 this->drawPathsAndFlush(ccpr, m);
567 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
568 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
569 int renderedAtlasID = atlasIDRecorder.lastRenderedAtlasID();
570 REPORTER_ASSERT(reporter, renderedAtlasID != lastRenderedAtlasID);
571 lastRenderedAtlasID = renderedAtlasID;
572
573 this->drawPathsAndFlush(ccpr, m);
574 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
575 REPORTER_ASSERT(reporter, lastRenderedAtlasID == atlasIDRecorder.lastRenderedAtlasID());
576
577 // On draw 3 they should get copied to an 8-bit atlas.
578 this->drawPathsAndFlush(ccpr, m);
579 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
580 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
581
582 for (int i = 0; i < 10; ++i) {
583 this->drawPathsAndFlush(ccpr, m);
584 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
585 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
586 }
587
588 ccpr.ctx()->performDeferredCleanup(std::chrono::milliseconds(0));
589 }
590 }
591};
592DEF_CCPR_TEST(CCPR_cache_deferredCleanup)
593
594// Verifies the cache/hash table internals.
595class CCPR_cache_hashTable : public CCPRCacheTest {
596 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
597 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
598 using CoverageType = GrCCAtlas::CoverageType;
599 SkMatrix m = SkMatrix::MakeScale(20, 20);
600
601 for (int i = 0; i < 5; ++i) {
602 this->drawPathsAndFlush(ccpr, m);
603 if (2 == i) {
604 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
605 } else {
606 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
607 }
608 if (i < 2) {
609 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
610 } else {
611 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
612 }
613
614 auto cache = ccpr.ccpr()->testingOnly_getPathCache();
615 REPORTER_ASSERT(reporter, cache);
616
617 const auto& hash = cache->testingOnly_getHashTable();
618 const auto& lru = cache->testingOnly_getLRU();
619 int count = 0;
620 for (GrCCPathCacheEntry* entry : lru) {
621 auto* node = hash.find(entry->cacheKey());
622 REPORTER_ASSERT(reporter, node);
623 REPORTER_ASSERT(reporter, node->entry() == entry);
624 REPORTER_ASSERT(reporter, 0 == entry->testingOnly_peekOnFlushRefCnt());
625 REPORTER_ASSERT(reporter, entry->unique());
626 if (0 == i) {
627 REPORTER_ASSERT(reporter, !entry->cachedAtlas());
628 } else {
629 const GrCCCachedAtlas* cachedAtlas = entry->cachedAtlas();
630 REPORTER_ASSERT(reporter, cachedAtlas);
631 if (1 == i) {
Chris Daltonc3318f02019-07-19 14:20:53 -0600632 REPORTER_ASSERT(reporter, ccpr.ccpr()->coverageType()
Chris Dalton351e80c2019-01-06 22:51:00 -0700633 == cachedAtlas->coverageType());
634 } else {
635 REPORTER_ASSERT(reporter, CoverageType::kA8_LiteralCoverage
636 == cachedAtlas->coverageType());
637 }
638 REPORTER_ASSERT(reporter, cachedAtlas->textureKey().isValid());
639 // The actual proxy should not be held past the end of a flush.
640 REPORTER_ASSERT(reporter, !cachedAtlas->getOnFlushProxy());
641 REPORTER_ASSERT(reporter, 0 == cachedAtlas->testingOnly_peekOnFlushRefCnt());
642 }
643 ++count;
644 }
645 REPORTER_ASSERT(reporter, hash.count() == count);
646 }
647 }
648};
649DEF_CCPR_TEST(CCPR_cache_hashTable)
650
651// Ensures paths get cached even when using a sporadic flushing pattern and drawing out of order
652// (a la Chrome tiles).
653class CCPR_cache_multiFlush : public CCPRCacheTest {
654 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
655 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
656 static constexpr int kNumPaths = SK_ARRAY_COUNT(fPaths);
657 static constexpr int kBigPrimes[] = {
658 9323, 11059, 22993, 38749, 45127, 53147, 64853, 77969, 83269, 99989};
659
660 SkRandom rand;
661 SkMatrix m = SkMatrix::I();
662
663 for (size_t i = 0; i < SK_ARRAY_COUNT(kBigPrimes); ++i) {
664 int prime = kBigPrimes[i];
665 int endPathIdx = (int)rand.nextULessThan(kNumPaths);
666 int pathIdx = endPathIdx;
667 int nextFlush = rand.nextRangeU(1, 47);
668 for (int j = 0; j < kNumPaths; ++j) {
669 pathIdx = (pathIdx + prime) % kNumPaths;
670 int repeat = rand.nextRangeU(1, 3);
671 for (int k = 0; k < repeat; ++k) {
672 ccpr.drawPath(fPaths[pathIdx], m);
673 }
674 if (nextFlush == j) {
675 ccpr.flush();
676 // The paths are small enough that we should never copy to an A8 atlas.
677 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
678 if (i < 2) {
679 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
680 } else {
681 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
682 }
Brian Osman788b9162020-02-07 10:36:46 -0500683 nextFlush = std::min(j + (int)rand.nextRangeU(1, 29), kNumPaths - 1);
Chris Dalton351e80c2019-01-06 22:51:00 -0700684 }
685 }
686 SkASSERT(endPathIdx == pathIdx % kNumPaths);
687 }
688 }
689};
690DEF_CCPR_TEST(CCPR_cache_multiFlush)
691
Chris Daltonaaa77c12019-01-07 17:45:36 -0700692// Ensures a path drawn over mutiple tiles gets cached.
693class CCPR_cache_multiTileCache : public CCPRCacheTest {
694 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
695 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
696 // Make sure a path drawn over 9 tiles gets cached (1 tile out of 9 is >10% visibility).
697 const SkMatrix m0 = SkMatrix::MakeScale(kCanvasSize*3, kCanvasSize*3);
698 const SkPath p0 = fPaths[0];
699 for (int i = 0; i < 9; ++i) {
700 static constexpr int kRowOrder[9] = {0,1,1,0,2,2,2,1,0};
701 static constexpr int kColumnOrder[9] = {0,0,1,1,0,1,2,2,2};
702
703 SkMatrix tileM = m0;
704 tileM.postTranslate(-kCanvasSize * kColumnOrder[i], -kCanvasSize * kRowOrder[i]);
705 ccpr.drawPath(p0, tileM);
706 ccpr.flush();
707 if (i < 5) {
708 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
709 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
710 } else if (5 == i) {
711 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
712 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
713 } else {
714 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
715 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
716 }
717 }
718
719 // Now make sure paths don't get cached when visibility is <10% for every draw (12 tiles).
720 const SkMatrix m1 = SkMatrix::MakeScale(kCanvasSize*4, kCanvasSize*3);
721 const SkPath p1 = fPaths[1];
722 for (int row = 0; row < 3; ++row) {
723 for (int col = 0; col < 4; ++col) {
724 SkMatrix tileM = m1;
725 tileM.postTranslate(-kCanvasSize * col, -kCanvasSize * row);
726 ccpr.drawPath(p1, tileM);
727 ccpr.flush();
728 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
729 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
730 }
731 }
732
733 // Double-check the cache is still intact.
734 ccpr.drawPath(p0, m0);
735 ccpr.flush();
736 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
737 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
738
739 ccpr.drawPath(p1, m1);
740 ccpr.flush();
741 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
742 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
743 }
744};
745DEF_CCPR_TEST(CCPR_cache_multiTileCache)
746
Chris Dalton351e80c2019-01-06 22:51:00 -0700747// This test exercises CCPR's cache capabilities by drawing many paths with two different
748// transformation matrices. We then vary the matrices independently by whole and partial pixels,
749// and verify the caching behaved as expected.
750class CCPR_cache_partialInvalidate : public CCPRCacheTest {
751 void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
752 ctxOptions->fAllowPathMaskCaching = true;
753 }
754
755 static constexpr int kPathSize = 4;
756
757 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
758 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
Chris Dalton4da70192018-06-18 09:51:36 -0600759 SkMatrix matrices[2] = {
760 SkMatrix::MakeTrans(5, 5),
761 SkMatrix::MakeTrans(kCanvasSize - kPathSize - 5, kCanvasSize - kPathSize - 5)
762 };
Chris Dalton351e80c2019-01-06 22:51:00 -0700763 matrices[0].preScale(kPathSize, kPathSize);
764 matrices[1].preScale(kPathSize, kPathSize);
Chris Dalton4da70192018-06-18 09:51:36 -0600765
Chris Dalton351e80c2019-01-06 22:51:00 -0700766 int firstAtlasID = 0;
Chris Dalton4da70192018-06-18 09:51:36 -0600767
Chris Dalton351e80c2019-01-06 22:51:00 -0700768 for (int iterIdx = 0; iterIdx < 4*3*2; ++iterIdx) {
769 this->drawPathsAndFlush(ccpr, matrices, 2);
Chris Dalton4da70192018-06-18 09:51:36 -0600770
Chris Daltona8429cf2018-06-22 11:43:31 -0600771 if (0 == iterIdx) {
772 // First iteration: just note the ID of the stashed atlas and continue.
Chris Dalton351e80c2019-01-06 22:51:00 -0700773 firstAtlasID = atlasIDRecorder.lastRenderedAtlasID();
774 REPORTER_ASSERT(reporter, 0 != firstAtlasID);
Chris Dalton4da70192018-06-18 09:51:36 -0600775 continue;
776 }
777
Chris Dalton351e80c2019-01-06 22:51:00 -0700778 int testIdx = (iterIdx/2) % 3;
779 int repetitionIdx = iterIdx % 2;
780 switch (testIdx) {
781 case 0:
782 if (0 == repetitionIdx) {
783 // This is the big test. New paths were drawn twice last round. On hit 2
784 // (last time), 'firstAtlasID' was cached as a 16-bit atlas. Now, on hit 3,
785 // these paths should be copied out of 'firstAtlasID', and into an A8 atlas.
786 // THEN: we should recycle 'firstAtlasID' and reuse that same texture to
787 // render the new masks.
788 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
789 REPORTER_ASSERT(reporter,
790 atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
791 } else {
792 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
793 // This is hit 2 for the new masks. Next time they will be copied to an A8
794 // atlas.
795 REPORTER_ASSERT(reporter,
796 atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
797 }
Chris Daltond6fa4542019-01-04 13:23:51 -0700798
Chris Dalton351e80c2019-01-06 22:51:00 -0700799 if (1 == repetitionIdx) {
800 // Integer translates: all path masks stay valid.
801 matrices[0].preTranslate(-1, -1);
802 matrices[1].preTranslate(1, 1);
803 }
804 break;
805
806 case 1:
807 if (0 == repetitionIdx) {
808 // New paths were drawn twice last round. The third hit (now) they should be
809 // copied to an A8 atlas.
810 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
811 } else {
812 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
813 }
814
815 // This draw should have gotten 100% cache hits; we only did integer translates
816 // last time (or none if it was the first flush). Therefore, everything should
817 // have been cached.
818 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
819
820 if (1 == repetitionIdx) {
821 // Invalidate even path masks.
822 matrices[0].preTranslate(1.6f, 1.4f);
823 }
Chris Dalton4da70192018-06-18 09:51:36 -0600824 break;
825
826 case 2:
Chris Dalton351e80c2019-01-06 22:51:00 -0700827 // No new masks to copy from last time; it had 100% cache hits.
828 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
Chris Dalton4da70192018-06-18 09:51:36 -0600829
Chris Dalton351e80c2019-01-06 22:51:00 -0700830 // Even path masks were invalidated last iteration by a subpixel translate.
831 // They should have been re-rendered this time in the original 'firstAtlasID'
832 // texture.
833 REPORTER_ASSERT(reporter,
834 atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
Chris Dalton4da70192018-06-18 09:51:36 -0600835
Chris Dalton351e80c2019-01-06 22:51:00 -0700836 if (1 == repetitionIdx) {
837 // Invalidate odd path masks.
838 matrices[1].preTranslate(-1.4f, -1.6f);
839 }
Chris Dalton4da70192018-06-18 09:51:36 -0600840 break;
841 }
842 }
843 }
844};
Chris Dalton351e80c2019-01-06 22:51:00 -0700845DEF_CCPR_TEST(CCPR_cache_partialInvalidate)
Chris Dalton4da70192018-06-18 09:51:36 -0600846
Greg Danielf41b2bd2019-08-22 16:19:24 -0400847class CCPR_unrefPerOpsTaskPathsBeforeOps : public CCPRTest {
Chris Daltondedf8f22018-09-24 20:23:47 -0600848 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
849 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
850 for (int i = 0; i < 10000; ++i) {
851 // Draw enough paths to make the arena allocator hit the heap.
852 ccpr.drawPath(fPath);
853 }
854
Greg Danielf41b2bd2019-08-22 16:19:24 -0400855 // Unref the GrCCPerOpsTaskPaths object.
856 auto perOpsTaskPathsMap = ccpr.ccpr()->detachPendingPaths();
857 perOpsTaskPathsMap.clear();
Chris Daltondedf8f22018-09-24 20:23:47 -0600858
859 // Now delete the Op and all its draws.
860 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
861 ccpr.flush();
862 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
863 }
864};
Greg Danielf41b2bd2019-08-22 16:19:24 -0400865DEF_CCPR_TEST(CCPR_unrefPerOpsTaskPathsBeforeOps)
Chris Daltondedf8f22018-09-24 20:23:47 -0600866
Chris Daltonfddb6c02017-11-04 15:22:22 -0600867class CCPRRenderingTest {
868public:
Chris Daltonc3318f02019-07-19 14:20:53 -0600869 void run(skiatest::Reporter* reporter, GrContext* ctx, DoStroke doStroke) const {
870 if (auto ccpr = ctx->priv().drawingManager()->getCoverageCountingPathRenderer()) {
871 if (DoStroke::kYes == doStroke &&
872 GrCCAtlas::CoverageType::kA8_Multisample == ccpr->coverageType()) {
873 return; // Stroking is not yet supported for multisample.
874 }
875 CCPRPathDrawer drawer(sk_ref_sp(ctx), reporter, doStroke);
876 if (!drawer.valid()) {
877 return;
878 }
879 this->onRun(reporter, drawer);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600880 }
Chris Daltonfddb6c02017-11-04 15:22:22 -0600881 }
882
883 virtual ~CCPRRenderingTest() {}
884
885protected:
886 virtual void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const = 0;
887};
888
889#define DEF_CCPR_RENDERING_TEST(name) \
890 DEF_GPUTEST_FOR_RENDERING_CONTEXTS(name, reporter, ctxInfo) { \
891 name test; \
Chris Daltonc3318f02019-07-19 14:20:53 -0600892 test.run(reporter, ctxInfo.grContext(), DoStroke::kNo); \
893 test.run(reporter, ctxInfo.grContext(), DoStroke::kYes); \
Chris Daltonfddb6c02017-11-04 15:22:22 -0600894 }
895
Chris Dalton351e80c2019-01-06 22:51:00 -0700896class CCPR_busyPath : public CCPRRenderingTest {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600897 void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const override {
898 static constexpr int kNumBusyVerbs = 1 << 17;
899 ccpr.clear();
900 SkPath busyPath;
901 busyPath.moveTo(0, 0); // top left
902 busyPath.lineTo(kCanvasSize, kCanvasSize); // bottom right
903 for (int i = 2; i < kNumBusyVerbs; ++i) {
904 float offset = i * ((float)kCanvasSize / kNumBusyVerbs);
905 busyPath.lineTo(kCanvasSize - offset, kCanvasSize + offset); // offscreen
906 }
907 ccpr.drawPath(busyPath);
908
909 ccpr.flush(); // If this doesn't crash, the test passed.
910 // If it does, maybe fiddle with fMaxInstancesPerDrawArraysWithoutCrashing in
911 // your platform's GrGLCaps.
912 }
913};
Chris Dalton351e80c2019-01-06 22:51:00 -0700914DEF_CCPR_RENDERING_TEST(CCPR_busyPath)