blob: 467d664214c30a8d4cde46190a3032f2633cd0fa [file] [log] [blame]
Chris Daltoncc604e52017-10-06 16:27:32 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "include/core/SkTypes.h"
9#include "tests/Test.h"
Chris Daltoncc604e52017-10-06 16:27:32 -060010
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "include/core/SkMatrix.h"
Mike Reed06d7c9d2020-08-26 12:56:51 -040012#include "include/core/SkPathBuilder.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050013#include "include/core/SkRect.h"
Robert Phillips6d344c32020-07-06 10:56:46 -040014#include "include/gpu/GrDirectContext.h"
15#include "include/gpu/GrRecordingContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050016#include "include/gpu/mock/GrMockTypes.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050017#include "src/core/SkPathPriv.h"
18#include "src/gpu/GrClip.h"
Adlai Hollera0693042020-10-14 11:23:11 -040019#include "src/gpu/GrDirectContextPriv.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050020#include "src/gpu/GrDrawingManager.h"
21#include "src/gpu/GrPaint.h"
22#include "src/gpu/GrPathRenderer.h"
23#include "src/gpu/GrRecordingContextPriv.h"
Brian Salomoneebe7352020-12-09 16:37:04 -050024#include "src/gpu/GrSurfaceDrawContext.h"
Greg Daniel456f9b52020-03-05 19:14:18 +000025#include "src/gpu/GrTexture.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050026#include "src/gpu/ccpr/GrCCPathCache.h"
27#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Michael Ludwig2686d692020-04-17 20:21:37 +000028#include "src/gpu/geometry/GrStyledShape.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050029#include "tools/ToolUtils.h"
Hal Canary8a001442018-09-19 11:31:27 -040030
Chris Daltoncc604e52017-10-06 16:27:32 -060031#include <cmath>
32
33static constexpr int kCanvasSize = 100;
34
Chris Daltonc3318f02019-07-19 14:20:53 -060035enum class DoCoverageCount { kNo = false, kYes };
36enum class DoStroke { kNo = false, kYes };
37
Chris Daltona32a3c32017-12-05 10:05:21 -070038class CCPRClip : public GrClip {
39public:
40 CCPRClip(GrCoverageCountingPathRenderer* ccpr, const SkPath& path) : fCCPR(ccpr), fPath(path) {}
41
42private:
Michael Ludwige06a8972020-06-11 10:29:00 -040043 SkIRect getConservativeBounds() const final { return fPath.getBounds().roundOut(); }
Brian Salomoneebe7352020-12-09 16:37:04 -050044 Effect apply(GrRecordingContext* context, GrSurfaceDrawContext* rtc, GrAAType,
Michael Ludwig4e3cab72020-06-30 11:12:46 -040045 bool hasUserStencilSettings, GrAppliedClip* out,
46 SkRect* bounds) const override {
Brian Salomon70fe17e2020-11-30 14:33:58 -050047 out->addCoverageFP(fCCPR->makeClipProcessor(
48 /*inputFP=*/nullptr, rtc->getOpsTask()->uniqueID(), fPath,
49 SkIRect::MakeWH(rtc->width(), rtc->height()), *context->priv().caps()));
Michael Ludwig4e3cab72020-06-30 11:12:46 -040050 return Effect::kClipped;
Chris Daltona32a3c32017-12-05 10:05:21 -070051 }
Michael Ludwigc002d562020-05-13 14:17:57 -040052
Chris Daltona32a3c32017-12-05 10:05:21 -070053 GrCoverageCountingPathRenderer* const fCCPR;
54 const SkPath fPath;
55};
56
Chris Daltoncc604e52017-10-06 16:27:32 -060057class CCPRPathDrawer {
58public:
Robert Phillips0c5bb2f2020-07-17 15:40:13 -040059 CCPRPathDrawer(sk_sp<GrDirectContext> dContext, skiatest::Reporter* reporter, DoStroke doStroke)
60 : fDContext(dContext)
61 , fCCPR(fDContext->priv().drawingManager()->getCoverageCountingPathRenderer())
Brian Salomoneebe7352020-12-09 16:37:04 -050062 , fRTC(GrSurfaceDrawContext::Make(
Robert Phillips0c5bb2f2020-07-17 15:40:13 -040063 fDContext.get(), GrColorType::kRGBA_8888, nullptr, SkBackingFit::kExact,
Greg Daniele20fcad2020-01-08 11:52:34 -050064 {kCanvasSize, kCanvasSize}))
Chris Daltonc3318f02019-07-19 14:20:53 -060065 , fDoStroke(DoStroke::kYes == doStroke) {
Chris Daltonfddb6c02017-11-04 15:22:22 -060066 if (!fRTC) {
Brian Salomoneebe7352020-12-09 16:37:04 -050067 ERRORF(reporter, "failed to create GrSurfaceDrawContext for ccpr tests");
Chris Daltoncc604e52017-10-06 16:27:32 -060068 }
69 }
70
Robert Phillips0c5bb2f2020-07-17 15:40:13 -040071 GrDirectContext* dContext() const { return fDContext.get(); }
Chris Dalton4da70192018-06-18 09:51:36 -060072 GrCoverageCountingPathRenderer* ccpr() const { return fCCPR; }
73
Chris Daltonfddb6c02017-11-04 15:22:22 -060074 bool valid() const { return fCCPR && fRTC; }
Michael Ludwig81d41722020-05-26 16:57:38 -040075 void clear() const { fRTC->clear(SK_PMColor4fTRANSPARENT); }
Chris Dalton351e80c2019-01-06 22:51:00 -070076 void destroyGrContext() {
Robert Phillips0c5bb2f2020-07-17 15:40:13 -040077 SkASSERT(fDContext->unique());
Chris Dalton351e80c2019-01-06 22:51:00 -070078 fRTC.reset();
79 fCCPR = nullptr;
Robert Phillips0c5bb2f2020-07-17 15:40:13 -040080 fDContext.reset();
Chris Dalton351e80c2019-01-06 22:51:00 -070081 }
Chris Daltoncc604e52017-10-06 16:27:32 -060082
Chris Daltona2b5b642018-06-24 13:08:57 -060083 void drawPath(const SkPath& path, const SkMatrix& matrix = SkMatrix::I()) const {
Chris Daltonfddb6c02017-11-04 15:22:22 -060084 SkASSERT(this->valid());
Chris Daltoncc604e52017-10-06 16:27:32 -060085
Chris Daltoncc604e52017-10-06 16:27:32 -060086 GrPaint paint;
Brian Osmancb3d0872018-10-16 15:19:28 -040087 paint.setColor4f({ 0, 1, 0, 1 });
Chris Daltonfddb6c02017-11-04 15:22:22 -060088
Chris Daltoncc604e52017-10-06 16:27:32 -060089 SkIRect clipBounds = SkIRect::MakeWH(kCanvasSize, kCanvasSize);
Chris Daltonfddb6c02017-11-04 15:22:22 -060090
Michael Ludwig2686d692020-04-17 20:21:37 +000091 GrStyledShape shape;
Chris Dalton09a7bb22018-08-31 19:53:15 +080092 if (!fDoStroke) {
Michael Ludwig2686d692020-04-17 20:21:37 +000093 shape = GrStyledShape(path);
Chris Dalton09a7bb22018-08-31 19:53:15 +080094 } else {
95 // Use hairlines for now, since they are the only stroke type that doesn't require a
96 // rigid-body transform. The CCPR stroke code makes no distinction between hairlines
97 // and regular strokes other than how it decides the device-space stroke width.
98 SkStrokeRec stroke(SkStrokeRec::kHairline_InitStyle);
99 stroke.setStrokeParams(SkPaint::kRound_Cap, SkPaint::kMiter_Join, 4);
Michael Ludwig2686d692020-04-17 20:21:37 +0000100 shape = GrStyledShape(path, GrStyle(stroke, nullptr));
Chris Dalton09a7bb22018-08-31 19:53:15 +0800101 }
Chris Daltonfddb6c02017-11-04 15:22:22 -0600102
Chris Daltona2b5b642018-06-24 13:08:57 -0600103 fCCPR->testingOnly_drawPathDirectly({
Robert Phillips0c5bb2f2020-07-17 15:40:13 -0400104 fDContext.get(), std::move(paint), &GrUserStencilSettings::kUnused, fRTC.get(),
105 nullptr, &clipBounds, &matrix, &shape, GrAAType::kCoverage, false});
Chris Daltoncc604e52017-10-06 16:27:32 -0600106 }
107
Brian Osmancb3d0872018-10-16 15:19:28 -0400108 void clipFullscreenRect(SkPath clipPath, SkPMColor4f color = { 0, 1, 0, 1 }) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700109 SkASSERT(this->valid());
110
111 GrPaint paint;
112 paint.setColor4f(color);
113
Michael Ludwig7c12e282020-05-29 09:54:07 -0400114 CCPRClip clip(fCCPR, clipPath);
115 fRTC->drawRect(&clip, std::move(paint), GrAA::kYes, SkMatrix::I(),
Chris Daltona32a3c32017-12-05 10:05:21 -0700116 SkRect::MakeIWH(kCanvasSize, kCanvasSize));
117 }
118
Chris Daltonfddb6c02017-11-04 15:22:22 -0600119 void flush() const {
120 SkASSERT(this->valid());
Robert Phillips0c5bb2f2020-07-17 15:40:13 -0400121 fDContext->flushAndSubmit();
Chris Daltoncc604e52017-10-06 16:27:32 -0600122 }
123
124private:
Robert Phillips0c5bb2f2020-07-17 15:40:13 -0400125 sk_sp<GrDirectContext> fDContext;
Chris Dalton4da70192018-06-18 09:51:36 -0600126 GrCoverageCountingPathRenderer* fCCPR;
Brian Salomoneebe7352020-12-09 16:37:04 -0500127 std::unique_ptr<GrSurfaceDrawContext> fRTC;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800128 const bool fDoStroke;
Chris Daltoncc604e52017-10-06 16:27:32 -0600129};
130
Chris Daltonfddb6c02017-11-04 15:22:22 -0600131class CCPRTest {
132public:
Chris Daltonc3318f02019-07-19 14:20:53 -0600133 void run(skiatest::Reporter* reporter, DoCoverageCount doCoverageCount, DoStroke doStroke) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600134 GrMockOptions mockOptions;
Chris Daltona77cdee2020-04-03 14:50:43 -0600135 mockOptions.fDrawInstancedSupport = true;
Brian Osmanc6444d22019-01-09 16:30:12 -0500136 mockOptions.fHalfFloatVertexAttributeSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600137 mockOptions.fMapBufferFlags = GrCaps::kCanMap_MapFlag;
Robert Phillipsa5e78be2019-07-09 12:34:38 -0400138 mockOptions.fConfigOptions[(int)GrColorType::kAlpha_F16].fRenderability =
Brian Salomonbdecacf2018-02-02 20:32:49 -0500139 GrMockOptions::ConfigOptions::Renderability::kNonMSAA;
Robert Phillipsa5e78be2019-07-09 12:34:38 -0400140 mockOptions.fConfigOptions[(int)GrColorType::kAlpha_F16].fTexturable = true;
141 mockOptions.fConfigOptions[(int)GrColorType::kAlpha_8].fRenderability =
Chris Daltonc3318f02019-07-19 14:20:53 -0600142 GrMockOptions::ConfigOptions::Renderability::kMSAA;
Robert Phillipsa5e78be2019-07-09 12:34:38 -0400143 mockOptions.fConfigOptions[(int)GrColorType::kAlpha_8].fTexturable = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600144 mockOptions.fGeometryShaderSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600145 mockOptions.fIntegerSupport = true;
146 mockOptions.fFlatInterpolationSupport = true;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600147
148 GrContextOptions ctxOptions;
Chris Daltonc3318f02019-07-19 14:20:53 -0600149 ctxOptions.fDisableCoverageCountingPaths = (DoCoverageCount::kNo == doCoverageCount);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600150 ctxOptions.fAllowPathMaskCaching = false;
151 ctxOptions.fGpuPathRenderers = GpuPathRenderers::kCoverageCounting;
152
Chris Daltona2b5b642018-06-24 13:08:57 -0600153 this->customizeOptions(&mockOptions, &ctxOptions);
154
Robert Phillipsf4f80112020-07-13 16:13:31 -0400155 sk_sp<GrDirectContext> mockContext = GrDirectContext::MakeMock(&mockOptions, ctxOptions);
Chris Dalton351e80c2019-01-06 22:51:00 -0700156 if (!mockContext) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600157 ERRORF(reporter, "could not create mock context");
158 return;
159 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700160 if (!mockContext->unique()) {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600161 ERRORF(reporter, "mock context is not unique");
162 return;
163 }
164
Adlai Holler5ba50af2020-04-29 21:11:14 -0400165 CCPRPathDrawer ccpr(std::exchange(mockContext, nullptr), reporter, doStroke);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600166 if (!ccpr.valid()) {
167 return;
168 }
169
170 fPath.moveTo(0, 0);
171 fPath.cubicTo(50, 50, 0, 50, 50, 0);
172 this->onRun(reporter, ccpr);
Chris Daltoncc604e52017-10-06 16:27:32 -0600173 }
174
Chris Daltonfddb6c02017-11-04 15:22:22 -0600175 virtual ~CCPRTest() {}
176
177protected:
Chris Daltona2b5b642018-06-24 13:08:57 -0600178 virtual void customizeOptions(GrMockOptions*, GrContextOptions*) {}
Chris Daltonfddb6c02017-11-04 15:22:22 -0600179 virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) = 0;
180
Chris Dalton4da70192018-06-18 09:51:36 -0600181 SkPath fPath;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600182};
183
Chris Dalton09a7bb22018-08-31 19:53:15 +0800184#define DEF_CCPR_TEST(name) \
Brian Salomondcfca432017-11-15 15:48:03 -0500185 DEF_GPUTEST(name, reporter, /* options */) { \
Chris Dalton09a7bb22018-08-31 19:53:15 +0800186 name test; \
Chris Daltonc3318f02019-07-19 14:20:53 -0600187 test.run(reporter, DoCoverageCount::kYes, DoStroke::kNo); \
188 test.run(reporter, DoCoverageCount::kYes, DoStroke::kYes); \
189 test.run(reporter, DoCoverageCount::kNo, DoStroke::kNo); \
190 /* FIXME: test.run(reporter, (DoCoverageCount::kNo, DoStroke::kYes) once supported. */ \
Chris Daltoncc604e52017-10-06 16:27:32 -0600191 }
192
Chris Dalton351e80c2019-01-06 22:51:00 -0700193class CCPR_cleanup : public CCPRTest {
Chris Dalton5a5fe792020-02-15 11:41:30 -0700194protected:
Chris Daltonfddb6c02017-11-04 15:22:22 -0600195 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
196 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
Chris Daltoncc604e52017-10-06 16:27:32 -0600197
Chris Daltonfddb6c02017-11-04 15:22:22 -0600198 // Ensure paths get unreffed.
199 for (int i = 0; i < 10; ++i) {
200 ccpr.drawPath(fPath);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600201 }
202 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
203 ccpr.flush();
204 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
205
206 // Ensure clip paths get unreffed.
207 for (int i = 0; i < 10; ++i) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700208 ccpr.clipFullscreenRect(fPath);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600209 }
210 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
211 ccpr.flush();
212 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
213
214 // Ensure paths get unreffed when we delete the context without flushing.
215 for (int i = 0; i < 10; ++i) {
216 ccpr.drawPath(fPath);
Chris Daltona32a3c32017-12-05 10:05:21 -0700217 ccpr.clipFullscreenRect(fPath);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600218 }
Chris Daltonfddb6c02017-11-04 15:22:22 -0600219 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
Chris Dalton351e80c2019-01-06 22:51:00 -0700220
221 ccpr.destroyGrContext();
Chris Daltonfddb6c02017-11-04 15:22:22 -0600222 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
223 }
224};
Chris Dalton351e80c2019-01-06 22:51:00 -0700225DEF_CCPR_TEST(CCPR_cleanup)
Chris Daltonfddb6c02017-11-04 15:22:22 -0600226
Chris Dalton351e80c2019-01-06 22:51:00 -0700227class CCPR_cleanupWithTexAllocFail : public CCPR_cleanup {
Chris Daltona2b5b642018-06-24 13:08:57 -0600228 void customizeOptions(GrMockOptions* mockOptions, GrContextOptions*) override {
229 mockOptions->fFailTextureAllocations = true;
Chris Dalton91ab1552018-04-18 13:24:25 -0600230 }
Chris Dalton5a5fe792020-02-15 11:41:30 -0700231 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
Robert Phillips0c5bb2f2020-07-17 15:40:13 -0400232 ((GrRecordingContext*)ccpr.dContext())->priv().incrSuppressWarningMessages();
Chris Dalton5a5fe792020-02-15 11:41:30 -0700233 this->CCPR_cleanup::onRun(reporter, ccpr);
234 }
Chris Dalton91ab1552018-04-18 13:24:25 -0600235};
Chris Dalton351e80c2019-01-06 22:51:00 -0700236DEF_CCPR_TEST(CCPR_cleanupWithTexAllocFail)
Chris Dalton91ab1552018-04-18 13:24:25 -0600237
Chris Dalton351e80c2019-01-06 22:51:00 -0700238class CCPR_unregisterCulledOps : public CCPRTest {
Chris Dalton080baa42017-11-06 14:19:19 -0700239 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
240 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
241
242 // Ensure Ops get unregistered from CCPR when culled early.
243 ccpr.drawPath(fPath);
244 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
245 ccpr.clear(); // Clear should delete the CCPR Op.
246 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
247 ccpr.flush(); // Should not crash (DrawPathsOp should have unregistered itself).
248
249 // Ensure Op unregisters work when we delete the context without flushing.
250 ccpr.drawPath(fPath);
251 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
252 ccpr.clear(); // Clear should delete the CCPR DrawPathsOp.
253 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
Chris Dalton351e80c2019-01-06 22:51:00 -0700254 ccpr.destroyGrContext(); // Should not crash (DrawPathsOp should have unregistered itself).
Chris Dalton080baa42017-11-06 14:19:19 -0700255 }
256};
Chris Dalton351e80c2019-01-06 22:51:00 -0700257DEF_CCPR_TEST(CCPR_unregisterCulledOps)
Chris Dalton080baa42017-11-06 14:19:19 -0700258
Chris Dalton351e80c2019-01-06 22:51:00 -0700259class CCPR_parseEmptyPath : public CCPRTest {
Chris Daltonc9c97b72017-11-27 15:34:26 -0700260 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
261 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
262
263 // Make a path large enough that ccpr chooses to crop it by the RT bounds, and ends up with
264 // an empty path.
Mike Reed92f6eb12020-08-25 11:48:41 -0400265 SkPath largeOutsidePath = SkPath::Polygon({
266 {-1e30f, -1e30f},
267 {-1e30f, +1e30f},
268 {-1e10f, +1e30f},
269 }, false);
Chris Daltonc9c97b72017-11-27 15:34:26 -0700270 ccpr.drawPath(largeOutsidePath);
271
272 // Normally an empty path is culled before reaching ccpr, however we use a back door for
273 // testing so this path will make it.
274 SkPath emptyPath;
275 SkASSERT(emptyPath.isEmpty());
276 ccpr.drawPath(emptyPath);
277
278 // This is the test. It will exercise various internal asserts and verify we do not crash.
279 ccpr.flush();
Chris Daltona32a3c32017-12-05 10:05:21 -0700280
281 // Now try again with clips.
282 ccpr.clipFullscreenRect(largeOutsidePath);
283 ccpr.clipFullscreenRect(emptyPath);
284 ccpr.flush();
285
286 // ... and both.
287 ccpr.drawPath(largeOutsidePath);
288 ccpr.clipFullscreenRect(largeOutsidePath);
289 ccpr.drawPath(emptyPath);
290 ccpr.clipFullscreenRect(emptyPath);
291 ccpr.flush();
Chris Daltonc9c97b72017-11-27 15:34:26 -0700292 }
293};
Chris Dalton351e80c2019-01-06 22:51:00 -0700294DEF_CCPR_TEST(CCPR_parseEmptyPath)
Chris Daltond6fa4542019-01-04 13:23:51 -0700295
Chris Dalton351e80c2019-01-06 22:51:00 -0700296static int get_mock_texture_id(const GrTexture* texture) {
297 const GrBackendTexture& backingTexture = texture->getBackendTexture();
298 SkASSERT(GrBackendApi::kMock == backingTexture.backend());
299
300 if (!backingTexture.isValid()) {
301 return 0;
302 }
303
304 GrMockTextureInfo info;
305 backingTexture.getMockTextureInfo(&info);
Robert Phillipsa27d6252019-12-10 14:48:36 -0500306 return info.id();
Chris Dalton351e80c2019-01-06 22:51:00 -0700307}
308
309// Base class for cache path unit tests.
310class CCPRCacheTest : public CCPRTest {
311protected:
312 // Registers as an onFlush callback in order to snag the CCPR per-flush resources and note the
313 // texture IDs.
314 class RecordLastMockAtlasIDs : public GrOnFlushCallbackObject {
315 public:
316 RecordLastMockAtlasIDs(sk_sp<GrCoverageCountingPathRenderer> ccpr) : fCCPR(ccpr) {}
317
318 int lastCopyAtlasID() const { return fLastCopyAtlasID; }
319 int lastRenderedAtlasID() const { return fLastRenderedAtlasID; }
320
Adlai Holler9902cff2020-11-11 08:51:25 -0500321 void preFlush(GrOnFlushResourceProvider*, SkSpan<const uint32_t>) override {
Chris Dalton351e80c2019-01-06 22:51:00 -0700322 fLastRenderedAtlasID = fLastCopyAtlasID = 0;
323
324 const GrCCPerFlushResources* resources = fCCPR->testingOnly_getCurrentFlushResources();
325 if (!resources) {
326 return;
327 }
328
329 if (const GrTexture* tex = resources->testingOnly_frontCopyAtlasTexture()) {
330 fLastCopyAtlasID = get_mock_texture_id(tex);
331 }
332 if (const GrTexture* tex = resources->testingOnly_frontRenderedAtlasTexture()) {
333 fLastRenderedAtlasID = get_mock_texture_id(tex);
334 }
335 }
336
Adlai Holler9902cff2020-11-11 08:51:25 -0500337 void postFlush(GrDeferredUploadToken, SkSpan<const uint32_t>) override {}
Chris Dalton351e80c2019-01-06 22:51:00 -0700338
339 private:
340 sk_sp<GrCoverageCountingPathRenderer> fCCPR;
341 int fLastCopyAtlasID = 0;
342 int fLastRenderedAtlasID = 0;
343 };
344
345 CCPRCacheTest() {
346 static constexpr int primes[11] = {2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31};
347
348 SkRandom rand;
349 for (size_t i = 0; i < SK_ARRAY_COUNT(fPaths); ++i) {
Michael Ludwig2686d692020-04-17 20:21:37 +0000350 int numPts = rand.nextRangeU(GrStyledShape::kMaxKeyFromDataVerbCnt + 1,
351 GrStyledShape::kMaxKeyFromDataVerbCnt * 2);
Chris Dalton351e80c2019-01-06 22:51:00 -0700352 int step;
353 do {
354 step = primes[rand.nextU() % SK_ARRAY_COUNT(primes)];
355 } while (step == numPts);
Mike Kleinea3f0142019-03-20 11:12:10 -0500356 fPaths[i] = ToolUtils::make_star(SkRect::MakeLTRB(0, 0, 1, 1), numPts, step);
Chris Dalton351e80c2019-01-06 22:51:00 -0700357 }
358 }
359
360 void drawPathsAndFlush(CCPRPathDrawer& ccpr, const SkMatrix& m) {
361 this->drawPathsAndFlush(ccpr, &m, 1);
362 }
363 void drawPathsAndFlush(CCPRPathDrawer& ccpr, const SkMatrix* matrices, int numMatrices) {
364 // Draw all the paths.
365 for (size_t i = 0; i < SK_ARRAY_COUNT(fPaths); ++i) {
366 ccpr.drawPath(fPaths[i], matrices[i % numMatrices]);
367 }
368 // Re-draw a few paths, to test the case where a cache entry is hit more than once in a
369 // single flush.
370 SkRandom rand;
371 int duplicateIndices[10];
372 for (size_t i = 0; i < SK_ARRAY_COUNT(duplicateIndices); ++i) {
373 duplicateIndices[i] = rand.nextULessThan(SK_ARRAY_COUNT(fPaths));
374 }
375 for (size_t i = 0; i < SK_ARRAY_COUNT(duplicateIndices); ++i) {
376 for (size_t j = 0; j <= i; ++j) {
377 int idx = duplicateIndices[j];
378 ccpr.drawPath(fPaths[idx], matrices[idx % numMatrices]);
379 }
380 }
381 ccpr.flush();
382 }
383
384private:
Chris Daltond6fa4542019-01-04 13:23:51 -0700385 void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
386 ctxOptions->fAllowPathMaskCaching = true;
387 }
388
Chris Dalton351e80c2019-01-06 22:51:00 -0700389 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) final {
390 RecordLastMockAtlasIDs atlasIDRecorder(sk_ref_sp(ccpr.ccpr()));
Robert Phillips0c5bb2f2020-07-17 15:40:13 -0400391 ccpr.dContext()->priv().addOnFlushCallbackObject(&atlasIDRecorder);
Chris Daltond6fa4542019-01-04 13:23:51 -0700392
Chris Dalton351e80c2019-01-06 22:51:00 -0700393 this->onRun(reporter, ccpr, atlasIDRecorder);
394
Robert Phillips0c5bb2f2020-07-17 15:40:13 -0400395 ccpr.dContext()->priv().testingOnly_flushAndRemoveOnFlushCallbackObject(&atlasIDRecorder);
Chris Dalton351e80c2019-01-06 22:51:00 -0700396 }
397
398 virtual void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
399 const RecordLastMockAtlasIDs&) = 0;
400
401protected:
402 SkPath fPaths[350];
403};
404
405// Ensures ccpr always reuses the same atlas texture in the animation use case.
406class CCPR_cache_animationAtlasReuse : public CCPRCacheTest {
407 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
408 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
Mike Reed1f607332020-05-21 12:11:27 -0400409 SkMatrix m = SkMatrix::Translate(kCanvasSize/2, kCanvasSize/2);
Chris Dalton351e80c2019-01-06 22:51:00 -0700410 m.preScale(80, 80);
411 m.preTranslate(-.5,-.5);
412 this->drawPathsAndFlush(ccpr, m);
413
414 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
415 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
416 const int atlasID = atlasIDRecorder.lastRenderedAtlasID();
417
418 // Ensures we always reuse the same atlas texture in the animation use case.
419 for (int i = 0; i < 12; ++i) {
420 // 59 is prime, so we will hit every integer modulo 360 before repeating.
421 m.preRotate(59, .5, .5);
422
423 // Go twice. Paths have to get drawn twice with the same matrix before we cache their
424 // atlas. This makes sure that on the subsequent draw, after an atlas has been cached
425 // and is then invalidated since the matrix will change, that the same underlying
426 // texture object is still reused for the next atlas.
427 for (int j = 0; j < 2; ++j) {
428 this->drawPathsAndFlush(ccpr, m);
429 // Nothing should be copied to an 8-bit atlas after just two draws.
430 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
431 REPORTER_ASSERT(reporter, atlasIDRecorder.lastRenderedAtlasID() == atlasID);
432 }
Chris Dalton2e825a32019-01-04 22:14:27 +0000433 }
434
Chris Dalton351e80c2019-01-06 22:51:00 -0700435 // Do the last draw again. (On draw 3 they should get copied to an 8-bit atlas.)
436 this->drawPathsAndFlush(ccpr, m);
437 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
438 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
439
440 // Now double-check that everything continues to hit the cache as expected when the matrix
441 // doesn't change.
442 for (int i = 0; i < 10; ++i) {
443 this->drawPathsAndFlush(ccpr, m);
444 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
445 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
446 }
447 }
448};
449DEF_CCPR_TEST(CCPR_cache_animationAtlasReuse)
450
451class CCPR_cache_recycleEntries : public CCPRCacheTest {
452 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
453 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
Mike Reed1f607332020-05-21 12:11:27 -0400454 SkMatrix m = SkMatrix::Translate(kCanvasSize/2, kCanvasSize/2);
Chris Dalton351e80c2019-01-06 22:51:00 -0700455 m.preScale(80, 80);
456 m.preTranslate(-.5,-.5);
457
458 auto cache = ccpr.ccpr()->testingOnly_getPathCache();
459 REPORTER_ASSERT(reporter, cache);
460
461 const auto& lru = cache->testingOnly_getLRU();
462
463 SkTArray<const void*> expectedPtrs;
464
465 // Ensures we always reuse the same atlas texture in the animation use case.
466 for (int i = 0; i < 5; ++i) {
467 // 59 is prime, so we will hit every integer modulo 360 before repeating.
468 m.preRotate(59, .5, .5);
469
470 // Go twice. Paths have to get drawn twice with the same matrix before we cache their
471 // atlas.
472 for (int j = 0; j < 2; ++j) {
473 this->drawPathsAndFlush(ccpr, m);
474 // Nothing should be copied to an 8-bit atlas after just two draws.
475 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
476 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
477 }
478
479 int idx = 0;
480 for (const GrCCPathCacheEntry* entry : lru) {
481 if (0 == i) {
482 expectedPtrs.push_back(entry);
483 } else {
484 // The same pointer should have been recycled for the new matrix.
485 REPORTER_ASSERT(reporter, entry == expectedPtrs[idx]);
486 }
487 ++idx;
488 }
489 }
490 }
491};
492DEF_CCPR_TEST(CCPR_cache_recycleEntries)
493
494// Ensures mostly-visible paths get their full mask cached.
495class CCPR_cache_mostlyVisible : public CCPRCacheTest {
496 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
497 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
498 SkMatrix matrices[3] = {
Mike Reed1f607332020-05-21 12:11:27 -0400499 SkMatrix::Scale(kCanvasSize/2, kCanvasSize/2), // Fully visible.
500 SkMatrix::Scale(kCanvasSize * 1.25, kCanvasSize * 1.25), // Mostly visible.
501 SkMatrix::Scale(kCanvasSize * 1.5, kCanvasSize * 1.5), // Mostly NOT visible.
Chris Dalton351e80c2019-01-06 22:51:00 -0700502 };
503
504 for (int i = 0; i < 10; ++i) {
505 this->drawPathsAndFlush(ccpr, matrices, 3);
506 if (2 == i) {
507 // The mostly-visible paths should still get cached.
508 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
509 } else {
510 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
511 }
512 // Ensure mostly NOT-visible paths never get cached.
513 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
514 }
515
516 // Clear the path cache.
517 this->drawPathsAndFlush(ccpr, SkMatrix::I());
518
519 // Now only draw the fully/mostly visible ones.
520 for (int i = 0; i < 2; ++i) {
521 this->drawPathsAndFlush(ccpr, matrices, 2);
522 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
523 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
524 }
525
526 // On draw 3 they should get copied to an 8-bit atlas.
527 this->drawPathsAndFlush(ccpr, matrices, 2);
528 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
529 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
530
531 for (int i = 0; i < 10; ++i) {
532 this->drawPathsAndFlush(ccpr, matrices, 2);
533 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
534 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
535 }
536
537 // Draw a different part of the path to ensure the full mask was cached.
538 matrices[1].postTranslate(SkScalarFloorToInt(kCanvasSize * -.25f),
539 SkScalarFloorToInt(kCanvasSize * -.25f));
540 for (int i = 0; i < 10; ++i) {
541 this->drawPathsAndFlush(ccpr, matrices, 2);
542 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
543 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
544 }
545 }
546};
547DEF_CCPR_TEST(CCPR_cache_mostlyVisible)
548
Robert Phillipse94b4e12020-07-23 13:54:35 -0400549// Ensures GrDirectContext::performDeferredCleanup works.
Chris Dalton351e80c2019-01-06 22:51:00 -0700550class CCPR_cache_deferredCleanup : public CCPRCacheTest {
551 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
552 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
Mike Reed1f607332020-05-21 12:11:27 -0400553 SkMatrix m = SkMatrix::Scale(20, 20);
Chris Dalton351e80c2019-01-06 22:51:00 -0700554 int lastRenderedAtlasID = 0;
555
556 for (int i = 0; i < 5; ++i) {
557 this->drawPathsAndFlush(ccpr, m);
558 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
559 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
560 int renderedAtlasID = atlasIDRecorder.lastRenderedAtlasID();
561 REPORTER_ASSERT(reporter, renderedAtlasID != lastRenderedAtlasID);
562 lastRenderedAtlasID = renderedAtlasID;
563
564 this->drawPathsAndFlush(ccpr, m);
565 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
566 REPORTER_ASSERT(reporter, lastRenderedAtlasID == atlasIDRecorder.lastRenderedAtlasID());
567
568 // On draw 3 they should get copied to an 8-bit atlas.
569 this->drawPathsAndFlush(ccpr, m);
570 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
571 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
572
573 for (int i = 0; i < 10; ++i) {
574 this->drawPathsAndFlush(ccpr, m);
575 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
576 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
577 }
578
Robert Phillips0c5bb2f2020-07-17 15:40:13 -0400579 ccpr.dContext()->performDeferredCleanup(std::chrono::milliseconds(0));
Chris Dalton351e80c2019-01-06 22:51:00 -0700580 }
581 }
582};
583DEF_CCPR_TEST(CCPR_cache_deferredCleanup)
584
585// Verifies the cache/hash table internals.
586class CCPR_cache_hashTable : public CCPRCacheTest {
587 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
588 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
589 using CoverageType = GrCCAtlas::CoverageType;
Mike Reed1f607332020-05-21 12:11:27 -0400590 SkMatrix m = SkMatrix::Scale(20, 20);
Chris Dalton351e80c2019-01-06 22:51:00 -0700591
592 for (int i = 0; i < 5; ++i) {
593 this->drawPathsAndFlush(ccpr, m);
594 if (2 == i) {
595 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
596 } else {
597 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
598 }
599 if (i < 2) {
600 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
601 } else {
602 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
603 }
604
605 auto cache = ccpr.ccpr()->testingOnly_getPathCache();
606 REPORTER_ASSERT(reporter, cache);
607
608 const auto& hash = cache->testingOnly_getHashTable();
609 const auto& lru = cache->testingOnly_getLRU();
610 int count = 0;
611 for (GrCCPathCacheEntry* entry : lru) {
612 auto* node = hash.find(entry->cacheKey());
613 REPORTER_ASSERT(reporter, node);
614 REPORTER_ASSERT(reporter, node->entry() == entry);
615 REPORTER_ASSERT(reporter, 0 == entry->testingOnly_peekOnFlushRefCnt());
616 REPORTER_ASSERT(reporter, entry->unique());
617 if (0 == i) {
618 REPORTER_ASSERT(reporter, !entry->cachedAtlas());
619 } else {
620 const GrCCCachedAtlas* cachedAtlas = entry->cachedAtlas();
621 REPORTER_ASSERT(reporter, cachedAtlas);
622 if (1 == i) {
Chris Daltonc3318f02019-07-19 14:20:53 -0600623 REPORTER_ASSERT(reporter, ccpr.ccpr()->coverageType()
Chris Dalton351e80c2019-01-06 22:51:00 -0700624 == cachedAtlas->coverageType());
625 } else {
626 REPORTER_ASSERT(reporter, CoverageType::kA8_LiteralCoverage
627 == cachedAtlas->coverageType());
628 }
629 REPORTER_ASSERT(reporter, cachedAtlas->textureKey().isValid());
630 // The actual proxy should not be held past the end of a flush.
631 REPORTER_ASSERT(reporter, !cachedAtlas->getOnFlushProxy());
632 REPORTER_ASSERT(reporter, 0 == cachedAtlas->testingOnly_peekOnFlushRefCnt());
633 }
634 ++count;
635 }
636 REPORTER_ASSERT(reporter, hash.count() == count);
637 }
638 }
639};
640DEF_CCPR_TEST(CCPR_cache_hashTable)
641
642// Ensures paths get cached even when using a sporadic flushing pattern and drawing out of order
643// (a la Chrome tiles).
644class CCPR_cache_multiFlush : public CCPRCacheTest {
645 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
646 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
647 static constexpr int kNumPaths = SK_ARRAY_COUNT(fPaths);
648 static constexpr int kBigPrimes[] = {
649 9323, 11059, 22993, 38749, 45127, 53147, 64853, 77969, 83269, 99989};
650
651 SkRandom rand;
652 SkMatrix m = SkMatrix::I();
653
654 for (size_t i = 0; i < SK_ARRAY_COUNT(kBigPrimes); ++i) {
655 int prime = kBigPrimes[i];
656 int endPathIdx = (int)rand.nextULessThan(kNumPaths);
657 int pathIdx = endPathIdx;
658 int nextFlush = rand.nextRangeU(1, 47);
659 for (int j = 0; j < kNumPaths; ++j) {
660 pathIdx = (pathIdx + prime) % kNumPaths;
661 int repeat = rand.nextRangeU(1, 3);
662 for (int k = 0; k < repeat; ++k) {
663 ccpr.drawPath(fPaths[pathIdx], m);
664 }
665 if (nextFlush == j) {
666 ccpr.flush();
667 // The paths are small enough that we should never copy to an A8 atlas.
668 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
669 if (i < 2) {
670 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
671 } else {
672 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
673 }
Brian Osman788b9162020-02-07 10:36:46 -0500674 nextFlush = std::min(j + (int)rand.nextRangeU(1, 29), kNumPaths - 1);
Chris Dalton351e80c2019-01-06 22:51:00 -0700675 }
676 }
677 SkASSERT(endPathIdx == pathIdx % kNumPaths);
678 }
679 }
680};
681DEF_CCPR_TEST(CCPR_cache_multiFlush)
682
Chris Daltonaaa77c12019-01-07 17:45:36 -0700683// Ensures a path drawn over mutiple tiles gets cached.
684class CCPR_cache_multiTileCache : public CCPRCacheTest {
685 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
686 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
687 // Make sure a path drawn over 9 tiles gets cached (1 tile out of 9 is >10% visibility).
Mike Reed1f607332020-05-21 12:11:27 -0400688 const SkMatrix m0 = SkMatrix::Scale(kCanvasSize*3, kCanvasSize*3);
Chris Daltonaaa77c12019-01-07 17:45:36 -0700689 const SkPath p0 = fPaths[0];
690 for (int i = 0; i < 9; ++i) {
691 static constexpr int kRowOrder[9] = {0,1,1,0,2,2,2,1,0};
692 static constexpr int kColumnOrder[9] = {0,0,1,1,0,1,2,2,2};
693
694 SkMatrix tileM = m0;
695 tileM.postTranslate(-kCanvasSize * kColumnOrder[i], -kCanvasSize * kRowOrder[i]);
696 ccpr.drawPath(p0, tileM);
697 ccpr.flush();
698 if (i < 5) {
699 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
700 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
701 } else if (5 == i) {
702 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
703 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
704 } else {
705 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
706 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
707 }
708 }
709
710 // Now make sure paths don't get cached when visibility is <10% for every draw (12 tiles).
Mike Reed1f607332020-05-21 12:11:27 -0400711 const SkMatrix m1 = SkMatrix::Scale(kCanvasSize*4, kCanvasSize*3);
Chris Daltonaaa77c12019-01-07 17:45:36 -0700712 const SkPath p1 = fPaths[1];
713 for (int row = 0; row < 3; ++row) {
714 for (int col = 0; col < 4; ++col) {
715 SkMatrix tileM = m1;
716 tileM.postTranslate(-kCanvasSize * col, -kCanvasSize * row);
717 ccpr.drawPath(p1, tileM);
718 ccpr.flush();
719 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
720 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
721 }
722 }
723
724 // Double-check the cache is still intact.
725 ccpr.drawPath(p0, m0);
726 ccpr.flush();
727 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
728 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
729
730 ccpr.drawPath(p1, m1);
731 ccpr.flush();
732 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
733 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastRenderedAtlasID());
734 }
735};
736DEF_CCPR_TEST(CCPR_cache_multiTileCache)
737
Chris Dalton351e80c2019-01-06 22:51:00 -0700738// This test exercises CCPR's cache capabilities by drawing many paths with two different
739// transformation matrices. We then vary the matrices independently by whole and partial pixels,
740// and verify the caching behaved as expected.
741class CCPR_cache_partialInvalidate : public CCPRCacheTest {
742 void customizeOptions(GrMockOptions*, GrContextOptions* ctxOptions) override {
743 ctxOptions->fAllowPathMaskCaching = true;
744 }
745
746 static constexpr int kPathSize = 4;
747
748 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr,
749 const RecordLastMockAtlasIDs& atlasIDRecorder) override {
Chris Dalton4da70192018-06-18 09:51:36 -0600750 SkMatrix matrices[2] = {
Mike Reed1f607332020-05-21 12:11:27 -0400751 SkMatrix::Translate(5, 5),
752 SkMatrix::Translate(kCanvasSize - kPathSize - 5, kCanvasSize - kPathSize - 5)
Chris Dalton4da70192018-06-18 09:51:36 -0600753 };
Chris Dalton351e80c2019-01-06 22:51:00 -0700754 matrices[0].preScale(kPathSize, kPathSize);
755 matrices[1].preScale(kPathSize, kPathSize);
Chris Dalton4da70192018-06-18 09:51:36 -0600756
Chris Dalton351e80c2019-01-06 22:51:00 -0700757 int firstAtlasID = 0;
Chris Dalton4da70192018-06-18 09:51:36 -0600758
Chris Dalton351e80c2019-01-06 22:51:00 -0700759 for (int iterIdx = 0; iterIdx < 4*3*2; ++iterIdx) {
760 this->drawPathsAndFlush(ccpr, matrices, 2);
Chris Dalton4da70192018-06-18 09:51:36 -0600761
Chris Daltona8429cf2018-06-22 11:43:31 -0600762 if (0 == iterIdx) {
763 // First iteration: just note the ID of the stashed atlas and continue.
Chris Dalton351e80c2019-01-06 22:51:00 -0700764 firstAtlasID = atlasIDRecorder.lastRenderedAtlasID();
765 REPORTER_ASSERT(reporter, 0 != firstAtlasID);
Chris Dalton4da70192018-06-18 09:51:36 -0600766 continue;
767 }
768
Chris Dalton351e80c2019-01-06 22:51:00 -0700769 int testIdx = (iterIdx/2) % 3;
770 int repetitionIdx = iterIdx % 2;
771 switch (testIdx) {
772 case 0:
773 if (0 == repetitionIdx) {
774 // This is the big test. New paths were drawn twice last round. On hit 2
775 // (last time), 'firstAtlasID' was cached as a 16-bit atlas. Now, on hit 3,
776 // these paths should be copied out of 'firstAtlasID', and into an A8 atlas.
777 // THEN: we should recycle 'firstAtlasID' and reuse that same texture to
778 // render the new masks.
779 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
780 REPORTER_ASSERT(reporter,
781 atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
782 } else {
783 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
784 // This is hit 2 for the new masks. Next time they will be copied to an A8
785 // atlas.
786 REPORTER_ASSERT(reporter,
787 atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
788 }
Chris Daltond6fa4542019-01-04 13:23:51 -0700789
Chris Dalton351e80c2019-01-06 22:51:00 -0700790 if (1 == repetitionIdx) {
791 // Integer translates: all path masks stay valid.
792 matrices[0].preTranslate(-1, -1);
793 matrices[1].preTranslate(1, 1);
794 }
795 break;
796
797 case 1:
798 if (0 == repetitionIdx) {
799 // New paths were drawn twice last round. The third hit (now) they should be
800 // copied to an A8 atlas.
801 REPORTER_ASSERT(reporter, 0 != atlasIDRecorder.lastCopyAtlasID());
802 } else {
803 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
804 }
805
806 // This draw should have gotten 100% cache hits; we only did integer translates
807 // last time (or none if it was the first flush). Therefore, everything should
808 // have been cached.
809 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastRenderedAtlasID());
810
811 if (1 == repetitionIdx) {
812 // Invalidate even path masks.
813 matrices[0].preTranslate(1.6f, 1.4f);
814 }
Chris Dalton4da70192018-06-18 09:51:36 -0600815 break;
816
817 case 2:
Chris Dalton351e80c2019-01-06 22:51:00 -0700818 // No new masks to copy from last time; it had 100% cache hits.
819 REPORTER_ASSERT(reporter, 0 == atlasIDRecorder.lastCopyAtlasID());
Chris Dalton4da70192018-06-18 09:51:36 -0600820
Chris Dalton351e80c2019-01-06 22:51:00 -0700821 // Even path masks were invalidated last iteration by a subpixel translate.
822 // They should have been re-rendered this time in the original 'firstAtlasID'
823 // texture.
824 REPORTER_ASSERT(reporter,
825 atlasIDRecorder.lastRenderedAtlasID() == firstAtlasID);
Chris Dalton4da70192018-06-18 09:51:36 -0600826
Chris Dalton351e80c2019-01-06 22:51:00 -0700827 if (1 == repetitionIdx) {
828 // Invalidate odd path masks.
829 matrices[1].preTranslate(-1.4f, -1.6f);
830 }
Chris Dalton4da70192018-06-18 09:51:36 -0600831 break;
832 }
833 }
834 }
835};
Chris Dalton351e80c2019-01-06 22:51:00 -0700836DEF_CCPR_TEST(CCPR_cache_partialInvalidate)
Chris Dalton4da70192018-06-18 09:51:36 -0600837
Greg Danielf41b2bd2019-08-22 16:19:24 -0400838class CCPR_unrefPerOpsTaskPathsBeforeOps : public CCPRTest {
Chris Daltondedf8f22018-09-24 20:23:47 -0600839 void onRun(skiatest::Reporter* reporter, CCPRPathDrawer& ccpr) override {
840 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
841 for (int i = 0; i < 10000; ++i) {
842 // Draw enough paths to make the arena allocator hit the heap.
843 ccpr.drawPath(fPath);
844 }
845
Greg Danielf41b2bd2019-08-22 16:19:24 -0400846 // Unref the GrCCPerOpsTaskPaths object.
847 auto perOpsTaskPathsMap = ccpr.ccpr()->detachPendingPaths();
848 perOpsTaskPathsMap.clear();
Chris Daltondedf8f22018-09-24 20:23:47 -0600849
850 // Now delete the Op and all its draws.
851 REPORTER_ASSERT(reporter, !SkPathPriv::TestingOnly_unique(fPath));
852 ccpr.flush();
853 REPORTER_ASSERT(reporter, SkPathPriv::TestingOnly_unique(fPath));
854 }
855};
Greg Danielf41b2bd2019-08-22 16:19:24 -0400856DEF_CCPR_TEST(CCPR_unrefPerOpsTaskPathsBeforeOps)
Chris Daltondedf8f22018-09-24 20:23:47 -0600857
Chris Daltonfddb6c02017-11-04 15:22:22 -0600858class CCPRRenderingTest {
859public:
Robert Phillips0c5bb2f2020-07-17 15:40:13 -0400860 void run(skiatest::Reporter* reporter, GrDirectContext* dContext, DoStroke doStroke) const {
861 if (auto ccpr = dContext->priv().drawingManager()->getCoverageCountingPathRenderer()) {
Chris Daltonc3318f02019-07-19 14:20:53 -0600862 if (DoStroke::kYes == doStroke &&
863 GrCCAtlas::CoverageType::kA8_Multisample == ccpr->coverageType()) {
864 return; // Stroking is not yet supported for multisample.
865 }
Robert Phillips0c5bb2f2020-07-17 15:40:13 -0400866 CCPRPathDrawer drawer(sk_ref_sp(dContext), reporter, doStroke);
Chris Daltonc3318f02019-07-19 14:20:53 -0600867 if (!drawer.valid()) {
868 return;
869 }
870 this->onRun(reporter, drawer);
Chris Daltonfddb6c02017-11-04 15:22:22 -0600871 }
Chris Daltonfddb6c02017-11-04 15:22:22 -0600872 }
873
874 virtual ~CCPRRenderingTest() {}
875
876protected:
877 virtual void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const = 0;
878};
879
880#define DEF_CCPR_RENDERING_TEST(name) \
881 DEF_GPUTEST_FOR_RENDERING_CONTEXTS(name, reporter, ctxInfo) { \
882 name test; \
Robert Phillips6d344c32020-07-06 10:56:46 -0400883 test.run(reporter, ctxInfo.directContext(), DoStroke::kNo); \
884 test.run(reporter, ctxInfo.directContext(), DoStroke::kYes); \
Chris Daltonfddb6c02017-11-04 15:22:22 -0600885 }
886
Chris Dalton351e80c2019-01-06 22:51:00 -0700887class CCPR_busyPath : public CCPRRenderingTest {
Chris Daltonfddb6c02017-11-04 15:22:22 -0600888 void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const override {
889 static constexpr int kNumBusyVerbs = 1 << 17;
890 ccpr.clear();
Mike Reed06d7c9d2020-08-26 12:56:51 -0400891 SkPathBuilder busyPath;
Chris Daltonfddb6c02017-11-04 15:22:22 -0600892 busyPath.moveTo(0, 0); // top left
893 busyPath.lineTo(kCanvasSize, kCanvasSize); // bottom right
894 for (int i = 2; i < kNumBusyVerbs; ++i) {
895 float offset = i * ((float)kCanvasSize / kNumBusyVerbs);
896 busyPath.lineTo(kCanvasSize - offset, kCanvasSize + offset); // offscreen
897 }
Mike Reed06d7c9d2020-08-26 12:56:51 -0400898 ccpr.drawPath(busyPath.detach());
Chris Daltonfddb6c02017-11-04 15:22:22 -0600899
900 ccpr.flush(); // If this doesn't crash, the test passed.
901 // If it does, maybe fiddle with fMaxInstancesPerDrawArraysWithoutCrashing in
902 // your platform's GrGLCaps.
903 }
904};
Chris Dalton351e80c2019-01-06 22:51:00 -0700905DEF_CCPR_RENDERING_TEST(CCPR_busyPath)
Jiulong Wangdf5739c2020-10-13 15:09:09 -0700906
907// https://bugs.chromium.org/p/chromium/issues/detail?id=1102117
908class CCPR_evictCacheEntryForPendingDrawOp : public CCPRRenderingTest {
909 void onRun(skiatest::Reporter* reporter, const CCPRPathDrawer& ccpr) const override {
910 static constexpr SkRect kRect = SkRect::MakeWH(50, 50);
911 ccpr.clear();
912
913 // make sure path is cached.
914 for (int i = 0; i < 2; i++) {
915 SkPath path;
916 path.addRect(kRect);
917
918 ccpr.drawPath(path);
919 ccpr.flush();
920 }
921
922 // make enough cached draws to make DoCopies happen.
923 for (int i = 0; i <= GrCoverageCountingPathRenderer::kDoCopiesThreshold; i++) {
924 SkPath path;
925 path.addRect(kRect);
926 ccpr.drawPath(path);
927 }
928
929 // now draw the path in an incompatible matrix. Previous draw's cached atlas should
930 // not be invalidated. otherwise, this flush would render more paths than allocated for.
931 auto m = SkMatrix::Translate(0.1f, 0.1f);
932 SkPath path;
933 path.addRect(kRect);
934 ccpr.drawPath(path, m);
935 ccpr.flush();
936
937 // if this test does not crash, it is passed.
938 }
939};
940DEF_CCPR_RENDERING_TEST(CCPR_evictCacheEntryForPendingDrawOp)