blob: 85f2aa08fd35380b69d479553d3e500a3a1e7e00 [file] [log] [blame]
Chris Daltonb832ce62020-01-06 19:49:37 -07001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Chris Dalton0a22b1e2020-03-26 11:52:15 -06008#include "src/gpu/tessellate/GrTessellationPathRenderer.h"
Chris Daltonb832ce62020-01-06 19:49:37 -07009
Chris Dalton50c3c242021-06-14 16:32:35 -060010#include "include/private/SkVx.h"
Chris Daltond2dc8dd2020-05-19 16:32:02 -060011#include "src/core/SkIPoint16.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070012#include "src/core/SkPathPriv.h"
13#include "src/gpu/GrClip.h"
14#include "src/gpu/GrMemoryPool.h"
15#include "src/gpu/GrRecordingContextPriv.h"
Brian Salomoneebe7352020-12-09 16:37:04 -050016#include "src/gpu/GrSurfaceDrawContext.h"
Chris Dalton50c3c242021-06-14 16:32:35 -060017#include "src/gpu/GrVx.h"
Chris Dalton43a8b0c2021-06-14 17:10:07 -060018#include "src/gpu/effects/GrBlendFragmentProcessor.h"
Michael Ludwig2686d692020-04-17 20:21:37 +000019#include "src/gpu/geometry/GrStyledShape.h"
Michael Ludwig4e9d5e22021-05-11 10:00:12 -040020#include "src/gpu/geometry/GrWangsFormula.h"
Chris Daltonc3b67eb2020-02-10 21:09:58 -070021#include "src/gpu/ops/GrFillRectOp.h"
Chris Dalton4e998532020-02-10 11:06:42 -070022#include "src/gpu/tessellate/GrDrawAtlasPathOp.h"
Chris Daltonebb37e72021-01-27 17:59:45 -070023#include "src/gpu/tessellate/GrPathInnerTriangulateOp.h"
Chris Dalton031d76b2021-06-08 16:32:00 -060024#include "src/gpu/tessellate/GrPathStencilCoverOp.h"
Chris Dalton7ae272f2021-06-10 11:45:14 -060025#include "src/gpu/tessellate/GrPathTessellateOp.h"
Chris Dalton05007df2021-02-04 00:24:52 -070026#include "src/gpu/tessellate/GrStrokeTessellateOp.h"
Chris Daltonabed2672021-06-17 16:54:28 -060027#include "src/gpu/tessellate/shaders/GrModulateAtlasCoverageFP.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070028
Chris Dalton4e998532020-02-10 11:06:42 -070029constexpr static SkISize kAtlasInitialSize{512, 512};
30constexpr static int kMaxAtlasSize = 2048;
31
Chris Daltond72cb4c2020-07-16 17:50:17 -060032constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8;
33
Chris Daltond2dc8dd2020-05-19 16:32:02 -060034// The atlas is only used for small-area paths, which means at least one dimension of every path is
35// guaranteed to be quite small. So if we transpose tall paths, then every path will have a small
36// height, which lends very well to efficient pow2 atlas packing.
37constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2;
38
39// Ensure every path in the atlas falls in or below the 128px high rectanizer band.
40constexpr static int kMaxAtlasPathHeight = 128;
41
Chris Dalton1413d112020-07-09 11:26:31 -060042bool GrTessellationPathRenderer::IsSupported(const GrCaps& caps) {
Chris Dalton8f282f52021-01-06 11:47:58 -070043 return !caps.avoidStencilBuffers() &&
44 caps.drawInstancedSupport() &&
Chris Daltoneae5c162020-12-29 10:18:21 -070045 caps.shaderCaps()->vertexIDSupport() &&
46 !caps.disableTessellationPathRenderer();
Chris Dalton1413d112020-07-09 11:26:31 -060047}
48
Chris Dalton9213e612020-10-09 17:22:43 -060049GrTessellationPathRenderer::GrTessellationPathRenderer(GrRecordingContext* rContext)
Chris Daltond72cb4c2020-07-16 17:50:17 -060050 : fAtlas(kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes, kAtlasInitialSize,
Chris Dalton31634282020-09-17 12:16:54 -060051 std::min(kMaxAtlasSize, rContext->priv().caps()->maxPreferredRenderTargetSize()),
52 *rContext->priv().caps(), kAtlasAlgorithm) {
Chris Dalton31634282020-09-17 12:16:54 -060053 const GrCaps& caps = *rContext->priv().caps();
Chris Dalton9213e612020-10-09 17:22:43 -060054 auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes);
Chris Dalton569c01b2021-05-25 10:11:46 -060055 if (rContext->asDirectContext() && // The atlas doesn't support DDL yet.
56 caps.internalMultisampleCount(atlasFormat) > 1) {
57 fMaxAtlasPathWidth = fAtlas.maxAtlasSize() / 2; // Enable the atlas.
Chris Dalton9213e612020-10-09 17:22:43 -060058 }
Chris Dalton4e998532020-02-10 11:06:42 -070059}
60
Chris Dalton7ae272f2021-06-10 11:45:14 -060061GrPathRenderer::StencilSupport GrTessellationPathRenderer::onGetStencilSupport(
62 const GrStyledShape& shape) const {
63 if (!shape.style().isSimpleFill()) {
64 // Don't bother with stroke stencilling yet. Skia probably shouldn't support this at all
65 // since you can't clip by a stroke.
66 return kNoSupport_StencilSupport;
67 }
68 return shape.knownToBeConvex() ? kNoRestriction_StencilSupport : kStencilOnly_StencilSupport;
69}
70
Chris Dalton0a22b1e2020-03-26 11:52:15 -060071GrPathRenderer::CanDrawPath GrTessellationPathRenderer::onCanDrawPath(
Chris Daltonb832ce62020-01-06 19:49:37 -070072 const CanDrawPathArgs& args) const {
Chris Dalton1c62a7b2020-06-29 22:01:14 -060073 const GrStyledShape& shape = *args.fShape;
Chris Dalton57ab06c2021-04-22 12:57:28 -060074 if (args.fAAType == GrAAType::kCoverage ||
75 shape.style().hasPathEffect() ||
Chris Dalton06b52ad2020-12-15 10:01:35 -070076 args.fViewMatrix->hasPerspective() ||
77 shape.style().strokeRec().getStyle() == SkStrokeRec::kStrokeAndFill_Style ||
Chris Dalton2078cbe2020-12-14 19:04:55 -070078 shape.inverseFilled() ||
Chris Dalton537293bf2021-05-03 15:54:24 -060079 !args.fProxy->canUseStencil(*args.fCaps)) {
Chris Daltonb832ce62020-01-06 19:49:37 -070080 return CanDrawPath::kNo;
81 }
Chris Dalton7ae272f2021-06-10 11:45:14 -060082 if (args.fHasUserStencilSettings) {
83 // Non-convex paths and strokes use the stencil buffer internally, so they can't support
84 // draws with stencil settings.
85 if (!shape.style().isSimpleFill() || !shape.knownToBeConvex()) {
86 return CanDrawPath::kNo;
87 }
88 }
Chris Daltonb832ce62020-01-06 19:49:37 -070089 return CanDrawPath::kYes;
90}
91
Chris Dalton7ae272f2021-06-10 11:45:14 -060092static GrOp::Owner make_non_convex_fill_op(GrRecordingContext* rContext,
93 GrTessellationPathRenderer::PathFlags pathFlags,
94 GrAAType aaType, const SkRect& pathDevBounds,
95 const SkMatrix& viewMatrix, const SkPath& path,
96 GrPaint&& paint) {
97 SkASSERT(!path.isConvex());
98 int numVerbs = path.countVerbs();
99 if (numVerbs > 0) {
100 // Check if the path is large and/or simple enough that we can triangulate the inner fan
101 // on the CPU. This is our fastest approach. It allows us to stencil only the curves,
102 // and then fill the inner fan directly to the final render target, thus drawing the
103 // majority of pixels in a single render pass.
104 float gpuFragmentWork = pathDevBounds.height() * pathDevBounds.width();
105 float cpuTessellationWork = numVerbs * SkNextLog2(numVerbs); // N log N.
106 constexpr static float kCpuWeight = 512;
107 constexpr static float kMinNumPixelsToTriangulate = 256 * 256;
108 if (cpuTessellationWork * kCpuWeight + kMinNumPixelsToTriangulate < gpuFragmentWork) {
109 return GrOp::Make<GrPathInnerTriangulateOp>(rContext, viewMatrix, path,
110 std::move(paint), aaType, pathFlags,
111 pathDevBounds);
Chris Dalton70a0d2c2021-01-26 12:01:21 -0700112 }
Chris Daltonc2a17462020-12-09 16:46:22 -0700113 }
Chris Dalton7ae272f2021-06-10 11:45:14 -0600114 return GrOp::Make<GrPathStencilCoverOp>(rContext, viewMatrix, path, std::move(paint), aaType,
115 pathFlags, pathDevBounds);
Chris Daltonc2a17462020-12-09 16:46:22 -0700116}
117
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600118bool GrTessellationPathRenderer::onDrawPath(const DrawPathArgs& args) {
John Stiles0fbc6a32021-06-04 14:40:57 -0400119 GrSurfaceDrawContext* surfaceDrawContext = args.fSurfaceDrawContext;
Chris Daltonb832ce62020-01-06 19:49:37 -0700120
Chris Dalton7ae272f2021-06-10 11:45:14 -0600121 SkPath path;
122 args.fShape->asPath(&path);
123
124 // Handle strokes first.
125 if (!args.fShape->style().isSimpleFill()) {
126 SkASSERT(args.fUserStencilSettings->isUnused());
127 const SkStrokeRec& stroke = args.fShape->style().strokeRec();
128 SkASSERT(stroke.getStyle() != SkStrokeRec::kStrokeAndFill_Style);
129 auto op = GrOp::Make<GrStrokeTessellateOp>(args.fContext, args.fAAType, *args.fViewMatrix,
130 path, stroke, std::move(args.fPaint));
131 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
132 return true;
133 }
134
135 SkRect pathDevBounds;
136 args.fViewMatrix->mapRect(&pathDevBounds, args.fShape->bounds());
Chris Daltonb96995d2020-06-04 16:44:29 -0600137
Chris Dalton4e998532020-02-10 11:06:42 -0700138 // See if the path is small and simple enough to atlas instead of drawing directly.
139 //
140 // NOTE: The atlas uses alpha8 coverage even for msaa render targets. We could theoretically
141 // render the sample mask to an integer texture, but such a scheme would probably require
142 // GL_EXT_post_depth_coverage, which appears to have low adoption.
143 SkIRect devIBounds;
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600144 SkIPoint16 locationInAtlas;
145 bool transposedInAtlas;
Chris Dalton7ae272f2021-06-10 11:45:14 -0600146 if (args.fUserStencilSettings->isUnused() &&
147 this->tryAddPathToAtlas(*args.fContext->priv().caps(), *args.fViewMatrix, path,
Chris Dalton50c3c242021-06-14 16:32:35 -0600148 pathDevBounds, args.fAAType != GrAAType::kNone, &devIBounds,
149 &locationInAtlas, &transposedInAtlas)) {
Chris Dalton9213e612020-10-09 17:22:43 -0600150 // The atlas is not compatible with DDL. We should only be using it on direct contexts.
151 SkASSERT(args.fContext->asDirectContext());
Chris Dalton50c3c242021-06-14 16:32:35 -0600152 auto op = GrOp::Make<GrDrawAtlasPathOp>(args.fContext, surfaceDrawContext->numSamples(),
153 sk_ref_sp(fAtlas.textureProxy()), devIBounds,
154 locationInAtlas, transposedInAtlas,
155 *args.fViewMatrix, std::move(args.fPaint));
Brian Salomon1aa1f5f2020-12-11 17:25:17 -0500156 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700157 return true;
158 }
Chris Daltonb832ce62020-01-06 19:49:37 -0700159
Chris Dalton7ae272f2021-06-10 11:45:14 -0600160 // Handle convex paths only if we couldn't fit them in the atlas. We give the atlas priority in
161 // an effort to reduce DMSAA triggers.
162 if (args.fShape->knownToBeConvex()) {
163 auto op = GrOp::Make<GrPathTessellateOp>(args.fContext, *args.fViewMatrix, path,
164 std::move(args.fPaint), args.fAAType,
165 args.fUserStencilSettings, pathDevBounds);
Chris Daltonb0643342020-12-15 01:04:12 -0700166 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton7ae272f2021-06-10 11:45:14 -0600167 return true;
Chris Daltonb96995d2020-06-04 16:44:29 -0600168 }
Chris Dalton7ae272f2021-06-10 11:45:14 -0600169
170 SkASSERT(args.fUserStencilSettings->isUnused()); // See onGetStencilSupport().
171 auto op = make_non_convex_fill_op(args.fContext, PathFlags::kNone, args.fAAType, pathDevBounds,
172 *args.fViewMatrix, path, std::move(args.fPaint));
173 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700174 return true;
175}
176
Chris Dalton7ae272f2021-06-10 11:45:14 -0600177void GrTessellationPathRenderer::onStencilPath(const StencilPathArgs& args) {
178 SkASSERT(args.fShape->style().isSimpleFill()); // See onGetStencilSupport().
179
180 GrSurfaceDrawContext* surfaceDrawContext = args.fSurfaceDrawContext;
181 GrAAType aaType = (GrAA::kYes == args.fDoStencilMSAA) ? GrAAType::kMSAA : GrAAType::kNone;
182
183 SkRect pathDevBounds;
184 args.fViewMatrix->mapRect(&pathDevBounds, args.fShape->bounds());
185
186 SkPath path;
187 args.fShape->asPath(&path);
188
189 if (args.fShape->knownToBeConvex()) {
190 constexpr static GrUserStencilSettings kMarkStencil(
191 GrUserStencilSettings::StaticInit<
192 0x0001,
193 GrUserStencilTest::kAlways,
194 0xffff,
195 GrUserStencilOp::kReplace,
196 GrUserStencilOp::kKeep,
197 0xffff>());
198
199 GrPaint stencilPaint;
200 stencilPaint.setXPFactory(GrDisableColorXPFactory::Get());
201 auto op = GrOp::Make<GrPathTessellateOp>(args.fContext, *args.fViewMatrix, path,
202 std::move(stencilPaint), aaType, &kMarkStencil,
203 pathDevBounds);
204 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
205 return;
Chris Daltonb0643342020-12-15 01:04:12 -0700206 }
207
Chris Dalton7ae272f2021-06-10 11:45:14 -0600208 auto op = make_non_convex_fill_op(args.fContext, PathFlags::kStencilOnly, aaType, pathDevBounds,
209 *args.fViewMatrix, path, GrPaint());
210 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
211}
212
Chris Daltonabed2672021-06-17 16:54:28 -0600213GrFPResult GrTessellationPathRenderer::makeAtlasClipFP(const SkIRect& drawBounds,
214 const SkMatrix& viewMatrix,
215 const SkPath& path, GrAA aa,
216 std::unique_ptr<GrFragmentProcessor> inputFP,
217 const GrCaps& caps) {
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600218 if (viewMatrix.hasPerspective()) {
Chris Daltonabed2672021-06-17 16:54:28 -0600219 return GrFPFailure(std::move(inputFP));
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600220 }
221 SkIRect devIBounds;
222 SkIPoint16 locationInAtlas;
223 bool transposedInAtlas;
224 // tryAddPathToAtlas() ignores inverseness of the fill. See getAtlasUberPath().
225 if (!this->tryAddPathToAtlas(caps, viewMatrix, path, viewMatrix.mapRect(path.getBounds()),
226 aa != GrAA::kNo, &devIBounds, &locationInAtlas,
227 &transposedInAtlas)) {
228 // The path is too big, or the atlas ran out of room.
Chris Daltonabed2672021-06-17 16:54:28 -0600229 return GrFPFailure(std::move(inputFP));
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600230 }
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600231 SkMatrix atlasMatrix;
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600232 auto [atlasX, atlasY] = locationInAtlas;
233 if (!transposedInAtlas) {
Chris Daltonabed2672021-06-17 16:54:28 -0600234 atlasMatrix = SkMatrix::Translate(atlasX - devIBounds.left(), atlasY - devIBounds.top());
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600235 } else {
236 atlasMatrix.setAll(0, 1, atlasX - devIBounds.top(),
237 1, 0, atlasY - devIBounds.left(),
238 0, 0, 1);
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600239 }
Chris Daltonabed2672021-06-17 16:54:28 -0600240 auto flags = GrModulateAtlasCoverageFP::Flags::kNone;
Chris Daltonfd3ec902021-06-17 20:44:13 +0000241 if (path.isInverseFillType()) {
Chris Daltonabed2672021-06-17 16:54:28 -0600242 flags |= GrModulateAtlasCoverageFP::Flags::kInvertCoverage;
Chris Daltonfd3ec902021-06-17 20:44:13 +0000243 }
Chris Daltonabed2672021-06-17 16:54:28 -0600244 if (!devIBounds.contains(drawBounds)) {
245 flags |= GrModulateAtlasCoverageFP::Flags::kCheckBounds;
246 // At this point in time we expect callers to tighten the scissor for "kIntersect" clips, as
247 // opposed to us having to check the path bounds. Feel free to remove this assert if that
248 // ever changes.
249 SkASSERT(path.isInverseFillType());
250 }
251 return GrFPSuccess(std::make_unique<GrModulateAtlasCoverageFP>(flags, std::move(inputFP),
252 fAtlas.surfaceProxyView(caps),
253 atlasMatrix, devIBounds));
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600254}
255
Chris Dalton50c3c242021-06-14 16:32:35 -0600256void GrTessellationPathRenderer::AtlasPathKey::set(const SkMatrix& m, bool antialias,
257 const SkPath& path) {
258 using grvx::float2;
259 fAffineMatrix[0] = m.getScaleX();
260 fAffineMatrix[1] = m.getSkewX();
261 fAffineMatrix[2] = m.getSkewY();
262 fAffineMatrix[3] = m.getScaleY();
263 float2 translate = {m.getTranslateX(), m.getTranslateY()};
264 float2 subpixelPosition = translate - skvx::floor(translate);
Robert Phillips62214f72021-06-15 10:12:51 -0400265 float2 subpixelPositionKey = skvx::trunc(subpixelPosition *
266 GrPathTessellator::kLinearizationPrecision);
Chris Dalton50c3c242021-06-14 16:32:35 -0600267 skvx::cast<uint8_t>(subpixelPositionKey).store(fSubpixelPositionKey);
268 fAntialias = antialias;
269 fFillRule = (uint8_t)GrFillRuleForSkPath(path); // Fill rule doesn't affect the path's genID.
270 fPathGenID = path.getGenerationID();
271}
272
Chris Dalton7ae272f2021-06-10 11:45:14 -0600273bool GrTessellationPathRenderer::tryAddPathToAtlas(const GrCaps& caps, const SkMatrix& viewMatrix,
274 const SkPath& path, const SkRect& pathDevBounds,
Chris Dalton50c3c242021-06-14 16:32:35 -0600275 bool antialias, SkIRect* devIBounds,
Chris Dalton7ae272f2021-06-10 11:45:14 -0600276 SkIPoint16* locationInAtlas,
277 bool* transposedInAtlas) {
Chris Dalton50c3c242021-06-14 16:32:35 -0600278 SkASSERT(!viewMatrix.hasPerspective()); // See onCanDrawPath().
279
Chris Daltond72cb4c2020-07-16 17:50:17 -0600280 if (!fMaxAtlasPathWidth) {
281 return false;
282 }
283
Chris Dalton50c3c242021-06-14 16:32:35 -0600284 if (!caps.multisampleDisableSupport() && !antialias) {
Chris Dalton4e998532020-02-10 11:06:42 -0700285 return false;
286 }
287
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600288 // Transpose tall paths in the atlas. Since we limit ourselves to small-area paths, this
289 // guarantees that every atlas entry has a small height, which lends very well to efficient pow2
290 // atlas packing.
Chris Dalton7ae272f2021-06-10 11:45:14 -0600291 pathDevBounds.roundOut(devIBounds);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600292 int maxDimenstion = devIBounds->width();
293 int minDimension = devIBounds->height();
294 *transposedInAtlas = minDimension > maxDimenstion;
295 if (*transposedInAtlas) {
296 std::swap(minDimension, maxDimenstion);
297 }
298
Chris Dalton569c01b2021-05-25 10:11:46 -0600299 // Check if the path is too large for an atlas. Since we transpose paths in the atlas so height
300 // is always "minDimension", limiting to kMaxAtlasPathHeight^2 pixels guarantees height <=
301 // kMaxAtlasPathHeight, while also allowing paths that are very wide and short.
Chris Daltoneae5c162020-12-29 10:18:21 -0700302 if ((uint64_t)maxDimenstion * minDimension > kMaxAtlasPathHeight * kMaxAtlasPathHeight ||
Chris Daltonb96995d2020-06-04 16:44:29 -0600303 maxDimenstion > fMaxAtlasPathWidth) {
Chris Dalton4e998532020-02-10 11:06:42 -0700304 return false;
305 }
306
Chris Dalton50c3c242021-06-14 16:32:35 -0600307 // Check if this path is already in the atlas. This is mainly for clip paths.
308 AtlasPathKey atlasPathKey;
309 if (!path.isVolatile()) {
310 atlasPathKey.set(viewMatrix, antialias, path);
311 if (const SkIPoint16* existingLocation = fAtlasPathCache.find(atlasPathKey)) {
312 *locationInAtlas = *existingLocation;
313 return true;
314 }
315 }
316
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600317 if (!fAtlas.addRect(maxDimenstion, minDimension, locationInAtlas)) {
Chris Dalton4e998532020-02-10 11:06:42 -0700318 return false;
319 }
320
Chris Dalton50c3c242021-06-14 16:32:35 -0600321 // Remember this path's location in the atlas, in case it gets drawn again.
322 if (!path.isVolatile()) {
323 fAtlasPathCache.set(atlasPathKey, *locationInAtlas);
324 }
325
Chris Dalton4e998532020-02-10 11:06:42 -0700326 SkMatrix atlasMatrix = viewMatrix;
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600327 if (*transposedInAtlas) {
328 std::swap(atlasMatrix[0], atlasMatrix[3]);
329 std::swap(atlasMatrix[1], atlasMatrix[4]);
330 float tx=atlasMatrix.getTranslateX(), ty=atlasMatrix.getTranslateY();
331 atlasMatrix.setTranslateX(ty - devIBounds->y() + locationInAtlas->x());
332 atlasMatrix.setTranslateY(tx - devIBounds->x() + locationInAtlas->y());
333 } else {
334 atlasMatrix.postTranslate(locationInAtlas->x() - devIBounds->x(),
335 locationInAtlas->y() - devIBounds->y());
336 }
Chris Dalton4e998532020-02-10 11:06:42 -0700337
338 // Concatenate this path onto our uber path that matches its fill and AA types.
Chris Dalton50c3c242021-06-14 16:32:35 -0600339 SkPath* uberPath = this->getAtlasUberPath(path.getFillType(), antialias);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600340 uberPath->moveTo(locationInAtlas->x(), locationInAtlas->y()); // Implicit moveTo(0,0).
Chris Dalton4e998532020-02-10 11:06:42 -0700341 uberPath->addPath(path, atlasMatrix);
Chris Daltonb832ce62020-01-06 19:49:37 -0700342 return true;
343}
344
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600345void GrTessellationPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
Adlai Holler9902cff2020-11-11 08:51:25 -0500346 SkSpan<const uint32_t> /* taskIDs */) {
Chris Dalton4e998532020-02-10 11:06:42 -0700347 if (!fAtlas.drawBounds().isEmpty()) {
348 this->renderAtlas(onFlushRP);
349 fAtlas.reset(kAtlasInitialSize, *onFlushRP->caps());
350 }
351 for (SkPath& path : fAtlasUberPaths) {
352 path.reset();
353 }
Chris Dalton50c3c242021-06-14 16:32:35 -0600354 fAtlasPathCache.reset();
Chris Dalton4e998532020-02-10 11:06:42 -0700355}
356
357constexpr static GrUserStencilSettings kTestStencil(
358 GrUserStencilSettings::StaticInit<
359 0x0000,
360 GrUserStencilTest::kNotEqual,
361 0xffff,
362 GrUserStencilOp::kKeep,
363 GrUserStencilOp::kKeep,
364 0xffff>());
365
366constexpr static GrUserStencilSettings kTestAndResetStencil(
367 GrUserStencilSettings::StaticInit<
368 0x0000,
369 GrUserStencilTest::kNotEqual,
370 0xffff,
371 GrUserStencilOp::kZero,
372 GrUserStencilOp::kKeep,
373 0xffff>());
374
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600375void GrTessellationPathRenderer::renderAtlas(GrOnFlushResourceProvider* onFlushRP) {
Chris Dalton4e998532020-02-10 11:06:42 -0700376 auto rtc = fAtlas.instantiate(onFlushRP);
377 if (!rtc) {
378 return;
379 }
380
Chris Dalton569c01b2021-05-25 10:11:46 -0600381 SkRect atlasRect = SkRect::MakeIWH(fAtlas.drawBounds().width(), fAtlas.drawBounds().height());
382
Chris Dalton4e998532020-02-10 11:06:42 -0700383 // Add ops to stencil the atlas paths.
384 for (auto antialias : {false, true}) {
385 for (auto fillType : {SkPathFillType::kWinding, SkPathFillType::kEvenOdd}) {
386 SkPath* uberPath = this->getAtlasUberPath(fillType, antialias);
387 if (uberPath->isEmpty()) {
388 continue;
389 }
390 uberPath->setFillType(fillType);
391 GrAAType aaType = (antialias) ? GrAAType::kMSAA : GrAAType::kNone;
Chris Dalton031d76b2021-06-08 16:32:00 -0600392 auto op = GrOp::Make<GrPathStencilCoverOp>(onFlushRP->recordingContext(), SkMatrix::I(),
393 *uberPath, GrPaint(), aaType,
394 PathFlags::kStencilOnly, atlasRect);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400395 rtc->addDrawOp(nullptr, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700396 }
397 }
398
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700399 // Finally, draw a fullscreen rect to convert our stencilled paths into alpha coverage masks.
Chris Dalton569c01b2021-05-25 10:11:46 -0600400 GrPaint paint;
401 paint.setColor4f(SK_PMColor4fWHITE);
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700402 const GrUserStencilSettings* stencil;
Chris Dalton57ab06c2021-04-22 12:57:28 -0600403 if (onFlushRP->caps()->discardStencilValuesAfterRenderPass()) {
404 // This is the final op in the surfaceDrawContext. Since Ganesh is planning to discard the
405 // stencil values anyway, there is no need to reset the stencil values back to 0.
406 stencil = &kTestStencil;
407 } else {
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700408 // Outset the cover rect in case there are T-junctions in the path bounds.
Chris Dalton569c01b2021-05-25 10:11:46 -0600409 atlasRect.outset(1, 1);
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700410 stencil = &kTestAndResetStencil;
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700411 }
Chris Dalton569c01b2021-05-25 10:11:46 -0600412 rtc->stencilRect(nullptr, stencil, std::move(paint), GrAA::kYes, SkMatrix::I(), atlasRect);
Chris Dalton4e998532020-02-10 11:06:42 -0700413
414 if (rtc->asSurfaceProxy()->requiresManualMSAAResolve()) {
415 onFlushRP->addTextureResolveTask(sk_ref_sp(rtc->asTextureProxy()),
416 GrSurfaceProxy::ResolveFlags::kMSAA);
417 }
418}