blob: 9854ff0f8202df1e054946506bb820570c842113 [file] [log] [blame]
Chris Daltonb832ce62020-01-06 19:49:37 -07001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Chris Dalton0a22b1e2020-03-26 11:52:15 -06008#include "src/gpu/tessellate/GrTessellationPathRenderer.h"
Chris Daltonb832ce62020-01-06 19:49:37 -07009
Chris Dalton50c3c242021-06-14 16:32:35 -060010#include "include/private/SkVx.h"
Chris Daltond2dc8dd2020-05-19 16:32:02 -060011#include "src/core/SkIPoint16.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070012#include "src/core/SkPathPriv.h"
13#include "src/gpu/GrClip.h"
14#include "src/gpu/GrMemoryPool.h"
15#include "src/gpu/GrRecordingContextPriv.h"
Brian Salomoneebe7352020-12-09 16:37:04 -050016#include "src/gpu/GrSurfaceDrawContext.h"
Chris Dalton50c3c242021-06-14 16:32:35 -060017#include "src/gpu/GrVx.h"
Chris Dalton43a8b0c2021-06-14 17:10:07 -060018#include "src/gpu/effects/GrBlendFragmentProcessor.h"
19#include "src/gpu/effects/generated/GrDeviceSpaceEffect.h"
Michael Ludwig2686d692020-04-17 20:21:37 +000020#include "src/gpu/geometry/GrStyledShape.h"
Michael Ludwig4e9d5e22021-05-11 10:00:12 -040021#include "src/gpu/geometry/GrWangsFormula.h"
Chris Daltonc3b67eb2020-02-10 21:09:58 -070022#include "src/gpu/ops/GrFillRectOp.h"
Chris Dalton4e998532020-02-10 11:06:42 -070023#include "src/gpu/tessellate/GrDrawAtlasPathOp.h"
Chris Daltonebb37e72021-01-27 17:59:45 -070024#include "src/gpu/tessellate/GrPathInnerTriangulateOp.h"
Chris Dalton031d76b2021-06-08 16:32:00 -060025#include "src/gpu/tessellate/GrPathStencilCoverOp.h"
Chris Dalton7ae272f2021-06-10 11:45:14 -060026#include "src/gpu/tessellate/GrPathTessellateOp.h"
Chris Dalton05007df2021-02-04 00:24:52 -070027#include "src/gpu/tessellate/GrStrokeTessellateOp.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070028
Chris Dalton4e998532020-02-10 11:06:42 -070029constexpr static SkISize kAtlasInitialSize{512, 512};
30constexpr static int kMaxAtlasSize = 2048;
31
Chris Daltond72cb4c2020-07-16 17:50:17 -060032constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8;
33
Chris Daltond2dc8dd2020-05-19 16:32:02 -060034// The atlas is only used for small-area paths, which means at least one dimension of every path is
35// guaranteed to be quite small. So if we transpose tall paths, then every path will have a small
36// height, which lends very well to efficient pow2 atlas packing.
37constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2;
38
39// Ensure every path in the atlas falls in or below the 128px high rectanizer band.
40constexpr static int kMaxAtlasPathHeight = 128;
41
Chris Dalton1413d112020-07-09 11:26:31 -060042bool GrTessellationPathRenderer::IsSupported(const GrCaps& caps) {
Chris Dalton8f282f52021-01-06 11:47:58 -070043 return !caps.avoidStencilBuffers() &&
44 caps.drawInstancedSupport() &&
Chris Daltoneae5c162020-12-29 10:18:21 -070045 caps.shaderCaps()->vertexIDSupport() &&
46 !caps.disableTessellationPathRenderer();
Chris Dalton1413d112020-07-09 11:26:31 -060047}
48
Chris Dalton9213e612020-10-09 17:22:43 -060049GrTessellationPathRenderer::GrTessellationPathRenderer(GrRecordingContext* rContext)
Chris Daltond72cb4c2020-07-16 17:50:17 -060050 : fAtlas(kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes, kAtlasInitialSize,
Chris Dalton31634282020-09-17 12:16:54 -060051 std::min(kMaxAtlasSize, rContext->priv().caps()->maxPreferredRenderTargetSize()),
52 *rContext->priv().caps(), kAtlasAlgorithm) {
Chris Dalton31634282020-09-17 12:16:54 -060053 const GrCaps& caps = *rContext->priv().caps();
Chris Dalton9213e612020-10-09 17:22:43 -060054 auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes);
Chris Dalton569c01b2021-05-25 10:11:46 -060055 if (rContext->asDirectContext() && // The atlas doesn't support DDL yet.
56 caps.internalMultisampleCount(atlasFormat) > 1) {
57 fMaxAtlasPathWidth = fAtlas.maxAtlasSize() / 2; // Enable the atlas.
Chris Dalton9213e612020-10-09 17:22:43 -060058 }
Chris Dalton4e998532020-02-10 11:06:42 -070059}
60
Chris Dalton7ae272f2021-06-10 11:45:14 -060061GrPathRenderer::StencilSupport GrTessellationPathRenderer::onGetStencilSupport(
62 const GrStyledShape& shape) const {
63 if (!shape.style().isSimpleFill()) {
64 // Don't bother with stroke stencilling yet. Skia probably shouldn't support this at all
65 // since you can't clip by a stroke.
66 return kNoSupport_StencilSupport;
67 }
68 return shape.knownToBeConvex() ? kNoRestriction_StencilSupport : kStencilOnly_StencilSupport;
69}
70
Chris Dalton0a22b1e2020-03-26 11:52:15 -060071GrPathRenderer::CanDrawPath GrTessellationPathRenderer::onCanDrawPath(
Chris Daltonb832ce62020-01-06 19:49:37 -070072 const CanDrawPathArgs& args) const {
Chris Dalton1c62a7b2020-06-29 22:01:14 -060073 const GrStyledShape& shape = *args.fShape;
Chris Dalton57ab06c2021-04-22 12:57:28 -060074 if (args.fAAType == GrAAType::kCoverage ||
75 shape.style().hasPathEffect() ||
Chris Dalton06b52ad2020-12-15 10:01:35 -070076 args.fViewMatrix->hasPerspective() ||
77 shape.style().strokeRec().getStyle() == SkStrokeRec::kStrokeAndFill_Style ||
Chris Dalton2078cbe2020-12-14 19:04:55 -070078 shape.inverseFilled() ||
Chris Dalton537293bf2021-05-03 15:54:24 -060079 !args.fProxy->canUseStencil(*args.fCaps)) {
Chris Daltonb832ce62020-01-06 19:49:37 -070080 return CanDrawPath::kNo;
81 }
Chris Dalton7ae272f2021-06-10 11:45:14 -060082 if (args.fHasUserStencilSettings) {
83 // Non-convex paths and strokes use the stencil buffer internally, so they can't support
84 // draws with stencil settings.
85 if (!shape.style().isSimpleFill() || !shape.knownToBeConvex()) {
86 return CanDrawPath::kNo;
87 }
88 }
Chris Daltonb832ce62020-01-06 19:49:37 -070089 return CanDrawPath::kYes;
90}
91
Chris Dalton7ae272f2021-06-10 11:45:14 -060092static GrOp::Owner make_non_convex_fill_op(GrRecordingContext* rContext,
93 GrTessellationPathRenderer::PathFlags pathFlags,
94 GrAAType aaType, const SkRect& pathDevBounds,
95 const SkMatrix& viewMatrix, const SkPath& path,
96 GrPaint&& paint) {
97 SkASSERT(!path.isConvex());
98 int numVerbs = path.countVerbs();
99 if (numVerbs > 0) {
100 // Check if the path is large and/or simple enough that we can triangulate the inner fan
101 // on the CPU. This is our fastest approach. It allows us to stencil only the curves,
102 // and then fill the inner fan directly to the final render target, thus drawing the
103 // majority of pixels in a single render pass.
104 float gpuFragmentWork = pathDevBounds.height() * pathDevBounds.width();
105 float cpuTessellationWork = numVerbs * SkNextLog2(numVerbs); // N log N.
106 constexpr static float kCpuWeight = 512;
107 constexpr static float kMinNumPixelsToTriangulate = 256 * 256;
108 if (cpuTessellationWork * kCpuWeight + kMinNumPixelsToTriangulate < gpuFragmentWork) {
109 return GrOp::Make<GrPathInnerTriangulateOp>(rContext, viewMatrix, path,
110 std::move(paint), aaType, pathFlags,
111 pathDevBounds);
Chris Dalton70a0d2c2021-01-26 12:01:21 -0700112 }
Chris Daltonc2a17462020-12-09 16:46:22 -0700113 }
Chris Dalton7ae272f2021-06-10 11:45:14 -0600114 return GrOp::Make<GrPathStencilCoverOp>(rContext, viewMatrix, path, std::move(paint), aaType,
115 pathFlags, pathDevBounds);
Chris Daltonc2a17462020-12-09 16:46:22 -0700116}
117
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600118bool GrTessellationPathRenderer::onDrawPath(const DrawPathArgs& args) {
John Stiles0fbc6a32021-06-04 14:40:57 -0400119 GrSurfaceDrawContext* surfaceDrawContext = args.fSurfaceDrawContext;
Chris Daltonb832ce62020-01-06 19:49:37 -0700120
Chris Dalton7ae272f2021-06-10 11:45:14 -0600121 SkPath path;
122 args.fShape->asPath(&path);
123
124 // Handle strokes first.
125 if (!args.fShape->style().isSimpleFill()) {
126 SkASSERT(args.fUserStencilSettings->isUnused());
127 const SkStrokeRec& stroke = args.fShape->style().strokeRec();
128 SkASSERT(stroke.getStyle() != SkStrokeRec::kStrokeAndFill_Style);
129 auto op = GrOp::Make<GrStrokeTessellateOp>(args.fContext, args.fAAType, *args.fViewMatrix,
130 path, stroke, std::move(args.fPaint));
131 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
132 return true;
133 }
134
135 SkRect pathDevBounds;
136 args.fViewMatrix->mapRect(&pathDevBounds, args.fShape->bounds());
Chris Daltonb96995d2020-06-04 16:44:29 -0600137
Chris Dalton4e998532020-02-10 11:06:42 -0700138 // See if the path is small and simple enough to atlas instead of drawing directly.
139 //
140 // NOTE: The atlas uses alpha8 coverage even for msaa render targets. We could theoretically
141 // render the sample mask to an integer texture, but such a scheme would probably require
142 // GL_EXT_post_depth_coverage, which appears to have low adoption.
143 SkIRect devIBounds;
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600144 SkIPoint16 locationInAtlas;
145 bool transposedInAtlas;
Chris Dalton7ae272f2021-06-10 11:45:14 -0600146 if (args.fUserStencilSettings->isUnused() &&
147 this->tryAddPathToAtlas(*args.fContext->priv().caps(), *args.fViewMatrix, path,
Chris Dalton50c3c242021-06-14 16:32:35 -0600148 pathDevBounds, args.fAAType != GrAAType::kNone, &devIBounds,
149 &locationInAtlas, &transposedInAtlas)) {
Chris Dalton9213e612020-10-09 17:22:43 -0600150 // The atlas is not compatible with DDL. We should only be using it on direct contexts.
151 SkASSERT(args.fContext->asDirectContext());
Chris Dalton50c3c242021-06-14 16:32:35 -0600152 auto op = GrOp::Make<GrDrawAtlasPathOp>(args.fContext, surfaceDrawContext->numSamples(),
153 sk_ref_sp(fAtlas.textureProxy()), devIBounds,
154 locationInAtlas, transposedInAtlas,
155 *args.fViewMatrix, std::move(args.fPaint));
Brian Salomon1aa1f5f2020-12-11 17:25:17 -0500156 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700157 return true;
158 }
Chris Daltonb832ce62020-01-06 19:49:37 -0700159
Chris Dalton7ae272f2021-06-10 11:45:14 -0600160 // Handle convex paths only if we couldn't fit them in the atlas. We give the atlas priority in
161 // an effort to reduce DMSAA triggers.
162 if (args.fShape->knownToBeConvex()) {
163 auto op = GrOp::Make<GrPathTessellateOp>(args.fContext, *args.fViewMatrix, path,
164 std::move(args.fPaint), args.fAAType,
165 args.fUserStencilSettings, pathDevBounds);
Chris Daltonb0643342020-12-15 01:04:12 -0700166 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton7ae272f2021-06-10 11:45:14 -0600167 return true;
Chris Daltonb96995d2020-06-04 16:44:29 -0600168 }
Chris Dalton7ae272f2021-06-10 11:45:14 -0600169
170 SkASSERT(args.fUserStencilSettings->isUnused()); // See onGetStencilSupport().
171 auto op = make_non_convex_fill_op(args.fContext, PathFlags::kNone, args.fAAType, pathDevBounds,
172 *args.fViewMatrix, path, std::move(args.fPaint));
173 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700174 return true;
175}
176
Chris Dalton7ae272f2021-06-10 11:45:14 -0600177void GrTessellationPathRenderer::onStencilPath(const StencilPathArgs& args) {
178 SkASSERT(args.fShape->style().isSimpleFill()); // See onGetStencilSupport().
179
180 GrSurfaceDrawContext* surfaceDrawContext = args.fSurfaceDrawContext;
181 GrAAType aaType = (GrAA::kYes == args.fDoStencilMSAA) ? GrAAType::kMSAA : GrAAType::kNone;
182
183 SkRect pathDevBounds;
184 args.fViewMatrix->mapRect(&pathDevBounds, args.fShape->bounds());
185
186 SkPath path;
187 args.fShape->asPath(&path);
188
189 if (args.fShape->knownToBeConvex()) {
190 constexpr static GrUserStencilSettings kMarkStencil(
191 GrUserStencilSettings::StaticInit<
192 0x0001,
193 GrUserStencilTest::kAlways,
194 0xffff,
195 GrUserStencilOp::kReplace,
196 GrUserStencilOp::kKeep,
197 0xffff>());
198
199 GrPaint stencilPaint;
200 stencilPaint.setXPFactory(GrDisableColorXPFactory::Get());
201 auto op = GrOp::Make<GrPathTessellateOp>(args.fContext, *args.fViewMatrix, path,
202 std::move(stencilPaint), aaType, &kMarkStencil,
203 pathDevBounds);
204 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
205 return;
Chris Daltonb0643342020-12-15 01:04:12 -0700206 }
207
Chris Dalton7ae272f2021-06-10 11:45:14 -0600208 auto op = make_non_convex_fill_op(args.fContext, PathFlags::kStencilOnly, aaType, pathDevBounds,
209 *args.fViewMatrix, path, GrPaint());
210 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
211}
212
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600213GrFPResult GrTessellationPathRenderer::makeAtlasClipFP(
214 const SkIRect& drawBounds, const SkMatrix& viewMatrix, const SkPath& path, GrAA aa,
215 std::unique_ptr<GrFragmentProcessor> inputCoverage, const GrCaps& caps) {
216 if (viewMatrix.hasPerspective()) {
217 return GrFPFailure(std::move(inputCoverage));
218 }
219 SkIRect devIBounds;
220 SkIPoint16 locationInAtlas;
221 bool transposedInAtlas;
222 // tryAddPathToAtlas() ignores inverseness of the fill. See getAtlasUberPath().
223 if (!this->tryAddPathToAtlas(caps, viewMatrix, path, viewMatrix.mapRect(path.getBounds()),
224 aa != GrAA::kNo, &devIBounds, &locationInAtlas,
225 &transposedInAtlas)) {
226 // The path is too big, or the atlas ran out of room.
227 return GrFPFailure(std::move(inputCoverage));
228 }
229 GrSurfaceProxyView atlasView(sk_ref_sp(fAtlas.textureProxy()), GrDynamicAtlas::kTextureOrigin,
230 caps.getReadSwizzle(fAtlas.textureProxy()->backendFormat(),
231 GrColorType::kAlpha_8));
232 SkMatrix atlasMatrix;
233 SkRect atlasSubset, atlasDomain;
234 auto [atlasX, atlasY] = locationInAtlas;
235 if (!transposedInAtlas) {
236 auto atlasOffset = SkVector::Make(atlasX - devIBounds.left(), atlasY - devIBounds.top());
237 atlasMatrix = SkMatrix::Translate(atlasOffset);
238 atlasSubset = SkRect::Make(devIBounds).makeOffset(atlasOffset);
239 atlasDomain = SkRect::Make(drawBounds).makeOffset(atlasOffset);
240 } else {
241 atlasMatrix.setAll(0, 1, atlasX - devIBounds.top(),
242 1, 0, atlasY - devIBounds.left(),
243 0, 0, 1);
244 atlasSubset = SkRect::MakeXYWH(atlasX, atlasY, devIBounds.height(), devIBounds.width());
245 atlasDomain = atlasMatrix.mapRect(SkRect::Make(drawBounds));
246 }
247#ifdef SK_DEBUG
248 if (!path.isInverseFillType()) {
249 // At this point in time we expect callers to tighten the scissor for "kIntersect" clips, as
250 // opposed to us having to enforce the texture subset. Feel free to remove this assert if
251 // that ever changes.
252 SkASSERT(atlasDomain.isEmpty() || atlasSubset.contains(atlasDomain));
253 }
254#endif
255 // Inset the domain because if it is equal to the subset, then it falls on an exact boundary
256 // between pixels, the "nearest" filter becomes undefined, and GrTextureEffect is forced to
257 // manually enforce the subset. This inset is justifiable because textures are sampled at pixel
258 // center, unless sample shading is enabled, in which case we assume standard sample locations
259 // (https://www.khronos.org/registry/vulkan/specs/1.2/html/chap25.html).
260 // NOTE: At MSAA16, standard sample locations begin falling on actual pixel boundaries. If this
261 // happens then we simply have to rely on the fact that the atlas has a 1px padding between
262 // entries.
263 constexpr static float kMinInsetOfStandardMSAA8Locations = 1/16.f;
264 atlasDomain.inset(kMinInsetOfStandardMSAA8Locations, kMinInsetOfStandardMSAA8Locations);
265 // Look up clip coverage in the atlas.
266 GrSamplerState samplerState(GrSamplerState::WrapMode::kClampToBorder,
267 GrSamplerState::Filter::kNearest);
268 auto fp = GrTextureEffect::MakeSubset(std::move(atlasView), kPremul_SkAlphaType, atlasMatrix,
269 samplerState, atlasSubset, atlasDomain, caps);
270 // Feed sk_FragCoord into the above texture lookup.
271 fp = GrDeviceSpaceEffect::Make(std::move(fp));
272 if (path.isInverseFillType()) {
273 // outputCoverage = inputCoverage * (1 - atlasAlpha)
274 fp = GrBlendFragmentProcessor::Make(
275 std::move(fp), std::move(inputCoverage), SkBlendMode::kDstOut,
276 GrBlendFragmentProcessor::BlendBehavior::kSkModeBehavior);
277 } else {
278 // outputCoverage = inputCoverage * atlasAlpha
279 fp = GrBlendFragmentProcessor::Make(
280 std::move(fp), std::move(inputCoverage), SkBlendMode::kDstIn,
281 GrBlendFragmentProcessor::BlendBehavior::kSkModeBehavior);
282 }
283 return GrFPSuccess(std::move(fp));
284}
285
Chris Dalton50c3c242021-06-14 16:32:35 -0600286void GrTessellationPathRenderer::AtlasPathKey::set(const SkMatrix& m, bool antialias,
287 const SkPath& path) {
288 using grvx::float2;
289 fAffineMatrix[0] = m.getScaleX();
290 fAffineMatrix[1] = m.getSkewX();
291 fAffineMatrix[2] = m.getSkewY();
292 fAffineMatrix[3] = m.getScaleY();
293 float2 translate = {m.getTranslateX(), m.getTranslateY()};
294 float2 subpixelPosition = translate - skvx::floor(translate);
Robert Phillips62214f72021-06-15 10:12:51 -0400295 float2 subpixelPositionKey = skvx::trunc(subpixelPosition *
296 GrPathTessellator::kLinearizationPrecision);
Chris Dalton50c3c242021-06-14 16:32:35 -0600297 skvx::cast<uint8_t>(subpixelPositionKey).store(fSubpixelPositionKey);
298 fAntialias = antialias;
299 fFillRule = (uint8_t)GrFillRuleForSkPath(path); // Fill rule doesn't affect the path's genID.
300 fPathGenID = path.getGenerationID();
301}
302
Chris Dalton7ae272f2021-06-10 11:45:14 -0600303bool GrTessellationPathRenderer::tryAddPathToAtlas(const GrCaps& caps, const SkMatrix& viewMatrix,
304 const SkPath& path, const SkRect& pathDevBounds,
Chris Dalton50c3c242021-06-14 16:32:35 -0600305 bool antialias, SkIRect* devIBounds,
Chris Dalton7ae272f2021-06-10 11:45:14 -0600306 SkIPoint16* locationInAtlas,
307 bool* transposedInAtlas) {
Chris Dalton50c3c242021-06-14 16:32:35 -0600308 SkASSERT(!viewMatrix.hasPerspective()); // See onCanDrawPath().
309
Chris Daltond72cb4c2020-07-16 17:50:17 -0600310 if (!fMaxAtlasPathWidth) {
311 return false;
312 }
313
Chris Dalton50c3c242021-06-14 16:32:35 -0600314 if (!caps.multisampleDisableSupport() && !antialias) {
Chris Dalton4e998532020-02-10 11:06:42 -0700315 return false;
316 }
317
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600318 // Transpose tall paths in the atlas. Since we limit ourselves to small-area paths, this
319 // guarantees that every atlas entry has a small height, which lends very well to efficient pow2
320 // atlas packing.
Chris Dalton7ae272f2021-06-10 11:45:14 -0600321 pathDevBounds.roundOut(devIBounds);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600322 int maxDimenstion = devIBounds->width();
323 int minDimension = devIBounds->height();
324 *transposedInAtlas = minDimension > maxDimenstion;
325 if (*transposedInAtlas) {
326 std::swap(minDimension, maxDimenstion);
327 }
328
Chris Dalton569c01b2021-05-25 10:11:46 -0600329 // Check if the path is too large for an atlas. Since we transpose paths in the atlas so height
330 // is always "minDimension", limiting to kMaxAtlasPathHeight^2 pixels guarantees height <=
331 // kMaxAtlasPathHeight, while also allowing paths that are very wide and short.
Chris Daltoneae5c162020-12-29 10:18:21 -0700332 if ((uint64_t)maxDimenstion * minDimension > kMaxAtlasPathHeight * kMaxAtlasPathHeight ||
Chris Daltonb96995d2020-06-04 16:44:29 -0600333 maxDimenstion > fMaxAtlasPathWidth) {
Chris Dalton4e998532020-02-10 11:06:42 -0700334 return false;
335 }
336
Chris Dalton50c3c242021-06-14 16:32:35 -0600337 // Check if this path is already in the atlas. This is mainly for clip paths.
338 AtlasPathKey atlasPathKey;
339 if (!path.isVolatile()) {
340 atlasPathKey.set(viewMatrix, antialias, path);
341 if (const SkIPoint16* existingLocation = fAtlasPathCache.find(atlasPathKey)) {
342 *locationInAtlas = *existingLocation;
343 return true;
344 }
345 }
346
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600347 if (!fAtlas.addRect(maxDimenstion, minDimension, locationInAtlas)) {
Chris Dalton4e998532020-02-10 11:06:42 -0700348 return false;
349 }
350
Chris Dalton50c3c242021-06-14 16:32:35 -0600351 // Remember this path's location in the atlas, in case it gets drawn again.
352 if (!path.isVolatile()) {
353 fAtlasPathCache.set(atlasPathKey, *locationInAtlas);
354 }
355
Chris Dalton4e998532020-02-10 11:06:42 -0700356 SkMatrix atlasMatrix = viewMatrix;
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600357 if (*transposedInAtlas) {
358 std::swap(atlasMatrix[0], atlasMatrix[3]);
359 std::swap(atlasMatrix[1], atlasMatrix[4]);
360 float tx=atlasMatrix.getTranslateX(), ty=atlasMatrix.getTranslateY();
361 atlasMatrix.setTranslateX(ty - devIBounds->y() + locationInAtlas->x());
362 atlasMatrix.setTranslateY(tx - devIBounds->x() + locationInAtlas->y());
363 } else {
364 atlasMatrix.postTranslate(locationInAtlas->x() - devIBounds->x(),
365 locationInAtlas->y() - devIBounds->y());
366 }
Chris Dalton4e998532020-02-10 11:06:42 -0700367
368 // Concatenate this path onto our uber path that matches its fill and AA types.
Chris Dalton50c3c242021-06-14 16:32:35 -0600369 SkPath* uberPath = this->getAtlasUberPath(path.getFillType(), antialias);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600370 uberPath->moveTo(locationInAtlas->x(), locationInAtlas->y()); // Implicit moveTo(0,0).
Chris Dalton4e998532020-02-10 11:06:42 -0700371 uberPath->addPath(path, atlasMatrix);
Chris Daltonb832ce62020-01-06 19:49:37 -0700372 return true;
373}
374
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600375void GrTessellationPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
Adlai Holler9902cff2020-11-11 08:51:25 -0500376 SkSpan<const uint32_t> /* taskIDs */) {
Chris Dalton4e998532020-02-10 11:06:42 -0700377 if (!fAtlas.drawBounds().isEmpty()) {
378 this->renderAtlas(onFlushRP);
379 fAtlas.reset(kAtlasInitialSize, *onFlushRP->caps());
380 }
381 for (SkPath& path : fAtlasUberPaths) {
382 path.reset();
383 }
Chris Dalton50c3c242021-06-14 16:32:35 -0600384 fAtlasPathCache.reset();
Chris Dalton4e998532020-02-10 11:06:42 -0700385}
386
387constexpr static GrUserStencilSettings kTestStencil(
388 GrUserStencilSettings::StaticInit<
389 0x0000,
390 GrUserStencilTest::kNotEqual,
391 0xffff,
392 GrUserStencilOp::kKeep,
393 GrUserStencilOp::kKeep,
394 0xffff>());
395
396constexpr static GrUserStencilSettings kTestAndResetStencil(
397 GrUserStencilSettings::StaticInit<
398 0x0000,
399 GrUserStencilTest::kNotEqual,
400 0xffff,
401 GrUserStencilOp::kZero,
402 GrUserStencilOp::kKeep,
403 0xffff>());
404
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600405void GrTessellationPathRenderer::renderAtlas(GrOnFlushResourceProvider* onFlushRP) {
Chris Dalton4e998532020-02-10 11:06:42 -0700406 auto rtc = fAtlas.instantiate(onFlushRP);
407 if (!rtc) {
408 return;
409 }
410
Chris Dalton569c01b2021-05-25 10:11:46 -0600411 SkRect atlasRect = SkRect::MakeIWH(fAtlas.drawBounds().width(), fAtlas.drawBounds().height());
412
Chris Dalton4e998532020-02-10 11:06:42 -0700413 // Add ops to stencil the atlas paths.
414 for (auto antialias : {false, true}) {
415 for (auto fillType : {SkPathFillType::kWinding, SkPathFillType::kEvenOdd}) {
416 SkPath* uberPath = this->getAtlasUberPath(fillType, antialias);
417 if (uberPath->isEmpty()) {
418 continue;
419 }
420 uberPath->setFillType(fillType);
421 GrAAType aaType = (antialias) ? GrAAType::kMSAA : GrAAType::kNone;
Chris Dalton031d76b2021-06-08 16:32:00 -0600422 auto op = GrOp::Make<GrPathStencilCoverOp>(onFlushRP->recordingContext(), SkMatrix::I(),
423 *uberPath, GrPaint(), aaType,
424 PathFlags::kStencilOnly, atlasRect);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400425 rtc->addDrawOp(nullptr, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700426 }
427 }
428
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700429 // Finally, draw a fullscreen rect to convert our stencilled paths into alpha coverage masks.
Chris Dalton569c01b2021-05-25 10:11:46 -0600430 GrPaint paint;
431 paint.setColor4f(SK_PMColor4fWHITE);
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700432 const GrUserStencilSettings* stencil;
Chris Dalton57ab06c2021-04-22 12:57:28 -0600433 if (onFlushRP->caps()->discardStencilValuesAfterRenderPass()) {
434 // This is the final op in the surfaceDrawContext. Since Ganesh is planning to discard the
435 // stencil values anyway, there is no need to reset the stencil values back to 0.
436 stencil = &kTestStencil;
437 } else {
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700438 // Outset the cover rect in case there are T-junctions in the path bounds.
Chris Dalton569c01b2021-05-25 10:11:46 -0600439 atlasRect.outset(1, 1);
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700440 stencil = &kTestAndResetStencil;
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700441 }
Chris Dalton569c01b2021-05-25 10:11:46 -0600442 rtc->stencilRect(nullptr, stencil, std::move(paint), GrAA::kYes, SkMatrix::I(), atlasRect);
Chris Dalton4e998532020-02-10 11:06:42 -0700443
444 if (rtc->asSurfaceProxy()->requiresManualMSAAResolve()) {
445 onFlushRP->addTextureResolveTask(sk_ref_sp(rtc->asTextureProxy()),
446 GrSurfaceProxy::ResolveFlags::kMSAA);
447 }
448}