blob: d1f9dc7a03ca6d0236acf1b689cfd22672e883f4 [file] [log] [blame]
Chris Daltonb832ce62020-01-06 19:49:37 -07001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Chris Dalton0a22b1e2020-03-26 11:52:15 -06008#include "src/gpu/tessellate/GrTessellationPathRenderer.h"
Chris Daltonb832ce62020-01-06 19:49:37 -07009
Chris Daltonb96995d2020-06-04 16:44:29 -060010#include "include/pathops/SkPathOps.h"
Chris Daltond2dc8dd2020-05-19 16:32:02 -060011#include "src/core/SkIPoint16.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070012#include "src/core/SkPathPriv.h"
13#include "src/gpu/GrClip.h"
14#include "src/gpu/GrMemoryPool.h"
15#include "src/gpu/GrRecordingContextPriv.h"
Brian Salomoneebe7352020-12-09 16:37:04 -050016#include "src/gpu/GrSurfaceDrawContext.h"
Michael Ludwig2686d692020-04-17 20:21:37 +000017#include "src/gpu/geometry/GrStyledShape.h"
Chris Daltonc3b67eb2020-02-10 21:09:58 -070018#include "src/gpu/ops/GrFillRectOp.h"
Chris Dalton4e998532020-02-10 11:06:42 -070019#include "src/gpu/tessellate/GrDrawAtlasPathOp.h"
Chris Dalton078f8752020-07-30 19:50:46 -060020#include "src/gpu/tessellate/GrPathTessellateOp.h"
Chris Daltonc2a17462020-12-09 16:46:22 -070021#include "src/gpu/tessellate/GrStrokeIndirectOp.h"
Chris Dalton078f8752020-07-30 19:50:46 -060022#include "src/gpu/tessellate/GrStrokeTessellateOp.h"
Chris Daltonb96995d2020-06-04 16:44:29 -060023#include "src/gpu/tessellate/GrWangsFormula.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070024
Chris Dalton4e998532020-02-10 11:06:42 -070025constexpr static SkISize kAtlasInitialSize{512, 512};
26constexpr static int kMaxAtlasSize = 2048;
27
Chris Daltond72cb4c2020-07-16 17:50:17 -060028constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8;
29
Chris Daltond2dc8dd2020-05-19 16:32:02 -060030// The atlas is only used for small-area paths, which means at least one dimension of every path is
31// guaranteed to be quite small. So if we transpose tall paths, then every path will have a small
32// height, which lends very well to efficient pow2 atlas packing.
33constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2;
34
35// Ensure every path in the atlas falls in or below the 128px high rectanizer band.
36constexpr static int kMaxAtlasPathHeight = 128;
37
Chris Dalton1413d112020-07-09 11:26:31 -060038bool GrTessellationPathRenderer::IsSupported(const GrCaps& caps) {
39 return caps.drawInstancedSupport() && caps.shaderCaps()->vertexIDSupport();
40}
41
Chris Dalton9213e612020-10-09 17:22:43 -060042GrTessellationPathRenderer::GrTessellationPathRenderer(GrRecordingContext* rContext)
Chris Daltond72cb4c2020-07-16 17:50:17 -060043 : fAtlas(kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes, kAtlasInitialSize,
Chris Dalton31634282020-09-17 12:16:54 -060044 std::min(kMaxAtlasSize, rContext->priv().caps()->maxPreferredRenderTargetSize()),
45 *rContext->priv().caps(), kAtlasAlgorithm) {
46 this->initAtlasFlags(rContext);
Chris Daltonb96995d2020-06-04 16:44:29 -060047}
48
Chris Dalton9213e612020-10-09 17:22:43 -060049void GrTessellationPathRenderer::initAtlasFlags(GrRecordingContext* rContext) {
50 fMaxAtlasPathWidth = 0;
51
52 if (!rContext->asDirectContext()) {
53 // The atlas is not compatible with DDL. Leave it disabled on non-direct contexts.
54 return;
55 }
56
Chris Dalton31634282020-09-17 12:16:54 -060057 const GrCaps& caps = *rContext->priv().caps();
Chris Dalton9213e612020-10-09 17:22:43 -060058 auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes);
59 if (caps.internalMultisampleCount(atlasFormat) <= 1) {
60 // MSAA is not supported on kAlpha8. Leave the atlas disabled.
61 return;
62 }
Chris Dalton31634282020-09-17 12:16:54 -060063
Chris Daltonb96995d2020-06-04 16:44:29 -060064 fStencilAtlasFlags = OpFlags::kStencilOnly | OpFlags::kDisableHWTessellation;
65 fMaxAtlasPathWidth = fAtlas.maxAtlasSize() / 2;
Chris Daltond72cb4c2020-07-16 17:50:17 -060066
Chris Daltond72cb4c2020-07-16 17:50:17 -060067 // The atlas usually does better with hardware tessellation. If hardware tessellation is
68 // supported, we will next choose a max atlas path width that is guaranteed to never require
69 // more tessellation segments than are supported by the hardware.
70 if (!caps.shaderCaps()->tessellationSupport()) {
71 return;
72 }
73
Chris Daltonb96995d2020-06-04 16:44:29 -060074 // Since we limit the area of paths in the atlas to kMaxAtlasPathHeight^2, taller paths can't
75 // get very wide anyway. Find the tallest path whose width is limited by
76 // GrWangsFormula::worst_case_cubic() rather than the max area constraint, and use that for our
77 // max atlas path width.
78 //
79 // Solve the following equation for w:
80 //
81 // GrWangsFormula::worst_case_cubic(kLinearizationIntolerance, w, kMaxAtlasPathHeight^2 / w)
82 // == maxTessellationSegments
83 //
Chris Dalton4dd3c8c2020-10-30 22:45:58 -060084 float k = GrWangsFormula::length_term<3>(kLinearizationIntolerance);
Chris Daltonb96995d2020-06-04 16:44:29 -060085 float h = kMaxAtlasPathHeight;
Chris Daltond72cb4c2020-07-16 17:50:17 -060086 float s = caps.shaderCaps()->maxTessellationSegments();
Chris Daltonb96995d2020-06-04 16:44:29 -060087 // Quadratic formula from Numerical Recipes in C:
88 //
89 // q = -1/2 [b + sign(b) sqrt(b*b - 4*a*c)]
90 // x1 = q/a
91 // x2 = c/q
92 //
93 // float a = 1; // 'a' is always 1 in our specific equation.
94 float b = -s*s*s*s / (4*k*k); // Always negative.
95 float c = h*h*h*h; // Always positive.
Chris Dalton31634282020-09-17 12:16:54 -060096 float discr = b*b - 4*1*c;
97 if (discr <= 0) {
Chris Daltonb96995d2020-06-04 16:44:29 -060098 // maxTessellationSegments is too small for any path whose area == kMaxAtlasPathHeight^2.
99 // (This is unexpected because the GL spec mandates a minimum of 64 segments.)
Chris Dalton31634282020-09-17 12:16:54 -0600100 rContext->priv().printWarningMessage(SkStringPrintf(
101 "WARNING: maxTessellationSegments seems too low. (%i)\n",
102 caps.shaderCaps()->maxTessellationSegments()).c_str());
Chris Daltonb96995d2020-06-04 16:44:29 -0600103 return;
104 }
Chris Dalton31634282020-09-17 12:16:54 -0600105 float q = -.5f * (b - std::sqrt(discr)); // Always positive.
Chris Daltonb96995d2020-06-04 16:44:29 -0600106 // The two roots represent the width^2 and height^2 of the tallest rectangle that is limited by
107 // GrWangsFormula::worst_case_cubic().
108 float r0 = q; // Always positive.
109 float r1 = c/q; // Always positive.
110 float worstCaseWidth = std::sqrt(std::max(r0, r1));
111#ifdef SK_DEBUG
112 float worstCaseHeight = std::sqrt(std::min(r0, r1));
113 // Verify the above equation worked as expected. It should have found a width and height whose
114 // area == kMaxAtlasPathHeight^2.
115 SkASSERT(SkScalarNearlyEqual(worstCaseHeight * worstCaseWidth, h*h, 1));
116 // Verify GrWangsFormula::worst_case_cubic() still works as we expect. The worst case number of
117 // segments for this bounding box should be maxTessellationSegments.
118 SkASSERT(SkScalarNearlyEqual(GrWangsFormula::worst_case_cubic(
119 kLinearizationIntolerance, worstCaseWidth, worstCaseHeight), s, 1));
120#endif
121 fStencilAtlasFlags &= ~OpFlags::kDisableHWTessellation;
122 fMaxAtlasPathWidth = std::min(fMaxAtlasPathWidth, (int)worstCaseWidth);
Chris Dalton4e998532020-02-10 11:06:42 -0700123}
124
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600125GrPathRenderer::CanDrawPath GrTessellationPathRenderer::onCanDrawPath(
Chris Daltonb832ce62020-01-06 19:49:37 -0700126 const CanDrawPathArgs& args) const {
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600127 const GrStyledShape& shape = *args.fShape;
128 if (shape.inverseFilled() || shape.style().hasPathEffect() ||
Chris Dalton0f6bb8a2020-01-15 09:40:54 -0700129 args.fViewMatrix->hasPerspective()) {
Chris Daltonb832ce62020-01-06 19:49:37 -0700130 return CanDrawPath::kNo;
131 }
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600132
Chris Daltonb832ce62020-01-06 19:49:37 -0700133 if (GrAAType::kCoverage == args.fAAType) {
134 SkASSERT(1 == args.fProxy->numSamples());
135 if (!args.fProxy->canUseMixedSamples(*args.fCaps)) {
136 return CanDrawPath::kNo;
137 }
138 }
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600139
Chris Daltonb832ce62020-01-06 19:49:37 -0700140 SkPath path;
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600141 shape.asPath(&path);
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600142
143 if (!shape.style().isSimpleFill()) {
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600144 // These are only temporary restrictions while we bootstrap tessellated stroking. Every one
145 // of them will eventually go away.
146 if (shape.style().strokeRec().getStyle() == SkStrokeRec::kStrokeAndFill_Style ||
Chris Dalton55abaf52020-12-08 10:25:13 -0700147 SkPathPriv::ConicWeightCnt(path)) {
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600148 return CanDrawPath::kNo;
149 }
Chris Dalton55abaf52020-12-08 10:25:13 -0700150 if (shape.style().isSimpleHairline()) {
151 // For the time being we translate hairline paths to device space. We can't do this if
152 // it's possible the paint might use local coordinates.
153 if (args.fPaint->usesVaryingCoords()) {
154 return CanDrawPath::kNo;
155 }
156 }
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600157 }
158
Chris Daltonb832ce62020-01-06 19:49:37 -0700159 return CanDrawPath::kYes;
160}
161
Chris Daltonc2a17462020-12-09 16:46:22 -0700162static GrOp::Owner make_stroke_op(GrRecordingContext* context, GrAAType aaType,
163 const SkMatrix& viewMatrix, const SkStrokeRec& stroke,
164 const SkPath& path, GrPaint&& paint,
165 const GrShaderCaps& shaderCaps) {
166 // Only use hardware tessellation if the path has a somewhat large number of verbs. Otherwise we
Chris Dalton55abaf52020-12-08 10:25:13 -0700167 // seem to be better off using indirect draws. Our back door for HW tessellation shaders isn't
168 // currently capable of passing varyings to the fragment shader either, so if the paint uses
169 // varyings we need to use indirect draws.
170 if (shaderCaps.tessellationSupport() && path.countVerbs() > 50 && !paint.usesVaryingCoords()) {
Chris Daltonc2a17462020-12-09 16:46:22 -0700171 return GrOp::Make<GrStrokeTessellateOp>(context, aaType, viewMatrix, stroke, path,
172 std::move(paint));
173 } else {
174 return GrOp::Make<GrStrokeIndirectOp>(context, aaType, viewMatrix, path, stroke,
175 std::move(paint));
176 }
177}
178
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600179bool GrTessellationPathRenderer::onDrawPath(const DrawPathArgs& args) {
Brian Salomoneebe7352020-12-09 16:37:04 -0500180 GrSurfaceDrawContext* renderTargetContext = args.fRenderTargetContext;
Chris Daltonb96995d2020-06-04 16:44:29 -0600181 const GrShaderCaps& shaderCaps = *args.fContext->priv().caps()->shaderCaps();
182
Chris Daltonb832ce62020-01-06 19:49:37 -0700183 SkPath path;
184 args.fShape->asPath(&path);
185
Chris Daltonb96995d2020-06-04 16:44:29 -0600186 SkRect devBounds;
187 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
188
Chris Dalton4e998532020-02-10 11:06:42 -0700189 // See if the path is small and simple enough to atlas instead of drawing directly.
190 //
191 // NOTE: The atlas uses alpha8 coverage even for msaa render targets. We could theoretically
192 // render the sample mask to an integer texture, but such a scheme would probably require
193 // GL_EXT_post_depth_coverage, which appears to have low adoption.
194 SkIRect devIBounds;
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600195 SkIPoint16 locationInAtlas;
196 bool transposedInAtlas;
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600197 if (args.fShape->style().isSimpleFill() &&
198 this->tryAddPathToAtlas(*args.fContext->priv().caps(), *args.fViewMatrix, path, devBounds,
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600199 args.fAAType, &devIBounds, &locationInAtlas, &transposedInAtlas)) {
Chris Dalton9213e612020-10-09 17:22:43 -0600200 // The atlas is not compatible with DDL. We should only be using it on direct contexts.
201 SkASSERT(args.fContext->asDirectContext());
Chris Daltonb96995d2020-06-04 16:44:29 -0600202#ifdef SK_DEBUG
203 // If using hardware tessellation in the atlas, make sure the max number of segments is
204 // sufficient for this path. fMaxAtlasPathWidth should have been tuned for this to always be
205 // the case.
206 if (!(fStencilAtlasFlags & OpFlags::kDisableHWTessellation)) {
207 int worstCaseNumSegments = GrWangsFormula::worst_case_cubic(kLinearizationIntolerance,
208 devIBounds.width(),
209 devIBounds.height());
210 SkASSERT(worstCaseNumSegments <= shaderCaps.maxTessellationSegments());
211 }
212#endif
Herb Derbyc76d4092020-10-07 16:46:15 -0400213 auto op = GrOp::Make<GrDrawAtlasPathOp>(args.fContext,
Chris Dalton4e998532020-02-10 11:06:42 -0700214 renderTargetContext->numSamples(), sk_ref_sp(fAtlas.textureProxy()),
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600215 devIBounds, locationInAtlas, transposedInAtlas, *args.fViewMatrix,
Michael Ludwig7c12e282020-05-29 09:54:07 -0400216 std::move(args.fPaint));
217 renderTargetContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700218 return true;
219 }
Chris Daltonb832ce62020-01-06 19:49:37 -0700220
Chris Daltonb96995d2020-06-04 16:44:29 -0600221 // Find the worst-case log2 number of line segments that a curve in this path might need to be
222 // divided into.
223 int worstCaseResolveLevel = GrWangsFormula::worst_case_cubic_log2(kLinearizationIntolerance,
224 devBounds.width(),
225 devBounds.height());
226 if (worstCaseResolveLevel > kMaxResolveLevel) {
227 // The path is too large for our internal indirect draw shaders. Crop it to the viewport.
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600228 auto viewport = SkRect::MakeIWH(renderTargetContext->width(),
229 renderTargetContext->height());
230 float inflationRadius = 1;
231 const SkStrokeRec& stroke = args.fShape->style().strokeRec();
232 if (stroke.getStyle() == SkStrokeRec::kHairline_Style) {
233 inflationRadius += SkStrokeRec::GetInflationRadius(stroke.getJoin(), stroke.getMiter(),
234 stroke.getCap(), 1);
235 } else if (stroke.getStyle() != SkStrokeRec::kFill_Style) {
236 inflationRadius += stroke.getInflationRadius() * args.fViewMatrix->getMaxScale();
237 }
238 viewport.outset(inflationRadius, inflationRadius);
239
240 SkPath viewportPath;
241 viewportPath.addRect(viewport);
Chris Daltonb96995d2020-06-04 16:44:29 -0600242 // Perform the crop in device space so it's a simple rect-path intersection.
243 path.transform(*args.fViewMatrix);
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600244 if (!Op(viewportPath, path, kIntersect_SkPathOp, &path)) {
Chris Daltonb96995d2020-06-04 16:44:29 -0600245 // The crop can fail if the PathOps encounter NaN or infinities. Return true
246 // because drawing nothing is acceptable behavior for FP overflow.
247 return true;
248 }
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600249
Chris Daltonb96995d2020-06-04 16:44:29 -0600250 // Transform the path back to its own local space.
251 SkMatrix inverse;
252 if (!args.fViewMatrix->invert(&inverse)) {
253 return true; // Singular view matrix. Nothing would have drawn anyway. Return true.
254 }
255 path.transform(inverse);
256 path.setIsVolatile(true);
257 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
258 worstCaseResolveLevel = GrWangsFormula::worst_case_cubic_log2(kLinearizationIntolerance,
259 devBounds.width(),
260 devBounds.height());
261 // kMaxResolveLevel should be large enough to tessellate paths the size of any screen we
262 // might encounter.
263 SkASSERT(worstCaseResolveLevel <= kMaxResolveLevel);
264 }
265
Chris Dalton128ed7b2020-07-30 17:48:24 -0600266 if (args.fShape->style().isSimpleHairline()) {
Chris Dalton128ed7b2020-07-30 17:48:24 -0600267 // Since we will be transforming the path, just double check that we are still in a position
268 // where the paint will not use local coordinates.
Chris Dalton55abaf52020-12-08 10:25:13 -0700269 SkASSERT(!args.fPaint.usesVaryingCoords());
270 // Pre-transform the path into device space and use a stroke width of 1.
Chris Dalton128ed7b2020-07-30 17:48:24 -0600271 SkPath devPath;
272 path.transform(*args.fViewMatrix, &devPath);
273 SkStrokeRec devStroke = args.fShape->style().strokeRec();
274 devStroke.setStrokeStyle(1);
Chris Daltonc2a17462020-12-09 16:46:22 -0700275 auto op = make_stroke_op(args.fContext, args.fAAType, SkMatrix::I(), devStroke, devPath,
276 std::move(args.fPaint), shaderCaps);
Chris Dalton128ed7b2020-07-30 17:48:24 -0600277 renderTargetContext->addDrawOp(args.fClip, std::move(op));
278 return true;
279 }
280
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600281 if (!args.fShape->style().isSimpleFill()) {
282 const SkStrokeRec& stroke = args.fShape->style().strokeRec();
Chris Dalton128ed7b2020-07-30 17:48:24 -0600283 SkASSERT(stroke.getStyle() == SkStrokeRec::kStroke_Style);
Chris Daltonc2a17462020-12-09 16:46:22 -0700284 auto op = make_stroke_op(args.fContext, args.fAAType, *args.fViewMatrix, stroke, path,
285 std::move(args.fPaint), shaderCaps);
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600286 renderTargetContext->addDrawOp(args.fClip, std::move(op));
287 return true;
288 }
289
290 auto drawPathFlags = OpFlags::kNone;
Chris Daltonb96995d2020-06-04 16:44:29 -0600291 if ((1 << worstCaseResolveLevel) > shaderCaps.maxTessellationSegments()) {
292 // The path is too large for hardware tessellation; a curve in this bounding box could
293 // potentially require more segments than are supported by the hardware. Fall back on
294 // indirect draws.
295 drawPathFlags |= OpFlags::kDisableHWTessellation;
296 }
297
Herb Derbyc76d4092020-10-07 16:46:15 -0400298 auto op = GrOp::Make<GrPathTessellateOp>(
299 args.fContext, *args.fViewMatrix, path, std::move(args.fPaint),
300 args.fAAType, drawPathFlags);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400301 renderTargetContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700302 return true;
303}
304
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600305bool GrTessellationPathRenderer::tryAddPathToAtlas(
Chris Daltonb96995d2020-06-04 16:44:29 -0600306 const GrCaps& caps, const SkMatrix& viewMatrix, const SkPath& path, const SkRect& devBounds,
307 GrAAType aaType, SkIRect* devIBounds, SkIPoint16* locationInAtlas,
308 bool* transposedInAtlas) {
Chris Daltond72cb4c2020-07-16 17:50:17 -0600309 if (!fMaxAtlasPathWidth) {
310 return false;
311 }
312
Chris Dalton4e998532020-02-10 11:06:42 -0700313 if (!caps.multisampleDisableSupport() && GrAAType::kNone == aaType) {
314 return false;
315 }
316
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600317 // Atlas paths require their points to be transformed on the CPU and copied into an "uber path".
318 // Check if this path has too many points to justify this extra work.
319 if (path.countPoints() > 200) {
Chris Dalton4e998532020-02-10 11:06:42 -0700320 return false;
321 }
322
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600323 // Transpose tall paths in the atlas. Since we limit ourselves to small-area paths, this
324 // guarantees that every atlas entry has a small height, which lends very well to efficient pow2
325 // atlas packing.
Chris Daltonb96995d2020-06-04 16:44:29 -0600326 devBounds.roundOut(devIBounds);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600327 int maxDimenstion = devIBounds->width();
328 int minDimension = devIBounds->height();
329 *transposedInAtlas = minDimension > maxDimenstion;
330 if (*transposedInAtlas) {
331 std::swap(minDimension, maxDimenstion);
332 }
333
334 // Check if the path is too large for an atlas. Since we use "minDimension" for height in the
335 // atlas, limiting to kMaxAtlasPathHeight^2 pixels guarantees height <= kMaxAtlasPathHeight.
336 if (maxDimenstion * minDimension > kMaxAtlasPathHeight * kMaxAtlasPathHeight ||
Chris Daltonb96995d2020-06-04 16:44:29 -0600337 maxDimenstion > fMaxAtlasPathWidth) {
Chris Dalton4e998532020-02-10 11:06:42 -0700338 return false;
339 }
340
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600341 if (!fAtlas.addRect(maxDimenstion, minDimension, locationInAtlas)) {
Chris Dalton4e998532020-02-10 11:06:42 -0700342 return false;
343 }
344
345 SkMatrix atlasMatrix = viewMatrix;
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600346 if (*transposedInAtlas) {
347 std::swap(atlasMatrix[0], atlasMatrix[3]);
348 std::swap(atlasMatrix[1], atlasMatrix[4]);
349 float tx=atlasMatrix.getTranslateX(), ty=atlasMatrix.getTranslateY();
350 atlasMatrix.setTranslateX(ty - devIBounds->y() + locationInAtlas->x());
351 atlasMatrix.setTranslateY(tx - devIBounds->x() + locationInAtlas->y());
352 } else {
353 atlasMatrix.postTranslate(locationInAtlas->x() - devIBounds->x(),
354 locationInAtlas->y() - devIBounds->y());
355 }
Chris Dalton4e998532020-02-10 11:06:42 -0700356
357 // Concatenate this path onto our uber path that matches its fill and AA types.
358 SkPath* uberPath = this->getAtlasUberPath(path.getFillType(), GrAAType::kNone != aaType);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600359 uberPath->moveTo(locationInAtlas->x(), locationInAtlas->y()); // Implicit moveTo(0,0).
Chris Dalton4e998532020-02-10 11:06:42 -0700360 uberPath->addPath(path, atlasMatrix);
Chris Daltonb832ce62020-01-06 19:49:37 -0700361 return true;
362}
363
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600364void GrTessellationPathRenderer::onStencilPath(const StencilPathArgs& args) {
Chris Daltonb832ce62020-01-06 19:49:37 -0700365 SkPath path;
366 args.fShape->asPath(&path);
367
368 GrAAType aaType = (GrAA::kYes == args.fDoStencilMSAA) ? GrAAType::kMSAA : GrAAType::kNone;
369
Herb Derbyc76d4092020-10-07 16:46:15 -0400370 auto op = GrOp::Make<GrPathTessellateOp>(
371 args.fContext, *args.fViewMatrix, path, GrPaint(), aaType, OpFlags::kStencilOnly);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400372 args.fRenderTargetContext->addDrawOp(args.fClip, std::move(op));
Chris Daltonb832ce62020-01-06 19:49:37 -0700373}
Chris Dalton4e998532020-02-10 11:06:42 -0700374
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600375void GrTessellationPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
Adlai Holler9902cff2020-11-11 08:51:25 -0500376 SkSpan<const uint32_t> /* taskIDs */) {
Chris Dalton4e998532020-02-10 11:06:42 -0700377 if (!fAtlas.drawBounds().isEmpty()) {
378 this->renderAtlas(onFlushRP);
379 fAtlas.reset(kAtlasInitialSize, *onFlushRP->caps());
380 }
381 for (SkPath& path : fAtlasUberPaths) {
382 path.reset();
383 }
384}
385
386constexpr static GrUserStencilSettings kTestStencil(
387 GrUserStencilSettings::StaticInit<
388 0x0000,
389 GrUserStencilTest::kNotEqual,
390 0xffff,
391 GrUserStencilOp::kKeep,
392 GrUserStencilOp::kKeep,
393 0xffff>());
394
395constexpr static GrUserStencilSettings kTestAndResetStencil(
396 GrUserStencilSettings::StaticInit<
397 0x0000,
398 GrUserStencilTest::kNotEqual,
399 0xffff,
400 GrUserStencilOp::kZero,
401 GrUserStencilOp::kKeep,
402 0xffff>());
403
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600404void GrTessellationPathRenderer::renderAtlas(GrOnFlushResourceProvider* onFlushRP) {
Chris Dalton4e998532020-02-10 11:06:42 -0700405 auto rtc = fAtlas.instantiate(onFlushRP);
406 if (!rtc) {
407 return;
408 }
409
410 // Add ops to stencil the atlas paths.
411 for (auto antialias : {false, true}) {
412 for (auto fillType : {SkPathFillType::kWinding, SkPathFillType::kEvenOdd}) {
413 SkPath* uberPath = this->getAtlasUberPath(fillType, antialias);
414 if (uberPath->isEmpty()) {
415 continue;
416 }
417 uberPath->setFillType(fillType);
418 GrAAType aaType = (antialias) ? GrAAType::kMSAA : GrAAType::kNone;
Herb Derbyc76d4092020-10-07 16:46:15 -0400419 auto op = GrOp::Make<GrPathTessellateOp>(onFlushRP->recordingContext(),
Chris Daltonb96995d2020-06-04 16:44:29 -0600420 SkMatrix::I(), *uberPath, GrPaint(), aaType, fStencilAtlasFlags);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400421 rtc->addDrawOp(nullptr, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700422 }
423 }
424
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700425 // Finally, draw a fullscreen rect to convert our stencilled paths into alpha coverage masks.
Chris Daltond72cb4c2020-07-16 17:50:17 -0600426 auto aaType = GrAAType::kMSAA;
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700427 auto fillRectFlags = GrFillRectOp::InputFlags::kNone;
Chris Dalton4e998532020-02-10 11:06:42 -0700428
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700429 // This will be the final op in the renderTargetContext. So if Ganesh is planning to discard the
430 // stencil values anyway, then we might not actually need to reset the stencil values back to 0.
431 bool mustResetStencil = !onFlushRP->caps()->discardStencilValuesAfterRenderPass();
432
Chris Daltond72cb4c2020-07-16 17:50:17 -0600433 if (rtc->numSamples() == 1) {
434 // We are mixed sampled. We need to either enable conservative raster (preferred) or disable
435 // MSAA in order to avoid double blend artifacts. (Even if we disable MSAA for the cover
436 // geometry, the stencil test is still multisampled and will still produce smooth results.)
437 if (onFlushRP->caps()->conservativeRasterSupport()) {
438 fillRectFlags |= GrFillRectOp::InputFlags::kConservativeRaster;
439 } else {
440 aaType = GrAAType::kNone;
441 }
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700442 mustResetStencil = true;
443 }
444
445 SkRect coverRect = SkRect::MakeIWH(fAtlas.drawBounds().width(), fAtlas.drawBounds().height());
446 const GrUserStencilSettings* stencil;
447 if (mustResetStencil) {
448 // Outset the cover rect in case there are T-junctions in the path bounds.
449 coverRect.outset(1, 1);
450 stencil = &kTestAndResetStencil;
451 } else {
452 stencil = &kTestStencil;
453 }
454
455 GrQuad coverQuad(coverRect);
456 DrawQuad drawQuad{coverQuad, coverQuad, GrQuadAAFlags::kAll};
457
Chris Dalton4e998532020-02-10 11:06:42 -0700458 GrPaint paint;
459 paint.setColor4f(SK_PMColor4fWHITE);
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700460
Brian Salomon70fe17e2020-11-30 14:33:58 -0500461 auto coverOp = GrFillRectOp::Make(rtc->recordingContext(), std::move(paint), aaType, &drawQuad,
462 stencil, fillRectFlags);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400463 rtc->addDrawOp(nullptr, std::move(coverOp));
Chris Dalton4e998532020-02-10 11:06:42 -0700464
465 if (rtc->asSurfaceProxy()->requiresManualMSAAResolve()) {
466 onFlushRP->addTextureResolveTask(sk_ref_sp(rtc->asTextureProxy()),
467 GrSurfaceProxy::ResolveFlags::kMSAA);
468 }
469}