blob: 8804a4aea2b206b665683a9e291f34cfed4c1fb2 [file] [log] [blame]
Chris Daltonb832ce62020-01-06 19:49:37 -07001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Chris Dalton0a22b1e2020-03-26 11:52:15 -06008#include "src/gpu/tessellate/GrTessellationPathRenderer.h"
Chris Daltonb832ce62020-01-06 19:49:37 -07009
Chris Daltonb96995d2020-06-04 16:44:29 -060010#include "include/pathops/SkPathOps.h"
Chris Daltond2dc8dd2020-05-19 16:32:02 -060011#include "src/core/SkIPoint16.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070012#include "src/core/SkPathPriv.h"
13#include "src/gpu/GrClip.h"
14#include "src/gpu/GrMemoryPool.h"
15#include "src/gpu/GrRecordingContextPriv.h"
Brian Salomoneebe7352020-12-09 16:37:04 -050016#include "src/gpu/GrSurfaceDrawContext.h"
Michael Ludwig2686d692020-04-17 20:21:37 +000017#include "src/gpu/geometry/GrStyledShape.h"
Chris Daltonc3b67eb2020-02-10 21:09:58 -070018#include "src/gpu/ops/GrFillRectOp.h"
Chris Dalton4e998532020-02-10 11:06:42 -070019#include "src/gpu/tessellate/GrDrawAtlasPathOp.h"
Chris Daltonebb37e72021-01-27 17:59:45 -070020#include "src/gpu/tessellate/GrPathInnerTriangulateOp.h"
Chris Dalton05007df2021-02-04 00:24:52 -070021#include "src/gpu/tessellate/GrStrokeTessellateOp.h"
Chris Daltonb03f4a12021-01-27 17:45:52 -070022#include "src/gpu/tessellate/GrTessellatingStencilFillOp.h"
Chris Daltonb96995d2020-06-04 16:44:29 -060023#include "src/gpu/tessellate/GrWangsFormula.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070024
Chris Dalton4e998532020-02-10 11:06:42 -070025constexpr static SkISize kAtlasInitialSize{512, 512};
26constexpr static int kMaxAtlasSize = 2048;
27
Chris Daltond72cb4c2020-07-16 17:50:17 -060028constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8;
29
Chris Daltond2dc8dd2020-05-19 16:32:02 -060030// The atlas is only used for small-area paths, which means at least one dimension of every path is
31// guaranteed to be quite small. So if we transpose tall paths, then every path will have a small
32// height, which lends very well to efficient pow2 atlas packing.
33constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2;
34
35// Ensure every path in the atlas falls in or below the 128px high rectanizer band.
36constexpr static int kMaxAtlasPathHeight = 128;
37
Chris Dalton1413d112020-07-09 11:26:31 -060038bool GrTessellationPathRenderer::IsSupported(const GrCaps& caps) {
Chris Dalton8f282f52021-01-06 11:47:58 -070039 return !caps.avoidStencilBuffers() &&
40 caps.drawInstancedSupport() &&
Chris Daltoneae5c162020-12-29 10:18:21 -070041 caps.shaderCaps()->vertexIDSupport() &&
42 !caps.disableTessellationPathRenderer();
Chris Dalton1413d112020-07-09 11:26:31 -060043}
44
Chris Dalton9213e612020-10-09 17:22:43 -060045GrTessellationPathRenderer::GrTessellationPathRenderer(GrRecordingContext* rContext)
Chris Daltond72cb4c2020-07-16 17:50:17 -060046 : fAtlas(kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes, kAtlasInitialSize,
Chris Dalton31634282020-09-17 12:16:54 -060047 std::min(kMaxAtlasSize, rContext->priv().caps()->maxPreferredRenderTargetSize()),
48 *rContext->priv().caps(), kAtlasAlgorithm) {
49 this->initAtlasFlags(rContext);
Chris Daltonb96995d2020-06-04 16:44:29 -060050}
51
Chris Dalton9213e612020-10-09 17:22:43 -060052void GrTessellationPathRenderer::initAtlasFlags(GrRecordingContext* rContext) {
53 fMaxAtlasPathWidth = 0;
54
55 if (!rContext->asDirectContext()) {
56 // The atlas is not compatible with DDL. Leave it disabled on non-direct contexts.
57 return;
58 }
59
Chris Dalton31634282020-09-17 12:16:54 -060060 const GrCaps& caps = *rContext->priv().caps();
Chris Dalton9213e612020-10-09 17:22:43 -060061 auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes);
62 if (caps.internalMultisampleCount(atlasFormat) <= 1) {
63 // MSAA is not supported on kAlpha8. Leave the atlas disabled.
64 return;
65 }
Chris Dalton31634282020-09-17 12:16:54 -060066
Chris Dalton70a0d2c2021-01-26 12:01:21 -070067 fStencilAtlasFlags = OpFlags::kStencilOnly | OpFlags::kDisableHWTessellation;
Chris Daltonb96995d2020-06-04 16:44:29 -060068 fMaxAtlasPathWidth = fAtlas.maxAtlasSize() / 2;
Chris Daltond72cb4c2020-07-16 17:50:17 -060069
Chris Daltond72cb4c2020-07-16 17:50:17 -060070 // The atlas usually does better with hardware tessellation. If hardware tessellation is
71 // supported, we will next choose a max atlas path width that is guaranteed to never require
72 // more tessellation segments than are supported by the hardware.
73 if (!caps.shaderCaps()->tessellationSupport()) {
74 return;
75 }
76
Chris Daltonb96995d2020-06-04 16:44:29 -060077 // Since we limit the area of paths in the atlas to kMaxAtlasPathHeight^2, taller paths can't
78 // get very wide anyway. Find the tallest path whose width is limited by
79 // GrWangsFormula::worst_case_cubic() rather than the max area constraint, and use that for our
80 // max atlas path width.
81 //
82 // Solve the following equation for w:
83 //
Chris Dalton50f5e682021-04-16 22:47:03 -060084 // GrWangsFormula::worst_case_cubic(kLinearizationPrecision, w, kMaxAtlasPathHeight^2 / w)
Chris Daltonb96995d2020-06-04 16:44:29 -060085 // == maxTessellationSegments
86 //
Chris Dalton50f5e682021-04-16 22:47:03 -060087 float k = GrWangsFormula::length_term<3>(kLinearizationPrecision);
Chris Daltonb96995d2020-06-04 16:44:29 -060088 float h = kMaxAtlasPathHeight;
Chris Daltond72cb4c2020-07-16 17:50:17 -060089 float s = caps.shaderCaps()->maxTessellationSegments();
Chris Daltonb96995d2020-06-04 16:44:29 -060090 // Quadratic formula from Numerical Recipes in C:
91 //
92 // q = -1/2 [b + sign(b) sqrt(b*b - 4*a*c)]
93 // x1 = q/a
94 // x2 = c/q
95 //
96 // float a = 1; // 'a' is always 1 in our specific equation.
97 float b = -s*s*s*s / (4*k*k); // Always negative.
98 float c = h*h*h*h; // Always positive.
Chris Dalton31634282020-09-17 12:16:54 -060099 float discr = b*b - 4*1*c;
100 if (discr <= 0) {
Chris Daltonb96995d2020-06-04 16:44:29 -0600101 // maxTessellationSegments is too small for any path whose area == kMaxAtlasPathHeight^2.
102 // (This is unexpected because the GL spec mandates a minimum of 64 segments.)
Chris Dalton31634282020-09-17 12:16:54 -0600103 rContext->priv().printWarningMessage(SkStringPrintf(
104 "WARNING: maxTessellationSegments seems too low. (%i)\n",
105 caps.shaderCaps()->maxTessellationSegments()).c_str());
Chris Daltonb96995d2020-06-04 16:44:29 -0600106 return;
107 }
Chris Dalton31634282020-09-17 12:16:54 -0600108 float q = -.5f * (b - std::sqrt(discr)); // Always positive.
Chris Daltonb96995d2020-06-04 16:44:29 -0600109 // The two roots represent the width^2 and height^2 of the tallest rectangle that is limited by
110 // GrWangsFormula::worst_case_cubic().
111 float r0 = q; // Always positive.
112 float r1 = c/q; // Always positive.
113 float worstCaseWidth = std::sqrt(std::max(r0, r1));
114#ifdef SK_DEBUG
115 float worstCaseHeight = std::sqrt(std::min(r0, r1));
116 // Verify the above equation worked as expected. It should have found a width and height whose
117 // area == kMaxAtlasPathHeight^2.
118 SkASSERT(SkScalarNearlyEqual(worstCaseHeight * worstCaseWidth, h*h, 1));
119 // Verify GrWangsFormula::worst_case_cubic() still works as we expect. The worst case number of
120 // segments for this bounding box should be maxTessellationSegments.
121 SkASSERT(SkScalarNearlyEqual(GrWangsFormula::worst_case_cubic(
Chris Dalton50f5e682021-04-16 22:47:03 -0600122 kLinearizationPrecision, worstCaseWidth, worstCaseHeight), s, 1));
Chris Daltonb96995d2020-06-04 16:44:29 -0600123#endif
124 fStencilAtlasFlags &= ~OpFlags::kDisableHWTessellation;
125 fMaxAtlasPathWidth = std::min(fMaxAtlasPathWidth, (int)worstCaseWidth);
Chris Dalton4e998532020-02-10 11:06:42 -0700126}
127
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600128GrPathRenderer::CanDrawPath GrTessellationPathRenderer::onCanDrawPath(
Chris Daltonb832ce62020-01-06 19:49:37 -0700129 const CanDrawPathArgs& args) const {
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600130 const GrStyledShape& shape = *args.fShape;
Chris Dalton57ab06c2021-04-22 12:57:28 -0600131 if (args.fAAType == GrAAType::kCoverage ||
132 shape.style().hasPathEffect() ||
Chris Dalton06b52ad2020-12-15 10:01:35 -0700133 args.fViewMatrix->hasPerspective() ||
134 shape.style().strokeRec().getStyle() == SkStrokeRec::kStrokeAndFill_Style ||
Chris Dalton2078cbe2020-12-14 19:04:55 -0700135 shape.inverseFilled() ||
Chris Daltonf285bd12021-04-26 14:29:54 -0600136 args.fHasUserStencilSettings ||
Chris Dalton537293bf2021-05-03 15:54:24 -0600137 !args.fProxy->canUseStencil(*args.fCaps)) {
Chris Daltonb832ce62020-01-06 19:49:37 -0700138 return CanDrawPath::kNo;
139 }
Chris Dalton90898de2021-03-12 14:15:36 -0700140 // On platforms that don't have native support for indirect draws and/or hardware tessellation,
141 // we find that cached triangulations of strokes can render slightly faster. Let cacheable paths
142 // go to the triangulator on these platforms for now.
143 // (crbug.com/1163441, skbug.com/11138, skbug.com/11139)
144 if (!args.fCaps->nativeDrawIndirectSupport() &&
145 !args.fCaps->shaderCaps()->tessellationSupport() &&
146 shape.hasUnstyledKey()) { // Is the path cacheable?
147 return CanDrawPath::kNo;
148 }
Chris Daltonb832ce62020-01-06 19:49:37 -0700149 return CanDrawPath::kYes;
150}
151
Chris Daltonb0643342020-12-15 01:04:12 -0700152static GrOp::Owner make_op(GrRecordingContext* rContext, const GrSurfaceContext* surfaceContext,
153 GrTessellationPathRenderer::OpFlags opFlags, GrAAType aaType,
154 const SkRect& shapeDevBounds, const SkMatrix& viewMatrix,
155 const GrStyledShape& shape, GrPaint&& paint) {
Chris Dalton50f5e682021-04-16 22:47:03 -0600156 constexpr static auto kLinearizationPrecision =
157 GrTessellationPathRenderer::kLinearizationPrecision;
Chris Daltonb0643342020-12-15 01:04:12 -0700158 constexpr static auto kMaxResolveLevel = GrTessellationPathRenderer::kMaxResolveLevel;
159 using OpFlags = GrTessellationPathRenderer::OpFlags;
160
161 const GrShaderCaps& shaderCaps = *rContext->priv().caps()->shaderCaps();
162
163 SkPath path;
164 shape.asPath(&path);
165
166 // Find the worst-case log2 number of line segments that a curve in this path might need to be
167 // divided into.
Chris Dalton50f5e682021-04-16 22:47:03 -0600168 int worstCaseResolveLevel = GrWangsFormula::worst_case_cubic_log2(kLinearizationPrecision,
Chris Daltonb0643342020-12-15 01:04:12 -0700169 shapeDevBounds.width(),
170 shapeDevBounds.height());
171 if (worstCaseResolveLevel > kMaxResolveLevel) {
172 // The path is too large for our internal indirect draw shaders. Crop it to the viewport.
173 auto viewport = SkRect::MakeIWH(surfaceContext->width(), surfaceContext->height());
174 float inflationRadius = 1;
175 const SkStrokeRec& stroke = shape.style().strokeRec();
176 if (stroke.getStyle() == SkStrokeRec::kHairline_Style) {
177 inflationRadius += SkStrokeRec::GetInflationRadius(stroke.getJoin(), stroke.getMiter(),
178 stroke.getCap(), 1);
179 } else if (stroke.getStyle() != SkStrokeRec::kFill_Style) {
180 inflationRadius += stroke.getInflationRadius() * viewMatrix.getMaxScale();
181 }
182 viewport.outset(inflationRadius, inflationRadius);
183
184 SkPath viewportPath;
185 viewportPath.addRect(viewport);
186 // Perform the crop in device space so it's a simple rect-path intersection.
187 path.transform(viewMatrix);
188 if (!Op(viewportPath, path, kIntersect_SkPathOp, &path)) {
189 // The crop can fail if the PathOps encounter NaN or infinities. Return true
190 // because drawing nothing is acceptable behavior for FP overflow.
191 return nullptr;
192 }
193
194 // Transform the path back to its own local space.
195 SkMatrix inverse;
196 if (!viewMatrix.invert(&inverse)) {
197 return nullptr; // Singular view matrix. Nothing would have drawn anyway. Return null.
198 }
199 path.transform(inverse);
200 path.setIsVolatile(true);
201
202 SkRect newDevBounds;
203 viewMatrix.mapRect(&newDevBounds, path.getBounds());
Chris Dalton50f5e682021-04-16 22:47:03 -0600204 worstCaseResolveLevel = GrWangsFormula::worst_case_cubic_log2(kLinearizationPrecision,
Chris Daltonb0643342020-12-15 01:04:12 -0700205 newDevBounds.width(),
206 newDevBounds.height());
207 // kMaxResolveLevel should be large enough to tessellate paths the size of any screen we
208 // might encounter.
209 SkASSERT(worstCaseResolveLevel <= kMaxResolveLevel);
210 }
211
212 if (!shape.style().isSimpleFill()) {
213 const SkStrokeRec& stroke = shape.style().strokeRec();
214 SkASSERT(stroke.getStyle() != SkStrokeRec::kStrokeAndFill_Style);
Chris Dalton05007df2021-02-04 00:24:52 -0700215 return GrOp::Make<GrStrokeTessellateOp>(rContext, aaType, viewMatrix, path, stroke,
216 std::move(paint));
Chris Daltonc2a17462020-12-09 16:46:22 -0700217 } else {
Chris Daltonb0643342020-12-15 01:04:12 -0700218 if ((1 << worstCaseResolveLevel) > shaderCaps.maxTessellationSegments()) {
219 // The path is too large for hardware tessellation; a curve in this bounding box could
220 // potentially require more segments than are supported by the hardware. Fall back on
221 // indirect draws.
222 opFlags |= OpFlags::kDisableHWTessellation;
223 }
Chris Dalton70a0d2c2021-01-26 12:01:21 -0700224 int numVerbs = path.countVerbs();
225 if (numVerbs > 0) {
226 // Check if the path is large and/or simple enough that we can triangulate the inner fan
227 // on the CPU. This is our fastest approach. It allows us to stencil only the curves,
228 // and then fill the inner fan directly to the final render target, thus drawing the
229 // majority of pixels in a single render pass.
230 SkScalar scales[2];
231 SkAssertResult(viewMatrix.getMinMaxScales(scales)); // Will fail if perspective.
232 const SkRect& bounds = path.getBounds();
233 float gpuFragmentWork = bounds.height() * scales[0] * bounds.width() * scales[1];
234 float cpuTessellationWork = numVerbs * SkNextLog2(numVerbs); // N log N.
235 constexpr static float kCpuWeight = 512;
236 constexpr static float kMinNumPixelsToTriangulate = 256 * 256;
237 if (cpuTessellationWork * kCpuWeight + kMinNumPixelsToTriangulate < gpuFragmentWork) {
Chris Daltonebb37e72021-01-27 17:59:45 -0700238 return GrOp::Make<GrPathInnerTriangulateOp>(rContext, viewMatrix, path,
239 std::move(paint), aaType, opFlags);
Chris Dalton70a0d2c2021-01-26 12:01:21 -0700240 }
241 }
Chris Daltonb03f4a12021-01-27 17:45:52 -0700242 return GrOp::Make<GrTessellatingStencilFillOp>(rContext, viewMatrix, path, std::move(paint),
243 aaType, opFlags);
Chris Daltonc2a17462020-12-09 16:46:22 -0700244 }
245}
246
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600247bool GrTessellationPathRenderer::onDrawPath(const DrawPathArgs& args) {
Brian Salomon1aa1f5f2020-12-11 17:25:17 -0500248 GrSurfaceDrawContext* surfaceDrawContext = args.fRenderTargetContext;
Chris Daltonb832ce62020-01-06 19:49:37 -0700249
Chris Daltonb96995d2020-06-04 16:44:29 -0600250 SkRect devBounds;
Chris Daltonb0643342020-12-15 01:04:12 -0700251 args.fViewMatrix->mapRect(&devBounds, args.fShape->bounds());
Chris Daltonb96995d2020-06-04 16:44:29 -0600252
Chris Dalton4e998532020-02-10 11:06:42 -0700253 // See if the path is small and simple enough to atlas instead of drawing directly.
254 //
255 // NOTE: The atlas uses alpha8 coverage even for msaa render targets. We could theoretically
256 // render the sample mask to an integer texture, but such a scheme would probably require
257 // GL_EXT_post_depth_coverage, which appears to have low adoption.
258 SkIRect devIBounds;
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600259 SkIPoint16 locationInAtlas;
260 bool transposedInAtlas;
Chris Daltonb0643342020-12-15 01:04:12 -0700261 if (this->tryAddPathToAtlas(*args.fContext->priv().caps(), *args.fViewMatrix, *args.fShape,
262 devBounds, args.fAAType, &devIBounds, &locationInAtlas,
263 &transposedInAtlas)) {
Chris Dalton9213e612020-10-09 17:22:43 -0600264 // The atlas is not compatible with DDL. We should only be using it on direct contexts.
265 SkASSERT(args.fContext->asDirectContext());
Chris Daltonb96995d2020-06-04 16:44:29 -0600266#ifdef SK_DEBUG
267 // If using hardware tessellation in the atlas, make sure the max number of segments is
268 // sufficient for this path. fMaxAtlasPathWidth should have been tuned for this to always be
269 // the case.
270 if (!(fStencilAtlasFlags & OpFlags::kDisableHWTessellation)) {
Chris Dalton50f5e682021-04-16 22:47:03 -0600271 int worstCaseNumSegments = GrWangsFormula::worst_case_cubic(kLinearizationPrecision,
Chris Daltonb96995d2020-06-04 16:44:29 -0600272 devIBounds.width(),
273 devIBounds.height());
Chris Daltonb0643342020-12-15 01:04:12 -0700274 const GrShaderCaps& shaderCaps = *args.fContext->priv().caps()->shaderCaps();
Chris Daltonb96995d2020-06-04 16:44:29 -0600275 SkASSERT(worstCaseNumSegments <= shaderCaps.maxTessellationSegments());
276 }
277#endif
Herb Derbyc76d4092020-10-07 16:46:15 -0400278 auto op = GrOp::Make<GrDrawAtlasPathOp>(args.fContext,
Brian Salomon1aa1f5f2020-12-11 17:25:17 -0500279 surfaceDrawContext->numSamples(), sk_ref_sp(fAtlas.textureProxy()),
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600280 devIBounds, locationInAtlas, transposedInAtlas, *args.fViewMatrix,
Michael Ludwig7c12e282020-05-29 09:54:07 -0400281 std::move(args.fPaint));
Brian Salomon1aa1f5f2020-12-11 17:25:17 -0500282 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700283 return true;
284 }
Chris Daltonb832ce62020-01-06 19:49:37 -0700285
Chris Daltonb0643342020-12-15 01:04:12 -0700286 if (auto op = make_op(args.fContext, surfaceDrawContext, OpFlags::kNone, args.fAAType,
287 devBounds, *args.fViewMatrix, *args.fShape, std::move(args.fPaint))) {
288 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
Chris Daltonb96995d2020-06-04 16:44:29 -0600289 }
Chris Dalton4e998532020-02-10 11:06:42 -0700290 return true;
291}
292
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600293bool GrTessellationPathRenderer::tryAddPathToAtlas(
Chris Daltonb0643342020-12-15 01:04:12 -0700294 const GrCaps& caps, const SkMatrix& viewMatrix, const GrStyledShape& shape,
295 const SkRect& devBounds, GrAAType aaType, SkIRect* devIBounds, SkIPoint16* locationInAtlas,
Chris Daltonb96995d2020-06-04 16:44:29 -0600296 bool* transposedInAtlas) {
Chris Daltonb0643342020-12-15 01:04:12 -0700297 if (!shape.style().isSimpleFill()) {
298 return false;
299 }
300
Chris Daltond72cb4c2020-07-16 17:50:17 -0600301 if (!fMaxAtlasPathWidth) {
302 return false;
303 }
304
Chris Dalton4e998532020-02-10 11:06:42 -0700305 if (!caps.multisampleDisableSupport() && GrAAType::kNone == aaType) {
306 return false;
307 }
308
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600309 // Atlas paths require their points to be transformed on the CPU and copied into an "uber path".
310 // Check if this path has too many points to justify this extra work.
Chris Daltonb0643342020-12-15 01:04:12 -0700311 SkPath path;
312 shape.asPath(&path);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600313 if (path.countPoints() > 200) {
Chris Dalton4e998532020-02-10 11:06:42 -0700314 return false;
315 }
316
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600317 // Transpose tall paths in the atlas. Since we limit ourselves to small-area paths, this
318 // guarantees that every atlas entry has a small height, which lends very well to efficient pow2
319 // atlas packing.
Chris Daltonb96995d2020-06-04 16:44:29 -0600320 devBounds.roundOut(devIBounds);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600321 int maxDimenstion = devIBounds->width();
322 int minDimension = devIBounds->height();
323 *transposedInAtlas = minDimension > maxDimenstion;
324 if (*transposedInAtlas) {
325 std::swap(minDimension, maxDimenstion);
326 }
327
328 // Check if the path is too large for an atlas. Since we use "minDimension" for height in the
329 // atlas, limiting to kMaxAtlasPathHeight^2 pixels guarantees height <= kMaxAtlasPathHeight.
Chris Daltoneae5c162020-12-29 10:18:21 -0700330 if ((uint64_t)maxDimenstion * minDimension > kMaxAtlasPathHeight * kMaxAtlasPathHeight ||
Chris Daltonb96995d2020-06-04 16:44:29 -0600331 maxDimenstion > fMaxAtlasPathWidth) {
Chris Dalton4e998532020-02-10 11:06:42 -0700332 return false;
333 }
334
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600335 if (!fAtlas.addRect(maxDimenstion, minDimension, locationInAtlas)) {
Chris Dalton4e998532020-02-10 11:06:42 -0700336 return false;
337 }
338
339 SkMatrix atlasMatrix = viewMatrix;
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600340 if (*transposedInAtlas) {
341 std::swap(atlasMatrix[0], atlasMatrix[3]);
342 std::swap(atlasMatrix[1], atlasMatrix[4]);
343 float tx=atlasMatrix.getTranslateX(), ty=atlasMatrix.getTranslateY();
344 atlasMatrix.setTranslateX(ty - devIBounds->y() + locationInAtlas->x());
345 atlasMatrix.setTranslateY(tx - devIBounds->x() + locationInAtlas->y());
346 } else {
347 atlasMatrix.postTranslate(locationInAtlas->x() - devIBounds->x(),
348 locationInAtlas->y() - devIBounds->y());
349 }
Chris Dalton4e998532020-02-10 11:06:42 -0700350
351 // Concatenate this path onto our uber path that matches its fill and AA types.
352 SkPath* uberPath = this->getAtlasUberPath(path.getFillType(), GrAAType::kNone != aaType);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600353 uberPath->moveTo(locationInAtlas->x(), locationInAtlas->y()); // Implicit moveTo(0,0).
Chris Dalton4e998532020-02-10 11:06:42 -0700354 uberPath->addPath(path, atlasMatrix);
Chris Daltonb832ce62020-01-06 19:49:37 -0700355 return true;
356}
357
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600358void GrTessellationPathRenderer::onStencilPath(const StencilPathArgs& args) {
Chris Daltonb0643342020-12-15 01:04:12 -0700359 GrSurfaceDrawContext* surfaceDrawContext = args.fRenderTargetContext;
Chris Daltonb832ce62020-01-06 19:49:37 -0700360 GrAAType aaType = (GrAA::kYes == args.fDoStencilMSAA) ? GrAAType::kMSAA : GrAAType::kNone;
Chris Daltonb0643342020-12-15 01:04:12 -0700361 SkRect devBounds;
362 args.fViewMatrix->mapRect(&devBounds, args.fShape->bounds());
363 if (auto op = make_op(args.fContext, surfaceDrawContext, OpFlags::kStencilOnly, aaType,
364 devBounds, *args.fViewMatrix, *args.fShape, GrPaint())) {
365 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
366 }
Chris Daltonb832ce62020-01-06 19:49:37 -0700367}
Chris Dalton4e998532020-02-10 11:06:42 -0700368
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600369void GrTessellationPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
Adlai Holler9902cff2020-11-11 08:51:25 -0500370 SkSpan<const uint32_t> /* taskIDs */) {
Chris Dalton4e998532020-02-10 11:06:42 -0700371 if (!fAtlas.drawBounds().isEmpty()) {
372 this->renderAtlas(onFlushRP);
373 fAtlas.reset(kAtlasInitialSize, *onFlushRP->caps());
374 }
375 for (SkPath& path : fAtlasUberPaths) {
376 path.reset();
377 }
378}
379
380constexpr static GrUserStencilSettings kTestStencil(
381 GrUserStencilSettings::StaticInit<
382 0x0000,
383 GrUserStencilTest::kNotEqual,
384 0xffff,
385 GrUserStencilOp::kKeep,
386 GrUserStencilOp::kKeep,
387 0xffff>());
388
389constexpr static GrUserStencilSettings kTestAndResetStencil(
390 GrUserStencilSettings::StaticInit<
391 0x0000,
392 GrUserStencilTest::kNotEqual,
393 0xffff,
394 GrUserStencilOp::kZero,
395 GrUserStencilOp::kKeep,
396 0xffff>());
397
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600398void GrTessellationPathRenderer::renderAtlas(GrOnFlushResourceProvider* onFlushRP) {
Chris Dalton4e998532020-02-10 11:06:42 -0700399 auto rtc = fAtlas.instantiate(onFlushRP);
400 if (!rtc) {
401 return;
402 }
403
404 // Add ops to stencil the atlas paths.
405 for (auto antialias : {false, true}) {
406 for (auto fillType : {SkPathFillType::kWinding, SkPathFillType::kEvenOdd}) {
407 SkPath* uberPath = this->getAtlasUberPath(fillType, antialias);
408 if (uberPath->isEmpty()) {
409 continue;
410 }
411 uberPath->setFillType(fillType);
412 GrAAType aaType = (antialias) ? GrAAType::kMSAA : GrAAType::kNone;
Chris Daltonb03f4a12021-01-27 17:45:52 -0700413 auto op = GrOp::Make<GrTessellatingStencilFillOp>(onFlushRP->recordingContext(),
Chris Daltonb96995d2020-06-04 16:44:29 -0600414 SkMatrix::I(), *uberPath, GrPaint(), aaType, fStencilAtlasFlags);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400415 rtc->addDrawOp(nullptr, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700416 }
417 }
418
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700419 // Finally, draw a fullscreen rect to convert our stencilled paths into alpha coverage masks.
Chris Daltond72cb4c2020-07-16 17:50:17 -0600420 auto aaType = GrAAType::kMSAA;
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700421 auto fillRectFlags = GrFillRectOp::InputFlags::kNone;
Chris Dalton4e998532020-02-10 11:06:42 -0700422
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700423 SkRect coverRect = SkRect::MakeIWH(fAtlas.drawBounds().width(), fAtlas.drawBounds().height());
424 const GrUserStencilSettings* stencil;
Chris Dalton57ab06c2021-04-22 12:57:28 -0600425 if (onFlushRP->caps()->discardStencilValuesAfterRenderPass()) {
426 // This is the final op in the surfaceDrawContext. Since Ganesh is planning to discard the
427 // stencil values anyway, there is no need to reset the stencil values back to 0.
428 stencil = &kTestStencil;
429 } else {
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700430 // Outset the cover rect in case there are T-junctions in the path bounds.
431 coverRect.outset(1, 1);
432 stencil = &kTestAndResetStencil;
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700433 }
434
435 GrQuad coverQuad(coverRect);
436 DrawQuad drawQuad{coverQuad, coverQuad, GrQuadAAFlags::kAll};
437
Chris Dalton4e998532020-02-10 11:06:42 -0700438 GrPaint paint;
439 paint.setColor4f(SK_PMColor4fWHITE);
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700440
Brian Salomon70fe17e2020-11-30 14:33:58 -0500441 auto coverOp = GrFillRectOp::Make(rtc->recordingContext(), std::move(paint), aaType, &drawQuad,
442 stencil, fillRectFlags);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400443 rtc->addDrawOp(nullptr, std::move(coverOp));
Chris Dalton4e998532020-02-10 11:06:42 -0700444
445 if (rtc->asSurfaceProxy()->requiresManualMSAAResolve()) {
446 onFlushRP->addTextureResolveTask(sk_ref_sp(rtc->asTextureProxy()),
447 GrSurfaceProxy::ResolveFlags::kMSAA);
448 }
449}