blob: 8bbd6f6bb14ce3991453e85c9584cbfe25fab03b [file] [log] [blame]
Chris Daltonb832ce62020-01-06 19:49:37 -07001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Chris Dalton0a22b1e2020-03-26 11:52:15 -06008#include "src/gpu/tessellate/GrTessellationPathRenderer.h"
Chris Daltonb832ce62020-01-06 19:49:37 -07009
Chris Daltonb96995d2020-06-04 16:44:29 -060010#include "include/pathops/SkPathOps.h"
Chris Daltond2dc8dd2020-05-19 16:32:02 -060011#include "src/core/SkIPoint16.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070012#include "src/core/SkPathPriv.h"
13#include "src/gpu/GrClip.h"
14#include "src/gpu/GrMemoryPool.h"
15#include "src/gpu/GrRecordingContextPriv.h"
Brian Salomoneebe7352020-12-09 16:37:04 -050016#include "src/gpu/GrSurfaceDrawContext.h"
Michael Ludwig2686d692020-04-17 20:21:37 +000017#include "src/gpu/geometry/GrStyledShape.h"
Michael Ludwig4e9d5e22021-05-11 10:00:12 -040018#include "src/gpu/geometry/GrWangsFormula.h"
Chris Daltonc3b67eb2020-02-10 21:09:58 -070019#include "src/gpu/ops/GrFillRectOp.h"
Chris Dalton4e998532020-02-10 11:06:42 -070020#include "src/gpu/tessellate/GrDrawAtlasPathOp.h"
Chris Daltonebb37e72021-01-27 17:59:45 -070021#include "src/gpu/tessellate/GrPathInnerTriangulateOp.h"
Chris Dalton2ed22fa2021-05-06 16:08:30 -060022#include "src/gpu/tessellate/GrPathStencilFillOp.h"
Chris Dalton05007df2021-02-04 00:24:52 -070023#include "src/gpu/tessellate/GrStrokeTessellateOp.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070024
Chris Dalton4e998532020-02-10 11:06:42 -070025constexpr static SkISize kAtlasInitialSize{512, 512};
26constexpr static int kMaxAtlasSize = 2048;
27
Chris Daltond72cb4c2020-07-16 17:50:17 -060028constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8;
29
Chris Daltond2dc8dd2020-05-19 16:32:02 -060030// The atlas is only used for small-area paths, which means at least one dimension of every path is
31// guaranteed to be quite small. So if we transpose tall paths, then every path will have a small
32// height, which lends very well to efficient pow2 atlas packing.
33constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2;
34
35// Ensure every path in the atlas falls in or below the 128px high rectanizer band.
36constexpr static int kMaxAtlasPathHeight = 128;
37
Chris Dalton1413d112020-07-09 11:26:31 -060038bool GrTessellationPathRenderer::IsSupported(const GrCaps& caps) {
Chris Dalton8f282f52021-01-06 11:47:58 -070039 return !caps.avoidStencilBuffers() &&
40 caps.drawInstancedSupport() &&
Chris Daltoneae5c162020-12-29 10:18:21 -070041 caps.shaderCaps()->vertexIDSupport() &&
42 !caps.disableTessellationPathRenderer();
Chris Dalton1413d112020-07-09 11:26:31 -060043}
44
Chris Dalton9213e612020-10-09 17:22:43 -060045GrTessellationPathRenderer::GrTessellationPathRenderer(GrRecordingContext* rContext)
Chris Daltond72cb4c2020-07-16 17:50:17 -060046 : fAtlas(kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes, kAtlasInitialSize,
Chris Dalton31634282020-09-17 12:16:54 -060047 std::min(kMaxAtlasSize, rContext->priv().caps()->maxPreferredRenderTargetSize()),
48 *rContext->priv().caps(), kAtlasAlgorithm) {
Chris Dalton31634282020-09-17 12:16:54 -060049 const GrCaps& caps = *rContext->priv().caps();
Chris Dalton9213e612020-10-09 17:22:43 -060050 auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes);
Chris Dalton569c01b2021-05-25 10:11:46 -060051 if (rContext->asDirectContext() && // The atlas doesn't support DDL yet.
52 caps.internalMultisampleCount(atlasFormat) > 1) {
53 fMaxAtlasPathWidth = fAtlas.maxAtlasSize() / 2; // Enable the atlas.
Chris Dalton9213e612020-10-09 17:22:43 -060054 }
Chris Dalton4e998532020-02-10 11:06:42 -070055}
56
Chris Dalton0a22b1e2020-03-26 11:52:15 -060057GrPathRenderer::CanDrawPath GrTessellationPathRenderer::onCanDrawPath(
Chris Daltonb832ce62020-01-06 19:49:37 -070058 const CanDrawPathArgs& args) const {
Chris Dalton1c62a7b2020-06-29 22:01:14 -060059 const GrStyledShape& shape = *args.fShape;
Chris Dalton57ab06c2021-04-22 12:57:28 -060060 if (args.fAAType == GrAAType::kCoverage ||
61 shape.style().hasPathEffect() ||
Chris Dalton06b52ad2020-12-15 10:01:35 -070062 args.fViewMatrix->hasPerspective() ||
63 shape.style().strokeRec().getStyle() == SkStrokeRec::kStrokeAndFill_Style ||
Chris Dalton2078cbe2020-12-14 19:04:55 -070064 shape.inverseFilled() ||
Chris Daltonf285bd12021-04-26 14:29:54 -060065 args.fHasUserStencilSettings ||
Chris Dalton537293bf2021-05-03 15:54:24 -060066 !args.fProxy->canUseStencil(*args.fCaps)) {
Chris Daltonb832ce62020-01-06 19:49:37 -070067 return CanDrawPath::kNo;
68 }
Chris Dalton8606cec2021-05-04 15:06:14 -060069 if (shape.style().strokeRec().getStyle() != SkStrokeRec::kStroke_Style) {
70 // On platforms that don't have native support for indirect draws and/or hardware
71 // tessellation, we find that the default path renderer can draw fills faster sometimes. Let
72 // fills fall through to the default renderer on these platforms for now.
73 // (crbug.com/1163441, skbug.com/11138, skbug.com/11139)
74 if (!args.fCaps->nativeDrawIndirectSupport() &&
75 !args.fCaps->shaderCaps()->tessellationSupport() &&
76 // Is the path cacheable? TODO: This check is outdated. Remove it next.
77 shape.hasUnstyledKey()) {
78 return CanDrawPath::kNo;
79 }
Chris Dalton46d0c042021-05-04 20:43:12 +000080 }
Chris Daltonb832ce62020-01-06 19:49:37 -070081 return CanDrawPath::kYes;
82}
83
Chris Daltonb0643342020-12-15 01:04:12 -070084static GrOp::Owner make_op(GrRecordingContext* rContext, const GrSurfaceContext* surfaceContext,
85 GrTessellationPathRenderer::OpFlags opFlags, GrAAType aaType,
86 const SkRect& shapeDevBounds, const SkMatrix& viewMatrix,
87 const GrStyledShape& shape, GrPaint&& paint) {
Chris Dalton50f5e682021-04-16 22:47:03 -060088 constexpr static auto kLinearizationPrecision =
89 GrTessellationPathRenderer::kLinearizationPrecision;
Chris Daltonb0643342020-12-15 01:04:12 -070090 constexpr static auto kMaxResolveLevel = GrTessellationPathRenderer::kMaxResolveLevel;
Chris Daltonb0643342020-12-15 01:04:12 -070091 SkPath path;
92 shape.asPath(&path);
93
94 // Find the worst-case log2 number of line segments that a curve in this path might need to be
95 // divided into.
Chris Dalton50f5e682021-04-16 22:47:03 -060096 int worstCaseResolveLevel = GrWangsFormula::worst_case_cubic_log2(kLinearizationPrecision,
Chris Daltonb0643342020-12-15 01:04:12 -070097 shapeDevBounds.width(),
98 shapeDevBounds.height());
99 if (worstCaseResolveLevel > kMaxResolveLevel) {
100 // The path is too large for our internal indirect draw shaders. Crop it to the viewport.
101 auto viewport = SkRect::MakeIWH(surfaceContext->width(), surfaceContext->height());
102 float inflationRadius = 1;
103 const SkStrokeRec& stroke = shape.style().strokeRec();
104 if (stroke.getStyle() == SkStrokeRec::kHairline_Style) {
105 inflationRadius += SkStrokeRec::GetInflationRadius(stroke.getJoin(), stroke.getMiter(),
106 stroke.getCap(), 1);
107 } else if (stroke.getStyle() != SkStrokeRec::kFill_Style) {
108 inflationRadius += stroke.getInflationRadius() * viewMatrix.getMaxScale();
109 }
110 viewport.outset(inflationRadius, inflationRadius);
111
112 SkPath viewportPath;
113 viewportPath.addRect(viewport);
114 // Perform the crop in device space so it's a simple rect-path intersection.
115 path.transform(viewMatrix);
116 if (!Op(viewportPath, path, kIntersect_SkPathOp, &path)) {
117 // The crop can fail if the PathOps encounter NaN or infinities. Return true
118 // because drawing nothing is acceptable behavior for FP overflow.
119 return nullptr;
120 }
121
122 // Transform the path back to its own local space.
123 SkMatrix inverse;
124 if (!viewMatrix.invert(&inverse)) {
125 return nullptr; // Singular view matrix. Nothing would have drawn anyway. Return null.
126 }
127 path.transform(inverse);
128 path.setIsVolatile(true);
129
130 SkRect newDevBounds;
131 viewMatrix.mapRect(&newDevBounds, path.getBounds());
Chris Dalton50f5e682021-04-16 22:47:03 -0600132 worstCaseResolveLevel = GrWangsFormula::worst_case_cubic_log2(kLinearizationPrecision,
Chris Daltonb0643342020-12-15 01:04:12 -0700133 newDevBounds.width(),
134 newDevBounds.height());
135 // kMaxResolveLevel should be large enough to tessellate paths the size of any screen we
136 // might encounter.
137 SkASSERT(worstCaseResolveLevel <= kMaxResolveLevel);
138 }
139
140 if (!shape.style().isSimpleFill()) {
141 const SkStrokeRec& stroke = shape.style().strokeRec();
142 SkASSERT(stroke.getStyle() != SkStrokeRec::kStrokeAndFill_Style);
Chris Dalton05007df2021-02-04 00:24:52 -0700143 return GrOp::Make<GrStrokeTessellateOp>(rContext, aaType, viewMatrix, path, stroke,
144 std::move(paint));
Chris Daltonc2a17462020-12-09 16:46:22 -0700145 } else {
Chris Dalton569c01b2021-05-25 10:11:46 -0600146 SkRect devBounds;
147 viewMatrix.mapRect(&devBounds, path.getBounds());
Chris Dalton70a0d2c2021-01-26 12:01:21 -0700148 int numVerbs = path.countVerbs();
149 if (numVerbs > 0) {
150 // Check if the path is large and/or simple enough that we can triangulate the inner fan
151 // on the CPU. This is our fastest approach. It allows us to stencil only the curves,
152 // and then fill the inner fan directly to the final render target, thus drawing the
153 // majority of pixels in a single render pass.
Chris Dalton569c01b2021-05-25 10:11:46 -0600154 float gpuFragmentWork = devBounds.height() * devBounds.width();
Chris Dalton70a0d2c2021-01-26 12:01:21 -0700155 float cpuTessellationWork = numVerbs * SkNextLog2(numVerbs); // N log N.
156 constexpr static float kCpuWeight = 512;
157 constexpr static float kMinNumPixelsToTriangulate = 256 * 256;
158 if (cpuTessellationWork * kCpuWeight + kMinNumPixelsToTriangulate < gpuFragmentWork) {
Chris Daltonebb37e72021-01-27 17:59:45 -0700159 return GrOp::Make<GrPathInnerTriangulateOp>(rContext, viewMatrix, path,
Chris Dalton569c01b2021-05-25 10:11:46 -0600160 std::move(paint), aaType, opFlags,
161 devBounds);
Chris Dalton70a0d2c2021-01-26 12:01:21 -0700162 }
163 }
Chris Dalton2ed22fa2021-05-06 16:08:30 -0600164 return GrOp::Make<GrPathStencilFillOp>(rContext, viewMatrix, path, std::move(paint), aaType,
Chris Dalton569c01b2021-05-25 10:11:46 -0600165 opFlags, devBounds);
Chris Daltonc2a17462020-12-09 16:46:22 -0700166 }
167}
168
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600169bool GrTessellationPathRenderer::onDrawPath(const DrawPathArgs& args) {
Brian Salomon1aa1f5f2020-12-11 17:25:17 -0500170 GrSurfaceDrawContext* surfaceDrawContext = args.fRenderTargetContext;
Chris Daltonb832ce62020-01-06 19:49:37 -0700171
Chris Daltonb96995d2020-06-04 16:44:29 -0600172 SkRect devBounds;
Chris Daltonb0643342020-12-15 01:04:12 -0700173 args.fViewMatrix->mapRect(&devBounds, args.fShape->bounds());
Chris Daltonb96995d2020-06-04 16:44:29 -0600174
Chris Dalton4e998532020-02-10 11:06:42 -0700175 // See if the path is small and simple enough to atlas instead of drawing directly.
176 //
177 // NOTE: The atlas uses alpha8 coverage even for msaa render targets. We could theoretically
178 // render the sample mask to an integer texture, but such a scheme would probably require
179 // GL_EXT_post_depth_coverage, which appears to have low adoption.
180 SkIRect devIBounds;
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600181 SkIPoint16 locationInAtlas;
182 bool transposedInAtlas;
Chris Daltonb0643342020-12-15 01:04:12 -0700183 if (this->tryAddPathToAtlas(*args.fContext->priv().caps(), *args.fViewMatrix, *args.fShape,
184 devBounds, args.fAAType, &devIBounds, &locationInAtlas,
185 &transposedInAtlas)) {
Chris Dalton9213e612020-10-09 17:22:43 -0600186 // The atlas is not compatible with DDL. We should only be using it on direct contexts.
187 SkASSERT(args.fContext->asDirectContext());
Herb Derbyc76d4092020-10-07 16:46:15 -0400188 auto op = GrOp::Make<GrDrawAtlasPathOp>(args.fContext,
Brian Salomon1aa1f5f2020-12-11 17:25:17 -0500189 surfaceDrawContext->numSamples(), sk_ref_sp(fAtlas.textureProxy()),
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600190 devIBounds, locationInAtlas, transposedInAtlas, *args.fViewMatrix,
Michael Ludwig7c12e282020-05-29 09:54:07 -0400191 std::move(args.fPaint));
Brian Salomon1aa1f5f2020-12-11 17:25:17 -0500192 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700193 return true;
194 }
Chris Daltonb832ce62020-01-06 19:49:37 -0700195
Chris Daltonb0643342020-12-15 01:04:12 -0700196 if (auto op = make_op(args.fContext, surfaceDrawContext, OpFlags::kNone, args.fAAType,
197 devBounds, *args.fViewMatrix, *args.fShape, std::move(args.fPaint))) {
198 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
Chris Daltonb96995d2020-06-04 16:44:29 -0600199 }
Chris Dalton4e998532020-02-10 11:06:42 -0700200 return true;
201}
202
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600203bool GrTessellationPathRenderer::tryAddPathToAtlas(
Chris Daltonb0643342020-12-15 01:04:12 -0700204 const GrCaps& caps, const SkMatrix& viewMatrix, const GrStyledShape& shape,
205 const SkRect& devBounds, GrAAType aaType, SkIRect* devIBounds, SkIPoint16* locationInAtlas,
Chris Daltonb96995d2020-06-04 16:44:29 -0600206 bool* transposedInAtlas) {
Chris Daltonb0643342020-12-15 01:04:12 -0700207 if (!shape.style().isSimpleFill()) {
208 return false;
209 }
210
Chris Daltond72cb4c2020-07-16 17:50:17 -0600211 if (!fMaxAtlasPathWidth) {
212 return false;
213 }
214
Chris Dalton4e998532020-02-10 11:06:42 -0700215 if (!caps.multisampleDisableSupport() && GrAAType::kNone == aaType) {
216 return false;
217 }
218
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600219 // Transpose tall paths in the atlas. Since we limit ourselves to small-area paths, this
220 // guarantees that every atlas entry has a small height, which lends very well to efficient pow2
221 // atlas packing.
Chris Daltonb96995d2020-06-04 16:44:29 -0600222 devBounds.roundOut(devIBounds);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600223 int maxDimenstion = devIBounds->width();
224 int minDimension = devIBounds->height();
225 *transposedInAtlas = minDimension > maxDimenstion;
226 if (*transposedInAtlas) {
227 std::swap(minDimension, maxDimenstion);
228 }
229
Chris Dalton569c01b2021-05-25 10:11:46 -0600230 // Check if the path is too large for an atlas. Since we transpose paths in the atlas so height
231 // is always "minDimension", limiting to kMaxAtlasPathHeight^2 pixels guarantees height <=
232 // kMaxAtlasPathHeight, while also allowing paths that are very wide and short.
Chris Daltoneae5c162020-12-29 10:18:21 -0700233 if ((uint64_t)maxDimenstion * minDimension > kMaxAtlasPathHeight * kMaxAtlasPathHeight ||
Chris Daltonb96995d2020-06-04 16:44:29 -0600234 maxDimenstion > fMaxAtlasPathWidth) {
Chris Dalton4e998532020-02-10 11:06:42 -0700235 return false;
236 }
237
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600238 if (!fAtlas.addRect(maxDimenstion, minDimension, locationInAtlas)) {
Chris Dalton4e998532020-02-10 11:06:42 -0700239 return false;
240 }
241
242 SkMatrix atlasMatrix = viewMatrix;
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600243 if (*transposedInAtlas) {
244 std::swap(atlasMatrix[0], atlasMatrix[3]);
245 std::swap(atlasMatrix[1], atlasMatrix[4]);
246 float tx=atlasMatrix.getTranslateX(), ty=atlasMatrix.getTranslateY();
247 atlasMatrix.setTranslateX(ty - devIBounds->y() + locationInAtlas->x());
248 atlasMatrix.setTranslateY(tx - devIBounds->x() + locationInAtlas->y());
249 } else {
250 atlasMatrix.postTranslate(locationInAtlas->x() - devIBounds->x(),
251 locationInAtlas->y() - devIBounds->y());
252 }
Chris Dalton4e998532020-02-10 11:06:42 -0700253
254 // Concatenate this path onto our uber path that matches its fill and AA types.
Chris Dalton569c01b2021-05-25 10:11:46 -0600255 SkPath path;
256 shape.asPath(&path);
Chris Dalton4e998532020-02-10 11:06:42 -0700257 SkPath* uberPath = this->getAtlasUberPath(path.getFillType(), GrAAType::kNone != aaType);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600258 uberPath->moveTo(locationInAtlas->x(), locationInAtlas->y()); // Implicit moveTo(0,0).
Chris Dalton4e998532020-02-10 11:06:42 -0700259 uberPath->addPath(path, atlasMatrix);
Chris Daltonb832ce62020-01-06 19:49:37 -0700260 return true;
261}
262
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600263void GrTessellationPathRenderer::onStencilPath(const StencilPathArgs& args) {
Chris Daltonb0643342020-12-15 01:04:12 -0700264 GrSurfaceDrawContext* surfaceDrawContext = args.fRenderTargetContext;
Chris Daltonb832ce62020-01-06 19:49:37 -0700265 GrAAType aaType = (GrAA::kYes == args.fDoStencilMSAA) ? GrAAType::kMSAA : GrAAType::kNone;
Chris Daltonb0643342020-12-15 01:04:12 -0700266 SkRect devBounds;
267 args.fViewMatrix->mapRect(&devBounds, args.fShape->bounds());
268 if (auto op = make_op(args.fContext, surfaceDrawContext, OpFlags::kStencilOnly, aaType,
269 devBounds, *args.fViewMatrix, *args.fShape, GrPaint())) {
270 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
271 }
Chris Daltonb832ce62020-01-06 19:49:37 -0700272}
Chris Dalton4e998532020-02-10 11:06:42 -0700273
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600274void GrTessellationPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
Adlai Holler9902cff2020-11-11 08:51:25 -0500275 SkSpan<const uint32_t> /* taskIDs */) {
Chris Dalton4e998532020-02-10 11:06:42 -0700276 if (!fAtlas.drawBounds().isEmpty()) {
277 this->renderAtlas(onFlushRP);
278 fAtlas.reset(kAtlasInitialSize, *onFlushRP->caps());
279 }
280 for (SkPath& path : fAtlasUberPaths) {
281 path.reset();
282 }
283}
284
285constexpr static GrUserStencilSettings kTestStencil(
286 GrUserStencilSettings::StaticInit<
287 0x0000,
288 GrUserStencilTest::kNotEqual,
289 0xffff,
290 GrUserStencilOp::kKeep,
291 GrUserStencilOp::kKeep,
292 0xffff>());
293
294constexpr static GrUserStencilSettings kTestAndResetStencil(
295 GrUserStencilSettings::StaticInit<
296 0x0000,
297 GrUserStencilTest::kNotEqual,
298 0xffff,
299 GrUserStencilOp::kZero,
300 GrUserStencilOp::kKeep,
301 0xffff>());
302
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600303void GrTessellationPathRenderer::renderAtlas(GrOnFlushResourceProvider* onFlushRP) {
Chris Dalton4e998532020-02-10 11:06:42 -0700304 auto rtc = fAtlas.instantiate(onFlushRP);
305 if (!rtc) {
306 return;
307 }
308
Chris Dalton569c01b2021-05-25 10:11:46 -0600309 SkRect atlasRect = SkRect::MakeIWH(fAtlas.drawBounds().width(), fAtlas.drawBounds().height());
310
Chris Dalton4e998532020-02-10 11:06:42 -0700311 // Add ops to stencil the atlas paths.
312 for (auto antialias : {false, true}) {
313 for (auto fillType : {SkPathFillType::kWinding, SkPathFillType::kEvenOdd}) {
314 SkPath* uberPath = this->getAtlasUberPath(fillType, antialias);
315 if (uberPath->isEmpty()) {
316 continue;
317 }
318 uberPath->setFillType(fillType);
319 GrAAType aaType = (antialias) ? GrAAType::kMSAA : GrAAType::kNone;
Chris Dalton2ed22fa2021-05-06 16:08:30 -0600320 auto op = GrOp::Make<GrPathStencilFillOp>(onFlushRP->recordingContext(), SkMatrix::I(),
321 *uberPath, GrPaint(), aaType,
Chris Dalton569c01b2021-05-25 10:11:46 -0600322 OpFlags::kStencilOnly |
323 OpFlags::kPreferWedges, atlasRect);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400324 rtc->addDrawOp(nullptr, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700325 }
326 }
327
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700328 // Finally, draw a fullscreen rect to convert our stencilled paths into alpha coverage masks.
Chris Dalton569c01b2021-05-25 10:11:46 -0600329 GrPaint paint;
330 paint.setColor4f(SK_PMColor4fWHITE);
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700331 const GrUserStencilSettings* stencil;
Chris Dalton57ab06c2021-04-22 12:57:28 -0600332 if (onFlushRP->caps()->discardStencilValuesAfterRenderPass()) {
333 // This is the final op in the surfaceDrawContext. Since Ganesh is planning to discard the
334 // stencil values anyway, there is no need to reset the stencil values back to 0.
335 stencil = &kTestStencil;
336 } else {
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700337 // Outset the cover rect in case there are T-junctions in the path bounds.
Chris Dalton569c01b2021-05-25 10:11:46 -0600338 atlasRect.outset(1, 1);
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700339 stencil = &kTestAndResetStencil;
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700340 }
Chris Dalton569c01b2021-05-25 10:11:46 -0600341 rtc->stencilRect(nullptr, stencil, std::move(paint), GrAA::kYes, SkMatrix::I(), atlasRect);
Chris Dalton4e998532020-02-10 11:06:42 -0700342
343 if (rtc->asSurfaceProxy()->requiresManualMSAAResolve()) {
344 onFlushRP->addTextureResolveTask(sk_ref_sp(rtc->asTextureProxy()),
345 GrSurfaceProxy::ResolveFlags::kMSAA);
346 }
347}