blob: fba3fdde7831db9ab0c7116c50ee95de50c31b12 [file] [log] [blame]
Chris Daltonb832ce62020-01-06 19:49:37 -07001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Chris Dalton0a22b1e2020-03-26 11:52:15 -06008#include "src/gpu/tessellate/GrTessellationPathRenderer.h"
Chris Daltonb832ce62020-01-06 19:49:37 -07009
Chris Daltonb96995d2020-06-04 16:44:29 -060010#include "include/pathops/SkPathOps.h"
Chris Daltond2dc8dd2020-05-19 16:32:02 -060011#include "src/core/SkIPoint16.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070012#include "src/core/SkPathPriv.h"
13#include "src/gpu/GrClip.h"
14#include "src/gpu/GrMemoryPool.h"
15#include "src/gpu/GrRecordingContextPriv.h"
16#include "src/gpu/GrRenderTargetContext.h"
Chris Daltonc3b67eb2020-02-10 21:09:58 -070017#include "src/gpu/GrSurfaceContextPriv.h"
Michael Ludwig2686d692020-04-17 20:21:37 +000018#include "src/gpu/geometry/GrStyledShape.h"
Chris Daltonc3b67eb2020-02-10 21:09:58 -070019#include "src/gpu/ops/GrFillRectOp.h"
Chris Dalton4e998532020-02-10 11:06:42 -070020#include "src/gpu/tessellate/GrDrawAtlasPathOp.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070021#include "src/gpu/tessellate/GrTessellatePathOp.h"
Chris Dalton1c62a7b2020-06-29 22:01:14 -060022#include "src/gpu/tessellate/GrTessellateStrokeOp.h"
Chris Daltonb96995d2020-06-04 16:44:29 -060023#include "src/gpu/tessellate/GrWangsFormula.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070024
Chris Dalton4e998532020-02-10 11:06:42 -070025constexpr static SkISize kAtlasInitialSize{512, 512};
26constexpr static int kMaxAtlasSize = 2048;
27
Chris Daltond2dc8dd2020-05-19 16:32:02 -060028// The atlas is only used for small-area paths, which means at least one dimension of every path is
29// guaranteed to be quite small. So if we transpose tall paths, then every path will have a small
30// height, which lends very well to efficient pow2 atlas packing.
31constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2;
32
33// Ensure every path in the atlas falls in or below the 128px high rectanizer band.
34constexpr static int kMaxAtlasPathHeight = 128;
35
Chris Dalton1413d112020-07-09 11:26:31 -060036bool GrTessellationPathRenderer::IsSupported(const GrCaps& caps) {
37 return caps.drawInstancedSupport() && caps.shaderCaps()->vertexIDSupport();
38}
39
Chris Daltonb96995d2020-06-04 16:44:29 -060040GrTessellationPathRenderer::GrTessellationPathRenderer(const GrCaps& caps)
41 : fAtlas(GrColorType::kAlpha_8, GrDynamicAtlas::InternalMultisample::kYes,
42 kAtlasInitialSize, std::min(kMaxAtlasSize, caps.maxPreferredRenderTargetSize()),
43 caps, kAtlasAlgorithm) {
44 this->initAtlasFlags(*caps.shaderCaps());
45}
46
47void GrTessellationPathRenderer::initAtlasFlags(const GrShaderCaps& shaderCaps) {
48 fStencilAtlasFlags = OpFlags::kStencilOnly | OpFlags::kDisableHWTessellation;
49 fMaxAtlasPathWidth = fAtlas.maxAtlasSize() / 2;
50 // The atlas usually does better with hardware tessellation. If hardware tessellation is
51 // supported, we choose a max atlas path width that is guaranteed to never require more
52 // tessellation segments than are supported by the hardware.
53 if (!shaderCaps.tessellationSupport()) {
54 return;
55 }
56 // Since we limit the area of paths in the atlas to kMaxAtlasPathHeight^2, taller paths can't
57 // get very wide anyway. Find the tallest path whose width is limited by
58 // GrWangsFormula::worst_case_cubic() rather than the max area constraint, and use that for our
59 // max atlas path width.
60 //
61 // Solve the following equation for w:
62 //
63 // GrWangsFormula::worst_case_cubic(kLinearizationIntolerance, w, kMaxAtlasPathHeight^2 / w)
64 // == maxTessellationSegments
65 //
66 float k = GrWangsFormula::cubic_k(kLinearizationIntolerance);
67 float h = kMaxAtlasPathHeight;
68 float s = shaderCaps.maxTessellationSegments();
69 // Quadratic formula from Numerical Recipes in C:
70 //
71 // q = -1/2 [b + sign(b) sqrt(b*b - 4*a*c)]
72 // x1 = q/a
73 // x2 = c/q
74 //
75 // float a = 1; // 'a' is always 1 in our specific equation.
76 float b = -s*s*s*s / (4*k*k); // Always negative.
77 float c = h*h*h*h; // Always positive.
78 float det = b*b - 4*1*c;
79 if (det <= 0) {
80 // maxTessellationSegments is too small for any path whose area == kMaxAtlasPathHeight^2.
81 // (This is unexpected because the GL spec mandates a minimum of 64 segments.)
82 SkDebugf("WARNING: maxTessellationSegments seems too low. (%i)\n",
83 shaderCaps.maxTessellationSegments());
84 return;
85 }
86 float q = -.5f * (b - std::sqrt(det)); // Always positive.
87 // The two roots represent the width^2 and height^2 of the tallest rectangle that is limited by
88 // GrWangsFormula::worst_case_cubic().
89 float r0 = q; // Always positive.
90 float r1 = c/q; // Always positive.
91 float worstCaseWidth = std::sqrt(std::max(r0, r1));
92#ifdef SK_DEBUG
93 float worstCaseHeight = std::sqrt(std::min(r0, r1));
94 // Verify the above equation worked as expected. It should have found a width and height whose
95 // area == kMaxAtlasPathHeight^2.
96 SkASSERT(SkScalarNearlyEqual(worstCaseHeight * worstCaseWidth, h*h, 1));
97 // Verify GrWangsFormula::worst_case_cubic() still works as we expect. The worst case number of
98 // segments for this bounding box should be maxTessellationSegments.
99 SkASSERT(SkScalarNearlyEqual(GrWangsFormula::worst_case_cubic(
100 kLinearizationIntolerance, worstCaseWidth, worstCaseHeight), s, 1));
101#endif
102 fStencilAtlasFlags &= ~OpFlags::kDisableHWTessellation;
103 fMaxAtlasPathWidth = std::min(fMaxAtlasPathWidth, (int)worstCaseWidth);
Chris Dalton4e998532020-02-10 11:06:42 -0700104}
105
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600106GrPathRenderer::CanDrawPath GrTessellationPathRenderer::onCanDrawPath(
Chris Daltonb832ce62020-01-06 19:49:37 -0700107 const CanDrawPathArgs& args) const {
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600108 const GrStyledShape& shape = *args.fShape;
109 if (shape.inverseFilled() || shape.style().hasPathEffect() ||
Chris Dalton0f6bb8a2020-01-15 09:40:54 -0700110 args.fViewMatrix->hasPerspective()) {
Chris Daltonb832ce62020-01-06 19:49:37 -0700111 return CanDrawPath::kNo;
112 }
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600113
Chris Daltonb832ce62020-01-06 19:49:37 -0700114 if (GrAAType::kCoverage == args.fAAType) {
115 SkASSERT(1 == args.fProxy->numSamples());
116 if (!args.fProxy->canUseMixedSamples(*args.fCaps)) {
117 return CanDrawPath::kNo;
118 }
119 }
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600120
Chris Daltonb832ce62020-01-06 19:49:37 -0700121 SkPath path;
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600122 shape.asPath(&path);
Chris Daltonb832ce62020-01-06 19:49:37 -0700123 if (SkPathPriv::ConicWeightCnt(path)) {
124 return CanDrawPath::kNo;
125 }
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600126
127 if (!shape.style().isSimpleFill()) {
128 SkPMColor4f constantColor;
129 // These are only temporary restrictions while we bootstrap tessellated stroking. Every one
130 // of them will eventually go away.
131 if (shape.style().strokeRec().getStyle() == SkStrokeRec::kStrokeAndFill_Style ||
132 !args.fCaps->shaderCaps()->tessellationSupport() ||
133 GrAAType::kCoverage == args.fAAType || !args.fViewMatrix->isSimilarity() ||
134 !args.fPaint->isConstantBlendedColor(&constantColor) ||
135 args.fPaint->numCoverageFragmentProcessors()) {
136 return CanDrawPath::kNo;
137 }
138 }
139
Chris Daltonb832ce62020-01-06 19:49:37 -0700140 return CanDrawPath::kYes;
141}
142
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600143bool GrTessellationPathRenderer::onDrawPath(const DrawPathArgs& args) {
Chris Dalton4e998532020-02-10 11:06:42 -0700144 GrRenderTargetContext* renderTargetContext = args.fRenderTargetContext;
145 GrOpMemoryPool* pool = args.fContext->priv().opMemoryPool();
Chris Daltonb96995d2020-06-04 16:44:29 -0600146 const GrShaderCaps& shaderCaps = *args.fContext->priv().caps()->shaderCaps();
147
Chris Daltonb832ce62020-01-06 19:49:37 -0700148 SkPath path;
149 args.fShape->asPath(&path);
150
Chris Daltonb96995d2020-06-04 16:44:29 -0600151 SkRect devBounds;
152 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
153
Chris Dalton4e998532020-02-10 11:06:42 -0700154 // See if the path is small and simple enough to atlas instead of drawing directly.
155 //
156 // NOTE: The atlas uses alpha8 coverage even for msaa render targets. We could theoretically
157 // render the sample mask to an integer texture, but such a scheme would probably require
158 // GL_EXT_post_depth_coverage, which appears to have low adoption.
159 SkIRect devIBounds;
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600160 SkIPoint16 locationInAtlas;
161 bool transposedInAtlas;
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600162 if (args.fShape->style().isSimpleFill() &&
163 this->tryAddPathToAtlas(*args.fContext->priv().caps(), *args.fViewMatrix, path, devBounds,
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600164 args.fAAType, &devIBounds, &locationInAtlas, &transposedInAtlas)) {
Chris Daltonb96995d2020-06-04 16:44:29 -0600165#ifdef SK_DEBUG
166 // If using hardware tessellation in the atlas, make sure the max number of segments is
167 // sufficient for this path. fMaxAtlasPathWidth should have been tuned for this to always be
168 // the case.
169 if (!(fStencilAtlasFlags & OpFlags::kDisableHWTessellation)) {
170 int worstCaseNumSegments = GrWangsFormula::worst_case_cubic(kLinearizationIntolerance,
171 devIBounds.width(),
172 devIBounds.height());
173 SkASSERT(worstCaseNumSegments <= shaderCaps.maxTessellationSegments());
174 }
175#endif
Chris Dalton4e998532020-02-10 11:06:42 -0700176 auto op = pool->allocate<GrDrawAtlasPathOp>(
177 renderTargetContext->numSamples(), sk_ref_sp(fAtlas.textureProxy()),
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600178 devIBounds, locationInAtlas, transposedInAtlas, *args.fViewMatrix,
Michael Ludwig7c12e282020-05-29 09:54:07 -0400179 std::move(args.fPaint));
180 renderTargetContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700181 return true;
182 }
Chris Daltonb832ce62020-01-06 19:49:37 -0700183
Chris Daltonb96995d2020-06-04 16:44:29 -0600184 // Find the worst-case log2 number of line segments that a curve in this path might need to be
185 // divided into.
186 int worstCaseResolveLevel = GrWangsFormula::worst_case_cubic_log2(kLinearizationIntolerance,
187 devBounds.width(),
188 devBounds.height());
189 if (worstCaseResolveLevel > kMaxResolveLevel) {
190 // The path is too large for our internal indirect draw shaders. Crop it to the viewport.
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600191 auto viewport = SkRect::MakeIWH(renderTargetContext->width(),
192 renderTargetContext->height());
193 float inflationRadius = 1;
194 const SkStrokeRec& stroke = args.fShape->style().strokeRec();
195 if (stroke.getStyle() == SkStrokeRec::kHairline_Style) {
196 inflationRadius += SkStrokeRec::GetInflationRadius(stroke.getJoin(), stroke.getMiter(),
197 stroke.getCap(), 1);
198 } else if (stroke.getStyle() != SkStrokeRec::kFill_Style) {
199 inflationRadius += stroke.getInflationRadius() * args.fViewMatrix->getMaxScale();
200 }
201 viewport.outset(inflationRadius, inflationRadius);
202
203 SkPath viewportPath;
204 viewportPath.addRect(viewport);
Chris Daltonb96995d2020-06-04 16:44:29 -0600205 // Perform the crop in device space so it's a simple rect-path intersection.
206 path.transform(*args.fViewMatrix);
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600207 if (!Op(viewportPath, path, kIntersect_SkPathOp, &path)) {
Chris Daltonb96995d2020-06-04 16:44:29 -0600208 // The crop can fail if the PathOps encounter NaN or infinities. Return true
209 // because drawing nothing is acceptable behavior for FP overflow.
210 return true;
211 }
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600212
Chris Daltonb96995d2020-06-04 16:44:29 -0600213 // Transform the path back to its own local space.
214 SkMatrix inverse;
215 if (!args.fViewMatrix->invert(&inverse)) {
216 return true; // Singular view matrix. Nothing would have drawn anyway. Return true.
217 }
218 path.transform(inverse);
219 path.setIsVolatile(true);
220 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
221 worstCaseResolveLevel = GrWangsFormula::worst_case_cubic_log2(kLinearizationIntolerance,
222 devBounds.width(),
223 devBounds.height());
224 // kMaxResolveLevel should be large enough to tessellate paths the size of any screen we
225 // might encounter.
226 SkASSERT(worstCaseResolveLevel <= kMaxResolveLevel);
227 }
228
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600229 if (!args.fShape->style().isSimpleFill()) {
230 const SkStrokeRec& stroke = args.fShape->style().strokeRec();
231 SkASSERT(stroke.getStyle() != SkStrokeRec::kStrokeAndFill_Style);
232 auto op = pool->allocate<GrTessellateStrokeOp>(*args.fViewMatrix, path, stroke,
233 std::move(args.fPaint), args.fAAType);
234 renderTargetContext->addDrawOp(args.fClip, std::move(op));
235 return true;
236 }
237
238 auto drawPathFlags = OpFlags::kNone;
Chris Daltonb96995d2020-06-04 16:44:29 -0600239 if ((1 << worstCaseResolveLevel) > shaderCaps.maxTessellationSegments()) {
240 // The path is too large for hardware tessellation; a curve in this bounding box could
241 // potentially require more segments than are supported by the hardware. Fall back on
242 // indirect draws.
243 drawPathFlags |= OpFlags::kDisableHWTessellation;
244 }
245
246 auto op = pool->allocate<GrTessellatePathOp>(*args.fViewMatrix, path, std::move(args.fPaint),
247 args.fAAType, drawPathFlags);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400248 renderTargetContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700249 return true;
250}
251
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600252bool GrTessellationPathRenderer::tryAddPathToAtlas(
Chris Daltonb96995d2020-06-04 16:44:29 -0600253 const GrCaps& caps, const SkMatrix& viewMatrix, const SkPath& path, const SkRect& devBounds,
254 GrAAType aaType, SkIRect* devIBounds, SkIPoint16* locationInAtlas,
255 bool* transposedInAtlas) {
Chris Dalton4e998532020-02-10 11:06:42 -0700256 if (!caps.multisampleDisableSupport() && GrAAType::kNone == aaType) {
257 return false;
258 }
259
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600260 // Atlas paths require their points to be transformed on the CPU and copied into an "uber path".
261 // Check if this path has too many points to justify this extra work.
262 if (path.countPoints() > 200) {
Chris Dalton4e998532020-02-10 11:06:42 -0700263 return false;
264 }
265
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600266 // Transpose tall paths in the atlas. Since we limit ourselves to small-area paths, this
267 // guarantees that every atlas entry has a small height, which lends very well to efficient pow2
268 // atlas packing.
Chris Daltonb96995d2020-06-04 16:44:29 -0600269 devBounds.roundOut(devIBounds);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600270 int maxDimenstion = devIBounds->width();
271 int minDimension = devIBounds->height();
272 *transposedInAtlas = minDimension > maxDimenstion;
273 if (*transposedInAtlas) {
274 std::swap(minDimension, maxDimenstion);
275 }
276
277 // Check if the path is too large for an atlas. Since we use "minDimension" for height in the
278 // atlas, limiting to kMaxAtlasPathHeight^2 pixels guarantees height <= kMaxAtlasPathHeight.
279 if (maxDimenstion * minDimension > kMaxAtlasPathHeight * kMaxAtlasPathHeight ||
Chris Daltonb96995d2020-06-04 16:44:29 -0600280 maxDimenstion > fMaxAtlasPathWidth) {
Chris Dalton4e998532020-02-10 11:06:42 -0700281 return false;
282 }
283
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600284 if (!fAtlas.addRect(maxDimenstion, minDimension, locationInAtlas)) {
Chris Dalton4e998532020-02-10 11:06:42 -0700285 return false;
286 }
287
288 SkMatrix atlasMatrix = viewMatrix;
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600289 if (*transposedInAtlas) {
290 std::swap(atlasMatrix[0], atlasMatrix[3]);
291 std::swap(atlasMatrix[1], atlasMatrix[4]);
292 float tx=atlasMatrix.getTranslateX(), ty=atlasMatrix.getTranslateY();
293 atlasMatrix.setTranslateX(ty - devIBounds->y() + locationInAtlas->x());
294 atlasMatrix.setTranslateY(tx - devIBounds->x() + locationInAtlas->y());
295 } else {
296 atlasMatrix.postTranslate(locationInAtlas->x() - devIBounds->x(),
297 locationInAtlas->y() - devIBounds->y());
298 }
Chris Dalton4e998532020-02-10 11:06:42 -0700299
300 // Concatenate this path onto our uber path that matches its fill and AA types.
301 SkPath* uberPath = this->getAtlasUberPath(path.getFillType(), GrAAType::kNone != aaType);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600302 uberPath->moveTo(locationInAtlas->x(), locationInAtlas->y()); // Implicit moveTo(0,0).
Chris Dalton4e998532020-02-10 11:06:42 -0700303 uberPath->addPath(path, atlasMatrix);
Chris Daltonb832ce62020-01-06 19:49:37 -0700304 return true;
305}
306
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600307void GrTessellationPathRenderer::onStencilPath(const StencilPathArgs& args) {
Chris Daltonb832ce62020-01-06 19:49:37 -0700308 SkPath path;
309 args.fShape->asPath(&path);
310
311 GrAAType aaType = (GrAA::kYes == args.fDoStencilMSAA) ? GrAAType::kMSAA : GrAAType::kNone;
312
Chris Daltonf9aea7f2020-01-21 11:19:26 -0700313 auto op = args.fContext->priv().opMemoryPool()->allocate<GrTessellatePathOp>(
Chris Daltonb96995d2020-06-04 16:44:29 -0600314 *args.fViewMatrix, path, GrPaint(), aaType, OpFlags::kStencilOnly);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400315 args.fRenderTargetContext->addDrawOp(args.fClip, std::move(op));
Chris Daltonb832ce62020-01-06 19:49:37 -0700316}
Chris Dalton4e998532020-02-10 11:06:42 -0700317
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600318void GrTessellationPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
319 const uint32_t* opsTaskIDs, int numOpsTaskIDs) {
Chris Dalton4e998532020-02-10 11:06:42 -0700320 if (!fAtlas.drawBounds().isEmpty()) {
321 this->renderAtlas(onFlushRP);
322 fAtlas.reset(kAtlasInitialSize, *onFlushRP->caps());
323 }
324 for (SkPath& path : fAtlasUberPaths) {
325 path.reset();
326 }
327}
328
329constexpr static GrUserStencilSettings kTestStencil(
330 GrUserStencilSettings::StaticInit<
331 0x0000,
332 GrUserStencilTest::kNotEqual,
333 0xffff,
334 GrUserStencilOp::kKeep,
335 GrUserStencilOp::kKeep,
336 0xffff>());
337
338constexpr static GrUserStencilSettings kTestAndResetStencil(
339 GrUserStencilSettings::StaticInit<
340 0x0000,
341 GrUserStencilTest::kNotEqual,
342 0xffff,
343 GrUserStencilOp::kZero,
344 GrUserStencilOp::kKeep,
345 0xffff>());
346
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600347void GrTessellationPathRenderer::renderAtlas(GrOnFlushResourceProvider* onFlushRP) {
Chris Dalton4e998532020-02-10 11:06:42 -0700348 auto rtc = fAtlas.instantiate(onFlushRP);
349 if (!rtc) {
350 return;
351 }
352
353 // Add ops to stencil the atlas paths.
354 for (auto antialias : {false, true}) {
355 for (auto fillType : {SkPathFillType::kWinding, SkPathFillType::kEvenOdd}) {
356 SkPath* uberPath = this->getAtlasUberPath(fillType, antialias);
357 if (uberPath->isEmpty()) {
358 continue;
359 }
360 uberPath->setFillType(fillType);
361 GrAAType aaType = (antialias) ? GrAAType::kMSAA : GrAAType::kNone;
362 auto op = onFlushRP->opMemoryPool()->allocate<GrTessellatePathOp>(
Chris Daltonb96995d2020-06-04 16:44:29 -0600363 SkMatrix::I(), *uberPath, GrPaint(), aaType, fStencilAtlasFlags);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400364 rtc->addDrawOp(nullptr, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700365 }
366 }
367
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700368 // Finally, draw a fullscreen rect to convert our stencilled paths into alpha coverage masks.
369 auto fillRectFlags = GrFillRectOp::InputFlags::kNone;
Chris Dalton4e998532020-02-10 11:06:42 -0700370
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700371 // This will be the final op in the renderTargetContext. So if Ganesh is planning to discard the
372 // stencil values anyway, then we might not actually need to reset the stencil values back to 0.
373 bool mustResetStencil = !onFlushRP->caps()->discardStencilValuesAfterRenderPass();
374
375 if (rtc->numSamples() <= 1) {
376 // We are mixed sampled. We need to enable conservative raster and ensure stencil values get
377 // reset in order to avoid artifacts along the diagonal of the atlas.
378 fillRectFlags |= GrFillRectOp::InputFlags::kConservativeRaster;
379 mustResetStencil = true;
380 }
381
382 SkRect coverRect = SkRect::MakeIWH(fAtlas.drawBounds().width(), fAtlas.drawBounds().height());
383 const GrUserStencilSettings* stencil;
384 if (mustResetStencil) {
385 // Outset the cover rect in case there are T-junctions in the path bounds.
386 coverRect.outset(1, 1);
387 stencil = &kTestAndResetStencil;
388 } else {
389 stencil = &kTestStencil;
390 }
391
392 GrQuad coverQuad(coverRect);
393 DrawQuad drawQuad{coverQuad, coverQuad, GrQuadAAFlags::kAll};
394
Chris Dalton4e998532020-02-10 11:06:42 -0700395 GrPaint paint;
396 paint.setColor4f(SK_PMColor4fWHITE);
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700397
398 auto coverOp = GrFillRectOp::Make(rtc->surfPriv().getContext(), std::move(paint),
399 GrAAType::kMSAA, &drawQuad, stencil, fillRectFlags);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400400 rtc->addDrawOp(nullptr, std::move(coverOp));
Chris Dalton4e998532020-02-10 11:06:42 -0700401
402 if (rtc->asSurfaceProxy()->requiresManualMSAAResolve()) {
403 onFlushRP->addTextureResolveTask(sk_ref_sp(rtc->asTextureProxy()),
404 GrSurfaceProxy::ResolveFlags::kMSAA);
405 }
406}