blob: 14a096d9d2deb4fbbb955cc11d0d274ecbf7b8d3 [file] [log] [blame]
Chris Daltonb832ce62020-01-06 19:49:37 -07001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Chris Dalton0a22b1e2020-03-26 11:52:15 -06008#include "src/gpu/tessellate/GrTessellationPathRenderer.h"
Chris Daltonb832ce62020-01-06 19:49:37 -07009
Chris Daltonb96995d2020-06-04 16:44:29 -060010#include "include/pathops/SkPathOps.h"
Chris Daltond2dc8dd2020-05-19 16:32:02 -060011#include "src/core/SkIPoint16.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070012#include "src/core/SkPathPriv.h"
13#include "src/gpu/GrClip.h"
14#include "src/gpu/GrMemoryPool.h"
15#include "src/gpu/GrRecordingContextPriv.h"
16#include "src/gpu/GrRenderTargetContext.h"
Chris Daltonc3b67eb2020-02-10 21:09:58 -070017#include "src/gpu/GrSurfaceContextPriv.h"
Michael Ludwig2686d692020-04-17 20:21:37 +000018#include "src/gpu/geometry/GrStyledShape.h"
Chris Daltonc3b67eb2020-02-10 21:09:58 -070019#include "src/gpu/ops/GrFillRectOp.h"
Chris Dalton4e998532020-02-10 11:06:42 -070020#include "src/gpu/tessellate/GrDrawAtlasPathOp.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070021#include "src/gpu/tessellate/GrTessellatePathOp.h"
Chris Dalton1c62a7b2020-06-29 22:01:14 -060022#include "src/gpu/tessellate/GrTessellateStrokeOp.h"
Chris Daltonb96995d2020-06-04 16:44:29 -060023#include "src/gpu/tessellate/GrWangsFormula.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070024
Chris Dalton4e998532020-02-10 11:06:42 -070025constexpr static SkISize kAtlasInitialSize{512, 512};
26constexpr static int kMaxAtlasSize = 2048;
27
Chris Daltond72cb4c2020-07-16 17:50:17 -060028constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8;
29
Chris Daltond2dc8dd2020-05-19 16:32:02 -060030// The atlas is only used for small-area paths, which means at least one dimension of every path is
31// guaranteed to be quite small. So if we transpose tall paths, then every path will have a small
32// height, which lends very well to efficient pow2 atlas packing.
33constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2;
34
35// Ensure every path in the atlas falls in or below the 128px high rectanizer band.
36constexpr static int kMaxAtlasPathHeight = 128;
37
Chris Dalton1413d112020-07-09 11:26:31 -060038bool GrTessellationPathRenderer::IsSupported(const GrCaps& caps) {
39 return caps.drawInstancedSupport() && caps.shaderCaps()->vertexIDSupport();
40}
41
Chris Daltonb96995d2020-06-04 16:44:29 -060042GrTessellationPathRenderer::GrTessellationPathRenderer(const GrCaps& caps)
Chris Daltond72cb4c2020-07-16 17:50:17 -060043 : fAtlas(kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes, kAtlasInitialSize,
44 std::min(kMaxAtlasSize, caps.maxPreferredRenderTargetSize()),
Chris Daltonb96995d2020-06-04 16:44:29 -060045 caps, kAtlasAlgorithm) {
Chris Daltond72cb4c2020-07-16 17:50:17 -060046 this->initAtlasFlags(caps);
Chris Daltonb96995d2020-06-04 16:44:29 -060047}
48
Chris Daltond72cb4c2020-07-16 17:50:17 -060049void GrTessellationPathRenderer::initAtlasFlags(const GrCaps& caps) {
Chris Daltonb96995d2020-06-04 16:44:29 -060050 fStencilAtlasFlags = OpFlags::kStencilOnly | OpFlags::kDisableHWTessellation;
51 fMaxAtlasPathWidth = fAtlas.maxAtlasSize() / 2;
Chris Daltond72cb4c2020-07-16 17:50:17 -060052
53 auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes);
54 if (caps.internalMultisampleCount(atlasFormat) <= 1) {
55 // MSAA is not supported on kAlpha8. Disable the atlas.
56 fMaxAtlasPathWidth = 0;
Chris Daltonb96995d2020-06-04 16:44:29 -060057 return;
58 }
Chris Daltond72cb4c2020-07-16 17:50:17 -060059
60 // The atlas usually does better with hardware tessellation. If hardware tessellation is
61 // supported, we will next choose a max atlas path width that is guaranteed to never require
62 // more tessellation segments than are supported by the hardware.
63 if (!caps.shaderCaps()->tessellationSupport()) {
64 return;
65 }
66
Chris Daltonb96995d2020-06-04 16:44:29 -060067 // Since we limit the area of paths in the atlas to kMaxAtlasPathHeight^2, taller paths can't
68 // get very wide anyway. Find the tallest path whose width is limited by
69 // GrWangsFormula::worst_case_cubic() rather than the max area constraint, and use that for our
70 // max atlas path width.
71 //
72 // Solve the following equation for w:
73 //
74 // GrWangsFormula::worst_case_cubic(kLinearizationIntolerance, w, kMaxAtlasPathHeight^2 / w)
75 // == maxTessellationSegments
76 //
77 float k = GrWangsFormula::cubic_k(kLinearizationIntolerance);
78 float h = kMaxAtlasPathHeight;
Chris Daltond72cb4c2020-07-16 17:50:17 -060079 float s = caps.shaderCaps()->maxTessellationSegments();
Chris Daltonb96995d2020-06-04 16:44:29 -060080 // Quadratic formula from Numerical Recipes in C:
81 //
82 // q = -1/2 [b + sign(b) sqrt(b*b - 4*a*c)]
83 // x1 = q/a
84 // x2 = c/q
85 //
86 // float a = 1; // 'a' is always 1 in our specific equation.
87 float b = -s*s*s*s / (4*k*k); // Always negative.
88 float c = h*h*h*h; // Always positive.
89 float det = b*b - 4*1*c;
90 if (det <= 0) {
91 // maxTessellationSegments is too small for any path whose area == kMaxAtlasPathHeight^2.
92 // (This is unexpected because the GL spec mandates a minimum of 64 segments.)
93 SkDebugf("WARNING: maxTessellationSegments seems too low. (%i)\n",
Chris Daltond72cb4c2020-07-16 17:50:17 -060094 caps.shaderCaps()->maxTessellationSegments());
Chris Daltonb96995d2020-06-04 16:44:29 -060095 return;
96 }
97 float q = -.5f * (b - std::sqrt(det)); // Always positive.
98 // The two roots represent the width^2 and height^2 of the tallest rectangle that is limited by
99 // GrWangsFormula::worst_case_cubic().
100 float r0 = q; // Always positive.
101 float r1 = c/q; // Always positive.
102 float worstCaseWidth = std::sqrt(std::max(r0, r1));
103#ifdef SK_DEBUG
104 float worstCaseHeight = std::sqrt(std::min(r0, r1));
105 // Verify the above equation worked as expected. It should have found a width and height whose
106 // area == kMaxAtlasPathHeight^2.
107 SkASSERT(SkScalarNearlyEqual(worstCaseHeight * worstCaseWidth, h*h, 1));
108 // Verify GrWangsFormula::worst_case_cubic() still works as we expect. The worst case number of
109 // segments for this bounding box should be maxTessellationSegments.
110 SkASSERT(SkScalarNearlyEqual(GrWangsFormula::worst_case_cubic(
111 kLinearizationIntolerance, worstCaseWidth, worstCaseHeight), s, 1));
112#endif
113 fStencilAtlasFlags &= ~OpFlags::kDisableHWTessellation;
114 fMaxAtlasPathWidth = std::min(fMaxAtlasPathWidth, (int)worstCaseWidth);
Chris Dalton4e998532020-02-10 11:06:42 -0700115}
116
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600117GrPathRenderer::CanDrawPath GrTessellationPathRenderer::onCanDrawPath(
Chris Daltonb832ce62020-01-06 19:49:37 -0700118 const CanDrawPathArgs& args) const {
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600119 const GrStyledShape& shape = *args.fShape;
120 if (shape.inverseFilled() || shape.style().hasPathEffect() ||
Chris Dalton0f6bb8a2020-01-15 09:40:54 -0700121 args.fViewMatrix->hasPerspective()) {
Chris Daltonb832ce62020-01-06 19:49:37 -0700122 return CanDrawPath::kNo;
123 }
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600124
Chris Daltonb832ce62020-01-06 19:49:37 -0700125 if (GrAAType::kCoverage == args.fAAType) {
126 SkASSERT(1 == args.fProxy->numSamples());
127 if (!args.fProxy->canUseMixedSamples(*args.fCaps)) {
128 return CanDrawPath::kNo;
129 }
130 }
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600131
Chris Daltonb832ce62020-01-06 19:49:37 -0700132 SkPath path;
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600133 shape.asPath(&path);
Chris Daltonb832ce62020-01-06 19:49:37 -0700134 if (SkPathPriv::ConicWeightCnt(path)) {
135 return CanDrawPath::kNo;
136 }
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600137
138 if (!shape.style().isSimpleFill()) {
139 SkPMColor4f constantColor;
140 // These are only temporary restrictions while we bootstrap tessellated stroking. Every one
141 // of them will eventually go away.
142 if (shape.style().strokeRec().getStyle() == SkStrokeRec::kStrokeAndFill_Style ||
143 !args.fCaps->shaderCaps()->tessellationSupport() ||
Chris Dalton128ed7b2020-07-30 17:48:24 -0600144 GrAAType::kCoverage == args.fAAType ||
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600145 !args.fPaint->isConstantBlendedColor(&constantColor) ||
John Stiles41d91b62020-07-21 14:39:40 -0400146 args.fPaint->hasCoverageFragmentProcessor()) {
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600147 return CanDrawPath::kNo;
148 }
149 }
150
Chris Daltonb832ce62020-01-06 19:49:37 -0700151 return CanDrawPath::kYes;
152}
153
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600154bool GrTessellationPathRenderer::onDrawPath(const DrawPathArgs& args) {
Chris Dalton4e998532020-02-10 11:06:42 -0700155 GrRenderTargetContext* renderTargetContext = args.fRenderTargetContext;
156 GrOpMemoryPool* pool = args.fContext->priv().opMemoryPool();
Chris Daltonb96995d2020-06-04 16:44:29 -0600157 const GrShaderCaps& shaderCaps = *args.fContext->priv().caps()->shaderCaps();
158
Chris Daltonb832ce62020-01-06 19:49:37 -0700159 SkPath path;
160 args.fShape->asPath(&path);
161
Chris Daltonb96995d2020-06-04 16:44:29 -0600162 SkRect devBounds;
163 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
164
Chris Dalton4e998532020-02-10 11:06:42 -0700165 // See if the path is small and simple enough to atlas instead of drawing directly.
166 //
167 // NOTE: The atlas uses alpha8 coverage even for msaa render targets. We could theoretically
168 // render the sample mask to an integer texture, but such a scheme would probably require
169 // GL_EXT_post_depth_coverage, which appears to have low adoption.
170 SkIRect devIBounds;
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600171 SkIPoint16 locationInAtlas;
172 bool transposedInAtlas;
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600173 if (args.fShape->style().isSimpleFill() &&
174 this->tryAddPathToAtlas(*args.fContext->priv().caps(), *args.fViewMatrix, path, devBounds,
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600175 args.fAAType, &devIBounds, &locationInAtlas, &transposedInAtlas)) {
Chris Daltonb96995d2020-06-04 16:44:29 -0600176#ifdef SK_DEBUG
177 // If using hardware tessellation in the atlas, make sure the max number of segments is
178 // sufficient for this path. fMaxAtlasPathWidth should have been tuned for this to always be
179 // the case.
180 if (!(fStencilAtlasFlags & OpFlags::kDisableHWTessellation)) {
181 int worstCaseNumSegments = GrWangsFormula::worst_case_cubic(kLinearizationIntolerance,
182 devIBounds.width(),
183 devIBounds.height());
184 SkASSERT(worstCaseNumSegments <= shaderCaps.maxTessellationSegments());
185 }
186#endif
Chris Dalton4e998532020-02-10 11:06:42 -0700187 auto op = pool->allocate<GrDrawAtlasPathOp>(
188 renderTargetContext->numSamples(), sk_ref_sp(fAtlas.textureProxy()),
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600189 devIBounds, locationInAtlas, transposedInAtlas, *args.fViewMatrix,
Michael Ludwig7c12e282020-05-29 09:54:07 -0400190 std::move(args.fPaint));
191 renderTargetContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700192 return true;
193 }
Chris Daltonb832ce62020-01-06 19:49:37 -0700194
Chris Daltonb96995d2020-06-04 16:44:29 -0600195 // Find the worst-case log2 number of line segments that a curve in this path might need to be
196 // divided into.
197 int worstCaseResolveLevel = GrWangsFormula::worst_case_cubic_log2(kLinearizationIntolerance,
198 devBounds.width(),
199 devBounds.height());
200 if (worstCaseResolveLevel > kMaxResolveLevel) {
201 // The path is too large for our internal indirect draw shaders. Crop it to the viewport.
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600202 auto viewport = SkRect::MakeIWH(renderTargetContext->width(),
203 renderTargetContext->height());
204 float inflationRadius = 1;
205 const SkStrokeRec& stroke = args.fShape->style().strokeRec();
206 if (stroke.getStyle() == SkStrokeRec::kHairline_Style) {
207 inflationRadius += SkStrokeRec::GetInflationRadius(stroke.getJoin(), stroke.getMiter(),
208 stroke.getCap(), 1);
209 } else if (stroke.getStyle() != SkStrokeRec::kFill_Style) {
210 inflationRadius += stroke.getInflationRadius() * args.fViewMatrix->getMaxScale();
211 }
212 viewport.outset(inflationRadius, inflationRadius);
213
214 SkPath viewportPath;
215 viewportPath.addRect(viewport);
Chris Daltonb96995d2020-06-04 16:44:29 -0600216 // Perform the crop in device space so it's a simple rect-path intersection.
217 path.transform(*args.fViewMatrix);
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600218 if (!Op(viewportPath, path, kIntersect_SkPathOp, &path)) {
Chris Daltonb96995d2020-06-04 16:44:29 -0600219 // The crop can fail if the PathOps encounter NaN or infinities. Return true
220 // because drawing nothing is acceptable behavior for FP overflow.
221 return true;
222 }
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600223
Chris Daltonb96995d2020-06-04 16:44:29 -0600224 // Transform the path back to its own local space.
225 SkMatrix inverse;
226 if (!args.fViewMatrix->invert(&inverse)) {
227 return true; // Singular view matrix. Nothing would have drawn anyway. Return true.
228 }
229 path.transform(inverse);
230 path.setIsVolatile(true);
231 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
232 worstCaseResolveLevel = GrWangsFormula::worst_case_cubic_log2(kLinearizationIntolerance,
233 devBounds.width(),
234 devBounds.height());
235 // kMaxResolveLevel should be large enough to tessellate paths the size of any screen we
236 // might encounter.
237 SkASSERT(worstCaseResolveLevel <= kMaxResolveLevel);
238 }
239
Chris Dalton128ed7b2020-07-30 17:48:24 -0600240 if (args.fShape->style().isSimpleHairline()) {
241 // Pre-transform the path into device space and use a stroke width of 1.
242#ifdef SK_DEBUG
243 // Since we will be transforming the path, just double check that we are still in a position
244 // where the paint will not use local coordinates.
245 SkPMColor4f constantColor;
246 SkASSERT(args.fPaint.isConstantBlendedColor(&constantColor));
247#endif
248 SkPath devPath;
249 path.transform(*args.fViewMatrix, &devPath);
250 SkStrokeRec devStroke = args.fShape->style().strokeRec();
251 devStroke.setStrokeStyle(1);
252 auto op = pool->allocate<GrTessellateStrokeOp>(args.fAAType, SkMatrix::I(), devPath,
253 devStroke, std::move(args.fPaint));
254 renderTargetContext->addDrawOp(args.fClip, std::move(op));
255 return true;
256 }
257
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600258 if (!args.fShape->style().isSimpleFill()) {
259 const SkStrokeRec& stroke = args.fShape->style().strokeRec();
Chris Dalton128ed7b2020-07-30 17:48:24 -0600260 SkASSERT(stroke.getStyle() == SkStrokeRec::kStroke_Style);
261 auto op = pool->allocate<GrTessellateStrokeOp>(args.fAAType, *args.fViewMatrix, path,
262 stroke, std::move(args.fPaint));
Chris Dalton1c62a7b2020-06-29 22:01:14 -0600263 renderTargetContext->addDrawOp(args.fClip, std::move(op));
264 return true;
265 }
266
267 auto drawPathFlags = OpFlags::kNone;
Chris Daltonb96995d2020-06-04 16:44:29 -0600268 if ((1 << worstCaseResolveLevel) > shaderCaps.maxTessellationSegments()) {
269 // The path is too large for hardware tessellation; a curve in this bounding box could
270 // potentially require more segments than are supported by the hardware. Fall back on
271 // indirect draws.
272 drawPathFlags |= OpFlags::kDisableHWTessellation;
273 }
274
275 auto op = pool->allocate<GrTessellatePathOp>(*args.fViewMatrix, path, std::move(args.fPaint),
276 args.fAAType, drawPathFlags);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400277 renderTargetContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700278 return true;
279}
280
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600281bool GrTessellationPathRenderer::tryAddPathToAtlas(
Chris Daltonb96995d2020-06-04 16:44:29 -0600282 const GrCaps& caps, const SkMatrix& viewMatrix, const SkPath& path, const SkRect& devBounds,
283 GrAAType aaType, SkIRect* devIBounds, SkIPoint16* locationInAtlas,
284 bool* transposedInAtlas) {
Chris Daltond72cb4c2020-07-16 17:50:17 -0600285 if (!fMaxAtlasPathWidth) {
286 return false;
287 }
288
Chris Dalton4e998532020-02-10 11:06:42 -0700289 if (!caps.multisampleDisableSupport() && GrAAType::kNone == aaType) {
290 return false;
291 }
292
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600293 // Atlas paths require their points to be transformed on the CPU and copied into an "uber path".
294 // Check if this path has too many points to justify this extra work.
295 if (path.countPoints() > 200) {
Chris Dalton4e998532020-02-10 11:06:42 -0700296 return false;
297 }
298
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600299 // Transpose tall paths in the atlas. Since we limit ourselves to small-area paths, this
300 // guarantees that every atlas entry has a small height, which lends very well to efficient pow2
301 // atlas packing.
Chris Daltonb96995d2020-06-04 16:44:29 -0600302 devBounds.roundOut(devIBounds);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600303 int maxDimenstion = devIBounds->width();
304 int minDimension = devIBounds->height();
305 *transposedInAtlas = minDimension > maxDimenstion;
306 if (*transposedInAtlas) {
307 std::swap(minDimension, maxDimenstion);
308 }
309
310 // Check if the path is too large for an atlas. Since we use "minDimension" for height in the
311 // atlas, limiting to kMaxAtlasPathHeight^2 pixels guarantees height <= kMaxAtlasPathHeight.
312 if (maxDimenstion * minDimension > kMaxAtlasPathHeight * kMaxAtlasPathHeight ||
Chris Daltonb96995d2020-06-04 16:44:29 -0600313 maxDimenstion > fMaxAtlasPathWidth) {
Chris Dalton4e998532020-02-10 11:06:42 -0700314 return false;
315 }
316
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600317 if (!fAtlas.addRect(maxDimenstion, minDimension, locationInAtlas)) {
Chris Dalton4e998532020-02-10 11:06:42 -0700318 return false;
319 }
320
321 SkMatrix atlasMatrix = viewMatrix;
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600322 if (*transposedInAtlas) {
323 std::swap(atlasMatrix[0], atlasMatrix[3]);
324 std::swap(atlasMatrix[1], atlasMatrix[4]);
325 float tx=atlasMatrix.getTranslateX(), ty=atlasMatrix.getTranslateY();
326 atlasMatrix.setTranslateX(ty - devIBounds->y() + locationInAtlas->x());
327 atlasMatrix.setTranslateY(tx - devIBounds->x() + locationInAtlas->y());
328 } else {
329 atlasMatrix.postTranslate(locationInAtlas->x() - devIBounds->x(),
330 locationInAtlas->y() - devIBounds->y());
331 }
Chris Dalton4e998532020-02-10 11:06:42 -0700332
333 // Concatenate this path onto our uber path that matches its fill and AA types.
334 SkPath* uberPath = this->getAtlasUberPath(path.getFillType(), GrAAType::kNone != aaType);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600335 uberPath->moveTo(locationInAtlas->x(), locationInAtlas->y()); // Implicit moveTo(0,0).
Chris Dalton4e998532020-02-10 11:06:42 -0700336 uberPath->addPath(path, atlasMatrix);
Chris Daltonb832ce62020-01-06 19:49:37 -0700337 return true;
338}
339
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600340void GrTessellationPathRenderer::onStencilPath(const StencilPathArgs& args) {
Chris Daltonb832ce62020-01-06 19:49:37 -0700341 SkPath path;
342 args.fShape->asPath(&path);
343
344 GrAAType aaType = (GrAA::kYes == args.fDoStencilMSAA) ? GrAAType::kMSAA : GrAAType::kNone;
345
Chris Daltonf9aea7f2020-01-21 11:19:26 -0700346 auto op = args.fContext->priv().opMemoryPool()->allocate<GrTessellatePathOp>(
Chris Daltonb96995d2020-06-04 16:44:29 -0600347 *args.fViewMatrix, path, GrPaint(), aaType, OpFlags::kStencilOnly);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400348 args.fRenderTargetContext->addDrawOp(args.fClip, std::move(op));
Chris Daltonb832ce62020-01-06 19:49:37 -0700349}
Chris Dalton4e998532020-02-10 11:06:42 -0700350
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600351void GrTessellationPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
352 const uint32_t* opsTaskIDs, int numOpsTaskIDs) {
Chris Dalton4e998532020-02-10 11:06:42 -0700353 if (!fAtlas.drawBounds().isEmpty()) {
354 this->renderAtlas(onFlushRP);
355 fAtlas.reset(kAtlasInitialSize, *onFlushRP->caps());
356 }
357 for (SkPath& path : fAtlasUberPaths) {
358 path.reset();
359 }
360}
361
362constexpr static GrUserStencilSettings kTestStencil(
363 GrUserStencilSettings::StaticInit<
364 0x0000,
365 GrUserStencilTest::kNotEqual,
366 0xffff,
367 GrUserStencilOp::kKeep,
368 GrUserStencilOp::kKeep,
369 0xffff>());
370
371constexpr static GrUserStencilSettings kTestAndResetStencil(
372 GrUserStencilSettings::StaticInit<
373 0x0000,
374 GrUserStencilTest::kNotEqual,
375 0xffff,
376 GrUserStencilOp::kZero,
377 GrUserStencilOp::kKeep,
378 0xffff>());
379
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600380void GrTessellationPathRenderer::renderAtlas(GrOnFlushResourceProvider* onFlushRP) {
Chris Dalton4e998532020-02-10 11:06:42 -0700381 auto rtc = fAtlas.instantiate(onFlushRP);
382 if (!rtc) {
383 return;
384 }
385
386 // Add ops to stencil the atlas paths.
387 for (auto antialias : {false, true}) {
388 for (auto fillType : {SkPathFillType::kWinding, SkPathFillType::kEvenOdd}) {
389 SkPath* uberPath = this->getAtlasUberPath(fillType, antialias);
390 if (uberPath->isEmpty()) {
391 continue;
392 }
393 uberPath->setFillType(fillType);
394 GrAAType aaType = (antialias) ? GrAAType::kMSAA : GrAAType::kNone;
395 auto op = onFlushRP->opMemoryPool()->allocate<GrTessellatePathOp>(
Chris Daltonb96995d2020-06-04 16:44:29 -0600396 SkMatrix::I(), *uberPath, GrPaint(), aaType, fStencilAtlasFlags);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400397 rtc->addDrawOp(nullptr, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700398 }
399 }
400
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700401 // Finally, draw a fullscreen rect to convert our stencilled paths into alpha coverage masks.
Chris Daltond72cb4c2020-07-16 17:50:17 -0600402 auto aaType = GrAAType::kMSAA;
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700403 auto fillRectFlags = GrFillRectOp::InputFlags::kNone;
Chris Dalton4e998532020-02-10 11:06:42 -0700404
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700405 // This will be the final op in the renderTargetContext. So if Ganesh is planning to discard the
406 // stencil values anyway, then we might not actually need to reset the stencil values back to 0.
407 bool mustResetStencil = !onFlushRP->caps()->discardStencilValuesAfterRenderPass();
408
Chris Daltond72cb4c2020-07-16 17:50:17 -0600409 if (rtc->numSamples() == 1) {
410 // We are mixed sampled. We need to either enable conservative raster (preferred) or disable
411 // MSAA in order to avoid double blend artifacts. (Even if we disable MSAA for the cover
412 // geometry, the stencil test is still multisampled and will still produce smooth results.)
413 if (onFlushRP->caps()->conservativeRasterSupport()) {
414 fillRectFlags |= GrFillRectOp::InputFlags::kConservativeRaster;
415 } else {
416 aaType = GrAAType::kNone;
417 }
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700418 mustResetStencil = true;
419 }
420
421 SkRect coverRect = SkRect::MakeIWH(fAtlas.drawBounds().width(), fAtlas.drawBounds().height());
422 const GrUserStencilSettings* stencil;
423 if (mustResetStencil) {
424 // Outset the cover rect in case there are T-junctions in the path bounds.
425 coverRect.outset(1, 1);
426 stencil = &kTestAndResetStencil;
427 } else {
428 stencil = &kTestStencil;
429 }
430
431 GrQuad coverQuad(coverRect);
432 DrawQuad drawQuad{coverQuad, coverQuad, GrQuadAAFlags::kAll};
433
Chris Dalton4e998532020-02-10 11:06:42 -0700434 GrPaint paint;
435 paint.setColor4f(SK_PMColor4fWHITE);
Chris Daltonc3b67eb2020-02-10 21:09:58 -0700436
Chris Daltond72cb4c2020-07-16 17:50:17 -0600437 auto coverOp = GrFillRectOp::Make(rtc->surfPriv().getContext(), std::move(paint), aaType,
438 &drawQuad, stencil, fillRectFlags);
Michael Ludwig7c12e282020-05-29 09:54:07 -0400439 rtc->addDrawOp(nullptr, std::move(coverOp));
Chris Dalton4e998532020-02-10 11:06:42 -0700440
441 if (rtc->asSurfaceProxy()->requiresManualMSAAResolve()) {
442 onFlushRP->addTextureResolveTask(sk_ref_sp(rtc->asTextureProxy()),
443 GrSurfaceProxy::ResolveFlags::kMSAA);
444 }
445}