Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2019 Google LLC. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 8 | #include "src/gpu/tessellate/GrTessellationPathRenderer.h" |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 9 | |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 10 | #include "include/pathops/SkPathOps.h" |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 11 | #include "src/core/SkIPoint16.h" |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 12 | #include "src/core/SkPathPriv.h" |
| 13 | #include "src/gpu/GrClip.h" |
| 14 | #include "src/gpu/GrMemoryPool.h" |
| 15 | #include "src/gpu/GrRecordingContextPriv.h" |
| 16 | #include "src/gpu/GrRenderTargetContext.h" |
Michael Ludwig | 2686d69 | 2020-04-17 20:21:37 +0000 | [diff] [blame] | 17 | #include "src/gpu/geometry/GrStyledShape.h" |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 18 | #include "src/gpu/ops/GrFillRectOp.h" |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 19 | #include "src/gpu/tessellate/GrDrawAtlasPathOp.h" |
Chris Dalton | 078f875 | 2020-07-30 19:50:46 -0600 | [diff] [blame] | 20 | #include "src/gpu/tessellate/GrPathTessellateOp.h" |
Chris Dalton | c2a1746 | 2020-12-09 16:46:22 -0700 | [diff] [blame] | 21 | #include "src/gpu/tessellate/GrStrokeIndirectOp.h" |
Chris Dalton | 078f875 | 2020-07-30 19:50:46 -0600 | [diff] [blame] | 22 | #include "src/gpu/tessellate/GrStrokeTessellateOp.h" |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 23 | #include "src/gpu/tessellate/GrWangsFormula.h" |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 24 | |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 25 | constexpr static SkISize kAtlasInitialSize{512, 512}; |
| 26 | constexpr static int kMaxAtlasSize = 2048; |
| 27 | |
Chris Dalton | d72cb4c | 2020-07-16 17:50:17 -0600 | [diff] [blame] | 28 | constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8; |
| 29 | |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 30 | // The atlas is only used for small-area paths, which means at least one dimension of every path is |
| 31 | // guaranteed to be quite small. So if we transpose tall paths, then every path will have a small |
| 32 | // height, which lends very well to efficient pow2 atlas packing. |
| 33 | constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2; |
| 34 | |
| 35 | // Ensure every path in the atlas falls in or below the 128px high rectanizer band. |
| 36 | constexpr static int kMaxAtlasPathHeight = 128; |
| 37 | |
Chris Dalton | 1413d11 | 2020-07-09 11:26:31 -0600 | [diff] [blame] | 38 | bool GrTessellationPathRenderer::IsSupported(const GrCaps& caps) { |
| 39 | return caps.drawInstancedSupport() && caps.shaderCaps()->vertexIDSupport(); |
| 40 | } |
| 41 | |
Chris Dalton | 9213e61 | 2020-10-09 17:22:43 -0600 | [diff] [blame] | 42 | GrTessellationPathRenderer::GrTessellationPathRenderer(GrRecordingContext* rContext) |
Chris Dalton | d72cb4c | 2020-07-16 17:50:17 -0600 | [diff] [blame] | 43 | : fAtlas(kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes, kAtlasInitialSize, |
Chris Dalton | 3163428 | 2020-09-17 12:16:54 -0600 | [diff] [blame] | 44 | std::min(kMaxAtlasSize, rContext->priv().caps()->maxPreferredRenderTargetSize()), |
| 45 | *rContext->priv().caps(), kAtlasAlgorithm) { |
| 46 | this->initAtlasFlags(rContext); |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 47 | } |
| 48 | |
Chris Dalton | 9213e61 | 2020-10-09 17:22:43 -0600 | [diff] [blame] | 49 | void GrTessellationPathRenderer::initAtlasFlags(GrRecordingContext* rContext) { |
| 50 | fMaxAtlasPathWidth = 0; |
| 51 | |
| 52 | if (!rContext->asDirectContext()) { |
| 53 | // The atlas is not compatible with DDL. Leave it disabled on non-direct contexts. |
| 54 | return; |
| 55 | } |
| 56 | |
Chris Dalton | 3163428 | 2020-09-17 12:16:54 -0600 | [diff] [blame] | 57 | const GrCaps& caps = *rContext->priv().caps(); |
Chris Dalton | 9213e61 | 2020-10-09 17:22:43 -0600 | [diff] [blame] | 58 | auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes); |
| 59 | if (caps.internalMultisampleCount(atlasFormat) <= 1) { |
| 60 | // MSAA is not supported on kAlpha8. Leave the atlas disabled. |
| 61 | return; |
| 62 | } |
Chris Dalton | 3163428 | 2020-09-17 12:16:54 -0600 | [diff] [blame] | 63 | |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 64 | fStencilAtlasFlags = OpFlags::kStencilOnly | OpFlags::kDisableHWTessellation; |
| 65 | fMaxAtlasPathWidth = fAtlas.maxAtlasSize() / 2; |
Chris Dalton | d72cb4c | 2020-07-16 17:50:17 -0600 | [diff] [blame] | 66 | |
Chris Dalton | d72cb4c | 2020-07-16 17:50:17 -0600 | [diff] [blame] | 67 | // The atlas usually does better with hardware tessellation. If hardware tessellation is |
| 68 | // supported, we will next choose a max atlas path width that is guaranteed to never require |
| 69 | // more tessellation segments than are supported by the hardware. |
| 70 | if (!caps.shaderCaps()->tessellationSupport()) { |
| 71 | return; |
| 72 | } |
| 73 | |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 74 | // Since we limit the area of paths in the atlas to kMaxAtlasPathHeight^2, taller paths can't |
| 75 | // get very wide anyway. Find the tallest path whose width is limited by |
| 76 | // GrWangsFormula::worst_case_cubic() rather than the max area constraint, and use that for our |
| 77 | // max atlas path width. |
| 78 | // |
| 79 | // Solve the following equation for w: |
| 80 | // |
| 81 | // GrWangsFormula::worst_case_cubic(kLinearizationIntolerance, w, kMaxAtlasPathHeight^2 / w) |
| 82 | // == maxTessellationSegments |
| 83 | // |
Chris Dalton | 4dd3c8c | 2020-10-30 22:45:58 -0600 | [diff] [blame] | 84 | float k = GrWangsFormula::length_term<3>(kLinearizationIntolerance); |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 85 | float h = kMaxAtlasPathHeight; |
Chris Dalton | d72cb4c | 2020-07-16 17:50:17 -0600 | [diff] [blame] | 86 | float s = caps.shaderCaps()->maxTessellationSegments(); |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 87 | // Quadratic formula from Numerical Recipes in C: |
| 88 | // |
| 89 | // q = -1/2 [b + sign(b) sqrt(b*b - 4*a*c)] |
| 90 | // x1 = q/a |
| 91 | // x2 = c/q |
| 92 | // |
| 93 | // float a = 1; // 'a' is always 1 in our specific equation. |
| 94 | float b = -s*s*s*s / (4*k*k); // Always negative. |
| 95 | float c = h*h*h*h; // Always positive. |
Chris Dalton | 3163428 | 2020-09-17 12:16:54 -0600 | [diff] [blame] | 96 | float discr = b*b - 4*1*c; |
| 97 | if (discr <= 0) { |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 98 | // maxTessellationSegments is too small for any path whose area == kMaxAtlasPathHeight^2. |
| 99 | // (This is unexpected because the GL spec mandates a minimum of 64 segments.) |
Chris Dalton | 3163428 | 2020-09-17 12:16:54 -0600 | [diff] [blame] | 100 | rContext->priv().printWarningMessage(SkStringPrintf( |
| 101 | "WARNING: maxTessellationSegments seems too low. (%i)\n", |
| 102 | caps.shaderCaps()->maxTessellationSegments()).c_str()); |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 103 | return; |
| 104 | } |
Chris Dalton | 3163428 | 2020-09-17 12:16:54 -0600 | [diff] [blame] | 105 | float q = -.5f * (b - std::sqrt(discr)); // Always positive. |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 106 | // The two roots represent the width^2 and height^2 of the tallest rectangle that is limited by |
| 107 | // GrWangsFormula::worst_case_cubic(). |
| 108 | float r0 = q; // Always positive. |
| 109 | float r1 = c/q; // Always positive. |
| 110 | float worstCaseWidth = std::sqrt(std::max(r0, r1)); |
| 111 | #ifdef SK_DEBUG |
| 112 | float worstCaseHeight = std::sqrt(std::min(r0, r1)); |
| 113 | // Verify the above equation worked as expected. It should have found a width and height whose |
| 114 | // area == kMaxAtlasPathHeight^2. |
| 115 | SkASSERT(SkScalarNearlyEqual(worstCaseHeight * worstCaseWidth, h*h, 1)); |
| 116 | // Verify GrWangsFormula::worst_case_cubic() still works as we expect. The worst case number of |
| 117 | // segments for this bounding box should be maxTessellationSegments. |
| 118 | SkASSERT(SkScalarNearlyEqual(GrWangsFormula::worst_case_cubic( |
| 119 | kLinearizationIntolerance, worstCaseWidth, worstCaseHeight), s, 1)); |
| 120 | #endif |
| 121 | fStencilAtlasFlags &= ~OpFlags::kDisableHWTessellation; |
| 122 | fMaxAtlasPathWidth = std::min(fMaxAtlasPathWidth, (int)worstCaseWidth); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 123 | } |
| 124 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 125 | GrPathRenderer::CanDrawPath GrTessellationPathRenderer::onCanDrawPath( |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 126 | const CanDrawPathArgs& args) const { |
Chris Dalton | 1c62a7b | 2020-06-29 22:01:14 -0600 | [diff] [blame] | 127 | const GrStyledShape& shape = *args.fShape; |
| 128 | if (shape.inverseFilled() || shape.style().hasPathEffect() || |
Chris Dalton | 0f6bb8a | 2020-01-15 09:40:54 -0700 | [diff] [blame] | 129 | args.fViewMatrix->hasPerspective()) { |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 130 | return CanDrawPath::kNo; |
| 131 | } |
Chris Dalton | 1c62a7b | 2020-06-29 22:01:14 -0600 | [diff] [blame] | 132 | |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 133 | if (GrAAType::kCoverage == args.fAAType) { |
| 134 | SkASSERT(1 == args.fProxy->numSamples()); |
| 135 | if (!args.fProxy->canUseMixedSamples(*args.fCaps)) { |
| 136 | return CanDrawPath::kNo; |
| 137 | } |
| 138 | } |
Chris Dalton | 1c62a7b | 2020-06-29 22:01:14 -0600 | [diff] [blame] | 139 | |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 140 | SkPath path; |
Chris Dalton | 1c62a7b | 2020-06-29 22:01:14 -0600 | [diff] [blame] | 141 | shape.asPath(&path); |
Chris Dalton | 1c62a7b | 2020-06-29 22:01:14 -0600 | [diff] [blame] | 142 | |
| 143 | if (!shape.style().isSimpleFill()) { |
Chris Dalton | 1c62a7b | 2020-06-29 22:01:14 -0600 | [diff] [blame] | 144 | // These are only temporary restrictions while we bootstrap tessellated stroking. Every one |
| 145 | // of them will eventually go away. |
| 146 | if (shape.style().strokeRec().getStyle() == SkStrokeRec::kStrokeAndFill_Style || |
Chris Dalton | 55abaf5 | 2020-12-08 10:25:13 -0700 | [diff] [blame] | 147 | SkPathPriv::ConicWeightCnt(path)) { |
Chris Dalton | 1c62a7b | 2020-06-29 22:01:14 -0600 | [diff] [blame] | 148 | return CanDrawPath::kNo; |
| 149 | } |
Chris Dalton | 55abaf5 | 2020-12-08 10:25:13 -0700 | [diff] [blame] | 150 | if (shape.style().isSimpleHairline()) { |
| 151 | // For the time being we translate hairline paths to device space. We can't do this if |
| 152 | // it's possible the paint might use local coordinates. |
| 153 | if (args.fPaint->usesVaryingCoords()) { |
| 154 | return CanDrawPath::kNo; |
| 155 | } |
| 156 | } |
Chris Dalton | 1c62a7b | 2020-06-29 22:01:14 -0600 | [diff] [blame] | 157 | } |
| 158 | |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 159 | return CanDrawPath::kYes; |
| 160 | } |
| 161 | |
Chris Dalton | c2a1746 | 2020-12-09 16:46:22 -0700 | [diff] [blame] | 162 | static GrOp::Owner make_stroke_op(GrRecordingContext* context, GrAAType aaType, |
| 163 | const SkMatrix& viewMatrix, const SkStrokeRec& stroke, |
| 164 | const SkPath& path, GrPaint&& paint, |
| 165 | const GrShaderCaps& shaderCaps) { |
| 166 | // Only use hardware tessellation if the path has a somewhat large number of verbs. Otherwise we |
Chris Dalton | 55abaf5 | 2020-12-08 10:25:13 -0700 | [diff] [blame] | 167 | // seem to be better off using indirect draws. Our back door for HW tessellation shaders isn't |
| 168 | // currently capable of passing varyings to the fragment shader either, so if the paint uses |
| 169 | // varyings we need to use indirect draws. |
| 170 | if (shaderCaps.tessellationSupport() && path.countVerbs() > 50 && !paint.usesVaryingCoords()) { |
Chris Dalton | c2a1746 | 2020-12-09 16:46:22 -0700 | [diff] [blame] | 171 | return GrOp::Make<GrStrokeTessellateOp>(context, aaType, viewMatrix, stroke, path, |
| 172 | std::move(paint)); |
| 173 | } else { |
| 174 | return GrOp::Make<GrStrokeIndirectOp>(context, aaType, viewMatrix, path, stroke, |
| 175 | std::move(paint)); |
| 176 | } |
| 177 | } |
| 178 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 179 | bool GrTessellationPathRenderer::onDrawPath(const DrawPathArgs& args) { |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 180 | GrRenderTargetContext* renderTargetContext = args.fRenderTargetContext; |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 181 | const GrShaderCaps& shaderCaps = *args.fContext->priv().caps()->shaderCaps(); |
| 182 | |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 183 | SkPath path; |
| 184 | args.fShape->asPath(&path); |
| 185 | |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 186 | SkRect devBounds; |
| 187 | args.fViewMatrix->mapRect(&devBounds, path.getBounds()); |
| 188 | |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 189 | // See if the path is small and simple enough to atlas instead of drawing directly. |
| 190 | // |
| 191 | // NOTE: The atlas uses alpha8 coverage even for msaa render targets. We could theoretically |
| 192 | // render the sample mask to an integer texture, but such a scheme would probably require |
| 193 | // GL_EXT_post_depth_coverage, which appears to have low adoption. |
| 194 | SkIRect devIBounds; |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 195 | SkIPoint16 locationInAtlas; |
| 196 | bool transposedInAtlas; |
Chris Dalton | 1c62a7b | 2020-06-29 22:01:14 -0600 | [diff] [blame] | 197 | if (args.fShape->style().isSimpleFill() && |
| 198 | this->tryAddPathToAtlas(*args.fContext->priv().caps(), *args.fViewMatrix, path, devBounds, |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 199 | args.fAAType, &devIBounds, &locationInAtlas, &transposedInAtlas)) { |
Chris Dalton | 9213e61 | 2020-10-09 17:22:43 -0600 | [diff] [blame] | 200 | // The atlas is not compatible with DDL. We should only be using it on direct contexts. |
| 201 | SkASSERT(args.fContext->asDirectContext()); |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 202 | #ifdef SK_DEBUG |
| 203 | // If using hardware tessellation in the atlas, make sure the max number of segments is |
| 204 | // sufficient for this path. fMaxAtlasPathWidth should have been tuned for this to always be |
| 205 | // the case. |
| 206 | if (!(fStencilAtlasFlags & OpFlags::kDisableHWTessellation)) { |
| 207 | int worstCaseNumSegments = GrWangsFormula::worst_case_cubic(kLinearizationIntolerance, |
| 208 | devIBounds.width(), |
| 209 | devIBounds.height()); |
| 210 | SkASSERT(worstCaseNumSegments <= shaderCaps.maxTessellationSegments()); |
| 211 | } |
| 212 | #endif |
Herb Derby | c76d409 | 2020-10-07 16:46:15 -0400 | [diff] [blame] | 213 | auto op = GrOp::Make<GrDrawAtlasPathOp>(args.fContext, |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 214 | renderTargetContext->numSamples(), sk_ref_sp(fAtlas.textureProxy()), |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 215 | devIBounds, locationInAtlas, transposedInAtlas, *args.fViewMatrix, |
Michael Ludwig | 7c12e28 | 2020-05-29 09:54:07 -0400 | [diff] [blame] | 216 | std::move(args.fPaint)); |
| 217 | renderTargetContext->addDrawOp(args.fClip, std::move(op)); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 218 | return true; |
| 219 | } |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 220 | |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 221 | // Find the worst-case log2 number of line segments that a curve in this path might need to be |
| 222 | // divided into. |
| 223 | int worstCaseResolveLevel = GrWangsFormula::worst_case_cubic_log2(kLinearizationIntolerance, |
| 224 | devBounds.width(), |
| 225 | devBounds.height()); |
| 226 | if (worstCaseResolveLevel > kMaxResolveLevel) { |
| 227 | // The path is too large for our internal indirect draw shaders. Crop it to the viewport. |
Chris Dalton | 1c62a7b | 2020-06-29 22:01:14 -0600 | [diff] [blame] | 228 | auto viewport = SkRect::MakeIWH(renderTargetContext->width(), |
| 229 | renderTargetContext->height()); |
| 230 | float inflationRadius = 1; |
| 231 | const SkStrokeRec& stroke = args.fShape->style().strokeRec(); |
| 232 | if (stroke.getStyle() == SkStrokeRec::kHairline_Style) { |
| 233 | inflationRadius += SkStrokeRec::GetInflationRadius(stroke.getJoin(), stroke.getMiter(), |
| 234 | stroke.getCap(), 1); |
| 235 | } else if (stroke.getStyle() != SkStrokeRec::kFill_Style) { |
| 236 | inflationRadius += stroke.getInflationRadius() * args.fViewMatrix->getMaxScale(); |
| 237 | } |
| 238 | viewport.outset(inflationRadius, inflationRadius); |
| 239 | |
| 240 | SkPath viewportPath; |
| 241 | viewportPath.addRect(viewport); |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 242 | // Perform the crop in device space so it's a simple rect-path intersection. |
| 243 | path.transform(*args.fViewMatrix); |
Chris Dalton | 1c62a7b | 2020-06-29 22:01:14 -0600 | [diff] [blame] | 244 | if (!Op(viewportPath, path, kIntersect_SkPathOp, &path)) { |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 245 | // The crop can fail if the PathOps encounter NaN or infinities. Return true |
| 246 | // because drawing nothing is acceptable behavior for FP overflow. |
| 247 | return true; |
| 248 | } |
Chris Dalton | 1c62a7b | 2020-06-29 22:01:14 -0600 | [diff] [blame] | 249 | |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 250 | // Transform the path back to its own local space. |
| 251 | SkMatrix inverse; |
| 252 | if (!args.fViewMatrix->invert(&inverse)) { |
| 253 | return true; // Singular view matrix. Nothing would have drawn anyway. Return true. |
| 254 | } |
| 255 | path.transform(inverse); |
| 256 | path.setIsVolatile(true); |
| 257 | args.fViewMatrix->mapRect(&devBounds, path.getBounds()); |
| 258 | worstCaseResolveLevel = GrWangsFormula::worst_case_cubic_log2(kLinearizationIntolerance, |
| 259 | devBounds.width(), |
| 260 | devBounds.height()); |
| 261 | // kMaxResolveLevel should be large enough to tessellate paths the size of any screen we |
| 262 | // might encounter. |
| 263 | SkASSERT(worstCaseResolveLevel <= kMaxResolveLevel); |
| 264 | } |
| 265 | |
Chris Dalton | 128ed7b | 2020-07-30 17:48:24 -0600 | [diff] [blame] | 266 | if (args.fShape->style().isSimpleHairline()) { |
Chris Dalton | 128ed7b | 2020-07-30 17:48:24 -0600 | [diff] [blame] | 267 | // Since we will be transforming the path, just double check that we are still in a position |
| 268 | // where the paint will not use local coordinates. |
Chris Dalton | 55abaf5 | 2020-12-08 10:25:13 -0700 | [diff] [blame] | 269 | SkASSERT(!args.fPaint.usesVaryingCoords()); |
| 270 | // Pre-transform the path into device space and use a stroke width of 1. |
Chris Dalton | 128ed7b | 2020-07-30 17:48:24 -0600 | [diff] [blame] | 271 | SkPath devPath; |
| 272 | path.transform(*args.fViewMatrix, &devPath); |
| 273 | SkStrokeRec devStroke = args.fShape->style().strokeRec(); |
| 274 | devStroke.setStrokeStyle(1); |
Chris Dalton | c2a1746 | 2020-12-09 16:46:22 -0700 | [diff] [blame] | 275 | auto op = make_stroke_op(args.fContext, args.fAAType, SkMatrix::I(), devStroke, devPath, |
| 276 | std::move(args.fPaint), shaderCaps); |
Chris Dalton | 128ed7b | 2020-07-30 17:48:24 -0600 | [diff] [blame] | 277 | renderTargetContext->addDrawOp(args.fClip, std::move(op)); |
| 278 | return true; |
| 279 | } |
| 280 | |
Chris Dalton | 1c62a7b | 2020-06-29 22:01:14 -0600 | [diff] [blame] | 281 | if (!args.fShape->style().isSimpleFill()) { |
| 282 | const SkStrokeRec& stroke = args.fShape->style().strokeRec(); |
Chris Dalton | 128ed7b | 2020-07-30 17:48:24 -0600 | [diff] [blame] | 283 | SkASSERT(stroke.getStyle() == SkStrokeRec::kStroke_Style); |
Chris Dalton | c2a1746 | 2020-12-09 16:46:22 -0700 | [diff] [blame] | 284 | auto op = make_stroke_op(args.fContext, args.fAAType, *args.fViewMatrix, stroke, path, |
| 285 | std::move(args.fPaint), shaderCaps); |
Chris Dalton | 1c62a7b | 2020-06-29 22:01:14 -0600 | [diff] [blame] | 286 | renderTargetContext->addDrawOp(args.fClip, std::move(op)); |
| 287 | return true; |
| 288 | } |
| 289 | |
| 290 | auto drawPathFlags = OpFlags::kNone; |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 291 | if ((1 << worstCaseResolveLevel) > shaderCaps.maxTessellationSegments()) { |
| 292 | // The path is too large for hardware tessellation; a curve in this bounding box could |
| 293 | // potentially require more segments than are supported by the hardware. Fall back on |
| 294 | // indirect draws. |
| 295 | drawPathFlags |= OpFlags::kDisableHWTessellation; |
| 296 | } |
| 297 | |
Herb Derby | c76d409 | 2020-10-07 16:46:15 -0400 | [diff] [blame] | 298 | auto op = GrOp::Make<GrPathTessellateOp>( |
| 299 | args.fContext, *args.fViewMatrix, path, std::move(args.fPaint), |
| 300 | args.fAAType, drawPathFlags); |
Michael Ludwig | 7c12e28 | 2020-05-29 09:54:07 -0400 | [diff] [blame] | 301 | renderTargetContext->addDrawOp(args.fClip, std::move(op)); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 302 | return true; |
| 303 | } |
| 304 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 305 | bool GrTessellationPathRenderer::tryAddPathToAtlas( |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 306 | const GrCaps& caps, const SkMatrix& viewMatrix, const SkPath& path, const SkRect& devBounds, |
| 307 | GrAAType aaType, SkIRect* devIBounds, SkIPoint16* locationInAtlas, |
| 308 | bool* transposedInAtlas) { |
Chris Dalton | d72cb4c | 2020-07-16 17:50:17 -0600 | [diff] [blame] | 309 | if (!fMaxAtlasPathWidth) { |
| 310 | return false; |
| 311 | } |
| 312 | |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 313 | if (!caps.multisampleDisableSupport() && GrAAType::kNone == aaType) { |
| 314 | return false; |
| 315 | } |
| 316 | |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 317 | // Atlas paths require their points to be transformed on the CPU and copied into an "uber path". |
| 318 | // Check if this path has too many points to justify this extra work. |
| 319 | if (path.countPoints() > 200) { |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 320 | return false; |
| 321 | } |
| 322 | |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 323 | // Transpose tall paths in the atlas. Since we limit ourselves to small-area paths, this |
| 324 | // guarantees that every atlas entry has a small height, which lends very well to efficient pow2 |
| 325 | // atlas packing. |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 326 | devBounds.roundOut(devIBounds); |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 327 | int maxDimenstion = devIBounds->width(); |
| 328 | int minDimension = devIBounds->height(); |
| 329 | *transposedInAtlas = minDimension > maxDimenstion; |
| 330 | if (*transposedInAtlas) { |
| 331 | std::swap(minDimension, maxDimenstion); |
| 332 | } |
| 333 | |
| 334 | // Check if the path is too large for an atlas. Since we use "minDimension" for height in the |
| 335 | // atlas, limiting to kMaxAtlasPathHeight^2 pixels guarantees height <= kMaxAtlasPathHeight. |
| 336 | if (maxDimenstion * minDimension > kMaxAtlasPathHeight * kMaxAtlasPathHeight || |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 337 | maxDimenstion > fMaxAtlasPathWidth) { |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 338 | return false; |
| 339 | } |
| 340 | |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 341 | if (!fAtlas.addRect(maxDimenstion, minDimension, locationInAtlas)) { |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 342 | return false; |
| 343 | } |
| 344 | |
| 345 | SkMatrix atlasMatrix = viewMatrix; |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 346 | if (*transposedInAtlas) { |
| 347 | std::swap(atlasMatrix[0], atlasMatrix[3]); |
| 348 | std::swap(atlasMatrix[1], atlasMatrix[4]); |
| 349 | float tx=atlasMatrix.getTranslateX(), ty=atlasMatrix.getTranslateY(); |
| 350 | atlasMatrix.setTranslateX(ty - devIBounds->y() + locationInAtlas->x()); |
| 351 | atlasMatrix.setTranslateY(tx - devIBounds->x() + locationInAtlas->y()); |
| 352 | } else { |
| 353 | atlasMatrix.postTranslate(locationInAtlas->x() - devIBounds->x(), |
| 354 | locationInAtlas->y() - devIBounds->y()); |
| 355 | } |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 356 | |
| 357 | // Concatenate this path onto our uber path that matches its fill and AA types. |
| 358 | SkPath* uberPath = this->getAtlasUberPath(path.getFillType(), GrAAType::kNone != aaType); |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 359 | uberPath->moveTo(locationInAtlas->x(), locationInAtlas->y()); // Implicit moveTo(0,0). |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 360 | uberPath->addPath(path, atlasMatrix); |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 361 | return true; |
| 362 | } |
| 363 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 364 | void GrTessellationPathRenderer::onStencilPath(const StencilPathArgs& args) { |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 365 | SkPath path; |
| 366 | args.fShape->asPath(&path); |
| 367 | |
| 368 | GrAAType aaType = (GrAA::kYes == args.fDoStencilMSAA) ? GrAAType::kMSAA : GrAAType::kNone; |
| 369 | |
Herb Derby | c76d409 | 2020-10-07 16:46:15 -0400 | [diff] [blame] | 370 | auto op = GrOp::Make<GrPathTessellateOp>( |
| 371 | args.fContext, *args.fViewMatrix, path, GrPaint(), aaType, OpFlags::kStencilOnly); |
Michael Ludwig | 7c12e28 | 2020-05-29 09:54:07 -0400 | [diff] [blame] | 372 | args.fRenderTargetContext->addDrawOp(args.fClip, std::move(op)); |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 373 | } |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 374 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 375 | void GrTessellationPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP, |
Adlai Holler | 9902cff | 2020-11-11 08:51:25 -0500 | [diff] [blame] | 376 | SkSpan<const uint32_t> /* taskIDs */) { |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 377 | if (!fAtlas.drawBounds().isEmpty()) { |
| 378 | this->renderAtlas(onFlushRP); |
| 379 | fAtlas.reset(kAtlasInitialSize, *onFlushRP->caps()); |
| 380 | } |
| 381 | for (SkPath& path : fAtlasUberPaths) { |
| 382 | path.reset(); |
| 383 | } |
| 384 | } |
| 385 | |
| 386 | constexpr static GrUserStencilSettings kTestStencil( |
| 387 | GrUserStencilSettings::StaticInit< |
| 388 | 0x0000, |
| 389 | GrUserStencilTest::kNotEqual, |
| 390 | 0xffff, |
| 391 | GrUserStencilOp::kKeep, |
| 392 | GrUserStencilOp::kKeep, |
| 393 | 0xffff>()); |
| 394 | |
| 395 | constexpr static GrUserStencilSettings kTestAndResetStencil( |
| 396 | GrUserStencilSettings::StaticInit< |
| 397 | 0x0000, |
| 398 | GrUserStencilTest::kNotEqual, |
| 399 | 0xffff, |
| 400 | GrUserStencilOp::kZero, |
| 401 | GrUserStencilOp::kKeep, |
| 402 | 0xffff>()); |
| 403 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 404 | void GrTessellationPathRenderer::renderAtlas(GrOnFlushResourceProvider* onFlushRP) { |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 405 | auto rtc = fAtlas.instantiate(onFlushRP); |
| 406 | if (!rtc) { |
| 407 | return; |
| 408 | } |
| 409 | |
| 410 | // Add ops to stencil the atlas paths. |
| 411 | for (auto antialias : {false, true}) { |
| 412 | for (auto fillType : {SkPathFillType::kWinding, SkPathFillType::kEvenOdd}) { |
| 413 | SkPath* uberPath = this->getAtlasUberPath(fillType, antialias); |
| 414 | if (uberPath->isEmpty()) { |
| 415 | continue; |
| 416 | } |
| 417 | uberPath->setFillType(fillType); |
| 418 | GrAAType aaType = (antialias) ? GrAAType::kMSAA : GrAAType::kNone; |
Herb Derby | c76d409 | 2020-10-07 16:46:15 -0400 | [diff] [blame] | 419 | auto op = GrOp::Make<GrPathTessellateOp>(onFlushRP->recordingContext(), |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 420 | SkMatrix::I(), *uberPath, GrPaint(), aaType, fStencilAtlasFlags); |
Michael Ludwig | 7c12e28 | 2020-05-29 09:54:07 -0400 | [diff] [blame] | 421 | rtc->addDrawOp(nullptr, std::move(op)); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 422 | } |
| 423 | } |
| 424 | |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 425 | // Finally, draw a fullscreen rect to convert our stencilled paths into alpha coverage masks. |
Chris Dalton | d72cb4c | 2020-07-16 17:50:17 -0600 | [diff] [blame] | 426 | auto aaType = GrAAType::kMSAA; |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 427 | auto fillRectFlags = GrFillRectOp::InputFlags::kNone; |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 428 | |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 429 | // This will be the final op in the renderTargetContext. So if Ganesh is planning to discard the |
| 430 | // stencil values anyway, then we might not actually need to reset the stencil values back to 0. |
| 431 | bool mustResetStencil = !onFlushRP->caps()->discardStencilValuesAfterRenderPass(); |
| 432 | |
Chris Dalton | d72cb4c | 2020-07-16 17:50:17 -0600 | [diff] [blame] | 433 | if (rtc->numSamples() == 1) { |
| 434 | // We are mixed sampled. We need to either enable conservative raster (preferred) or disable |
| 435 | // MSAA in order to avoid double blend artifacts. (Even if we disable MSAA for the cover |
| 436 | // geometry, the stencil test is still multisampled and will still produce smooth results.) |
| 437 | if (onFlushRP->caps()->conservativeRasterSupport()) { |
| 438 | fillRectFlags |= GrFillRectOp::InputFlags::kConservativeRaster; |
| 439 | } else { |
| 440 | aaType = GrAAType::kNone; |
| 441 | } |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 442 | mustResetStencil = true; |
| 443 | } |
| 444 | |
| 445 | SkRect coverRect = SkRect::MakeIWH(fAtlas.drawBounds().width(), fAtlas.drawBounds().height()); |
| 446 | const GrUserStencilSettings* stencil; |
| 447 | if (mustResetStencil) { |
| 448 | // Outset the cover rect in case there are T-junctions in the path bounds. |
| 449 | coverRect.outset(1, 1); |
| 450 | stencil = &kTestAndResetStencil; |
| 451 | } else { |
| 452 | stencil = &kTestStencil; |
| 453 | } |
| 454 | |
| 455 | GrQuad coverQuad(coverRect); |
| 456 | DrawQuad drawQuad{coverQuad, coverQuad, GrQuadAAFlags::kAll}; |
| 457 | |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 458 | GrPaint paint; |
| 459 | paint.setColor4f(SK_PMColor4fWHITE); |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 460 | |
Brian Salomon | 70fe17e | 2020-11-30 14:33:58 -0500 | [diff] [blame] | 461 | auto coverOp = GrFillRectOp::Make(rtc->recordingContext(), std::move(paint), aaType, &drawQuad, |
| 462 | stencil, fillRectFlags); |
Michael Ludwig | 7c12e28 | 2020-05-29 09:54:07 -0400 | [diff] [blame] | 463 | rtc->addDrawOp(nullptr, std::move(coverOp)); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 464 | |
| 465 | if (rtc->asSurfaceProxy()->requiresManualMSAAResolve()) { |
| 466 | onFlushRP->addTextureResolveTask(sk_ref_sp(rtc->asTextureProxy()), |
| 467 | GrSurfaceProxy::ResolveFlags::kMSAA); |
| 468 | } |
| 469 | } |