Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2019 Google LLC. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 8 | #include "src/gpu/tessellate/GrTessellationPathRenderer.h" |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 9 | |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 10 | #include "include/private/SkVx.h" |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 11 | #include "src/core/SkIPoint16.h" |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 12 | #include "src/core/SkPathPriv.h" |
| 13 | #include "src/gpu/GrClip.h" |
| 14 | #include "src/gpu/GrMemoryPool.h" |
| 15 | #include "src/gpu/GrRecordingContextPriv.h" |
Brian Salomon | eebe735 | 2020-12-09 16:37:04 -0500 | [diff] [blame] | 16 | #include "src/gpu/GrSurfaceDrawContext.h" |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 17 | #include "src/gpu/GrVx.h" |
Chris Dalton | 43a8b0c | 2021-06-14 17:10:07 -0600 | [diff] [blame^] | 18 | #include "src/gpu/effects/GrBlendFragmentProcessor.h" |
| 19 | #include "src/gpu/effects/generated/GrDeviceSpaceEffect.h" |
Michael Ludwig | 2686d69 | 2020-04-17 20:21:37 +0000 | [diff] [blame] | 20 | #include "src/gpu/geometry/GrStyledShape.h" |
Michael Ludwig | 4e9d5e2 | 2021-05-11 10:00:12 -0400 | [diff] [blame] | 21 | #include "src/gpu/geometry/GrWangsFormula.h" |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 22 | #include "src/gpu/ops/GrFillRectOp.h" |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 23 | #include "src/gpu/tessellate/GrDrawAtlasPathOp.h" |
Chris Dalton | ebb37e7 | 2021-01-27 17:59:45 -0700 | [diff] [blame] | 24 | #include "src/gpu/tessellate/GrPathInnerTriangulateOp.h" |
Chris Dalton | 031d76b | 2021-06-08 16:32:00 -0600 | [diff] [blame] | 25 | #include "src/gpu/tessellate/GrPathStencilCoverOp.h" |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 26 | #include "src/gpu/tessellate/GrPathTessellateOp.h" |
Chris Dalton | 05007df | 2021-02-04 00:24:52 -0700 | [diff] [blame] | 27 | #include "src/gpu/tessellate/GrStrokeTessellateOp.h" |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 28 | |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 29 | constexpr static SkISize kAtlasInitialSize{512, 512}; |
| 30 | constexpr static int kMaxAtlasSize = 2048; |
| 31 | |
Chris Dalton | d72cb4c | 2020-07-16 17:50:17 -0600 | [diff] [blame] | 32 | constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8; |
| 33 | |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 34 | // The atlas is only used for small-area paths, which means at least one dimension of every path is |
| 35 | // guaranteed to be quite small. So if we transpose tall paths, then every path will have a small |
| 36 | // height, which lends very well to efficient pow2 atlas packing. |
| 37 | constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2; |
| 38 | |
| 39 | // Ensure every path in the atlas falls in or below the 128px high rectanizer band. |
| 40 | constexpr static int kMaxAtlasPathHeight = 128; |
| 41 | |
Chris Dalton | 1413d11 | 2020-07-09 11:26:31 -0600 | [diff] [blame] | 42 | bool GrTessellationPathRenderer::IsSupported(const GrCaps& caps) { |
Chris Dalton | 8f282f5 | 2021-01-06 11:47:58 -0700 | [diff] [blame] | 43 | return !caps.avoidStencilBuffers() && |
| 44 | caps.drawInstancedSupport() && |
Chris Dalton | eae5c16 | 2020-12-29 10:18:21 -0700 | [diff] [blame] | 45 | caps.shaderCaps()->vertexIDSupport() && |
| 46 | !caps.disableTessellationPathRenderer(); |
Chris Dalton | 1413d11 | 2020-07-09 11:26:31 -0600 | [diff] [blame] | 47 | } |
| 48 | |
Chris Dalton | 9213e61 | 2020-10-09 17:22:43 -0600 | [diff] [blame] | 49 | GrTessellationPathRenderer::GrTessellationPathRenderer(GrRecordingContext* rContext) |
Chris Dalton | d72cb4c | 2020-07-16 17:50:17 -0600 | [diff] [blame] | 50 | : fAtlas(kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes, kAtlasInitialSize, |
Chris Dalton | 3163428 | 2020-09-17 12:16:54 -0600 | [diff] [blame] | 51 | std::min(kMaxAtlasSize, rContext->priv().caps()->maxPreferredRenderTargetSize()), |
| 52 | *rContext->priv().caps(), kAtlasAlgorithm) { |
Chris Dalton | 3163428 | 2020-09-17 12:16:54 -0600 | [diff] [blame] | 53 | const GrCaps& caps = *rContext->priv().caps(); |
Chris Dalton | 9213e61 | 2020-10-09 17:22:43 -0600 | [diff] [blame] | 54 | auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes); |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame] | 55 | if (rContext->asDirectContext() && // The atlas doesn't support DDL yet. |
| 56 | caps.internalMultisampleCount(atlasFormat) > 1) { |
| 57 | fMaxAtlasPathWidth = fAtlas.maxAtlasSize() / 2; // Enable the atlas. |
Chris Dalton | 9213e61 | 2020-10-09 17:22:43 -0600 | [diff] [blame] | 58 | } |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 59 | } |
| 60 | |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 61 | GrPathRenderer::StencilSupport GrTessellationPathRenderer::onGetStencilSupport( |
| 62 | const GrStyledShape& shape) const { |
| 63 | if (!shape.style().isSimpleFill()) { |
| 64 | // Don't bother with stroke stencilling yet. Skia probably shouldn't support this at all |
| 65 | // since you can't clip by a stroke. |
| 66 | return kNoSupport_StencilSupport; |
| 67 | } |
| 68 | return shape.knownToBeConvex() ? kNoRestriction_StencilSupport : kStencilOnly_StencilSupport; |
| 69 | } |
| 70 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 71 | GrPathRenderer::CanDrawPath GrTessellationPathRenderer::onCanDrawPath( |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 72 | const CanDrawPathArgs& args) const { |
Chris Dalton | 1c62a7b | 2020-06-29 22:01:14 -0600 | [diff] [blame] | 73 | const GrStyledShape& shape = *args.fShape; |
Chris Dalton | 57ab06c | 2021-04-22 12:57:28 -0600 | [diff] [blame] | 74 | if (args.fAAType == GrAAType::kCoverage || |
| 75 | shape.style().hasPathEffect() || |
Chris Dalton | 06b52ad | 2020-12-15 10:01:35 -0700 | [diff] [blame] | 76 | args.fViewMatrix->hasPerspective() || |
| 77 | shape.style().strokeRec().getStyle() == SkStrokeRec::kStrokeAndFill_Style || |
Chris Dalton | 2078cbe | 2020-12-14 19:04:55 -0700 | [diff] [blame] | 78 | shape.inverseFilled() || |
Chris Dalton | 537293bf | 2021-05-03 15:54:24 -0600 | [diff] [blame] | 79 | !args.fProxy->canUseStencil(*args.fCaps)) { |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 80 | return CanDrawPath::kNo; |
| 81 | } |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 82 | if (args.fHasUserStencilSettings) { |
| 83 | // Non-convex paths and strokes use the stencil buffer internally, so they can't support |
| 84 | // draws with stencil settings. |
| 85 | if (!shape.style().isSimpleFill() || !shape.knownToBeConvex()) { |
| 86 | return CanDrawPath::kNo; |
| 87 | } |
| 88 | } |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 89 | return CanDrawPath::kYes; |
| 90 | } |
| 91 | |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 92 | static GrOp::Owner make_non_convex_fill_op(GrRecordingContext* rContext, |
| 93 | GrTessellationPathRenderer::PathFlags pathFlags, |
| 94 | GrAAType aaType, const SkRect& pathDevBounds, |
| 95 | const SkMatrix& viewMatrix, const SkPath& path, |
| 96 | GrPaint&& paint) { |
| 97 | SkASSERT(!path.isConvex()); |
| 98 | int numVerbs = path.countVerbs(); |
| 99 | if (numVerbs > 0) { |
| 100 | // Check if the path is large and/or simple enough that we can triangulate the inner fan |
| 101 | // on the CPU. This is our fastest approach. It allows us to stencil only the curves, |
| 102 | // and then fill the inner fan directly to the final render target, thus drawing the |
| 103 | // majority of pixels in a single render pass. |
| 104 | float gpuFragmentWork = pathDevBounds.height() * pathDevBounds.width(); |
| 105 | float cpuTessellationWork = numVerbs * SkNextLog2(numVerbs); // N log N. |
| 106 | constexpr static float kCpuWeight = 512; |
| 107 | constexpr static float kMinNumPixelsToTriangulate = 256 * 256; |
| 108 | if (cpuTessellationWork * kCpuWeight + kMinNumPixelsToTriangulate < gpuFragmentWork) { |
| 109 | return GrOp::Make<GrPathInnerTriangulateOp>(rContext, viewMatrix, path, |
| 110 | std::move(paint), aaType, pathFlags, |
| 111 | pathDevBounds); |
Chris Dalton | 70a0d2c | 2021-01-26 12:01:21 -0700 | [diff] [blame] | 112 | } |
Chris Dalton | c2a1746 | 2020-12-09 16:46:22 -0700 | [diff] [blame] | 113 | } |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 114 | return GrOp::Make<GrPathStencilCoverOp>(rContext, viewMatrix, path, std::move(paint), aaType, |
| 115 | pathFlags, pathDevBounds); |
Chris Dalton | c2a1746 | 2020-12-09 16:46:22 -0700 | [diff] [blame] | 116 | } |
| 117 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 118 | bool GrTessellationPathRenderer::onDrawPath(const DrawPathArgs& args) { |
John Stiles | 0fbc6a3 | 2021-06-04 14:40:57 -0400 | [diff] [blame] | 119 | GrSurfaceDrawContext* surfaceDrawContext = args.fSurfaceDrawContext; |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 120 | |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 121 | SkPath path; |
| 122 | args.fShape->asPath(&path); |
| 123 | |
| 124 | // Handle strokes first. |
| 125 | if (!args.fShape->style().isSimpleFill()) { |
| 126 | SkASSERT(args.fUserStencilSettings->isUnused()); |
| 127 | const SkStrokeRec& stroke = args.fShape->style().strokeRec(); |
| 128 | SkASSERT(stroke.getStyle() != SkStrokeRec::kStrokeAndFill_Style); |
| 129 | auto op = GrOp::Make<GrStrokeTessellateOp>(args.fContext, args.fAAType, *args.fViewMatrix, |
| 130 | path, stroke, std::move(args.fPaint)); |
| 131 | surfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
| 132 | return true; |
| 133 | } |
| 134 | |
| 135 | SkRect pathDevBounds; |
| 136 | args.fViewMatrix->mapRect(&pathDevBounds, args.fShape->bounds()); |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 137 | |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 138 | // See if the path is small and simple enough to atlas instead of drawing directly. |
| 139 | // |
| 140 | // NOTE: The atlas uses alpha8 coverage even for msaa render targets. We could theoretically |
| 141 | // render the sample mask to an integer texture, but such a scheme would probably require |
| 142 | // GL_EXT_post_depth_coverage, which appears to have low adoption. |
| 143 | SkIRect devIBounds; |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 144 | SkIPoint16 locationInAtlas; |
| 145 | bool transposedInAtlas; |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 146 | if (args.fUserStencilSettings->isUnused() && |
| 147 | this->tryAddPathToAtlas(*args.fContext->priv().caps(), *args.fViewMatrix, path, |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 148 | pathDevBounds, args.fAAType != GrAAType::kNone, &devIBounds, |
| 149 | &locationInAtlas, &transposedInAtlas)) { |
Chris Dalton | 9213e61 | 2020-10-09 17:22:43 -0600 | [diff] [blame] | 150 | // The atlas is not compatible with DDL. We should only be using it on direct contexts. |
| 151 | SkASSERT(args.fContext->asDirectContext()); |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 152 | auto op = GrOp::Make<GrDrawAtlasPathOp>(args.fContext, surfaceDrawContext->numSamples(), |
| 153 | sk_ref_sp(fAtlas.textureProxy()), devIBounds, |
| 154 | locationInAtlas, transposedInAtlas, |
| 155 | *args.fViewMatrix, std::move(args.fPaint)); |
Brian Salomon | 1aa1f5f | 2020-12-11 17:25:17 -0500 | [diff] [blame] | 156 | surfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 157 | return true; |
| 158 | } |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 159 | |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 160 | // Handle convex paths only if we couldn't fit them in the atlas. We give the atlas priority in |
| 161 | // an effort to reduce DMSAA triggers. |
| 162 | if (args.fShape->knownToBeConvex()) { |
| 163 | auto op = GrOp::Make<GrPathTessellateOp>(args.fContext, *args.fViewMatrix, path, |
| 164 | std::move(args.fPaint), args.fAAType, |
| 165 | args.fUserStencilSettings, pathDevBounds); |
Chris Dalton | b064334 | 2020-12-15 01:04:12 -0700 | [diff] [blame] | 166 | surfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 167 | return true; |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 168 | } |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 169 | |
| 170 | SkASSERT(args.fUserStencilSettings->isUnused()); // See onGetStencilSupport(). |
| 171 | auto op = make_non_convex_fill_op(args.fContext, PathFlags::kNone, args.fAAType, pathDevBounds, |
| 172 | *args.fViewMatrix, path, std::move(args.fPaint)); |
| 173 | surfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 174 | return true; |
| 175 | } |
| 176 | |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 177 | void GrTessellationPathRenderer::onStencilPath(const StencilPathArgs& args) { |
| 178 | SkASSERT(args.fShape->style().isSimpleFill()); // See onGetStencilSupport(). |
| 179 | |
| 180 | GrSurfaceDrawContext* surfaceDrawContext = args.fSurfaceDrawContext; |
| 181 | GrAAType aaType = (GrAA::kYes == args.fDoStencilMSAA) ? GrAAType::kMSAA : GrAAType::kNone; |
| 182 | |
| 183 | SkRect pathDevBounds; |
| 184 | args.fViewMatrix->mapRect(&pathDevBounds, args.fShape->bounds()); |
| 185 | |
| 186 | SkPath path; |
| 187 | args.fShape->asPath(&path); |
| 188 | |
| 189 | if (args.fShape->knownToBeConvex()) { |
| 190 | constexpr static GrUserStencilSettings kMarkStencil( |
| 191 | GrUserStencilSettings::StaticInit< |
| 192 | 0x0001, |
| 193 | GrUserStencilTest::kAlways, |
| 194 | 0xffff, |
| 195 | GrUserStencilOp::kReplace, |
| 196 | GrUserStencilOp::kKeep, |
| 197 | 0xffff>()); |
| 198 | |
| 199 | GrPaint stencilPaint; |
| 200 | stencilPaint.setXPFactory(GrDisableColorXPFactory::Get()); |
| 201 | auto op = GrOp::Make<GrPathTessellateOp>(args.fContext, *args.fViewMatrix, path, |
| 202 | std::move(stencilPaint), aaType, &kMarkStencil, |
| 203 | pathDevBounds); |
| 204 | surfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
| 205 | return; |
Chris Dalton | b064334 | 2020-12-15 01:04:12 -0700 | [diff] [blame] | 206 | } |
| 207 | |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 208 | auto op = make_non_convex_fill_op(args.fContext, PathFlags::kStencilOnly, aaType, pathDevBounds, |
| 209 | *args.fViewMatrix, path, GrPaint()); |
| 210 | surfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
| 211 | } |
| 212 | |
Chris Dalton | 43a8b0c | 2021-06-14 17:10:07 -0600 | [diff] [blame^] | 213 | GrFPResult GrTessellationPathRenderer::makeAtlasClipFP( |
| 214 | const SkIRect& drawBounds, const SkMatrix& viewMatrix, const SkPath& path, GrAA aa, |
| 215 | std::unique_ptr<GrFragmentProcessor> inputCoverage, const GrCaps& caps) { |
| 216 | if (viewMatrix.hasPerspective()) { |
| 217 | return GrFPFailure(std::move(inputCoverage)); |
| 218 | } |
| 219 | SkIRect devIBounds; |
| 220 | SkIPoint16 locationInAtlas; |
| 221 | bool transposedInAtlas; |
| 222 | // tryAddPathToAtlas() ignores inverseness of the fill. See getAtlasUberPath(). |
| 223 | if (!this->tryAddPathToAtlas(caps, viewMatrix, path, viewMatrix.mapRect(path.getBounds()), |
| 224 | aa != GrAA::kNo, &devIBounds, &locationInAtlas, |
| 225 | &transposedInAtlas)) { |
| 226 | // The path is too big, or the atlas ran out of room. |
| 227 | return GrFPFailure(std::move(inputCoverage)); |
| 228 | } |
| 229 | GrSurfaceProxyView atlasView(sk_ref_sp(fAtlas.textureProxy()), GrDynamicAtlas::kTextureOrigin, |
| 230 | caps.getReadSwizzle(fAtlas.textureProxy()->backendFormat(), |
| 231 | GrColorType::kAlpha_8)); |
| 232 | SkMatrix atlasMatrix; |
| 233 | SkRect atlasSubset, atlasDomain; |
| 234 | auto [atlasX, atlasY] = locationInAtlas; |
| 235 | if (!transposedInAtlas) { |
| 236 | auto atlasOffset = SkVector::Make(atlasX - devIBounds.left(), atlasY - devIBounds.top()); |
| 237 | atlasMatrix = SkMatrix::Translate(atlasOffset); |
| 238 | atlasSubset = SkRect::Make(devIBounds).makeOffset(atlasOffset); |
| 239 | atlasDomain = SkRect::Make(drawBounds).makeOffset(atlasOffset); |
| 240 | } else { |
| 241 | atlasMatrix.setAll(0, 1, atlasX - devIBounds.top(), |
| 242 | 1, 0, atlasY - devIBounds.left(), |
| 243 | 0, 0, 1); |
| 244 | atlasSubset = SkRect::MakeXYWH(atlasX, atlasY, devIBounds.height(), devIBounds.width()); |
| 245 | atlasDomain = atlasMatrix.mapRect(SkRect::Make(drawBounds)); |
| 246 | } |
| 247 | #ifdef SK_DEBUG |
| 248 | if (!path.isInverseFillType()) { |
| 249 | // At this point in time we expect callers to tighten the scissor for "kIntersect" clips, as |
| 250 | // opposed to us having to enforce the texture subset. Feel free to remove this assert if |
| 251 | // that ever changes. |
| 252 | SkASSERT(atlasDomain.isEmpty() || atlasSubset.contains(atlasDomain)); |
| 253 | } |
| 254 | #endif |
| 255 | // Inset the domain because if it is equal to the subset, then it falls on an exact boundary |
| 256 | // between pixels, the "nearest" filter becomes undefined, and GrTextureEffect is forced to |
| 257 | // manually enforce the subset. This inset is justifiable because textures are sampled at pixel |
| 258 | // center, unless sample shading is enabled, in which case we assume standard sample locations |
| 259 | // (https://www.khronos.org/registry/vulkan/specs/1.2/html/chap25.html). |
| 260 | // NOTE: At MSAA16, standard sample locations begin falling on actual pixel boundaries. If this |
| 261 | // happens then we simply have to rely on the fact that the atlas has a 1px padding between |
| 262 | // entries. |
| 263 | constexpr static float kMinInsetOfStandardMSAA8Locations = 1/16.f; |
| 264 | atlasDomain.inset(kMinInsetOfStandardMSAA8Locations, kMinInsetOfStandardMSAA8Locations); |
| 265 | // Look up clip coverage in the atlas. |
| 266 | GrSamplerState samplerState(GrSamplerState::WrapMode::kClampToBorder, |
| 267 | GrSamplerState::Filter::kNearest); |
| 268 | auto fp = GrTextureEffect::MakeSubset(std::move(atlasView), kPremul_SkAlphaType, atlasMatrix, |
| 269 | samplerState, atlasSubset, atlasDomain, caps); |
| 270 | // Feed sk_FragCoord into the above texture lookup. |
| 271 | fp = GrDeviceSpaceEffect::Make(std::move(fp)); |
| 272 | if (path.isInverseFillType()) { |
| 273 | // outputCoverage = inputCoverage * (1 - atlasAlpha) |
| 274 | fp = GrBlendFragmentProcessor::Make( |
| 275 | std::move(fp), std::move(inputCoverage), SkBlendMode::kDstOut, |
| 276 | GrBlendFragmentProcessor::BlendBehavior::kSkModeBehavior); |
| 277 | } else { |
| 278 | // outputCoverage = inputCoverage * atlasAlpha |
| 279 | fp = GrBlendFragmentProcessor::Make( |
| 280 | std::move(fp), std::move(inputCoverage), SkBlendMode::kDstIn, |
| 281 | GrBlendFragmentProcessor::BlendBehavior::kSkModeBehavior); |
| 282 | } |
| 283 | return GrFPSuccess(std::move(fp)); |
| 284 | } |
| 285 | |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 286 | void GrTessellationPathRenderer::AtlasPathKey::set(const SkMatrix& m, bool antialias, |
| 287 | const SkPath& path) { |
| 288 | using grvx::float2; |
| 289 | fAffineMatrix[0] = m.getScaleX(); |
| 290 | fAffineMatrix[1] = m.getSkewX(); |
| 291 | fAffineMatrix[2] = m.getSkewY(); |
| 292 | fAffineMatrix[3] = m.getScaleY(); |
| 293 | float2 translate = {m.getTranslateX(), m.getTranslateY()}; |
| 294 | float2 subpixelPosition = translate - skvx::floor(translate); |
Robert Phillips | 62214f7 | 2021-06-15 10:12:51 -0400 | [diff] [blame] | 295 | float2 subpixelPositionKey = skvx::trunc(subpixelPosition * |
| 296 | GrPathTessellator::kLinearizationPrecision); |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 297 | skvx::cast<uint8_t>(subpixelPositionKey).store(fSubpixelPositionKey); |
| 298 | fAntialias = antialias; |
| 299 | fFillRule = (uint8_t)GrFillRuleForSkPath(path); // Fill rule doesn't affect the path's genID. |
| 300 | fPathGenID = path.getGenerationID(); |
| 301 | } |
| 302 | |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 303 | bool GrTessellationPathRenderer::tryAddPathToAtlas(const GrCaps& caps, const SkMatrix& viewMatrix, |
| 304 | const SkPath& path, const SkRect& pathDevBounds, |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 305 | bool antialias, SkIRect* devIBounds, |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 306 | SkIPoint16* locationInAtlas, |
| 307 | bool* transposedInAtlas) { |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 308 | SkASSERT(!viewMatrix.hasPerspective()); // See onCanDrawPath(). |
| 309 | |
Chris Dalton | d72cb4c | 2020-07-16 17:50:17 -0600 | [diff] [blame] | 310 | if (!fMaxAtlasPathWidth) { |
| 311 | return false; |
| 312 | } |
| 313 | |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 314 | if (!caps.multisampleDisableSupport() && !antialias) { |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 315 | return false; |
| 316 | } |
| 317 | |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 318 | // Transpose tall paths in the atlas. Since we limit ourselves to small-area paths, this |
| 319 | // guarantees that every atlas entry has a small height, which lends very well to efficient pow2 |
| 320 | // atlas packing. |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 321 | pathDevBounds.roundOut(devIBounds); |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 322 | int maxDimenstion = devIBounds->width(); |
| 323 | int minDimension = devIBounds->height(); |
| 324 | *transposedInAtlas = minDimension > maxDimenstion; |
| 325 | if (*transposedInAtlas) { |
| 326 | std::swap(minDimension, maxDimenstion); |
| 327 | } |
| 328 | |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame] | 329 | // Check if the path is too large for an atlas. Since we transpose paths in the atlas so height |
| 330 | // is always "minDimension", limiting to kMaxAtlasPathHeight^2 pixels guarantees height <= |
| 331 | // kMaxAtlasPathHeight, while also allowing paths that are very wide and short. |
Chris Dalton | eae5c16 | 2020-12-29 10:18:21 -0700 | [diff] [blame] | 332 | if ((uint64_t)maxDimenstion * minDimension > kMaxAtlasPathHeight * kMaxAtlasPathHeight || |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 333 | maxDimenstion > fMaxAtlasPathWidth) { |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 334 | return false; |
| 335 | } |
| 336 | |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 337 | // Check if this path is already in the atlas. This is mainly for clip paths. |
| 338 | AtlasPathKey atlasPathKey; |
| 339 | if (!path.isVolatile()) { |
| 340 | atlasPathKey.set(viewMatrix, antialias, path); |
| 341 | if (const SkIPoint16* existingLocation = fAtlasPathCache.find(atlasPathKey)) { |
| 342 | *locationInAtlas = *existingLocation; |
| 343 | return true; |
| 344 | } |
| 345 | } |
| 346 | |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 347 | if (!fAtlas.addRect(maxDimenstion, minDimension, locationInAtlas)) { |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 348 | return false; |
| 349 | } |
| 350 | |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 351 | // Remember this path's location in the atlas, in case it gets drawn again. |
| 352 | if (!path.isVolatile()) { |
| 353 | fAtlasPathCache.set(atlasPathKey, *locationInAtlas); |
| 354 | } |
| 355 | |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 356 | SkMatrix atlasMatrix = viewMatrix; |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 357 | if (*transposedInAtlas) { |
| 358 | std::swap(atlasMatrix[0], atlasMatrix[3]); |
| 359 | std::swap(atlasMatrix[1], atlasMatrix[4]); |
| 360 | float tx=atlasMatrix.getTranslateX(), ty=atlasMatrix.getTranslateY(); |
| 361 | atlasMatrix.setTranslateX(ty - devIBounds->y() + locationInAtlas->x()); |
| 362 | atlasMatrix.setTranslateY(tx - devIBounds->x() + locationInAtlas->y()); |
| 363 | } else { |
| 364 | atlasMatrix.postTranslate(locationInAtlas->x() - devIBounds->x(), |
| 365 | locationInAtlas->y() - devIBounds->y()); |
| 366 | } |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 367 | |
| 368 | // Concatenate this path onto our uber path that matches its fill and AA types. |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 369 | SkPath* uberPath = this->getAtlasUberPath(path.getFillType(), antialias); |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 370 | uberPath->moveTo(locationInAtlas->x(), locationInAtlas->y()); // Implicit moveTo(0,0). |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 371 | uberPath->addPath(path, atlasMatrix); |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 372 | return true; |
| 373 | } |
| 374 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 375 | void GrTessellationPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP, |
Adlai Holler | 9902cff | 2020-11-11 08:51:25 -0500 | [diff] [blame] | 376 | SkSpan<const uint32_t> /* taskIDs */) { |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 377 | if (!fAtlas.drawBounds().isEmpty()) { |
| 378 | this->renderAtlas(onFlushRP); |
| 379 | fAtlas.reset(kAtlasInitialSize, *onFlushRP->caps()); |
| 380 | } |
| 381 | for (SkPath& path : fAtlasUberPaths) { |
| 382 | path.reset(); |
| 383 | } |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 384 | fAtlasPathCache.reset(); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 385 | } |
| 386 | |
| 387 | constexpr static GrUserStencilSettings kTestStencil( |
| 388 | GrUserStencilSettings::StaticInit< |
| 389 | 0x0000, |
| 390 | GrUserStencilTest::kNotEqual, |
| 391 | 0xffff, |
| 392 | GrUserStencilOp::kKeep, |
| 393 | GrUserStencilOp::kKeep, |
| 394 | 0xffff>()); |
| 395 | |
| 396 | constexpr static GrUserStencilSettings kTestAndResetStencil( |
| 397 | GrUserStencilSettings::StaticInit< |
| 398 | 0x0000, |
| 399 | GrUserStencilTest::kNotEqual, |
| 400 | 0xffff, |
| 401 | GrUserStencilOp::kZero, |
| 402 | GrUserStencilOp::kKeep, |
| 403 | 0xffff>()); |
| 404 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 405 | void GrTessellationPathRenderer::renderAtlas(GrOnFlushResourceProvider* onFlushRP) { |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 406 | auto rtc = fAtlas.instantiate(onFlushRP); |
| 407 | if (!rtc) { |
| 408 | return; |
| 409 | } |
| 410 | |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame] | 411 | SkRect atlasRect = SkRect::MakeIWH(fAtlas.drawBounds().width(), fAtlas.drawBounds().height()); |
| 412 | |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 413 | // Add ops to stencil the atlas paths. |
| 414 | for (auto antialias : {false, true}) { |
| 415 | for (auto fillType : {SkPathFillType::kWinding, SkPathFillType::kEvenOdd}) { |
| 416 | SkPath* uberPath = this->getAtlasUberPath(fillType, antialias); |
| 417 | if (uberPath->isEmpty()) { |
| 418 | continue; |
| 419 | } |
| 420 | uberPath->setFillType(fillType); |
| 421 | GrAAType aaType = (antialias) ? GrAAType::kMSAA : GrAAType::kNone; |
Chris Dalton | 031d76b | 2021-06-08 16:32:00 -0600 | [diff] [blame] | 422 | auto op = GrOp::Make<GrPathStencilCoverOp>(onFlushRP->recordingContext(), SkMatrix::I(), |
| 423 | *uberPath, GrPaint(), aaType, |
| 424 | PathFlags::kStencilOnly, atlasRect); |
Michael Ludwig | 7c12e28 | 2020-05-29 09:54:07 -0400 | [diff] [blame] | 425 | rtc->addDrawOp(nullptr, std::move(op)); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 426 | } |
| 427 | } |
| 428 | |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 429 | // Finally, draw a fullscreen rect to convert our stencilled paths into alpha coverage masks. |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame] | 430 | GrPaint paint; |
| 431 | paint.setColor4f(SK_PMColor4fWHITE); |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 432 | const GrUserStencilSettings* stencil; |
Chris Dalton | 57ab06c | 2021-04-22 12:57:28 -0600 | [diff] [blame] | 433 | if (onFlushRP->caps()->discardStencilValuesAfterRenderPass()) { |
| 434 | // This is the final op in the surfaceDrawContext. Since Ganesh is planning to discard the |
| 435 | // stencil values anyway, there is no need to reset the stencil values back to 0. |
| 436 | stencil = &kTestStencil; |
| 437 | } else { |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 438 | // Outset the cover rect in case there are T-junctions in the path bounds. |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame] | 439 | atlasRect.outset(1, 1); |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 440 | stencil = &kTestAndResetStencil; |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 441 | } |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame] | 442 | rtc->stencilRect(nullptr, stencil, std::move(paint), GrAA::kYes, SkMatrix::I(), atlasRect); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 443 | |
| 444 | if (rtc->asSurfaceProxy()->requiresManualMSAAResolve()) { |
| 445 | onFlushRP->addTextureResolveTask(sk_ref_sp(rtc->asTextureProxy()), |
| 446 | GrSurfaceProxy::ResolveFlags::kMSAA); |
| 447 | } |
| 448 | } |