Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2019 Google LLC. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 8 | #include "src/gpu/tessellate/GrTessellationPathRenderer.h" |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 9 | |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 10 | #include "include/private/SkVx.h" |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 11 | #include "src/core/SkIPoint16.h" |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 12 | #include "src/core/SkPathPriv.h" |
| 13 | #include "src/gpu/GrClip.h" |
| 14 | #include "src/gpu/GrMemoryPool.h" |
| 15 | #include "src/gpu/GrRecordingContextPriv.h" |
Brian Salomon | eebe735 | 2020-12-09 16:37:04 -0500 | [diff] [blame] | 16 | #include "src/gpu/GrSurfaceDrawContext.h" |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 17 | #include "src/gpu/GrVx.h" |
Robert Phillips | 550de7f | 2021-07-06 16:28:52 -0400 | [diff] [blame] | 18 | #include "src/gpu/effects/GrDisableColorXP.h" |
Michael Ludwig | 2686d69 | 2020-04-17 20:21:37 +0000 | [diff] [blame] | 19 | #include "src/gpu/geometry/GrStyledShape.h" |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 20 | #include "src/gpu/tessellate/GrAtlasRenderTask.h" |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 21 | #include "src/gpu/tessellate/GrDrawAtlasPathOp.h" |
Chris Dalton | ebb37e7 | 2021-01-27 17:59:45 -0700 | [diff] [blame] | 22 | #include "src/gpu/tessellate/GrPathInnerTriangulateOp.h" |
Chris Dalton | 031d76b | 2021-06-08 16:32:00 -0600 | [diff] [blame] | 23 | #include "src/gpu/tessellate/GrPathStencilCoverOp.h" |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 24 | #include "src/gpu/tessellate/GrPathTessellateOp.h" |
Chris Dalton | 05007df | 2021-02-04 00:24:52 -0700 | [diff] [blame] | 25 | #include "src/gpu/tessellate/GrStrokeTessellateOp.h" |
Chris Dalton | abed267 | 2021-06-17 16:54:28 -0600 | [diff] [blame] | 26 | #include "src/gpu/tessellate/shaders/GrModulateAtlasCoverageFP.h" |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 27 | |
Chris Dalton | d72cb4c | 2020-07-16 17:50:17 -0600 | [diff] [blame] | 28 | constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8; |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 29 | constexpr static int kAtlasInitialSize = 512; |
Chris Dalton | d72cb4c | 2020-07-16 17:50:17 -0600 | [diff] [blame] | 30 | |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 31 | // The atlas is only used for small-area paths, which means at least one dimension of every path is |
| 32 | // guaranteed to be quite small. So if we transpose tall paths, then every path will have a small |
| 33 | // height, which lends very well to efficient pow2 atlas packing. |
| 34 | constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2; |
| 35 | |
| 36 | // Ensure every path in the atlas falls in or below the 128px high rectanizer band. |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 37 | constexpr static int kAtlasMaxPathHeight = 128; |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 38 | |
Chris Dalton | 1413d11 | 2020-07-09 11:26:31 -0600 | [diff] [blame] | 39 | bool GrTessellationPathRenderer::IsSupported(const GrCaps& caps) { |
Chris Dalton | 8f282f5 | 2021-01-06 11:47:58 -0700 | [diff] [blame] | 40 | return !caps.avoidStencilBuffers() && |
| 41 | caps.drawInstancedSupport() && |
Chris Dalton | 3febc61 | 2021-07-14 13:47:07 -0600 | [diff] [blame] | 42 | caps.shaderCaps()->infinitySupport() && |
Chris Dalton | eae5c16 | 2020-12-29 10:18:21 -0700 | [diff] [blame] | 43 | !caps.disableTessellationPathRenderer(); |
Chris Dalton | 1413d11 | 2020-07-09 11:26:31 -0600 | [diff] [blame] | 44 | } |
| 45 | |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 46 | GrTessellationPathRenderer::GrTessellationPathRenderer(GrRecordingContext* rContext) { |
Chris Dalton | 3163428 | 2020-09-17 12:16:54 -0600 | [diff] [blame] | 47 | const GrCaps& caps = *rContext->priv().caps(); |
Chris Dalton | 9213e61 | 2020-10-09 17:22:43 -0600 | [diff] [blame] | 48 | auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes); |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame] | 49 | if (rContext->asDirectContext() && // The atlas doesn't support DDL yet. |
| 50 | caps.internalMultisampleCount(atlasFormat) > 1) { |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 51 | #if GR_TEST_UTILS |
| 52 | fAtlasMaxSize = rContext->priv().options().fMaxTextureAtlasSize; |
| 53 | #else |
| 54 | fAtlasMaxSize = 2048; |
| 55 | #endif |
| 56 | fAtlasMaxSize = SkPrevPow2(std::min(fAtlasMaxSize, caps.maxPreferredRenderTargetSize())); |
| 57 | fAtlasInitialSize = SkNextPow2(std::min(kAtlasInitialSize, fAtlasMaxSize)); |
Chris Dalton | 9213e61 | 2020-10-09 17:22:43 -0600 | [diff] [blame] | 58 | } |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 59 | } |
| 60 | |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 61 | GrPathRenderer::StencilSupport GrTessellationPathRenderer::onGetStencilSupport( |
| 62 | const GrStyledShape& shape) const { |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 63 | if (!shape.style().isSimpleFill() || shape.inverseFilled()) { |
| 64 | // Don't bother with stroke stencilling or inverse fills yet. The Skia API doesn't support |
| 65 | // clipping by a stroke, and the stencilling code already knows how to invert a fill. |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 66 | return kNoSupport_StencilSupport; |
| 67 | } |
| 68 | return shape.knownToBeConvex() ? kNoRestriction_StencilSupport : kStencilOnly_StencilSupport; |
| 69 | } |
| 70 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 71 | GrPathRenderer::CanDrawPath GrTessellationPathRenderer::onCanDrawPath( |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 72 | const CanDrawPathArgs& args) const { |
Chris Dalton | 1c62a7b | 2020-06-29 22:01:14 -0600 | [diff] [blame] | 73 | const GrStyledShape& shape = *args.fShape; |
Chris Dalton | 57ab06c | 2021-04-22 12:57:28 -0600 | [diff] [blame] | 74 | if (args.fAAType == GrAAType::kCoverage || |
| 75 | shape.style().hasPathEffect() || |
Chris Dalton | 06b52ad | 2020-12-15 10:01:35 -0700 | [diff] [blame] | 76 | args.fViewMatrix->hasPerspective() || |
| 77 | shape.style().strokeRec().getStyle() == SkStrokeRec::kStrokeAndFill_Style || |
Chris Dalton | 537293bf | 2021-05-03 15:54:24 -0600 | [diff] [blame] | 78 | !args.fProxy->canUseStencil(*args.fCaps)) { |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 79 | return CanDrawPath::kNo; |
| 80 | } |
Chris Dalton | a05ccc3 | 2021-06-29 19:42:13 -0600 | [diff] [blame] | 81 | if (!shape.style().isSimpleFill()) { |
Chris Dalton | bb995e6 | 2021-07-01 10:58:55 -0600 | [diff] [blame] | 82 | if (shape.inverseFilled()) { |
Chris Dalton | a05ccc3 | 2021-06-29 19:42:13 -0600 | [diff] [blame] | 83 | return CanDrawPath::kNo; |
| 84 | } |
| 85 | } |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 86 | if (args.fHasUserStencilSettings) { |
| 87 | // Non-convex paths and strokes use the stencil buffer internally, so they can't support |
| 88 | // draws with stencil settings. |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 89 | if (!shape.style().isSimpleFill() || !shape.knownToBeConvex() || shape.inverseFilled()) { |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 90 | return CanDrawPath::kNo; |
| 91 | } |
| 92 | } |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 93 | return CanDrawPath::kYes; |
| 94 | } |
| 95 | |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 96 | static GrOp::Owner make_non_convex_fill_op(GrRecordingContext* rContext, |
| 97 | GrTessellationPathRenderer::PathFlags pathFlags, |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 98 | GrAAType aaType, const SkRect& drawBounds, |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 99 | const SkMatrix& viewMatrix, const SkPath& path, |
| 100 | GrPaint&& paint) { |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 101 | SkASSERT(!path.isConvex() || path.isInverseFillType()); |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 102 | int numVerbs = path.countVerbs(); |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 103 | if (numVerbs > 0 && !path.isInverseFillType()) { |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 104 | // Check if the path is large and/or simple enough that we can triangulate the inner fan |
| 105 | // on the CPU. This is our fastest approach. It allows us to stencil only the curves, |
| 106 | // and then fill the inner fan directly to the final render target, thus drawing the |
| 107 | // majority of pixels in a single render pass. |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 108 | float gpuFragmentWork = drawBounds.height() * drawBounds.width(); |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 109 | float cpuTessellationWork = numVerbs * SkNextLog2(numVerbs); // N log N. |
| 110 | constexpr static float kCpuWeight = 512; |
| 111 | constexpr static float kMinNumPixelsToTriangulate = 256 * 256; |
| 112 | if (cpuTessellationWork * kCpuWeight + kMinNumPixelsToTriangulate < gpuFragmentWork) { |
| 113 | return GrOp::Make<GrPathInnerTriangulateOp>(rContext, viewMatrix, path, |
| 114 | std::move(paint), aaType, pathFlags, |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 115 | drawBounds); |
Chris Dalton | 70a0d2c | 2021-01-26 12:01:21 -0700 | [diff] [blame] | 116 | } |
Chris Dalton | c2a1746 | 2020-12-09 16:46:22 -0700 | [diff] [blame] | 117 | } |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 118 | return GrOp::Make<GrPathStencilCoverOp>(rContext, viewMatrix, path, std::move(paint), aaType, |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 119 | pathFlags, drawBounds); |
Chris Dalton | c2a1746 | 2020-12-09 16:46:22 -0700 | [diff] [blame] | 120 | } |
| 121 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 122 | bool GrTessellationPathRenderer::onDrawPath(const DrawPathArgs& args) { |
John Stiles | 0fbc6a3 | 2021-06-04 14:40:57 -0400 | [diff] [blame] | 123 | GrSurfaceDrawContext* surfaceDrawContext = args.fSurfaceDrawContext; |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 124 | |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 125 | SkPath path; |
| 126 | args.fShape->asPath(&path); |
| 127 | |
| 128 | // Handle strokes first. |
| 129 | if (!args.fShape->style().isSimpleFill()) { |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 130 | SkASSERT(!path.isInverseFillType()); // See onGetStencilSupport(). |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 131 | SkASSERT(args.fUserStencilSettings->isUnused()); |
| 132 | const SkStrokeRec& stroke = args.fShape->style().strokeRec(); |
| 133 | SkASSERT(stroke.getStyle() != SkStrokeRec::kStrokeAndFill_Style); |
| 134 | auto op = GrOp::Make<GrStrokeTessellateOp>(args.fContext, args.fAAType, *args.fViewMatrix, |
| 135 | path, stroke, std::move(args.fPaint)); |
| 136 | surfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
| 137 | return true; |
| 138 | } |
| 139 | |
Chris Dalton | 2346aa0 | 2021-07-14 22:55:35 -0600 | [diff] [blame^] | 140 | const SkRect pathDevBounds = args.fViewMatrix->mapRect(args.fShape->bounds()); |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 141 | if (pathDevBounds.isEmpty()) { |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 142 | if (path.isInverseFillType()) { |
| 143 | args.fSurfaceDrawContext->drawPaint(args.fClip, std::move(args.fPaint), |
| 144 | *args.fViewMatrix); |
| 145 | } |
| 146 | return true; |
| 147 | } |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 148 | |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 149 | if (args.fUserStencilSettings->isUnused()) { |
| 150 | // See if the path is small and simple enough to atlas instead of drawing directly. |
| 151 | // |
| 152 | // NOTE: The atlas uses alpha8 coverage even for msaa render targets. We could theoretically |
| 153 | // render the sample mask to an integer texture, but such a scheme would probably require |
| 154 | // GL_EXT_post_depth_coverage, which appears to have low adoption. |
| 155 | SkIRect devIBounds; |
| 156 | SkIPoint16 locationInAtlas; |
| 157 | bool transposedInAtlas; |
| 158 | auto visitProxiesUsedByDraw = [&args](GrVisitProxyFunc visitor) { |
| 159 | if (args.fPaint.hasColorFragmentProcessor()) { |
| 160 | args.fPaint.getColorFragmentProcessor()->visitProxies(visitor); |
| 161 | } |
| 162 | if (args.fPaint.hasCoverageFragmentProcessor()) { |
| 163 | args.fPaint.getCoverageFragmentProcessor()->visitProxies(visitor); |
| 164 | } |
| 165 | }; |
| 166 | if (this->tryAddPathToAtlas(args.fContext, *args.fViewMatrix, path, pathDevBounds, |
| 167 | args.fAAType != GrAAType::kNone, &devIBounds, &locationInAtlas, |
| 168 | &transposedInAtlas, visitProxiesUsedByDraw)) { |
Chris Dalton | b1fd64e | 2021-07-08 15:38:51 -0600 | [diff] [blame] | 169 | const GrCaps& caps = *args.fSurfaceDrawContext->caps(); |
Chris Dalton | ee40d5a | 2021-07-07 16:34:36 -0600 | [diff] [blame] | 170 | const SkIRect& fillBounds = path.isInverseFillType() |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 171 | ? (args.fClip |
Chris Dalton | ee40d5a | 2021-07-07 16:34:36 -0600 | [diff] [blame] | 172 | ? args.fClip->getConservativeBounds() |
| 173 | : args.fSurfaceDrawContext->asSurfaceProxy()->backingStoreBoundsIRect()) |
| 174 | : devIBounds; |
Chris Dalton | cc29a39 | 2021-07-12 15:16:29 -0600 | [diff] [blame] | 175 | auto op = GrOp::Make<GrDrawAtlasPathOp>(args.fContext, |
| 176 | args.fSurfaceDrawContext->arenaAlloc(), |
| 177 | fillBounds, *args.fViewMatrix, |
| 178 | std::move(args.fPaint), locationInAtlas, |
| 179 | devIBounds, transposedInAtlas, |
| 180 | fAtlasRenderTasks.back()->readView(caps), |
| 181 | path.isInverseFillType()); |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 182 | surfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
| 183 | return true; |
| 184 | } |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 185 | } |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 186 | |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 187 | // Handle convex paths only if we couldn't fit them in the atlas. We give the atlas priority in |
| 188 | // an effort to reduce DMSAA triggers. |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 189 | if (args.fShape->knownToBeConvex() && !path.isInverseFillType()) { |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 190 | auto op = GrOp::Make<GrPathTessellateOp>(args.fContext, *args.fViewMatrix, path, |
| 191 | std::move(args.fPaint), args.fAAType, |
| 192 | args.fUserStencilSettings, pathDevBounds); |
Chris Dalton | b064334 | 2020-12-15 01:04:12 -0700 | [diff] [blame] | 193 | surfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 194 | return true; |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 195 | } |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 196 | |
| 197 | SkASSERT(args.fUserStencilSettings->isUnused()); // See onGetStencilSupport(). |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 198 | const SkRect& drawBounds = path.isInverseFillType() |
| 199 | ? args.fSurfaceDrawContext->asSurfaceProxy()->backingStoreBoundsRect() |
| 200 | : pathDevBounds; |
| 201 | auto op = make_non_convex_fill_op(args.fContext, PathFlags::kNone, args.fAAType, drawBounds, |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 202 | *args.fViewMatrix, path, std::move(args.fPaint)); |
| 203 | surfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 204 | return true; |
| 205 | } |
| 206 | |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 207 | void GrTessellationPathRenderer::onStencilPath(const StencilPathArgs& args) { |
| 208 | SkASSERT(args.fShape->style().isSimpleFill()); // See onGetStencilSupport(). |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 209 | SkASSERT(!args.fShape->inverseFilled()); // See onGetStencilSupport(). |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 210 | |
| 211 | GrSurfaceDrawContext* surfaceDrawContext = args.fSurfaceDrawContext; |
| 212 | GrAAType aaType = (GrAA::kYes == args.fDoStencilMSAA) ? GrAAType::kMSAA : GrAAType::kNone; |
| 213 | |
| 214 | SkRect pathDevBounds; |
| 215 | args.fViewMatrix->mapRect(&pathDevBounds, args.fShape->bounds()); |
| 216 | |
| 217 | SkPath path; |
| 218 | args.fShape->asPath(&path); |
| 219 | |
| 220 | if (args.fShape->knownToBeConvex()) { |
| 221 | constexpr static GrUserStencilSettings kMarkStencil( |
| 222 | GrUserStencilSettings::StaticInit< |
| 223 | 0x0001, |
| 224 | GrUserStencilTest::kAlways, |
| 225 | 0xffff, |
| 226 | GrUserStencilOp::kReplace, |
| 227 | GrUserStencilOp::kKeep, |
| 228 | 0xffff>()); |
| 229 | |
| 230 | GrPaint stencilPaint; |
| 231 | stencilPaint.setXPFactory(GrDisableColorXPFactory::Get()); |
| 232 | auto op = GrOp::Make<GrPathTessellateOp>(args.fContext, *args.fViewMatrix, path, |
| 233 | std::move(stencilPaint), aaType, &kMarkStencil, |
| 234 | pathDevBounds); |
| 235 | surfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
| 236 | return; |
Chris Dalton | b064334 | 2020-12-15 01:04:12 -0700 | [diff] [blame] | 237 | } |
| 238 | |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 239 | auto op = make_non_convex_fill_op(args.fContext, PathFlags::kStencilOnly, aaType, pathDevBounds, |
| 240 | *args.fViewMatrix, path, GrPaint()); |
| 241 | surfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
| 242 | } |
| 243 | |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 244 | GrFPResult GrTessellationPathRenderer::makeAtlasClipFP(GrRecordingContext* rContext, |
| 245 | const GrOp* opBeingClipped, |
Chris Dalton | abed267 | 2021-06-17 16:54:28 -0600 | [diff] [blame] | 246 | std::unique_ptr<GrFragmentProcessor> inputFP, |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 247 | const SkIRect& drawBounds, |
| 248 | const SkMatrix& viewMatrix, |
| 249 | const SkPath& path, GrAA aa) { |
Chris Dalton | 43a8b0c | 2021-06-14 17:10:07 -0600 | [diff] [blame] | 250 | if (viewMatrix.hasPerspective()) { |
Chris Dalton | abed267 | 2021-06-17 16:54:28 -0600 | [diff] [blame] | 251 | return GrFPFailure(std::move(inputFP)); |
Chris Dalton | 43a8b0c | 2021-06-14 17:10:07 -0600 | [diff] [blame] | 252 | } |
Chris Dalton | 2346aa0 | 2021-07-14 22:55:35 -0600 | [diff] [blame^] | 253 | const SkRect pathDevBounds = viewMatrix.mapRect(path.getBounds()); |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 254 | if (pathDevBounds.isEmpty()) { |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 255 | return path.isInverseFillType() ? GrFPSuccess(std::move(inputFP)) |
| 256 | : GrFPFailure(std::move(inputFP)); |
| 257 | } |
Chris Dalton | 43a8b0c | 2021-06-14 17:10:07 -0600 | [diff] [blame] | 258 | SkIRect devIBounds; |
| 259 | SkIPoint16 locationInAtlas; |
| 260 | bool transposedInAtlas; |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 261 | auto visitProxiesUsedByDraw = [&opBeingClipped, &inputFP](GrVisitProxyFunc visitor) { |
| 262 | opBeingClipped->visitProxies(visitor); |
| 263 | if (inputFP) { |
| 264 | inputFP->visitProxies(visitor); |
| 265 | } |
| 266 | }; |
Chris Dalton | 43a8b0c | 2021-06-14 17:10:07 -0600 | [diff] [blame] | 267 | // tryAddPathToAtlas() ignores inverseness of the fill. See getAtlasUberPath(). |
Chris Dalton | baae2dd | 2021-06-25 14:52:49 -0600 | [diff] [blame] | 268 | if (!this->tryAddPathToAtlas(rContext, viewMatrix, path, pathDevBounds, aa != GrAA::kNo, |
| 269 | &devIBounds, &locationInAtlas, &transposedInAtlas, |
| 270 | visitProxiesUsedByDraw)) { |
Chris Dalton | 43a8b0c | 2021-06-14 17:10:07 -0600 | [diff] [blame] | 271 | // The path is too big, or the atlas ran out of room. |
Chris Dalton | abed267 | 2021-06-17 16:54:28 -0600 | [diff] [blame] | 272 | return GrFPFailure(std::move(inputFP)); |
Chris Dalton | 43a8b0c | 2021-06-14 17:10:07 -0600 | [diff] [blame] | 273 | } |
Chris Dalton | 43a8b0c | 2021-06-14 17:10:07 -0600 | [diff] [blame] | 274 | SkMatrix atlasMatrix; |
Chris Dalton | 43a8b0c | 2021-06-14 17:10:07 -0600 | [diff] [blame] | 275 | auto [atlasX, atlasY] = locationInAtlas; |
| 276 | if (!transposedInAtlas) { |
Chris Dalton | abed267 | 2021-06-17 16:54:28 -0600 | [diff] [blame] | 277 | atlasMatrix = SkMatrix::Translate(atlasX - devIBounds.left(), atlasY - devIBounds.top()); |
Chris Dalton | 43a8b0c | 2021-06-14 17:10:07 -0600 | [diff] [blame] | 278 | } else { |
| 279 | atlasMatrix.setAll(0, 1, atlasX - devIBounds.top(), |
| 280 | 1, 0, atlasY - devIBounds.left(), |
| 281 | 0, 0, 1); |
Chris Dalton | 43a8b0c | 2021-06-14 17:10:07 -0600 | [diff] [blame] | 282 | } |
Chris Dalton | abed267 | 2021-06-17 16:54:28 -0600 | [diff] [blame] | 283 | auto flags = GrModulateAtlasCoverageFP::Flags::kNone; |
Chris Dalton | fd3ec90 | 2021-06-17 20:44:13 +0000 | [diff] [blame] | 284 | if (path.isInverseFillType()) { |
Chris Dalton | abed267 | 2021-06-17 16:54:28 -0600 | [diff] [blame] | 285 | flags |= GrModulateAtlasCoverageFP::Flags::kInvertCoverage; |
Chris Dalton | fd3ec90 | 2021-06-17 20:44:13 +0000 | [diff] [blame] | 286 | } |
Chris Dalton | abed267 | 2021-06-17 16:54:28 -0600 | [diff] [blame] | 287 | if (!devIBounds.contains(drawBounds)) { |
| 288 | flags |= GrModulateAtlasCoverageFP::Flags::kCheckBounds; |
| 289 | // At this point in time we expect callers to tighten the scissor for "kIntersect" clips, as |
| 290 | // opposed to us having to check the path bounds. Feel free to remove this assert if that |
| 291 | // ever changes. |
| 292 | SkASSERT(path.isInverseFillType()); |
| 293 | } |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 294 | GrSurfaceProxyView atlasView = fAtlasRenderTasks.back()->readView(*rContext->priv().caps()); |
Chris Dalton | abed267 | 2021-06-17 16:54:28 -0600 | [diff] [blame] | 295 | return GrFPSuccess(std::make_unique<GrModulateAtlasCoverageFP>(flags, std::move(inputFP), |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 296 | std::move(atlasView), |
Chris Dalton | abed267 | 2021-06-17 16:54:28 -0600 | [diff] [blame] | 297 | atlasMatrix, devIBounds)); |
Chris Dalton | 43a8b0c | 2021-06-14 17:10:07 -0600 | [diff] [blame] | 298 | } |
| 299 | |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 300 | void GrTessellationPathRenderer::AtlasPathKey::set(const SkMatrix& m, bool antialias, |
| 301 | const SkPath& path) { |
| 302 | using grvx::float2; |
| 303 | fAffineMatrix[0] = m.getScaleX(); |
| 304 | fAffineMatrix[1] = m.getSkewX(); |
| 305 | fAffineMatrix[2] = m.getSkewY(); |
| 306 | fAffineMatrix[3] = m.getScaleY(); |
| 307 | float2 translate = {m.getTranslateX(), m.getTranslateY()}; |
| 308 | float2 subpixelPosition = translate - skvx::floor(translate); |
Robert Phillips | 62214f7 | 2021-06-15 10:12:51 -0400 | [diff] [blame] | 309 | float2 subpixelPositionKey = skvx::trunc(subpixelPosition * |
Chris Dalton | e1f7237 | 2021-06-29 16:45:49 -0600 | [diff] [blame] | 310 | GrTessellationShader::kLinearizationPrecision); |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 311 | skvx::cast<uint8_t>(subpixelPositionKey).store(fSubpixelPositionKey); |
| 312 | fAntialias = antialias; |
| 313 | fFillRule = (uint8_t)GrFillRuleForSkPath(path); // Fill rule doesn't affect the path's genID. |
| 314 | fPathGenID = path.getGenerationID(); |
| 315 | } |
| 316 | |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 317 | bool GrTessellationPathRenderer::tryAddPathToAtlas(GrRecordingContext* rContext, |
| 318 | const SkMatrix& viewMatrix, const SkPath& path, |
| 319 | const SkRect& pathDevBounds, bool antialias, |
| 320 | SkIRect* devIBounds, SkIPoint16* locationInAtlas, |
| 321 | bool* transposedInAtlas, |
| 322 | const VisitProxiesFn& visitProxiesUsedByDraw) { |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 323 | SkASSERT(!viewMatrix.hasPerspective()); // See onCanDrawPath(). |
| 324 | |
Chris Dalton | 2346aa0 | 2021-07-14 22:55:35 -0600 | [diff] [blame^] | 325 | // Write as the NOT of positive logic, so we will return false if any values are NaN. |
| 326 | if (!(pathDevBounds.width() > 0 && pathDevBounds.width() <= fAtlasMaxSize) || |
| 327 | !(pathDevBounds.height() > 0 && pathDevBounds.height() <= fAtlasMaxSize)) { |
Chris Dalton | d72cb4c | 2020-07-16 17:50:17 -0600 | [diff] [blame] | 328 | return false; |
| 329 | } |
| 330 | |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 331 | // The atlas is not compatible with DDL. We should only be using it on direct contexts. |
| 332 | SkASSERT(rContext->asDirectContext()); |
| 333 | |
| 334 | const GrCaps& caps = *rContext->priv().caps(); |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 335 | if (!caps.multisampleDisableSupport() && !antialias) { |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 336 | return false; |
| 337 | } |
| 338 | |
Chris Dalton | 7ae272f | 2021-06-10 11:45:14 -0600 | [diff] [blame] | 339 | pathDevBounds.roundOut(devIBounds); |
Chris Dalton | 8c3036c | 2021-06-23 14:34:56 -0600 | [diff] [blame] | 340 | int widthInAtlas = devIBounds->width(); |
| 341 | int heightInAtlas = devIBounds->height(); |
Chris Dalton | 2346aa0 | 2021-07-14 22:55:35 -0600 | [diff] [blame^] | 342 | if (widthInAtlas <= 0 || heightInAtlas <= 0) { |
| 343 | return false; |
| 344 | } |
| 345 | |
Chris Dalton | 8c3036c | 2021-06-23 14:34:56 -0600 | [diff] [blame] | 346 | if (SkNextPow2(widthInAtlas) == SkNextPow2(heightInAtlas)) { |
| 347 | // Both dimensions go to the same pow2 band in the atlas. Use the larger dimension as height |
| 348 | // for more efficient packing. |
| 349 | *transposedInAtlas = widthInAtlas > heightInAtlas; |
| 350 | } else { |
| 351 | // Both dimensions go to different pow2 bands in the atlas. Use the smaller pow2 band for |
| 352 | // most efficient packing. |
| 353 | *transposedInAtlas = heightInAtlas > widthInAtlas; |
| 354 | } |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 355 | if (*transposedInAtlas) { |
Chris Dalton | 8c3036c | 2021-06-23 14:34:56 -0600 | [diff] [blame] | 356 | std::swap(heightInAtlas, widthInAtlas); |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 357 | } |
| 358 | |
Chris Dalton | 8c3036c | 2021-06-23 14:34:56 -0600 | [diff] [blame] | 359 | // Check if the path is too large for an atlas. Since we transpose tall skinny paths, limiting |
| 360 | // to kAtlasMaxPathHeight^2 pixels guarantees heightInAtlas <= kAtlasMaxPathHeight, while also |
| 361 | // allowing paths that are very wide and short. |
| 362 | if ((uint64_t)widthInAtlas * heightInAtlas > kAtlasMaxPathHeight * kAtlasMaxPathHeight || |
| 363 | widthInAtlas > fAtlasMaxSize) { |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 364 | return false; |
| 365 | } |
Chris Dalton | 8c3036c | 2021-06-23 14:34:56 -0600 | [diff] [blame] | 366 | SkASSERT(heightInAtlas <= kAtlasMaxPathHeight); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 367 | |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 368 | // Check if this path is already in the atlas. This is mainly for clip paths. |
| 369 | AtlasPathKey atlasPathKey; |
| 370 | if (!path.isVolatile()) { |
| 371 | atlasPathKey.set(viewMatrix, antialias, path); |
| 372 | if (const SkIPoint16* existingLocation = fAtlasPathCache.find(atlasPathKey)) { |
| 373 | *locationInAtlas = *existingLocation; |
| 374 | return true; |
| 375 | } |
| 376 | } |
| 377 | |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 378 | if (fAtlasRenderTasks.empty() || |
| 379 | !fAtlasRenderTasks.back()->addPath(viewMatrix, path, antialias, devIBounds->topLeft(), |
Chris Dalton | 8c3036c | 2021-06-23 14:34:56 -0600 | [diff] [blame] | 380 | widthInAtlas, heightInAtlas, *transposedInAtlas, |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 381 | locationInAtlas)) { |
| 382 | // We either don't have an atlas yet or the current one is full. Try to replace it. |
| 383 | GrAtlasRenderTask* currentAtlasTask = (!fAtlasRenderTasks.empty()) |
| 384 | ? fAtlasRenderTasks.back().get() : nullptr; |
| 385 | if (currentAtlasTask) { |
| 386 | // Don't allow the current atlas to be replaced if the draw already uses it. Otherwise |
| 387 | // the draw would use two different atlases, which breaks our guarantee that there will |
| 388 | // only ever be one atlas active at a time. |
| 389 | const GrSurfaceProxy* currentAtlasProxy = currentAtlasTask->atlasProxy(); |
| 390 | bool drawUsesCurrentAtlas = false; |
| 391 | visitProxiesUsedByDraw([currentAtlasProxy, &drawUsesCurrentAtlas](GrSurfaceProxy* proxy, |
| 392 | GrMipmapped) { |
| 393 | if (proxy == currentAtlasProxy) { |
| 394 | drawUsesCurrentAtlas = true; |
| 395 | } |
| 396 | }); |
| 397 | if (drawUsesCurrentAtlas) { |
| 398 | // The draw already uses the current atlas. Give up. |
| 399 | return false; |
| 400 | } |
| 401 | } |
| 402 | // Replace the atlas with a new one. |
| 403 | auto dynamicAtlas = std::make_unique<GrDynamicAtlas>( |
| 404 | kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes, |
| 405 | SkISize{fAtlasInitialSize, fAtlasInitialSize}, fAtlasMaxSize, |
| 406 | *rContext->priv().caps(), kAtlasAlgorithm); |
Robert Phillips | a92913e | 2021-07-12 16:31:52 -0400 | [diff] [blame] | 407 | auto newAtlasTask = sk_make_sp<GrAtlasRenderTask>(rContext, |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 408 | sk_make_sp<GrArenas>(), |
| 409 | std::move(dynamicAtlas)); |
| 410 | rContext->priv().drawingManager()->addAtlasTask(newAtlasTask, currentAtlasTask); |
| 411 | SkAssertResult(newAtlasTask->addPath(viewMatrix, path, antialias, devIBounds->topLeft(), |
Chris Dalton | 8c3036c | 2021-06-23 14:34:56 -0600 | [diff] [blame] | 412 | widthInAtlas, heightInAtlas, *transposedInAtlas, |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 413 | locationInAtlas)); |
| 414 | fAtlasRenderTasks.push_back(std::move(newAtlasTask)); |
| 415 | fAtlasPathCache.reset(); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 416 | } |
| 417 | |
Chris Dalton | 50c3c24 | 2021-06-14 16:32:35 -0600 | [diff] [blame] | 418 | // Remember this path's location in the atlas, in case it gets drawn again. |
| 419 | if (!path.isVolatile()) { |
| 420 | fAtlasPathCache.set(atlasPathKey, *locationInAtlas); |
| 421 | } |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 422 | return true; |
| 423 | } |
| 424 | |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 425 | #ifdef SK_DEBUG |
| 426 | // Ensures the atlas dependencies are set up such that each atlas will be totally out of service |
| 427 | // before we render the next one in line. This means there will only ever be one atlas active at a |
| 428 | // time and that they can all share the same texture. |
| 429 | void validate_atlas_dependencies(const SkTArray<sk_sp<GrAtlasRenderTask>>& atlasTasks) { |
| 430 | for (int i = atlasTasks.count() - 1; i >= 1; --i) { |
| 431 | GrAtlasRenderTask* atlasTask = atlasTasks[i].get(); |
| 432 | GrAtlasRenderTask* previousAtlasTask = atlasTasks[i - 1].get(); |
| 433 | // Double check that atlasTask depends on every dependent of its previous atlas. If this |
| 434 | // fires it might mean previousAtlasTask gained a new dependent after atlasTask came into |
| 435 | // service (maybe by an op that hadn't yet been added to an opsTask when we registered the |
| 436 | // new atlas with the drawingManager). |
| 437 | for (GrRenderTask* previousAtlasUser : previousAtlasTask->dependents()) { |
| 438 | SkASSERT(atlasTask->dependsOn(previousAtlasUser)); |
| 439 | } |
| 440 | } |
| 441 | } |
| 442 | #endif |
| 443 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 444 | void GrTessellationPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP, |
Adlai Holler | 9902cff | 2020-11-11 08:51:25 -0500 | [diff] [blame] | 445 | SkSpan<const uint32_t> /* taskIDs */) { |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 446 | if (fAtlasRenderTasks.empty()) { |
| 447 | SkASSERT(fAtlasPathCache.count() == 0); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 448 | return; |
| 449 | } |
| 450 | |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 451 | // Verify the atlases can all share the same texture. |
| 452 | SkDEBUGCODE(validate_atlas_dependencies(fAtlasRenderTasks);) |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame] | 453 | |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 454 | // Instantiate the first atlas. |
| 455 | fAtlasRenderTasks[0]->instantiate(onFlushRP); |
| 456 | |
| 457 | // Instantiate the remaining atlases. |
| 458 | GrTexture* firstAtlasTexture = fAtlasRenderTasks[0]->atlasProxy()->peekTexture(); |
| 459 | SkASSERT(firstAtlasTexture); |
| 460 | for (int i = 1; i < fAtlasRenderTasks.count(); ++i) { |
| 461 | GrAtlasRenderTask* atlasTask = fAtlasRenderTasks[i].get(); |
| 462 | if (atlasTask->atlasProxy()->backingStoreDimensions() == firstAtlasTexture->dimensions()) { |
| 463 | atlasTask->instantiate(onFlushRP, sk_ref_sp(firstAtlasTexture)); |
| 464 | } else { |
| 465 | // The atlases are expected to all be full size except possibly the final one. |
| 466 | SkASSERT(i == fAtlasRenderTasks.count() - 1); |
| 467 | SkASSERT(atlasTask->atlasProxy()->backingStoreDimensions().area() < |
| 468 | firstAtlasTexture->dimensions().area()); |
| 469 | // TODO: Recycle the larger atlas texture anyway? |
| 470 | atlasTask->instantiate(onFlushRP); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 471 | } |
| 472 | } |
| 473 | |
Chris Dalton | 83420eb | 2021-06-23 18:47:09 -0600 | [diff] [blame] | 474 | // Reset all atlas data. |
| 475 | fAtlasRenderTasks.reset(); |
| 476 | fAtlasPathCache.reset(); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 477 | } |