Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2019 Google LLC. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 8 | #include "src/gpu/tessellate/GrTessellationPathRenderer.h" |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 9 | |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 10 | #include "include/pathops/SkPathOps.h" |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 11 | #include "src/core/SkIPoint16.h" |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 12 | #include "src/core/SkPathPriv.h" |
| 13 | #include "src/gpu/GrClip.h" |
| 14 | #include "src/gpu/GrMemoryPool.h" |
| 15 | #include "src/gpu/GrRecordingContextPriv.h" |
Brian Salomon | eebe735 | 2020-12-09 16:37:04 -0500 | [diff] [blame] | 16 | #include "src/gpu/GrSurfaceDrawContext.h" |
Michael Ludwig | 2686d69 | 2020-04-17 20:21:37 +0000 | [diff] [blame] | 17 | #include "src/gpu/geometry/GrStyledShape.h" |
Michael Ludwig | 4e9d5e2 | 2021-05-11 10:00:12 -0400 | [diff] [blame] | 18 | #include "src/gpu/geometry/GrWangsFormula.h" |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 19 | #include "src/gpu/ops/GrFillRectOp.h" |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 20 | #include "src/gpu/tessellate/GrDrawAtlasPathOp.h" |
Chris Dalton | ebb37e7 | 2021-01-27 17:59:45 -0700 | [diff] [blame] | 21 | #include "src/gpu/tessellate/GrPathInnerTriangulateOp.h" |
Chris Dalton | 2ed22fa | 2021-05-06 16:08:30 -0600 | [diff] [blame] | 22 | #include "src/gpu/tessellate/GrPathStencilFillOp.h" |
Chris Dalton | 05007df | 2021-02-04 00:24:52 -0700 | [diff] [blame] | 23 | #include "src/gpu/tessellate/GrStrokeTessellateOp.h" |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 24 | |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 25 | constexpr static SkISize kAtlasInitialSize{512, 512}; |
| 26 | constexpr static int kMaxAtlasSize = 2048; |
| 27 | |
Chris Dalton | d72cb4c | 2020-07-16 17:50:17 -0600 | [diff] [blame] | 28 | constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8; |
| 29 | |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 30 | // The atlas is only used for small-area paths, which means at least one dimension of every path is |
| 31 | // guaranteed to be quite small. So if we transpose tall paths, then every path will have a small |
| 32 | // height, which lends very well to efficient pow2 atlas packing. |
| 33 | constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2; |
| 34 | |
| 35 | // Ensure every path in the atlas falls in or below the 128px high rectanizer band. |
| 36 | constexpr static int kMaxAtlasPathHeight = 128; |
| 37 | |
Chris Dalton | 1413d11 | 2020-07-09 11:26:31 -0600 | [diff] [blame] | 38 | bool GrTessellationPathRenderer::IsSupported(const GrCaps& caps) { |
Chris Dalton | 8f282f5 | 2021-01-06 11:47:58 -0700 | [diff] [blame] | 39 | return !caps.avoidStencilBuffers() && |
| 40 | caps.drawInstancedSupport() && |
Chris Dalton | eae5c16 | 2020-12-29 10:18:21 -0700 | [diff] [blame] | 41 | caps.shaderCaps()->vertexIDSupport() && |
| 42 | !caps.disableTessellationPathRenderer(); |
Chris Dalton | 1413d11 | 2020-07-09 11:26:31 -0600 | [diff] [blame] | 43 | } |
| 44 | |
Chris Dalton | 9213e61 | 2020-10-09 17:22:43 -0600 | [diff] [blame] | 45 | GrTessellationPathRenderer::GrTessellationPathRenderer(GrRecordingContext* rContext) |
Chris Dalton | d72cb4c | 2020-07-16 17:50:17 -0600 | [diff] [blame] | 46 | : fAtlas(kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes, kAtlasInitialSize, |
Chris Dalton | 3163428 | 2020-09-17 12:16:54 -0600 | [diff] [blame] | 47 | std::min(kMaxAtlasSize, rContext->priv().caps()->maxPreferredRenderTargetSize()), |
| 48 | *rContext->priv().caps(), kAtlasAlgorithm) { |
Chris Dalton | 3163428 | 2020-09-17 12:16:54 -0600 | [diff] [blame] | 49 | const GrCaps& caps = *rContext->priv().caps(); |
Chris Dalton | 9213e61 | 2020-10-09 17:22:43 -0600 | [diff] [blame] | 50 | auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes); |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame^] | 51 | if (rContext->asDirectContext() && // The atlas doesn't support DDL yet. |
| 52 | caps.internalMultisampleCount(atlasFormat) > 1) { |
| 53 | fMaxAtlasPathWidth = fAtlas.maxAtlasSize() / 2; // Enable the atlas. |
Chris Dalton | 9213e61 | 2020-10-09 17:22:43 -0600 | [diff] [blame] | 54 | } |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 55 | } |
| 56 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 57 | GrPathRenderer::CanDrawPath GrTessellationPathRenderer::onCanDrawPath( |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 58 | const CanDrawPathArgs& args) const { |
Chris Dalton | 1c62a7b | 2020-06-29 22:01:14 -0600 | [diff] [blame] | 59 | const GrStyledShape& shape = *args.fShape; |
Chris Dalton | 57ab06c | 2021-04-22 12:57:28 -0600 | [diff] [blame] | 60 | if (args.fAAType == GrAAType::kCoverage || |
| 61 | shape.style().hasPathEffect() || |
Chris Dalton | 06b52ad | 2020-12-15 10:01:35 -0700 | [diff] [blame] | 62 | args.fViewMatrix->hasPerspective() || |
| 63 | shape.style().strokeRec().getStyle() == SkStrokeRec::kStrokeAndFill_Style || |
Chris Dalton | 2078cbe | 2020-12-14 19:04:55 -0700 | [diff] [blame] | 64 | shape.inverseFilled() || |
Chris Dalton | f285bd1 | 2021-04-26 14:29:54 -0600 | [diff] [blame] | 65 | args.fHasUserStencilSettings || |
Chris Dalton | 537293bf | 2021-05-03 15:54:24 -0600 | [diff] [blame] | 66 | !args.fProxy->canUseStencil(*args.fCaps)) { |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 67 | return CanDrawPath::kNo; |
| 68 | } |
Chris Dalton | 8606cec | 2021-05-04 15:06:14 -0600 | [diff] [blame] | 69 | if (shape.style().strokeRec().getStyle() != SkStrokeRec::kStroke_Style) { |
| 70 | // On platforms that don't have native support for indirect draws and/or hardware |
| 71 | // tessellation, we find that the default path renderer can draw fills faster sometimes. Let |
| 72 | // fills fall through to the default renderer on these platforms for now. |
| 73 | // (crbug.com/1163441, skbug.com/11138, skbug.com/11139) |
| 74 | if (!args.fCaps->nativeDrawIndirectSupport() && |
| 75 | !args.fCaps->shaderCaps()->tessellationSupport() && |
| 76 | // Is the path cacheable? TODO: This check is outdated. Remove it next. |
| 77 | shape.hasUnstyledKey()) { |
| 78 | return CanDrawPath::kNo; |
| 79 | } |
Chris Dalton | 46d0c04 | 2021-05-04 20:43:12 +0000 | [diff] [blame] | 80 | } |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 81 | return CanDrawPath::kYes; |
| 82 | } |
| 83 | |
Chris Dalton | b064334 | 2020-12-15 01:04:12 -0700 | [diff] [blame] | 84 | static GrOp::Owner make_op(GrRecordingContext* rContext, const GrSurfaceContext* surfaceContext, |
| 85 | GrTessellationPathRenderer::OpFlags opFlags, GrAAType aaType, |
| 86 | const SkRect& shapeDevBounds, const SkMatrix& viewMatrix, |
| 87 | const GrStyledShape& shape, GrPaint&& paint) { |
Chris Dalton | 50f5e68 | 2021-04-16 22:47:03 -0600 | [diff] [blame] | 88 | constexpr static auto kLinearizationPrecision = |
| 89 | GrTessellationPathRenderer::kLinearizationPrecision; |
Chris Dalton | b064334 | 2020-12-15 01:04:12 -0700 | [diff] [blame] | 90 | constexpr static auto kMaxResolveLevel = GrTessellationPathRenderer::kMaxResolveLevel; |
Chris Dalton | b064334 | 2020-12-15 01:04:12 -0700 | [diff] [blame] | 91 | SkPath path; |
| 92 | shape.asPath(&path); |
| 93 | |
| 94 | // Find the worst-case log2 number of line segments that a curve in this path might need to be |
| 95 | // divided into. |
Chris Dalton | 50f5e68 | 2021-04-16 22:47:03 -0600 | [diff] [blame] | 96 | int worstCaseResolveLevel = GrWangsFormula::worst_case_cubic_log2(kLinearizationPrecision, |
Chris Dalton | b064334 | 2020-12-15 01:04:12 -0700 | [diff] [blame] | 97 | shapeDevBounds.width(), |
| 98 | shapeDevBounds.height()); |
| 99 | if (worstCaseResolveLevel > kMaxResolveLevel) { |
| 100 | // The path is too large for our internal indirect draw shaders. Crop it to the viewport. |
| 101 | auto viewport = SkRect::MakeIWH(surfaceContext->width(), surfaceContext->height()); |
| 102 | float inflationRadius = 1; |
| 103 | const SkStrokeRec& stroke = shape.style().strokeRec(); |
| 104 | if (stroke.getStyle() == SkStrokeRec::kHairline_Style) { |
| 105 | inflationRadius += SkStrokeRec::GetInflationRadius(stroke.getJoin(), stroke.getMiter(), |
| 106 | stroke.getCap(), 1); |
| 107 | } else if (stroke.getStyle() != SkStrokeRec::kFill_Style) { |
| 108 | inflationRadius += stroke.getInflationRadius() * viewMatrix.getMaxScale(); |
| 109 | } |
| 110 | viewport.outset(inflationRadius, inflationRadius); |
| 111 | |
| 112 | SkPath viewportPath; |
| 113 | viewportPath.addRect(viewport); |
| 114 | // Perform the crop in device space so it's a simple rect-path intersection. |
| 115 | path.transform(viewMatrix); |
| 116 | if (!Op(viewportPath, path, kIntersect_SkPathOp, &path)) { |
| 117 | // The crop can fail if the PathOps encounter NaN or infinities. Return true |
| 118 | // because drawing nothing is acceptable behavior for FP overflow. |
| 119 | return nullptr; |
| 120 | } |
| 121 | |
| 122 | // Transform the path back to its own local space. |
| 123 | SkMatrix inverse; |
| 124 | if (!viewMatrix.invert(&inverse)) { |
| 125 | return nullptr; // Singular view matrix. Nothing would have drawn anyway. Return null. |
| 126 | } |
| 127 | path.transform(inverse); |
| 128 | path.setIsVolatile(true); |
| 129 | |
| 130 | SkRect newDevBounds; |
| 131 | viewMatrix.mapRect(&newDevBounds, path.getBounds()); |
Chris Dalton | 50f5e68 | 2021-04-16 22:47:03 -0600 | [diff] [blame] | 132 | worstCaseResolveLevel = GrWangsFormula::worst_case_cubic_log2(kLinearizationPrecision, |
Chris Dalton | b064334 | 2020-12-15 01:04:12 -0700 | [diff] [blame] | 133 | newDevBounds.width(), |
| 134 | newDevBounds.height()); |
| 135 | // kMaxResolveLevel should be large enough to tessellate paths the size of any screen we |
| 136 | // might encounter. |
| 137 | SkASSERT(worstCaseResolveLevel <= kMaxResolveLevel); |
| 138 | } |
| 139 | |
| 140 | if (!shape.style().isSimpleFill()) { |
| 141 | const SkStrokeRec& stroke = shape.style().strokeRec(); |
| 142 | SkASSERT(stroke.getStyle() != SkStrokeRec::kStrokeAndFill_Style); |
Chris Dalton | 05007df | 2021-02-04 00:24:52 -0700 | [diff] [blame] | 143 | return GrOp::Make<GrStrokeTessellateOp>(rContext, aaType, viewMatrix, path, stroke, |
| 144 | std::move(paint)); |
Chris Dalton | c2a1746 | 2020-12-09 16:46:22 -0700 | [diff] [blame] | 145 | } else { |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame^] | 146 | SkRect devBounds; |
| 147 | viewMatrix.mapRect(&devBounds, path.getBounds()); |
Chris Dalton | 70a0d2c | 2021-01-26 12:01:21 -0700 | [diff] [blame] | 148 | int numVerbs = path.countVerbs(); |
| 149 | if (numVerbs > 0) { |
| 150 | // Check if the path is large and/or simple enough that we can triangulate the inner fan |
| 151 | // on the CPU. This is our fastest approach. It allows us to stencil only the curves, |
| 152 | // and then fill the inner fan directly to the final render target, thus drawing the |
| 153 | // majority of pixels in a single render pass. |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame^] | 154 | float gpuFragmentWork = devBounds.height() * devBounds.width(); |
Chris Dalton | 70a0d2c | 2021-01-26 12:01:21 -0700 | [diff] [blame] | 155 | float cpuTessellationWork = numVerbs * SkNextLog2(numVerbs); // N log N. |
| 156 | constexpr static float kCpuWeight = 512; |
| 157 | constexpr static float kMinNumPixelsToTriangulate = 256 * 256; |
| 158 | if (cpuTessellationWork * kCpuWeight + kMinNumPixelsToTriangulate < gpuFragmentWork) { |
Chris Dalton | ebb37e7 | 2021-01-27 17:59:45 -0700 | [diff] [blame] | 159 | return GrOp::Make<GrPathInnerTriangulateOp>(rContext, viewMatrix, path, |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame^] | 160 | std::move(paint), aaType, opFlags, |
| 161 | devBounds); |
Chris Dalton | 70a0d2c | 2021-01-26 12:01:21 -0700 | [diff] [blame] | 162 | } |
| 163 | } |
Chris Dalton | 2ed22fa | 2021-05-06 16:08:30 -0600 | [diff] [blame] | 164 | return GrOp::Make<GrPathStencilFillOp>(rContext, viewMatrix, path, std::move(paint), aaType, |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame^] | 165 | opFlags, devBounds); |
Chris Dalton | c2a1746 | 2020-12-09 16:46:22 -0700 | [diff] [blame] | 166 | } |
| 167 | } |
| 168 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 169 | bool GrTessellationPathRenderer::onDrawPath(const DrawPathArgs& args) { |
Brian Salomon | 1aa1f5f | 2020-12-11 17:25:17 -0500 | [diff] [blame] | 170 | GrSurfaceDrawContext* surfaceDrawContext = args.fRenderTargetContext; |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 171 | |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 172 | SkRect devBounds; |
Chris Dalton | b064334 | 2020-12-15 01:04:12 -0700 | [diff] [blame] | 173 | args.fViewMatrix->mapRect(&devBounds, args.fShape->bounds()); |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 174 | |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 175 | // See if the path is small and simple enough to atlas instead of drawing directly. |
| 176 | // |
| 177 | // NOTE: The atlas uses alpha8 coverage even for msaa render targets. We could theoretically |
| 178 | // render the sample mask to an integer texture, but such a scheme would probably require |
| 179 | // GL_EXT_post_depth_coverage, which appears to have low adoption. |
| 180 | SkIRect devIBounds; |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 181 | SkIPoint16 locationInAtlas; |
| 182 | bool transposedInAtlas; |
Chris Dalton | b064334 | 2020-12-15 01:04:12 -0700 | [diff] [blame] | 183 | if (this->tryAddPathToAtlas(*args.fContext->priv().caps(), *args.fViewMatrix, *args.fShape, |
| 184 | devBounds, args.fAAType, &devIBounds, &locationInAtlas, |
| 185 | &transposedInAtlas)) { |
Chris Dalton | 9213e61 | 2020-10-09 17:22:43 -0600 | [diff] [blame] | 186 | // The atlas is not compatible with DDL. We should only be using it on direct contexts. |
| 187 | SkASSERT(args.fContext->asDirectContext()); |
Herb Derby | c76d409 | 2020-10-07 16:46:15 -0400 | [diff] [blame] | 188 | auto op = GrOp::Make<GrDrawAtlasPathOp>(args.fContext, |
Brian Salomon | 1aa1f5f | 2020-12-11 17:25:17 -0500 | [diff] [blame] | 189 | surfaceDrawContext->numSamples(), sk_ref_sp(fAtlas.textureProxy()), |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 190 | devIBounds, locationInAtlas, transposedInAtlas, *args.fViewMatrix, |
Michael Ludwig | 7c12e28 | 2020-05-29 09:54:07 -0400 | [diff] [blame] | 191 | std::move(args.fPaint)); |
Brian Salomon | 1aa1f5f | 2020-12-11 17:25:17 -0500 | [diff] [blame] | 192 | surfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 193 | return true; |
| 194 | } |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 195 | |
Chris Dalton | b064334 | 2020-12-15 01:04:12 -0700 | [diff] [blame] | 196 | if (auto op = make_op(args.fContext, surfaceDrawContext, OpFlags::kNone, args.fAAType, |
| 197 | devBounds, *args.fViewMatrix, *args.fShape, std::move(args.fPaint))) { |
| 198 | surfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 199 | } |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 200 | return true; |
| 201 | } |
| 202 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 203 | bool GrTessellationPathRenderer::tryAddPathToAtlas( |
Chris Dalton | b064334 | 2020-12-15 01:04:12 -0700 | [diff] [blame] | 204 | const GrCaps& caps, const SkMatrix& viewMatrix, const GrStyledShape& shape, |
| 205 | const SkRect& devBounds, GrAAType aaType, SkIRect* devIBounds, SkIPoint16* locationInAtlas, |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 206 | bool* transposedInAtlas) { |
Chris Dalton | b064334 | 2020-12-15 01:04:12 -0700 | [diff] [blame] | 207 | if (!shape.style().isSimpleFill()) { |
| 208 | return false; |
| 209 | } |
| 210 | |
Chris Dalton | d72cb4c | 2020-07-16 17:50:17 -0600 | [diff] [blame] | 211 | if (!fMaxAtlasPathWidth) { |
| 212 | return false; |
| 213 | } |
| 214 | |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 215 | if (!caps.multisampleDisableSupport() && GrAAType::kNone == aaType) { |
| 216 | return false; |
| 217 | } |
| 218 | |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 219 | // Transpose tall paths in the atlas. Since we limit ourselves to small-area paths, this |
| 220 | // guarantees that every atlas entry has a small height, which lends very well to efficient pow2 |
| 221 | // atlas packing. |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 222 | devBounds.roundOut(devIBounds); |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 223 | int maxDimenstion = devIBounds->width(); |
| 224 | int minDimension = devIBounds->height(); |
| 225 | *transposedInAtlas = minDimension > maxDimenstion; |
| 226 | if (*transposedInAtlas) { |
| 227 | std::swap(minDimension, maxDimenstion); |
| 228 | } |
| 229 | |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame^] | 230 | // Check if the path is too large for an atlas. Since we transpose paths in the atlas so height |
| 231 | // is always "minDimension", limiting to kMaxAtlasPathHeight^2 pixels guarantees height <= |
| 232 | // kMaxAtlasPathHeight, while also allowing paths that are very wide and short. |
Chris Dalton | eae5c16 | 2020-12-29 10:18:21 -0700 | [diff] [blame] | 233 | if ((uint64_t)maxDimenstion * minDimension > kMaxAtlasPathHeight * kMaxAtlasPathHeight || |
Chris Dalton | b96995d | 2020-06-04 16:44:29 -0600 | [diff] [blame] | 234 | maxDimenstion > fMaxAtlasPathWidth) { |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 235 | return false; |
| 236 | } |
| 237 | |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 238 | if (!fAtlas.addRect(maxDimenstion, minDimension, locationInAtlas)) { |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 239 | return false; |
| 240 | } |
| 241 | |
| 242 | SkMatrix atlasMatrix = viewMatrix; |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 243 | if (*transposedInAtlas) { |
| 244 | std::swap(atlasMatrix[0], atlasMatrix[3]); |
| 245 | std::swap(atlasMatrix[1], atlasMatrix[4]); |
| 246 | float tx=atlasMatrix.getTranslateX(), ty=atlasMatrix.getTranslateY(); |
| 247 | atlasMatrix.setTranslateX(ty - devIBounds->y() + locationInAtlas->x()); |
| 248 | atlasMatrix.setTranslateY(tx - devIBounds->x() + locationInAtlas->y()); |
| 249 | } else { |
| 250 | atlasMatrix.postTranslate(locationInAtlas->x() - devIBounds->x(), |
| 251 | locationInAtlas->y() - devIBounds->y()); |
| 252 | } |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 253 | |
| 254 | // Concatenate this path onto our uber path that matches its fill and AA types. |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame^] | 255 | SkPath path; |
| 256 | shape.asPath(&path); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 257 | SkPath* uberPath = this->getAtlasUberPath(path.getFillType(), GrAAType::kNone != aaType); |
Chris Dalton | d2dc8dd | 2020-05-19 16:32:02 -0600 | [diff] [blame] | 258 | uberPath->moveTo(locationInAtlas->x(), locationInAtlas->y()); // Implicit moveTo(0,0). |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 259 | uberPath->addPath(path, atlasMatrix); |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 260 | return true; |
| 261 | } |
| 262 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 263 | void GrTessellationPathRenderer::onStencilPath(const StencilPathArgs& args) { |
Chris Dalton | b064334 | 2020-12-15 01:04:12 -0700 | [diff] [blame] | 264 | GrSurfaceDrawContext* surfaceDrawContext = args.fRenderTargetContext; |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 265 | GrAAType aaType = (GrAA::kYes == args.fDoStencilMSAA) ? GrAAType::kMSAA : GrAAType::kNone; |
Chris Dalton | b064334 | 2020-12-15 01:04:12 -0700 | [diff] [blame] | 266 | SkRect devBounds; |
| 267 | args.fViewMatrix->mapRect(&devBounds, args.fShape->bounds()); |
| 268 | if (auto op = make_op(args.fContext, surfaceDrawContext, OpFlags::kStencilOnly, aaType, |
| 269 | devBounds, *args.fViewMatrix, *args.fShape, GrPaint())) { |
| 270 | surfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
| 271 | } |
Chris Dalton | b832ce6 | 2020-01-06 19:49:37 -0700 | [diff] [blame] | 272 | } |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 273 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 274 | void GrTessellationPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP, |
Adlai Holler | 9902cff | 2020-11-11 08:51:25 -0500 | [diff] [blame] | 275 | SkSpan<const uint32_t> /* taskIDs */) { |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 276 | if (!fAtlas.drawBounds().isEmpty()) { |
| 277 | this->renderAtlas(onFlushRP); |
| 278 | fAtlas.reset(kAtlasInitialSize, *onFlushRP->caps()); |
| 279 | } |
| 280 | for (SkPath& path : fAtlasUberPaths) { |
| 281 | path.reset(); |
| 282 | } |
| 283 | } |
| 284 | |
| 285 | constexpr static GrUserStencilSettings kTestStencil( |
| 286 | GrUserStencilSettings::StaticInit< |
| 287 | 0x0000, |
| 288 | GrUserStencilTest::kNotEqual, |
| 289 | 0xffff, |
| 290 | GrUserStencilOp::kKeep, |
| 291 | GrUserStencilOp::kKeep, |
| 292 | 0xffff>()); |
| 293 | |
| 294 | constexpr static GrUserStencilSettings kTestAndResetStencil( |
| 295 | GrUserStencilSettings::StaticInit< |
| 296 | 0x0000, |
| 297 | GrUserStencilTest::kNotEqual, |
| 298 | 0xffff, |
| 299 | GrUserStencilOp::kZero, |
| 300 | GrUserStencilOp::kKeep, |
| 301 | 0xffff>()); |
| 302 | |
Chris Dalton | 0a22b1e | 2020-03-26 11:52:15 -0600 | [diff] [blame] | 303 | void GrTessellationPathRenderer::renderAtlas(GrOnFlushResourceProvider* onFlushRP) { |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 304 | auto rtc = fAtlas.instantiate(onFlushRP); |
| 305 | if (!rtc) { |
| 306 | return; |
| 307 | } |
| 308 | |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame^] | 309 | SkRect atlasRect = SkRect::MakeIWH(fAtlas.drawBounds().width(), fAtlas.drawBounds().height()); |
| 310 | |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 311 | // Add ops to stencil the atlas paths. |
| 312 | for (auto antialias : {false, true}) { |
| 313 | for (auto fillType : {SkPathFillType::kWinding, SkPathFillType::kEvenOdd}) { |
| 314 | SkPath* uberPath = this->getAtlasUberPath(fillType, antialias); |
| 315 | if (uberPath->isEmpty()) { |
| 316 | continue; |
| 317 | } |
| 318 | uberPath->setFillType(fillType); |
| 319 | GrAAType aaType = (antialias) ? GrAAType::kMSAA : GrAAType::kNone; |
Chris Dalton | 2ed22fa | 2021-05-06 16:08:30 -0600 | [diff] [blame] | 320 | auto op = GrOp::Make<GrPathStencilFillOp>(onFlushRP->recordingContext(), SkMatrix::I(), |
| 321 | *uberPath, GrPaint(), aaType, |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame^] | 322 | OpFlags::kStencilOnly | |
| 323 | OpFlags::kPreferWedges, atlasRect); |
Michael Ludwig | 7c12e28 | 2020-05-29 09:54:07 -0400 | [diff] [blame] | 324 | rtc->addDrawOp(nullptr, std::move(op)); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 325 | } |
| 326 | } |
| 327 | |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 328 | // Finally, draw a fullscreen rect to convert our stencilled paths into alpha coverage masks. |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame^] | 329 | GrPaint paint; |
| 330 | paint.setColor4f(SK_PMColor4fWHITE); |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 331 | const GrUserStencilSettings* stencil; |
Chris Dalton | 57ab06c | 2021-04-22 12:57:28 -0600 | [diff] [blame] | 332 | if (onFlushRP->caps()->discardStencilValuesAfterRenderPass()) { |
| 333 | // This is the final op in the surfaceDrawContext. Since Ganesh is planning to discard the |
| 334 | // stencil values anyway, there is no need to reset the stencil values back to 0. |
| 335 | stencil = &kTestStencil; |
| 336 | } else { |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 337 | // Outset the cover rect in case there are T-junctions in the path bounds. |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame^] | 338 | atlasRect.outset(1, 1); |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 339 | stencil = &kTestAndResetStencil; |
Chris Dalton | c3b67eb | 2020-02-10 21:09:58 -0700 | [diff] [blame] | 340 | } |
Chris Dalton | 569c01b | 2021-05-25 10:11:46 -0600 | [diff] [blame^] | 341 | rtc->stencilRect(nullptr, stencil, std::move(paint), GrAA::kYes, SkMatrix::I(), atlasRect); |
Chris Dalton | 4e99853 | 2020-02-10 11:06:42 -0700 | [diff] [blame] | 342 | |
| 343 | if (rtc->asSurfaceProxy()->requiresManualMSAAResolve()) { |
| 344 | onFlushRP->addTextureResolveTask(sk_ref_sp(rtc->asTextureProxy()), |
| 345 | GrSurfaceProxy::ResolveFlags::kMSAA); |
| 346 | } |
| 347 | } |