blob: b80071f7bf263610a535d0d848b137a6e7b88c82 [file] [log] [blame]
Chris Daltonb832ce62020-01-06 19:49:37 -07001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Chris Dalton0a22b1e2020-03-26 11:52:15 -06008#include "src/gpu/tessellate/GrTessellationPathRenderer.h"
Chris Daltonb832ce62020-01-06 19:49:37 -07009
Chris Dalton50c3c242021-06-14 16:32:35 -060010#include "include/private/SkVx.h"
Chris Daltond2dc8dd2020-05-19 16:32:02 -060011#include "src/core/SkIPoint16.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070012#include "src/core/SkPathPriv.h"
13#include "src/gpu/GrClip.h"
14#include "src/gpu/GrMemoryPool.h"
15#include "src/gpu/GrRecordingContextPriv.h"
Brian Salomoneebe7352020-12-09 16:37:04 -050016#include "src/gpu/GrSurfaceDrawContext.h"
Chris Dalton50c3c242021-06-14 16:32:35 -060017#include "src/gpu/GrVx.h"
Robert Phillips550de7f2021-07-06 16:28:52 -040018#include "src/gpu/effects/GrDisableColorXP.h"
Michael Ludwig2686d692020-04-17 20:21:37 +000019#include "src/gpu/geometry/GrStyledShape.h"
Chris Dalton83420eb2021-06-23 18:47:09 -060020#include "src/gpu/tessellate/GrAtlasRenderTask.h"
Chris Dalton4e998532020-02-10 11:06:42 -070021#include "src/gpu/tessellate/GrDrawAtlasPathOp.h"
Chris Daltonebb37e72021-01-27 17:59:45 -070022#include "src/gpu/tessellate/GrPathInnerTriangulateOp.h"
Chris Dalton031d76b2021-06-08 16:32:00 -060023#include "src/gpu/tessellate/GrPathStencilCoverOp.h"
Chris Dalton7ae272f2021-06-10 11:45:14 -060024#include "src/gpu/tessellate/GrPathTessellateOp.h"
Chris Dalton05007df2021-02-04 00:24:52 -070025#include "src/gpu/tessellate/GrStrokeTessellateOp.h"
Chris Daltonabed2672021-06-17 16:54:28 -060026#include "src/gpu/tessellate/shaders/GrModulateAtlasCoverageFP.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070027
Chris Daltond72cb4c2020-07-16 17:50:17 -060028constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8;
Chris Dalton83420eb2021-06-23 18:47:09 -060029constexpr static int kAtlasInitialSize = 512;
Chris Daltond72cb4c2020-07-16 17:50:17 -060030
Chris Daltond2dc8dd2020-05-19 16:32:02 -060031// The atlas is only used for small-area paths, which means at least one dimension of every path is
32// guaranteed to be quite small. So if we transpose tall paths, then every path will have a small
33// height, which lends very well to efficient pow2 atlas packing.
34constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2;
35
36// Ensure every path in the atlas falls in or below the 128px high rectanizer band.
Chris Dalton83420eb2021-06-23 18:47:09 -060037constexpr static int kAtlasMaxPathHeight = 128;
Chris Daltond2dc8dd2020-05-19 16:32:02 -060038
Chris Dalton1413d112020-07-09 11:26:31 -060039bool GrTessellationPathRenderer::IsSupported(const GrCaps& caps) {
Chris Dalton8f282f52021-01-06 11:47:58 -070040 return !caps.avoidStencilBuffers() &&
41 caps.drawInstancedSupport() &&
Chris Dalton3febc612021-07-14 13:47:07 -060042 caps.shaderCaps()->infinitySupport() &&
Chris Daltoneae5c162020-12-29 10:18:21 -070043 !caps.disableTessellationPathRenderer();
Chris Dalton1413d112020-07-09 11:26:31 -060044}
45
Chris Dalton83420eb2021-06-23 18:47:09 -060046GrTessellationPathRenderer::GrTessellationPathRenderer(GrRecordingContext* rContext) {
Chris Dalton31634282020-09-17 12:16:54 -060047 const GrCaps& caps = *rContext->priv().caps();
Chris Dalton9213e612020-10-09 17:22:43 -060048 auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes);
Chris Dalton569c01b2021-05-25 10:11:46 -060049 if (rContext->asDirectContext() && // The atlas doesn't support DDL yet.
50 caps.internalMultisampleCount(atlasFormat) > 1) {
Chris Dalton83420eb2021-06-23 18:47:09 -060051#if GR_TEST_UTILS
52 fAtlasMaxSize = rContext->priv().options().fMaxTextureAtlasSize;
53#else
54 fAtlasMaxSize = 2048;
55#endif
56 fAtlasMaxSize = SkPrevPow2(std::min(fAtlasMaxSize, caps.maxPreferredRenderTargetSize()));
57 fAtlasInitialSize = SkNextPow2(std::min(kAtlasInitialSize, fAtlasMaxSize));
Chris Dalton9213e612020-10-09 17:22:43 -060058 }
Chris Dalton4e998532020-02-10 11:06:42 -070059}
60
Chris Dalton7ae272f2021-06-10 11:45:14 -060061GrPathRenderer::StencilSupport GrTessellationPathRenderer::onGetStencilSupport(
62 const GrStyledShape& shape) const {
Chris Daltonbaae2dd2021-06-25 14:52:49 -060063 if (!shape.style().isSimpleFill() || shape.inverseFilled()) {
64 // Don't bother with stroke stencilling or inverse fills yet. The Skia API doesn't support
65 // clipping by a stroke, and the stencilling code already knows how to invert a fill.
Chris Dalton7ae272f2021-06-10 11:45:14 -060066 return kNoSupport_StencilSupport;
67 }
68 return shape.knownToBeConvex() ? kNoRestriction_StencilSupport : kStencilOnly_StencilSupport;
69}
70
Chris Dalton0a22b1e2020-03-26 11:52:15 -060071GrPathRenderer::CanDrawPath GrTessellationPathRenderer::onCanDrawPath(
Chris Daltonb832ce62020-01-06 19:49:37 -070072 const CanDrawPathArgs& args) const {
Chris Dalton1c62a7b2020-06-29 22:01:14 -060073 const GrStyledShape& shape = *args.fShape;
Chris Dalton57ab06c2021-04-22 12:57:28 -060074 if (args.fAAType == GrAAType::kCoverage ||
75 shape.style().hasPathEffect() ||
Chris Dalton06b52ad2020-12-15 10:01:35 -070076 args.fViewMatrix->hasPerspective() ||
77 shape.style().strokeRec().getStyle() == SkStrokeRec::kStrokeAndFill_Style ||
Chris Dalton537293bf2021-05-03 15:54:24 -060078 !args.fProxy->canUseStencil(*args.fCaps)) {
Chris Daltonb832ce62020-01-06 19:49:37 -070079 return CanDrawPath::kNo;
80 }
Chris Daltona05ccc32021-06-29 19:42:13 -060081 if (!shape.style().isSimpleFill()) {
Chris Daltonbb995e62021-07-01 10:58:55 -060082 if (shape.inverseFilled()) {
Chris Daltona05ccc32021-06-29 19:42:13 -060083 return CanDrawPath::kNo;
84 }
85 }
Chris Dalton7ae272f2021-06-10 11:45:14 -060086 if (args.fHasUserStencilSettings) {
87 // Non-convex paths and strokes use the stencil buffer internally, so they can't support
88 // draws with stencil settings.
Chris Daltonbaae2dd2021-06-25 14:52:49 -060089 if (!shape.style().isSimpleFill() || !shape.knownToBeConvex() || shape.inverseFilled()) {
Chris Dalton7ae272f2021-06-10 11:45:14 -060090 return CanDrawPath::kNo;
91 }
92 }
Chris Daltonb832ce62020-01-06 19:49:37 -070093 return CanDrawPath::kYes;
94}
95
Chris Dalton7ae272f2021-06-10 11:45:14 -060096static GrOp::Owner make_non_convex_fill_op(GrRecordingContext* rContext,
97 GrTessellationPathRenderer::PathFlags pathFlags,
Chris Daltonbaae2dd2021-06-25 14:52:49 -060098 GrAAType aaType, const SkRect& drawBounds,
Chris Dalton7ae272f2021-06-10 11:45:14 -060099 const SkMatrix& viewMatrix, const SkPath& path,
100 GrPaint&& paint) {
Chris Daltonbaae2dd2021-06-25 14:52:49 -0600101 SkASSERT(!path.isConvex() || path.isInverseFillType());
Chris Dalton7ae272f2021-06-10 11:45:14 -0600102 int numVerbs = path.countVerbs();
Chris Daltonbaae2dd2021-06-25 14:52:49 -0600103 if (numVerbs > 0 && !path.isInverseFillType()) {
Chris Dalton7ae272f2021-06-10 11:45:14 -0600104 // Check if the path is large and/or simple enough that we can triangulate the inner fan
105 // on the CPU. This is our fastest approach. It allows us to stencil only the curves,
106 // and then fill the inner fan directly to the final render target, thus drawing the
107 // majority of pixels in a single render pass.
Chris Daltonbaae2dd2021-06-25 14:52:49 -0600108 float gpuFragmentWork = drawBounds.height() * drawBounds.width();
Chris Dalton7ae272f2021-06-10 11:45:14 -0600109 float cpuTessellationWork = numVerbs * SkNextLog2(numVerbs); // N log N.
110 constexpr static float kCpuWeight = 512;
111 constexpr static float kMinNumPixelsToTriangulate = 256 * 256;
112 if (cpuTessellationWork * kCpuWeight + kMinNumPixelsToTriangulate < gpuFragmentWork) {
113 return GrOp::Make<GrPathInnerTriangulateOp>(rContext, viewMatrix, path,
114 std::move(paint), aaType, pathFlags,
Chris Daltonbaae2dd2021-06-25 14:52:49 -0600115 drawBounds);
Chris Dalton70a0d2c2021-01-26 12:01:21 -0700116 }
Chris Daltonc2a17462020-12-09 16:46:22 -0700117 }
Chris Dalton7ae272f2021-06-10 11:45:14 -0600118 return GrOp::Make<GrPathStencilCoverOp>(rContext, viewMatrix, path, std::move(paint), aaType,
Chris Daltonbaae2dd2021-06-25 14:52:49 -0600119 pathFlags, drawBounds);
Chris Daltonc2a17462020-12-09 16:46:22 -0700120}
121
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600122bool GrTessellationPathRenderer::onDrawPath(const DrawPathArgs& args) {
John Stiles0fbc6a32021-06-04 14:40:57 -0400123 GrSurfaceDrawContext* surfaceDrawContext = args.fSurfaceDrawContext;
Chris Daltonb832ce62020-01-06 19:49:37 -0700124
Chris Dalton7ae272f2021-06-10 11:45:14 -0600125 SkPath path;
126 args.fShape->asPath(&path);
127
128 // Handle strokes first.
129 if (!args.fShape->style().isSimpleFill()) {
Chris Daltonbaae2dd2021-06-25 14:52:49 -0600130 SkASSERT(!path.isInverseFillType()); // See onGetStencilSupport().
Chris Dalton7ae272f2021-06-10 11:45:14 -0600131 SkASSERT(args.fUserStencilSettings->isUnused());
132 const SkStrokeRec& stroke = args.fShape->style().strokeRec();
133 SkASSERT(stroke.getStyle() != SkStrokeRec::kStrokeAndFill_Style);
134 auto op = GrOp::Make<GrStrokeTessellateOp>(args.fContext, args.fAAType, *args.fViewMatrix,
135 path, stroke, std::move(args.fPaint));
136 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
137 return true;
138 }
139
Chris Dalton2346aa02021-07-14 22:55:35 -0600140 const SkRect pathDevBounds = args.fViewMatrix->mapRect(args.fShape->bounds());
Chris Daltonbaae2dd2021-06-25 14:52:49 -0600141 if (pathDevBounds.isEmpty()) {
Chris Daltonbaae2dd2021-06-25 14:52:49 -0600142 if (path.isInverseFillType()) {
143 args.fSurfaceDrawContext->drawPaint(args.fClip, std::move(args.fPaint),
144 *args.fViewMatrix);
145 }
146 return true;
147 }
Chris Daltonb96995d2020-06-04 16:44:29 -0600148
Chris Dalton83420eb2021-06-23 18:47:09 -0600149 if (args.fUserStencilSettings->isUnused()) {
150 // See if the path is small and simple enough to atlas instead of drawing directly.
151 //
152 // NOTE: The atlas uses alpha8 coverage even for msaa render targets. We could theoretically
153 // render the sample mask to an integer texture, but such a scheme would probably require
154 // GL_EXT_post_depth_coverage, which appears to have low adoption.
155 SkIRect devIBounds;
156 SkIPoint16 locationInAtlas;
157 bool transposedInAtlas;
158 auto visitProxiesUsedByDraw = [&args](GrVisitProxyFunc visitor) {
159 if (args.fPaint.hasColorFragmentProcessor()) {
160 args.fPaint.getColorFragmentProcessor()->visitProxies(visitor);
161 }
162 if (args.fPaint.hasCoverageFragmentProcessor()) {
163 args.fPaint.getCoverageFragmentProcessor()->visitProxies(visitor);
164 }
165 };
166 if (this->tryAddPathToAtlas(args.fContext, *args.fViewMatrix, path, pathDevBounds,
167 args.fAAType != GrAAType::kNone, &devIBounds, &locationInAtlas,
168 &transposedInAtlas, visitProxiesUsedByDraw)) {
Chris Daltonb1fd64e2021-07-08 15:38:51 -0600169 const GrCaps& caps = *args.fSurfaceDrawContext->caps();
Chris Daltonee40d5a2021-07-07 16:34:36 -0600170 const SkIRect& fillBounds = path.isInverseFillType()
Chris Daltonbaae2dd2021-06-25 14:52:49 -0600171 ? (args.fClip
Chris Daltonee40d5a2021-07-07 16:34:36 -0600172 ? args.fClip->getConservativeBounds()
173 : args.fSurfaceDrawContext->asSurfaceProxy()->backingStoreBoundsIRect())
174 : devIBounds;
Chris Daltoncc29a392021-07-12 15:16:29 -0600175 auto op = GrOp::Make<GrDrawAtlasPathOp>(args.fContext,
176 args.fSurfaceDrawContext->arenaAlloc(),
177 fillBounds, *args.fViewMatrix,
178 std::move(args.fPaint), locationInAtlas,
179 devIBounds, transposedInAtlas,
180 fAtlasRenderTasks.back()->readView(caps),
181 path.isInverseFillType());
Chris Dalton83420eb2021-06-23 18:47:09 -0600182 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
183 return true;
184 }
Chris Dalton4e998532020-02-10 11:06:42 -0700185 }
Chris Daltonb832ce62020-01-06 19:49:37 -0700186
Chris Dalton7ae272f2021-06-10 11:45:14 -0600187 // Handle convex paths only if we couldn't fit them in the atlas. We give the atlas priority in
188 // an effort to reduce DMSAA triggers.
Chris Daltonbaae2dd2021-06-25 14:52:49 -0600189 if (args.fShape->knownToBeConvex() && !path.isInverseFillType()) {
Chris Dalton7ae272f2021-06-10 11:45:14 -0600190 auto op = GrOp::Make<GrPathTessellateOp>(args.fContext, *args.fViewMatrix, path,
191 std::move(args.fPaint), args.fAAType,
192 args.fUserStencilSettings, pathDevBounds);
Chris Daltonb0643342020-12-15 01:04:12 -0700193 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton7ae272f2021-06-10 11:45:14 -0600194 return true;
Chris Daltonb96995d2020-06-04 16:44:29 -0600195 }
Chris Dalton7ae272f2021-06-10 11:45:14 -0600196
197 SkASSERT(args.fUserStencilSettings->isUnused()); // See onGetStencilSupport().
Chris Daltonbaae2dd2021-06-25 14:52:49 -0600198 const SkRect& drawBounds = path.isInverseFillType()
199 ? args.fSurfaceDrawContext->asSurfaceProxy()->backingStoreBoundsRect()
200 : pathDevBounds;
201 auto op = make_non_convex_fill_op(args.fContext, PathFlags::kNone, args.fAAType, drawBounds,
Chris Dalton7ae272f2021-06-10 11:45:14 -0600202 *args.fViewMatrix, path, std::move(args.fPaint));
203 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700204 return true;
205}
206
Chris Dalton7ae272f2021-06-10 11:45:14 -0600207void GrTessellationPathRenderer::onStencilPath(const StencilPathArgs& args) {
208 SkASSERT(args.fShape->style().isSimpleFill()); // See onGetStencilSupport().
Chris Daltonbaae2dd2021-06-25 14:52:49 -0600209 SkASSERT(!args.fShape->inverseFilled()); // See onGetStencilSupport().
Chris Dalton7ae272f2021-06-10 11:45:14 -0600210
211 GrSurfaceDrawContext* surfaceDrawContext = args.fSurfaceDrawContext;
212 GrAAType aaType = (GrAA::kYes == args.fDoStencilMSAA) ? GrAAType::kMSAA : GrAAType::kNone;
213
214 SkRect pathDevBounds;
215 args.fViewMatrix->mapRect(&pathDevBounds, args.fShape->bounds());
216
217 SkPath path;
218 args.fShape->asPath(&path);
219
220 if (args.fShape->knownToBeConvex()) {
221 constexpr static GrUserStencilSettings kMarkStencil(
222 GrUserStencilSettings::StaticInit<
223 0x0001,
224 GrUserStencilTest::kAlways,
225 0xffff,
226 GrUserStencilOp::kReplace,
227 GrUserStencilOp::kKeep,
228 0xffff>());
229
230 GrPaint stencilPaint;
231 stencilPaint.setXPFactory(GrDisableColorXPFactory::Get());
232 auto op = GrOp::Make<GrPathTessellateOp>(args.fContext, *args.fViewMatrix, path,
233 std::move(stencilPaint), aaType, &kMarkStencil,
234 pathDevBounds);
235 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
236 return;
Chris Daltonb0643342020-12-15 01:04:12 -0700237 }
238
Chris Dalton7ae272f2021-06-10 11:45:14 -0600239 auto op = make_non_convex_fill_op(args.fContext, PathFlags::kStencilOnly, aaType, pathDevBounds,
240 *args.fViewMatrix, path, GrPaint());
241 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
242}
243
Chris Dalton83420eb2021-06-23 18:47:09 -0600244GrFPResult GrTessellationPathRenderer::makeAtlasClipFP(GrRecordingContext* rContext,
245 const GrOp* opBeingClipped,
Chris Daltonabed2672021-06-17 16:54:28 -0600246 std::unique_ptr<GrFragmentProcessor> inputFP,
Chris Dalton83420eb2021-06-23 18:47:09 -0600247 const SkIRect& drawBounds,
248 const SkMatrix& viewMatrix,
249 const SkPath& path, GrAA aa) {
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600250 if (viewMatrix.hasPerspective()) {
Chris Daltonabed2672021-06-17 16:54:28 -0600251 return GrFPFailure(std::move(inputFP));
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600252 }
Chris Dalton2346aa02021-07-14 22:55:35 -0600253 const SkRect pathDevBounds = viewMatrix.mapRect(path.getBounds());
Chris Daltonbaae2dd2021-06-25 14:52:49 -0600254 if (pathDevBounds.isEmpty()) {
Chris Daltonbaae2dd2021-06-25 14:52:49 -0600255 return path.isInverseFillType() ? GrFPSuccess(std::move(inputFP))
256 : GrFPFailure(std::move(inputFP));
257 }
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600258 SkIRect devIBounds;
259 SkIPoint16 locationInAtlas;
260 bool transposedInAtlas;
Chris Dalton83420eb2021-06-23 18:47:09 -0600261 auto visitProxiesUsedByDraw = [&opBeingClipped, &inputFP](GrVisitProxyFunc visitor) {
262 opBeingClipped->visitProxies(visitor);
263 if (inputFP) {
264 inputFP->visitProxies(visitor);
265 }
266 };
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600267 // tryAddPathToAtlas() ignores inverseness of the fill. See getAtlasUberPath().
Chris Daltonbaae2dd2021-06-25 14:52:49 -0600268 if (!this->tryAddPathToAtlas(rContext, viewMatrix, path, pathDevBounds, aa != GrAA::kNo,
269 &devIBounds, &locationInAtlas, &transposedInAtlas,
270 visitProxiesUsedByDraw)) {
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600271 // The path is too big, or the atlas ran out of room.
Chris Daltonabed2672021-06-17 16:54:28 -0600272 return GrFPFailure(std::move(inputFP));
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600273 }
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600274 SkMatrix atlasMatrix;
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600275 auto [atlasX, atlasY] = locationInAtlas;
276 if (!transposedInAtlas) {
Chris Daltonabed2672021-06-17 16:54:28 -0600277 atlasMatrix = SkMatrix::Translate(atlasX - devIBounds.left(), atlasY - devIBounds.top());
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600278 } else {
279 atlasMatrix.setAll(0, 1, atlasX - devIBounds.top(),
280 1, 0, atlasY - devIBounds.left(),
281 0, 0, 1);
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600282 }
Chris Daltonabed2672021-06-17 16:54:28 -0600283 auto flags = GrModulateAtlasCoverageFP::Flags::kNone;
Chris Daltonfd3ec902021-06-17 20:44:13 +0000284 if (path.isInverseFillType()) {
Chris Daltonabed2672021-06-17 16:54:28 -0600285 flags |= GrModulateAtlasCoverageFP::Flags::kInvertCoverage;
Chris Daltonfd3ec902021-06-17 20:44:13 +0000286 }
Chris Daltonabed2672021-06-17 16:54:28 -0600287 if (!devIBounds.contains(drawBounds)) {
288 flags |= GrModulateAtlasCoverageFP::Flags::kCheckBounds;
289 // At this point in time we expect callers to tighten the scissor for "kIntersect" clips, as
290 // opposed to us having to check the path bounds. Feel free to remove this assert if that
291 // ever changes.
292 SkASSERT(path.isInverseFillType());
293 }
Chris Dalton83420eb2021-06-23 18:47:09 -0600294 GrSurfaceProxyView atlasView = fAtlasRenderTasks.back()->readView(*rContext->priv().caps());
Chris Daltonabed2672021-06-17 16:54:28 -0600295 return GrFPSuccess(std::make_unique<GrModulateAtlasCoverageFP>(flags, std::move(inputFP),
Chris Dalton83420eb2021-06-23 18:47:09 -0600296 std::move(atlasView),
Chris Daltonabed2672021-06-17 16:54:28 -0600297 atlasMatrix, devIBounds));
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600298}
299
Chris Dalton50c3c242021-06-14 16:32:35 -0600300void GrTessellationPathRenderer::AtlasPathKey::set(const SkMatrix& m, bool antialias,
301 const SkPath& path) {
302 using grvx::float2;
303 fAffineMatrix[0] = m.getScaleX();
304 fAffineMatrix[1] = m.getSkewX();
305 fAffineMatrix[2] = m.getSkewY();
306 fAffineMatrix[3] = m.getScaleY();
307 float2 translate = {m.getTranslateX(), m.getTranslateY()};
308 float2 subpixelPosition = translate - skvx::floor(translate);
Robert Phillips62214f72021-06-15 10:12:51 -0400309 float2 subpixelPositionKey = skvx::trunc(subpixelPosition *
Chris Daltone1f72372021-06-29 16:45:49 -0600310 GrTessellationShader::kLinearizationPrecision);
Chris Dalton50c3c242021-06-14 16:32:35 -0600311 skvx::cast<uint8_t>(subpixelPositionKey).store(fSubpixelPositionKey);
312 fAntialias = antialias;
313 fFillRule = (uint8_t)GrFillRuleForSkPath(path); // Fill rule doesn't affect the path's genID.
314 fPathGenID = path.getGenerationID();
315}
316
Chris Dalton83420eb2021-06-23 18:47:09 -0600317bool GrTessellationPathRenderer::tryAddPathToAtlas(GrRecordingContext* rContext,
318 const SkMatrix& viewMatrix, const SkPath& path,
319 const SkRect& pathDevBounds, bool antialias,
320 SkIRect* devIBounds, SkIPoint16* locationInAtlas,
321 bool* transposedInAtlas,
322 const VisitProxiesFn& visitProxiesUsedByDraw) {
Chris Dalton50c3c242021-06-14 16:32:35 -0600323 SkASSERT(!viewMatrix.hasPerspective()); // See onCanDrawPath().
324
Chris Dalton2346aa02021-07-14 22:55:35 -0600325 // Write as the NOT of positive logic, so we will return false if any values are NaN.
326 if (!(pathDevBounds.width() > 0 && pathDevBounds.width() <= fAtlasMaxSize) ||
327 !(pathDevBounds.height() > 0 && pathDevBounds.height() <= fAtlasMaxSize)) {
Chris Daltond72cb4c2020-07-16 17:50:17 -0600328 return false;
329 }
330
Chris Dalton83420eb2021-06-23 18:47:09 -0600331 // The atlas is not compatible with DDL. We should only be using it on direct contexts.
332 SkASSERT(rContext->asDirectContext());
333
334 const GrCaps& caps = *rContext->priv().caps();
Chris Dalton50c3c242021-06-14 16:32:35 -0600335 if (!caps.multisampleDisableSupport() && !antialias) {
Chris Dalton4e998532020-02-10 11:06:42 -0700336 return false;
337 }
338
Chris Dalton7ae272f2021-06-10 11:45:14 -0600339 pathDevBounds.roundOut(devIBounds);
Chris Dalton8c3036c2021-06-23 14:34:56 -0600340 int widthInAtlas = devIBounds->width();
341 int heightInAtlas = devIBounds->height();
Chris Dalton2346aa02021-07-14 22:55:35 -0600342 if (widthInAtlas <= 0 || heightInAtlas <= 0) {
343 return false;
344 }
345
Chris Dalton8c3036c2021-06-23 14:34:56 -0600346 if (SkNextPow2(widthInAtlas) == SkNextPow2(heightInAtlas)) {
347 // Both dimensions go to the same pow2 band in the atlas. Use the larger dimension as height
348 // for more efficient packing.
349 *transposedInAtlas = widthInAtlas > heightInAtlas;
350 } else {
351 // Both dimensions go to different pow2 bands in the atlas. Use the smaller pow2 band for
352 // most efficient packing.
353 *transposedInAtlas = heightInAtlas > widthInAtlas;
354 }
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600355 if (*transposedInAtlas) {
Chris Dalton8c3036c2021-06-23 14:34:56 -0600356 std::swap(heightInAtlas, widthInAtlas);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600357 }
358
Chris Dalton8c3036c2021-06-23 14:34:56 -0600359 // Check if the path is too large for an atlas. Since we transpose tall skinny paths, limiting
360 // to kAtlasMaxPathHeight^2 pixels guarantees heightInAtlas <= kAtlasMaxPathHeight, while also
361 // allowing paths that are very wide and short.
362 if ((uint64_t)widthInAtlas * heightInAtlas > kAtlasMaxPathHeight * kAtlasMaxPathHeight ||
363 widthInAtlas > fAtlasMaxSize) {
Chris Dalton4e998532020-02-10 11:06:42 -0700364 return false;
365 }
Chris Dalton8c3036c2021-06-23 14:34:56 -0600366 SkASSERT(heightInAtlas <= kAtlasMaxPathHeight);
Chris Dalton4e998532020-02-10 11:06:42 -0700367
Chris Dalton50c3c242021-06-14 16:32:35 -0600368 // Check if this path is already in the atlas. This is mainly for clip paths.
369 AtlasPathKey atlasPathKey;
370 if (!path.isVolatile()) {
371 atlasPathKey.set(viewMatrix, antialias, path);
372 if (const SkIPoint16* existingLocation = fAtlasPathCache.find(atlasPathKey)) {
373 *locationInAtlas = *existingLocation;
374 return true;
375 }
376 }
377
Chris Dalton83420eb2021-06-23 18:47:09 -0600378 if (fAtlasRenderTasks.empty() ||
379 !fAtlasRenderTasks.back()->addPath(viewMatrix, path, antialias, devIBounds->topLeft(),
Chris Dalton8c3036c2021-06-23 14:34:56 -0600380 widthInAtlas, heightInAtlas, *transposedInAtlas,
Chris Dalton83420eb2021-06-23 18:47:09 -0600381 locationInAtlas)) {
382 // We either don't have an atlas yet or the current one is full. Try to replace it.
383 GrAtlasRenderTask* currentAtlasTask = (!fAtlasRenderTasks.empty())
384 ? fAtlasRenderTasks.back().get() : nullptr;
385 if (currentAtlasTask) {
386 // Don't allow the current atlas to be replaced if the draw already uses it. Otherwise
387 // the draw would use two different atlases, which breaks our guarantee that there will
388 // only ever be one atlas active at a time.
389 const GrSurfaceProxy* currentAtlasProxy = currentAtlasTask->atlasProxy();
390 bool drawUsesCurrentAtlas = false;
391 visitProxiesUsedByDraw([currentAtlasProxy, &drawUsesCurrentAtlas](GrSurfaceProxy* proxy,
392 GrMipmapped) {
393 if (proxy == currentAtlasProxy) {
394 drawUsesCurrentAtlas = true;
395 }
396 });
397 if (drawUsesCurrentAtlas) {
398 // The draw already uses the current atlas. Give up.
399 return false;
400 }
401 }
402 // Replace the atlas with a new one.
403 auto dynamicAtlas = std::make_unique<GrDynamicAtlas>(
404 kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes,
405 SkISize{fAtlasInitialSize, fAtlasInitialSize}, fAtlasMaxSize,
406 *rContext->priv().caps(), kAtlasAlgorithm);
Robert Phillipsa92913e2021-07-12 16:31:52 -0400407 auto newAtlasTask = sk_make_sp<GrAtlasRenderTask>(rContext,
Chris Dalton83420eb2021-06-23 18:47:09 -0600408 sk_make_sp<GrArenas>(),
409 std::move(dynamicAtlas));
410 rContext->priv().drawingManager()->addAtlasTask(newAtlasTask, currentAtlasTask);
411 SkAssertResult(newAtlasTask->addPath(viewMatrix, path, antialias, devIBounds->topLeft(),
Chris Dalton8c3036c2021-06-23 14:34:56 -0600412 widthInAtlas, heightInAtlas, *transposedInAtlas,
Chris Dalton83420eb2021-06-23 18:47:09 -0600413 locationInAtlas));
414 fAtlasRenderTasks.push_back(std::move(newAtlasTask));
415 fAtlasPathCache.reset();
Chris Dalton4e998532020-02-10 11:06:42 -0700416 }
417
Chris Dalton50c3c242021-06-14 16:32:35 -0600418 // Remember this path's location in the atlas, in case it gets drawn again.
419 if (!path.isVolatile()) {
420 fAtlasPathCache.set(atlasPathKey, *locationInAtlas);
421 }
Chris Daltonb832ce62020-01-06 19:49:37 -0700422 return true;
423}
424
Chris Dalton83420eb2021-06-23 18:47:09 -0600425#ifdef SK_DEBUG
426// Ensures the atlas dependencies are set up such that each atlas will be totally out of service
427// before we render the next one in line. This means there will only ever be one atlas active at a
428// time and that they can all share the same texture.
429void validate_atlas_dependencies(const SkTArray<sk_sp<GrAtlasRenderTask>>& atlasTasks) {
430 for (int i = atlasTasks.count() - 1; i >= 1; --i) {
431 GrAtlasRenderTask* atlasTask = atlasTasks[i].get();
432 GrAtlasRenderTask* previousAtlasTask = atlasTasks[i - 1].get();
433 // Double check that atlasTask depends on every dependent of its previous atlas. If this
434 // fires it might mean previousAtlasTask gained a new dependent after atlasTask came into
435 // service (maybe by an op that hadn't yet been added to an opsTask when we registered the
436 // new atlas with the drawingManager).
437 for (GrRenderTask* previousAtlasUser : previousAtlasTask->dependents()) {
438 SkASSERT(atlasTask->dependsOn(previousAtlasUser));
439 }
440 }
441}
442#endif
443
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600444void GrTessellationPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
Adlai Holler9902cff2020-11-11 08:51:25 -0500445 SkSpan<const uint32_t> /* taskIDs */) {
Chris Dalton83420eb2021-06-23 18:47:09 -0600446 if (fAtlasRenderTasks.empty()) {
447 SkASSERT(fAtlasPathCache.count() == 0);
Chris Dalton4e998532020-02-10 11:06:42 -0700448 return;
449 }
450
Chris Dalton83420eb2021-06-23 18:47:09 -0600451 // Verify the atlases can all share the same texture.
452 SkDEBUGCODE(validate_atlas_dependencies(fAtlasRenderTasks);)
Chris Dalton569c01b2021-05-25 10:11:46 -0600453
Chris Dalton83420eb2021-06-23 18:47:09 -0600454 // Instantiate the first atlas.
455 fAtlasRenderTasks[0]->instantiate(onFlushRP);
456
457 // Instantiate the remaining atlases.
458 GrTexture* firstAtlasTexture = fAtlasRenderTasks[0]->atlasProxy()->peekTexture();
459 SkASSERT(firstAtlasTexture);
460 for (int i = 1; i < fAtlasRenderTasks.count(); ++i) {
461 GrAtlasRenderTask* atlasTask = fAtlasRenderTasks[i].get();
462 if (atlasTask->atlasProxy()->backingStoreDimensions() == firstAtlasTexture->dimensions()) {
463 atlasTask->instantiate(onFlushRP, sk_ref_sp(firstAtlasTexture));
464 } else {
465 // The atlases are expected to all be full size except possibly the final one.
466 SkASSERT(i == fAtlasRenderTasks.count() - 1);
467 SkASSERT(atlasTask->atlasProxy()->backingStoreDimensions().area() <
468 firstAtlasTexture->dimensions().area());
469 // TODO: Recycle the larger atlas texture anyway?
470 atlasTask->instantiate(onFlushRP);
Chris Dalton4e998532020-02-10 11:06:42 -0700471 }
472 }
473
Chris Dalton83420eb2021-06-23 18:47:09 -0600474 // Reset all atlas data.
475 fAtlasRenderTasks.reset();
476 fAtlasPathCache.reset();
Chris Dalton4e998532020-02-10 11:06:42 -0700477}