blob: 80d6a41359dc6f54bae3d64800399110a6545560 [file] [log] [blame]
Chris Daltonb832ce62020-01-06 19:49:37 -07001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Chris Dalton0a22b1e2020-03-26 11:52:15 -06008#include "src/gpu/tessellate/GrTessellationPathRenderer.h"
Chris Daltonb832ce62020-01-06 19:49:37 -07009
Chris Dalton50c3c242021-06-14 16:32:35 -060010#include "include/private/SkVx.h"
Chris Daltond2dc8dd2020-05-19 16:32:02 -060011#include "src/core/SkIPoint16.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070012#include "src/core/SkPathPriv.h"
13#include "src/gpu/GrClip.h"
14#include "src/gpu/GrMemoryPool.h"
15#include "src/gpu/GrRecordingContextPriv.h"
Brian Salomoneebe7352020-12-09 16:37:04 -050016#include "src/gpu/GrSurfaceDrawContext.h"
Chris Dalton50c3c242021-06-14 16:32:35 -060017#include "src/gpu/GrVx.h"
Michael Ludwig2686d692020-04-17 20:21:37 +000018#include "src/gpu/geometry/GrStyledShape.h"
Chris Dalton83420eb2021-06-23 18:47:09 -060019#include "src/gpu/tessellate/GrAtlasRenderTask.h"
Chris Dalton4e998532020-02-10 11:06:42 -070020#include "src/gpu/tessellate/GrDrawAtlasPathOp.h"
Chris Daltonebb37e72021-01-27 17:59:45 -070021#include "src/gpu/tessellate/GrPathInnerTriangulateOp.h"
Chris Dalton031d76b2021-06-08 16:32:00 -060022#include "src/gpu/tessellate/GrPathStencilCoverOp.h"
Chris Dalton7ae272f2021-06-10 11:45:14 -060023#include "src/gpu/tessellate/GrPathTessellateOp.h"
Chris Dalton05007df2021-02-04 00:24:52 -070024#include "src/gpu/tessellate/GrStrokeTessellateOp.h"
Chris Daltonabed2672021-06-17 16:54:28 -060025#include "src/gpu/tessellate/shaders/GrModulateAtlasCoverageFP.h"
Chris Daltonb832ce62020-01-06 19:49:37 -070026
Chris Daltond72cb4c2020-07-16 17:50:17 -060027constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8;
Chris Dalton83420eb2021-06-23 18:47:09 -060028constexpr static int kAtlasInitialSize = 512;
Chris Daltond72cb4c2020-07-16 17:50:17 -060029
Chris Daltond2dc8dd2020-05-19 16:32:02 -060030// The atlas is only used for small-area paths, which means at least one dimension of every path is
31// guaranteed to be quite small. So if we transpose tall paths, then every path will have a small
32// height, which lends very well to efficient pow2 atlas packing.
33constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2;
34
35// Ensure every path in the atlas falls in or below the 128px high rectanizer band.
Chris Dalton83420eb2021-06-23 18:47:09 -060036constexpr static int kAtlasMaxPathHeight = 128;
Chris Daltond2dc8dd2020-05-19 16:32:02 -060037
Chris Dalton1413d112020-07-09 11:26:31 -060038bool GrTessellationPathRenderer::IsSupported(const GrCaps& caps) {
Chris Dalton8f282f52021-01-06 11:47:58 -070039 return !caps.avoidStencilBuffers() &&
40 caps.drawInstancedSupport() &&
Chris Daltoneae5c162020-12-29 10:18:21 -070041 caps.shaderCaps()->vertexIDSupport() &&
42 !caps.disableTessellationPathRenderer();
Chris Dalton1413d112020-07-09 11:26:31 -060043}
44
Chris Dalton83420eb2021-06-23 18:47:09 -060045GrTessellationPathRenderer::GrTessellationPathRenderer(GrRecordingContext* rContext) {
Chris Dalton31634282020-09-17 12:16:54 -060046 const GrCaps& caps = *rContext->priv().caps();
Chris Dalton9213e612020-10-09 17:22:43 -060047 auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes);
Chris Dalton569c01b2021-05-25 10:11:46 -060048 if (rContext->asDirectContext() && // The atlas doesn't support DDL yet.
49 caps.internalMultisampleCount(atlasFormat) > 1) {
Chris Dalton83420eb2021-06-23 18:47:09 -060050#if GR_TEST_UTILS
51 fAtlasMaxSize = rContext->priv().options().fMaxTextureAtlasSize;
52#else
53 fAtlasMaxSize = 2048;
54#endif
55 fAtlasMaxSize = SkPrevPow2(std::min(fAtlasMaxSize, caps.maxPreferredRenderTargetSize()));
56 fAtlasInitialSize = SkNextPow2(std::min(kAtlasInitialSize, fAtlasMaxSize));
Chris Dalton9213e612020-10-09 17:22:43 -060057 }
Chris Dalton4e998532020-02-10 11:06:42 -070058}
59
Chris Dalton7ae272f2021-06-10 11:45:14 -060060GrPathRenderer::StencilSupport GrTessellationPathRenderer::onGetStencilSupport(
61 const GrStyledShape& shape) const {
62 if (!shape.style().isSimpleFill()) {
63 // Don't bother with stroke stencilling yet. Skia probably shouldn't support this at all
64 // since you can't clip by a stroke.
65 return kNoSupport_StencilSupport;
66 }
67 return shape.knownToBeConvex() ? kNoRestriction_StencilSupport : kStencilOnly_StencilSupport;
68}
69
Chris Dalton0a22b1e2020-03-26 11:52:15 -060070GrPathRenderer::CanDrawPath GrTessellationPathRenderer::onCanDrawPath(
Chris Daltonb832ce62020-01-06 19:49:37 -070071 const CanDrawPathArgs& args) const {
Chris Dalton1c62a7b2020-06-29 22:01:14 -060072 const GrStyledShape& shape = *args.fShape;
Chris Dalton57ab06c2021-04-22 12:57:28 -060073 if (args.fAAType == GrAAType::kCoverage ||
74 shape.style().hasPathEffect() ||
Chris Dalton06b52ad2020-12-15 10:01:35 -070075 args.fViewMatrix->hasPerspective() ||
76 shape.style().strokeRec().getStyle() == SkStrokeRec::kStrokeAndFill_Style ||
Chris Dalton2078cbe2020-12-14 19:04:55 -070077 shape.inverseFilled() ||
Chris Dalton537293bf2021-05-03 15:54:24 -060078 !args.fProxy->canUseStencil(*args.fCaps)) {
Chris Daltonb832ce62020-01-06 19:49:37 -070079 return CanDrawPath::kNo;
80 }
Chris Dalton7ae272f2021-06-10 11:45:14 -060081 if (args.fHasUserStencilSettings) {
82 // Non-convex paths and strokes use the stencil buffer internally, so they can't support
83 // draws with stencil settings.
84 if (!shape.style().isSimpleFill() || !shape.knownToBeConvex()) {
85 return CanDrawPath::kNo;
86 }
87 }
Chris Daltonb832ce62020-01-06 19:49:37 -070088 return CanDrawPath::kYes;
89}
90
Chris Dalton7ae272f2021-06-10 11:45:14 -060091static GrOp::Owner make_non_convex_fill_op(GrRecordingContext* rContext,
92 GrTessellationPathRenderer::PathFlags pathFlags,
93 GrAAType aaType, const SkRect& pathDevBounds,
94 const SkMatrix& viewMatrix, const SkPath& path,
95 GrPaint&& paint) {
96 SkASSERT(!path.isConvex());
97 int numVerbs = path.countVerbs();
98 if (numVerbs > 0) {
99 // Check if the path is large and/or simple enough that we can triangulate the inner fan
100 // on the CPU. This is our fastest approach. It allows us to stencil only the curves,
101 // and then fill the inner fan directly to the final render target, thus drawing the
102 // majority of pixels in a single render pass.
103 float gpuFragmentWork = pathDevBounds.height() * pathDevBounds.width();
104 float cpuTessellationWork = numVerbs * SkNextLog2(numVerbs); // N log N.
105 constexpr static float kCpuWeight = 512;
106 constexpr static float kMinNumPixelsToTriangulate = 256 * 256;
107 if (cpuTessellationWork * kCpuWeight + kMinNumPixelsToTriangulate < gpuFragmentWork) {
108 return GrOp::Make<GrPathInnerTriangulateOp>(rContext, viewMatrix, path,
109 std::move(paint), aaType, pathFlags,
110 pathDevBounds);
Chris Dalton70a0d2c2021-01-26 12:01:21 -0700111 }
Chris Daltonc2a17462020-12-09 16:46:22 -0700112 }
Chris Dalton7ae272f2021-06-10 11:45:14 -0600113 return GrOp::Make<GrPathStencilCoverOp>(rContext, viewMatrix, path, std::move(paint), aaType,
114 pathFlags, pathDevBounds);
Chris Daltonc2a17462020-12-09 16:46:22 -0700115}
116
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600117bool GrTessellationPathRenderer::onDrawPath(const DrawPathArgs& args) {
John Stiles0fbc6a32021-06-04 14:40:57 -0400118 GrSurfaceDrawContext* surfaceDrawContext = args.fSurfaceDrawContext;
Chris Daltonb832ce62020-01-06 19:49:37 -0700119
Chris Dalton7ae272f2021-06-10 11:45:14 -0600120 SkPath path;
121 args.fShape->asPath(&path);
122
123 // Handle strokes first.
124 if (!args.fShape->style().isSimpleFill()) {
125 SkASSERT(args.fUserStencilSettings->isUnused());
126 const SkStrokeRec& stroke = args.fShape->style().strokeRec();
127 SkASSERT(stroke.getStyle() != SkStrokeRec::kStrokeAndFill_Style);
128 auto op = GrOp::Make<GrStrokeTessellateOp>(args.fContext, args.fAAType, *args.fViewMatrix,
129 path, stroke, std::move(args.fPaint));
130 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
131 return true;
132 }
133
134 SkRect pathDevBounds;
135 args.fViewMatrix->mapRect(&pathDevBounds, args.fShape->bounds());
Chris Daltonb96995d2020-06-04 16:44:29 -0600136
Chris Dalton83420eb2021-06-23 18:47:09 -0600137 if (args.fUserStencilSettings->isUnused()) {
138 // See if the path is small and simple enough to atlas instead of drawing directly.
139 //
140 // NOTE: The atlas uses alpha8 coverage even for msaa render targets. We could theoretically
141 // render the sample mask to an integer texture, but such a scheme would probably require
142 // GL_EXT_post_depth_coverage, which appears to have low adoption.
143 SkIRect devIBounds;
144 SkIPoint16 locationInAtlas;
145 bool transposedInAtlas;
146 auto visitProxiesUsedByDraw = [&args](GrVisitProxyFunc visitor) {
147 if (args.fPaint.hasColorFragmentProcessor()) {
148 args.fPaint.getColorFragmentProcessor()->visitProxies(visitor);
149 }
150 if (args.fPaint.hasCoverageFragmentProcessor()) {
151 args.fPaint.getCoverageFragmentProcessor()->visitProxies(visitor);
152 }
153 };
154 if (this->tryAddPathToAtlas(args.fContext, *args.fViewMatrix, path, pathDevBounds,
155 args.fAAType != GrAAType::kNone, &devIBounds, &locationInAtlas,
156 &transposedInAtlas, visitProxiesUsedByDraw)) {
157 auto op = GrOp::Make<GrDrawAtlasPathOp>(
158 args.fContext, surfaceDrawContext->numSamples(),
159 sk_ref_sp(fAtlasRenderTasks.back()->atlasProxy()), devIBounds, locationInAtlas,
160 transposedInAtlas, *args.fViewMatrix, std::move(args.fPaint));
161 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
162 return true;
163 }
Chris Dalton4e998532020-02-10 11:06:42 -0700164 }
Chris Daltonb832ce62020-01-06 19:49:37 -0700165
Chris Dalton7ae272f2021-06-10 11:45:14 -0600166 // Handle convex paths only if we couldn't fit them in the atlas. We give the atlas priority in
167 // an effort to reduce DMSAA triggers.
168 if (args.fShape->knownToBeConvex()) {
169 auto op = GrOp::Make<GrPathTessellateOp>(args.fContext, *args.fViewMatrix, path,
170 std::move(args.fPaint), args.fAAType,
171 args.fUserStencilSettings, pathDevBounds);
Chris Daltonb0643342020-12-15 01:04:12 -0700172 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton7ae272f2021-06-10 11:45:14 -0600173 return true;
Chris Daltonb96995d2020-06-04 16:44:29 -0600174 }
Chris Dalton7ae272f2021-06-10 11:45:14 -0600175
176 SkASSERT(args.fUserStencilSettings->isUnused()); // See onGetStencilSupport().
177 auto op = make_non_convex_fill_op(args.fContext, PathFlags::kNone, args.fAAType, pathDevBounds,
178 *args.fViewMatrix, path, std::move(args.fPaint));
179 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
Chris Dalton4e998532020-02-10 11:06:42 -0700180 return true;
181}
182
Chris Dalton7ae272f2021-06-10 11:45:14 -0600183void GrTessellationPathRenderer::onStencilPath(const StencilPathArgs& args) {
184 SkASSERT(args.fShape->style().isSimpleFill()); // See onGetStencilSupport().
185
186 GrSurfaceDrawContext* surfaceDrawContext = args.fSurfaceDrawContext;
187 GrAAType aaType = (GrAA::kYes == args.fDoStencilMSAA) ? GrAAType::kMSAA : GrAAType::kNone;
188
189 SkRect pathDevBounds;
190 args.fViewMatrix->mapRect(&pathDevBounds, args.fShape->bounds());
191
192 SkPath path;
193 args.fShape->asPath(&path);
194
195 if (args.fShape->knownToBeConvex()) {
196 constexpr static GrUserStencilSettings kMarkStencil(
197 GrUserStencilSettings::StaticInit<
198 0x0001,
199 GrUserStencilTest::kAlways,
200 0xffff,
201 GrUserStencilOp::kReplace,
202 GrUserStencilOp::kKeep,
203 0xffff>());
204
205 GrPaint stencilPaint;
206 stencilPaint.setXPFactory(GrDisableColorXPFactory::Get());
207 auto op = GrOp::Make<GrPathTessellateOp>(args.fContext, *args.fViewMatrix, path,
208 std::move(stencilPaint), aaType, &kMarkStencil,
209 pathDevBounds);
210 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
211 return;
Chris Daltonb0643342020-12-15 01:04:12 -0700212 }
213
Chris Dalton7ae272f2021-06-10 11:45:14 -0600214 auto op = make_non_convex_fill_op(args.fContext, PathFlags::kStencilOnly, aaType, pathDevBounds,
215 *args.fViewMatrix, path, GrPaint());
216 surfaceDrawContext->addDrawOp(args.fClip, std::move(op));
217}
218
Chris Dalton83420eb2021-06-23 18:47:09 -0600219GrFPResult GrTessellationPathRenderer::makeAtlasClipFP(GrRecordingContext* rContext,
220 const GrOp* opBeingClipped,
Chris Daltonabed2672021-06-17 16:54:28 -0600221 std::unique_ptr<GrFragmentProcessor> inputFP,
Chris Dalton83420eb2021-06-23 18:47:09 -0600222 const SkIRect& drawBounds,
223 const SkMatrix& viewMatrix,
224 const SkPath& path, GrAA aa) {
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600225 if (viewMatrix.hasPerspective()) {
Chris Daltonabed2672021-06-17 16:54:28 -0600226 return GrFPFailure(std::move(inputFP));
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600227 }
228 SkIRect devIBounds;
229 SkIPoint16 locationInAtlas;
230 bool transposedInAtlas;
Chris Dalton83420eb2021-06-23 18:47:09 -0600231 auto visitProxiesUsedByDraw = [&opBeingClipped, &inputFP](GrVisitProxyFunc visitor) {
232 opBeingClipped->visitProxies(visitor);
233 if (inputFP) {
234 inputFP->visitProxies(visitor);
235 }
236 };
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600237 // tryAddPathToAtlas() ignores inverseness of the fill. See getAtlasUberPath().
Chris Dalton83420eb2021-06-23 18:47:09 -0600238 if (!this->tryAddPathToAtlas(rContext, viewMatrix, path, viewMatrix.mapRect(path.getBounds()),
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600239 aa != GrAA::kNo, &devIBounds, &locationInAtlas,
Chris Dalton83420eb2021-06-23 18:47:09 -0600240 &transposedInAtlas, visitProxiesUsedByDraw)) {
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600241 // The path is too big, or the atlas ran out of room.
Chris Daltonabed2672021-06-17 16:54:28 -0600242 return GrFPFailure(std::move(inputFP));
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600243 }
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600244 SkMatrix atlasMatrix;
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600245 auto [atlasX, atlasY] = locationInAtlas;
246 if (!transposedInAtlas) {
Chris Daltonabed2672021-06-17 16:54:28 -0600247 atlasMatrix = SkMatrix::Translate(atlasX - devIBounds.left(), atlasY - devIBounds.top());
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600248 } else {
249 atlasMatrix.setAll(0, 1, atlasX - devIBounds.top(),
250 1, 0, atlasY - devIBounds.left(),
251 0, 0, 1);
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600252 }
Chris Daltonabed2672021-06-17 16:54:28 -0600253 auto flags = GrModulateAtlasCoverageFP::Flags::kNone;
Chris Daltonfd3ec902021-06-17 20:44:13 +0000254 if (path.isInverseFillType()) {
Chris Daltonabed2672021-06-17 16:54:28 -0600255 flags |= GrModulateAtlasCoverageFP::Flags::kInvertCoverage;
Chris Daltonfd3ec902021-06-17 20:44:13 +0000256 }
Chris Daltonabed2672021-06-17 16:54:28 -0600257 if (!devIBounds.contains(drawBounds)) {
258 flags |= GrModulateAtlasCoverageFP::Flags::kCheckBounds;
259 // At this point in time we expect callers to tighten the scissor for "kIntersect" clips, as
260 // opposed to us having to check the path bounds. Feel free to remove this assert if that
261 // ever changes.
262 SkASSERT(path.isInverseFillType());
263 }
Chris Dalton83420eb2021-06-23 18:47:09 -0600264 GrSurfaceProxyView atlasView = fAtlasRenderTasks.back()->readView(*rContext->priv().caps());
Chris Daltonabed2672021-06-17 16:54:28 -0600265 return GrFPSuccess(std::make_unique<GrModulateAtlasCoverageFP>(flags, std::move(inputFP),
Chris Dalton83420eb2021-06-23 18:47:09 -0600266 std::move(atlasView),
Chris Daltonabed2672021-06-17 16:54:28 -0600267 atlasMatrix, devIBounds));
Chris Dalton43a8b0c2021-06-14 17:10:07 -0600268}
269
Chris Dalton50c3c242021-06-14 16:32:35 -0600270void GrTessellationPathRenderer::AtlasPathKey::set(const SkMatrix& m, bool antialias,
271 const SkPath& path) {
272 using grvx::float2;
273 fAffineMatrix[0] = m.getScaleX();
274 fAffineMatrix[1] = m.getSkewX();
275 fAffineMatrix[2] = m.getSkewY();
276 fAffineMatrix[3] = m.getScaleY();
277 float2 translate = {m.getTranslateX(), m.getTranslateY()};
278 float2 subpixelPosition = translate - skvx::floor(translate);
Robert Phillips62214f72021-06-15 10:12:51 -0400279 float2 subpixelPositionKey = skvx::trunc(subpixelPosition *
280 GrPathTessellator::kLinearizationPrecision);
Chris Dalton50c3c242021-06-14 16:32:35 -0600281 skvx::cast<uint8_t>(subpixelPositionKey).store(fSubpixelPositionKey);
282 fAntialias = antialias;
283 fFillRule = (uint8_t)GrFillRuleForSkPath(path); // Fill rule doesn't affect the path's genID.
284 fPathGenID = path.getGenerationID();
285}
286
Chris Dalton83420eb2021-06-23 18:47:09 -0600287bool GrTessellationPathRenderer::tryAddPathToAtlas(GrRecordingContext* rContext,
288 const SkMatrix& viewMatrix, const SkPath& path,
289 const SkRect& pathDevBounds, bool antialias,
290 SkIRect* devIBounds, SkIPoint16* locationInAtlas,
291 bool* transposedInAtlas,
292 const VisitProxiesFn& visitProxiesUsedByDraw) {
Chris Dalton50c3c242021-06-14 16:32:35 -0600293 SkASSERT(!viewMatrix.hasPerspective()); // See onCanDrawPath().
294
Chris Dalton83420eb2021-06-23 18:47:09 -0600295 if (!fAtlasMaxSize) {
Chris Daltond72cb4c2020-07-16 17:50:17 -0600296 return false;
297 }
298
Chris Dalton83420eb2021-06-23 18:47:09 -0600299 // The atlas is not compatible with DDL. We should only be using it on direct contexts.
300 SkASSERT(rContext->asDirectContext());
301
302 const GrCaps& caps = *rContext->priv().caps();
Chris Dalton50c3c242021-06-14 16:32:35 -0600303 if (!caps.multisampleDisableSupport() && !antialias) {
Chris Dalton4e998532020-02-10 11:06:42 -0700304 return false;
305 }
306
Chris Dalton7ae272f2021-06-10 11:45:14 -0600307 pathDevBounds.roundOut(devIBounds);
Chris Dalton8c3036c2021-06-23 14:34:56 -0600308 int widthInAtlas = devIBounds->width();
309 int heightInAtlas = devIBounds->height();
310 if (SkNextPow2(widthInAtlas) == SkNextPow2(heightInAtlas)) {
311 // Both dimensions go to the same pow2 band in the atlas. Use the larger dimension as height
312 // for more efficient packing.
313 *transposedInAtlas = widthInAtlas > heightInAtlas;
314 } else {
315 // Both dimensions go to different pow2 bands in the atlas. Use the smaller pow2 band for
316 // most efficient packing.
317 *transposedInAtlas = heightInAtlas > widthInAtlas;
318 }
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600319 if (*transposedInAtlas) {
Chris Dalton8c3036c2021-06-23 14:34:56 -0600320 std::swap(heightInAtlas, widthInAtlas);
Chris Daltond2dc8dd2020-05-19 16:32:02 -0600321 }
322
Chris Dalton8c3036c2021-06-23 14:34:56 -0600323 // Check if the path is too large for an atlas. Since we transpose tall skinny paths, limiting
324 // to kAtlasMaxPathHeight^2 pixels guarantees heightInAtlas <= kAtlasMaxPathHeight, while also
325 // allowing paths that are very wide and short.
326 if ((uint64_t)widthInAtlas * heightInAtlas > kAtlasMaxPathHeight * kAtlasMaxPathHeight ||
327 widthInAtlas > fAtlasMaxSize) {
Chris Dalton4e998532020-02-10 11:06:42 -0700328 return false;
329 }
Chris Dalton8c3036c2021-06-23 14:34:56 -0600330 SkASSERT(heightInAtlas <= kAtlasMaxPathHeight);
Chris Dalton4e998532020-02-10 11:06:42 -0700331
Chris Dalton50c3c242021-06-14 16:32:35 -0600332 // Check if this path is already in the atlas. This is mainly for clip paths.
333 AtlasPathKey atlasPathKey;
334 if (!path.isVolatile()) {
335 atlasPathKey.set(viewMatrix, antialias, path);
336 if (const SkIPoint16* existingLocation = fAtlasPathCache.find(atlasPathKey)) {
337 *locationInAtlas = *existingLocation;
338 return true;
339 }
340 }
341
Chris Dalton83420eb2021-06-23 18:47:09 -0600342 if (fAtlasRenderTasks.empty() ||
343 !fAtlasRenderTasks.back()->addPath(viewMatrix, path, antialias, devIBounds->topLeft(),
Chris Dalton8c3036c2021-06-23 14:34:56 -0600344 widthInAtlas, heightInAtlas, *transposedInAtlas,
Chris Dalton83420eb2021-06-23 18:47:09 -0600345 locationInAtlas)) {
346 // We either don't have an atlas yet or the current one is full. Try to replace it.
347 GrAtlasRenderTask* currentAtlasTask = (!fAtlasRenderTasks.empty())
348 ? fAtlasRenderTasks.back().get() : nullptr;
349 if (currentAtlasTask) {
350 // Don't allow the current atlas to be replaced if the draw already uses it. Otherwise
351 // the draw would use two different atlases, which breaks our guarantee that there will
352 // only ever be one atlas active at a time.
353 const GrSurfaceProxy* currentAtlasProxy = currentAtlasTask->atlasProxy();
354 bool drawUsesCurrentAtlas = false;
355 visitProxiesUsedByDraw([currentAtlasProxy, &drawUsesCurrentAtlas](GrSurfaceProxy* proxy,
356 GrMipmapped) {
357 if (proxy == currentAtlasProxy) {
358 drawUsesCurrentAtlas = true;
359 }
360 });
361 if (drawUsesCurrentAtlas) {
362 // The draw already uses the current atlas. Give up.
363 return false;
364 }
365 }
366 // Replace the atlas with a new one.
367 auto dynamicAtlas = std::make_unique<GrDynamicAtlas>(
368 kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes,
369 SkISize{fAtlasInitialSize, fAtlasInitialSize}, fAtlasMaxSize,
370 *rContext->priv().caps(), kAtlasAlgorithm);
371 auto newAtlasTask = sk_make_sp<GrAtlasRenderTask>(rContext, rContext->priv().auditTrail(),
372 sk_make_sp<GrArenas>(),
373 std::move(dynamicAtlas));
374 rContext->priv().drawingManager()->addAtlasTask(newAtlasTask, currentAtlasTask);
375 SkAssertResult(newAtlasTask->addPath(viewMatrix, path, antialias, devIBounds->topLeft(),
Chris Dalton8c3036c2021-06-23 14:34:56 -0600376 widthInAtlas, heightInAtlas, *transposedInAtlas,
Chris Dalton83420eb2021-06-23 18:47:09 -0600377 locationInAtlas));
378 fAtlasRenderTasks.push_back(std::move(newAtlasTask));
379 fAtlasPathCache.reset();
Chris Dalton4e998532020-02-10 11:06:42 -0700380 }
381
Chris Dalton50c3c242021-06-14 16:32:35 -0600382 // Remember this path's location in the atlas, in case it gets drawn again.
383 if (!path.isVolatile()) {
384 fAtlasPathCache.set(atlasPathKey, *locationInAtlas);
385 }
Chris Daltonb832ce62020-01-06 19:49:37 -0700386 return true;
387}
388
Chris Dalton83420eb2021-06-23 18:47:09 -0600389#ifdef SK_DEBUG
390// Ensures the atlas dependencies are set up such that each atlas will be totally out of service
391// before we render the next one in line. This means there will only ever be one atlas active at a
392// time and that they can all share the same texture.
393void validate_atlas_dependencies(const SkTArray<sk_sp<GrAtlasRenderTask>>& atlasTasks) {
394 for (int i = atlasTasks.count() - 1; i >= 1; --i) {
395 GrAtlasRenderTask* atlasTask = atlasTasks[i].get();
396 GrAtlasRenderTask* previousAtlasTask = atlasTasks[i - 1].get();
397 // Double check that atlasTask depends on every dependent of its previous atlas. If this
398 // fires it might mean previousAtlasTask gained a new dependent after atlasTask came into
399 // service (maybe by an op that hadn't yet been added to an opsTask when we registered the
400 // new atlas with the drawingManager).
401 for (GrRenderTask* previousAtlasUser : previousAtlasTask->dependents()) {
402 SkASSERT(atlasTask->dependsOn(previousAtlasUser));
403 }
404 }
405}
406#endif
407
Chris Dalton0a22b1e2020-03-26 11:52:15 -0600408void GrTessellationPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
Adlai Holler9902cff2020-11-11 08:51:25 -0500409 SkSpan<const uint32_t> /* taskIDs */) {
Chris Dalton83420eb2021-06-23 18:47:09 -0600410 if (fAtlasRenderTasks.empty()) {
411 SkASSERT(fAtlasPathCache.count() == 0);
Chris Dalton4e998532020-02-10 11:06:42 -0700412 return;
413 }
414
Chris Dalton83420eb2021-06-23 18:47:09 -0600415 // Verify the atlases can all share the same texture.
416 SkDEBUGCODE(validate_atlas_dependencies(fAtlasRenderTasks);)
Chris Dalton569c01b2021-05-25 10:11:46 -0600417
Chris Dalton83420eb2021-06-23 18:47:09 -0600418 // Instantiate the first atlas.
419 fAtlasRenderTasks[0]->instantiate(onFlushRP);
420
421 // Instantiate the remaining atlases.
422 GrTexture* firstAtlasTexture = fAtlasRenderTasks[0]->atlasProxy()->peekTexture();
423 SkASSERT(firstAtlasTexture);
424 for (int i = 1; i < fAtlasRenderTasks.count(); ++i) {
425 GrAtlasRenderTask* atlasTask = fAtlasRenderTasks[i].get();
426 if (atlasTask->atlasProxy()->backingStoreDimensions() == firstAtlasTexture->dimensions()) {
427 atlasTask->instantiate(onFlushRP, sk_ref_sp(firstAtlasTexture));
428 } else {
429 // The atlases are expected to all be full size except possibly the final one.
430 SkASSERT(i == fAtlasRenderTasks.count() - 1);
431 SkASSERT(atlasTask->atlasProxy()->backingStoreDimensions().area() <
432 firstAtlasTexture->dimensions().area());
433 // TODO: Recycle the larger atlas texture anyway?
434 atlasTask->instantiate(onFlushRP);
Chris Dalton4e998532020-02-10 11:06:42 -0700435 }
436 }
437
Chris Dalton83420eb2021-06-23 18:47:09 -0600438 // Reset all atlas data.
439 fAtlasRenderTasks.reset();
440 fAtlasPathCache.reset();
Chris Dalton4e998532020-02-10 11:06:42 -0700441}