blob: b20cdd43b2436a42b9b0076ad237270ac931cc6c [file] [log] [blame]
Chris Daltonc3176002021-07-23 15:33:09 -06001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Robert Phillips43e70f12021-08-19 11:12:48 -04008#include "src/gpu/ops/AtlasPathRenderer.h"
Chris Daltonc3176002021-07-23 15:33:09 -06009
10#include "include/private/SkVx.h"
11#include "src/core/SkIPoint16.h"
12#include "src/gpu/GrClip.h"
13#include "src/gpu/GrDirectContextPriv.h"
Chris Daltonc3176002021-07-23 15:33:09 -060014#include "src/gpu/GrVx.h"
15#include "src/gpu/effects/GrModulateAtlasCoverageEffect.h"
16#include "src/gpu/geometry/GrStyledShape.h"
Robert Phillipsfdafc0c2021-08-25 16:39:14 -040017#include "src/gpu/ops/AtlasRenderTask.h"
Robert Phillips769b4882021-09-07 16:48:46 -040018#include "src/gpu/ops/DrawAtlasPathOp.h"
Robert Phillipse453fa02021-08-19 14:57:05 -040019#include "src/gpu/ops/TessellationPathRenderer.h"
Chris Daltonc3176002021-07-23 15:33:09 -060020#include "src/gpu/tessellate/shaders/GrTessellationShader.h"
Robert Phillips4dca8312021-07-28 15:13:20 -040021#include "src/gpu/v1/SurfaceDrawContext_v1.h"
Chris Daltonc3176002021-07-23 15:33:09 -060022
23using grvx::float2;
24using grvx::int2;
25
Robert Phillips43e70f12021-08-19 11:12:48 -040026namespace {
27
28// Returns the rect [topLeftFloor, botRightCeil], which is the rect [r] rounded out to integer
29// boundaries.
30std::tuple<float2,float2> round_out(const SkRect& r) {
31 return {skvx::floor(float2::Load(&r.fLeft)), skvx::ceil(float2::Load(&r.fRight))};
32}
33
34// Returns whether the given proxyOwner uses the atlasProxy.
35template<typename T> bool refs_atlas(const T* proxyOwner, const GrSurfaceProxy* atlasProxy) {
36 bool refsAtlas = false;
37 auto checkForAtlasRef = [atlasProxy, &refsAtlas](GrSurfaceProxy* proxy, GrMipmapped) {
38 if (proxy == atlasProxy) {
39 refsAtlas = true;
40 }
41 };
42 if (proxyOwner) {
43 proxyOwner->visitProxies(checkForAtlasRef);
44 }
45 return refsAtlas;
46}
47
48bool is_visible(const SkRect& pathDevBounds, const SkIRect& clipBounds) {
49 float2 pathTopLeft = float2::Load(&pathDevBounds.fLeft);
50 float2 pathBotRight = float2::Load(&pathDevBounds.fRight);
51 // Empty paths are never visible. Phrase this as a NOT of positive logic so we also return false
52 // in the case of NaN.
53 if (!skvx::all(pathTopLeft < pathBotRight)) {
54 return false;
55 }
56 float2 clipTopLeft = skvx::cast<float>(int2::Load(&clipBounds.fLeft));
57 float2 clipBotRight = skvx::cast<float>(int2::Load(&clipBounds.fRight));
58 static_assert(sizeof(clipBounds) == sizeof(clipTopLeft) + sizeof(clipBotRight));
59 return skvx::all(pathTopLeft < clipBotRight) && skvx::all(pathBotRight > clipTopLeft);
60}
61
62#ifdef SK_DEBUG
63// Ensures the atlas dependencies are set up such that each atlas will be totally out of service
64// before we render the next one in line. This means there will only ever be one atlas active at a
65// time and that they can all share the same texture.
Robert Phillipsfdafc0c2021-08-25 16:39:14 -040066void validate_atlas_dependencies(const SkTArray<sk_sp<skgpu::v1::AtlasRenderTask>>& atlasTasks) {
Robert Phillips43e70f12021-08-19 11:12:48 -040067 for (int i = atlasTasks.count() - 1; i >= 1; --i) {
Robert Phillipsfdafc0c2021-08-25 16:39:14 -040068 auto atlasTask = atlasTasks[i].get();
69 auto previousAtlasTask = atlasTasks[i - 1].get();
Robert Phillips43e70f12021-08-19 11:12:48 -040070 // Double check that atlasTask depends on every dependent of its previous atlas. If this
71 // fires it might mean previousAtlasTask gained a new dependent after atlasTask came into
72 // service (maybe by an op that hadn't yet been added to an opsTask when we registered the
73 // new atlas with the drawingManager).
74 for (GrRenderTask* previousAtlasUser : previousAtlasTask->dependents()) {
75 SkASSERT(atlasTask->dependsOn(previousAtlasUser));
76 }
77 }
78}
79#endif
80
81} // anonymous namespace
82
83namespace skgpu::v1 {
84
Chris Daltonc3176002021-07-23 15:33:09 -060085constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8;
86constexpr static int kAtlasInitialSize = 512;
87
88// The atlas is only used for small-area paths, which means at least one dimension of every path is
89// guaranteed to be quite small. So if we transpose tall paths, then every path will have a small
90// height, which lends very well to efficient pow2 atlas packing.
91constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2;
92
Chris Dalton66deeb22021-07-23 13:57:03 -060093// Ensure every path in the atlas falls in or below the 256px high rectanizer band.
94constexpr static int kAtlasMaxPathHeight = 256;
95
96// If we have MSAA to fall back on, paths are already fast enough that we really only benefit from
97// atlasing when they are very small.
98constexpr static int kAtlasMaxPathHeightWithMSAAFallback = 128;
Chris Daltonc3176002021-07-23 15:33:09 -060099
Chris Dalton72fd33a2021-07-28 14:19:13 -0600100// http://skbug.com/12291 -- The way GrDynamicAtlas works, a single 2048x1 path is given an entire
101// 2048x2048 atlas with draw bounds of 2048x1025. Limit the max width to 1024 to avoid this landmine
102// until it's resolved.
103constexpr static int kAtlasMaxPathWidth = 1024;
104
Robert Phillips43e70f12021-08-19 11:12:48 -0400105bool AtlasPathRenderer::IsSupported(GrRecordingContext* rContext) {
Chris Dalton7893d2d2021-08-03 17:42:58 -0600106#ifdef SK_BUILD_FOR_IOS
107 // b/195095846: There is a bug with the atlas path renderer on OpenGL iOS. Disable until we can
108 // investigate.
109 if (rContext->backend() == GrBackendApi::kOpenGL) {
110 return false;
111 }
112#endif
Chris Daltonc3176002021-07-23 15:33:09 -0600113 const GrCaps& caps = *rContext->priv().caps();
114 auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes);
115 return rContext->asDirectContext() && // The atlas doesn't support DDL yet.
116 caps.internalMultisampleCount(atlasFormat) > 1 &&
117 // GrAtlasRenderTask currently requires tessellation. In the future it could use the
118 // default path renderer when tessellation isn't available.
Robert Phillips24d622d2021-08-19 17:04:05 -0400119 TessellationPathRenderer::IsSupported(caps);
Chris Daltonc3176002021-07-23 15:33:09 -0600120}
121
Robert Phillips43e70f12021-08-19 11:12:48 -0400122sk_sp<AtlasPathRenderer> AtlasPathRenderer::Make(GrRecordingContext* rContext) {
Chris Daltonc3176002021-07-23 15:33:09 -0600123 return IsSupported(rContext)
Robert Phillips43e70f12021-08-19 11:12:48 -0400124 ? sk_sp<AtlasPathRenderer>(new AtlasPathRenderer(rContext->asDirectContext()))
Chris Daltonc3176002021-07-23 15:33:09 -0600125 : nullptr;
126}
127
Robert Phillips43e70f12021-08-19 11:12:48 -0400128AtlasPathRenderer::AtlasPathRenderer(GrDirectContext* dContext) {
Chris Daltonc3176002021-07-23 15:33:09 -0600129 SkASSERT(IsSupported(dContext));
130 const GrCaps& caps = *dContext->priv().caps();
131#if GR_TEST_UTILS
132 fAtlasMaxSize = dContext->priv().options().fMaxTextureAtlasSize;
133#else
134 fAtlasMaxSize = 2048;
135#endif
136 fAtlasMaxSize = SkPrevPow2(std::min(fAtlasMaxSize, (float)caps.maxPreferredRenderTargetSize()));
Chris Dalton72fd33a2021-07-28 14:19:13 -0600137 fAtlasMaxPathWidth = std::min((float)kAtlasMaxPathWidth, fAtlasMaxSize);
Chris Daltonc3176002021-07-23 15:33:09 -0600138 fAtlasInitialSize = SkNextPow2(std::min(kAtlasInitialSize, (int)fAtlasMaxSize));
139}
140
Robert Phillips43e70f12021-08-19 11:12:48 -0400141bool AtlasPathRenderer::pathFitsInAtlas(const SkRect& pathDevBounds,
142 GrAAType fallbackAAType) const {
Chris Dalton66deeb22021-07-23 13:57:03 -0600143 SkASSERT(fallbackAAType != GrAAType::kNone); // The atlas doesn't support non-AA.
144 float atlasMaxPathHeight_pow2 = (fallbackAAType == GrAAType::kMSAA)
145 ? kAtlasMaxPathHeightWithMSAAFallback * kAtlasMaxPathHeightWithMSAAFallback
146 : kAtlasMaxPathHeight * kAtlasMaxPathHeight;
Chris Daltonc3176002021-07-23 15:33:09 -0600147 auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds);
148 float2 size = botRightCeil - topLeftFloor;
149 return // Ensure the path's largest dimension fits in the atlas.
Chris Dalton72fd33a2021-07-28 14:19:13 -0600150 skvx::all(size <= fAtlasMaxPathWidth) &&
Chris Dalton66deeb22021-07-23 13:57:03 -0600151 // Since we will transpose tall skinny paths, limiting to atlasMaxPathHeight^2 pixels
152 // guarantees heightInAtlas <= atlasMaxPathHeight, while also allowing paths that are
Chris Daltonc3176002021-07-23 15:33:09 -0600153 // very wide and short.
Chris Dalton66deeb22021-07-23 13:57:03 -0600154 size[0] * size[1] <= atlasMaxPathHeight_pow2;
Chris Daltonc3176002021-07-23 15:33:09 -0600155}
156
Robert Phillips43e70f12021-08-19 11:12:48 -0400157void AtlasPathRenderer::AtlasPathKey::set(const SkMatrix& m, const SkPath& path) {
Chris Daltonc3176002021-07-23 15:33:09 -0600158 using grvx::float2;
159 fPathGenID = path.getGenerationID();
160 fAffineMatrix[0] = m.getScaleX();
161 fAffineMatrix[1] = m.getSkewX();
Chris Dalton7311f9a2021-10-05 13:31:16 -0600162 fAffineMatrix[2] = m.getTranslateX();
163 fAffineMatrix[3] = m.getSkewY();
164 fAffineMatrix[4] = m.getScaleY();
165 fAffineMatrix[5] = m.getTranslateY();
166 fFillRule = (uint32_t)GrFillRuleForSkPath(path); // Fill rule doesn't affect the path's genID.
Chris Daltonc3176002021-07-23 15:33:09 -0600167}
168
Robert Phillips43e70f12021-08-19 11:12:48 -0400169bool AtlasPathRenderer::addPathToAtlas(GrRecordingContext* rContext,
170 const SkMatrix& viewMatrix,
171 const SkPath& path,
172 const SkRect& pathDevBounds,
173 SkIRect* devIBounds,
174 SkIPoint16* locationInAtlas,
175 bool* transposedInAtlas,
176 const DrawRefsAtlasCallback& drawRefsAtlasCallback) {
Chris Daltonc3176002021-07-23 15:33:09 -0600177 SkASSERT(!viewMatrix.hasPerspective()); // See onCanDrawPath().
178
179 pathDevBounds.roundOut(devIBounds);
180#ifdef SK_DEBUG
181 // is_visible() should have guaranteed the path's bounds were representable as ints, since clip
182 // bounds within the max render target size are nowhere near INT_MAX.
183 auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds);
184 SkASSERT(skvx::all(skvx::cast<float>(int2::Load(&devIBounds->fLeft)) == topLeftFloor));
185 SkASSERT(skvx::all(skvx::cast<float>(int2::Load(&devIBounds->fRight)) == botRightCeil));
186#endif
187
188 int widthInAtlas = devIBounds->width();
189 int heightInAtlas = devIBounds->height();
190 // is_visible() should have guaranteed the path's bounds were non-empty.
191 SkASSERT(widthInAtlas > 0 && heightInAtlas > 0);
192
193 if (SkNextPow2(widthInAtlas) == SkNextPow2(heightInAtlas)) {
194 // Both dimensions go to the same pow2 band in the atlas. Use the larger dimension as height
195 // for more efficient packing.
196 *transposedInAtlas = widthInAtlas > heightInAtlas;
197 } else {
198 // Both dimensions go to different pow2 bands in the atlas. Use the smaller pow2 band for
199 // most efficient packing.
200 *transposedInAtlas = heightInAtlas > widthInAtlas;
201 }
202 if (*transposedInAtlas) {
203 std::swap(heightInAtlas, widthInAtlas);
204 }
Chris Dalton66deeb22021-07-23 13:57:03 -0600205 // pathFitsInAtlas() should have guaranteed these constraints on the path size.
Chris Dalton72fd33a2021-07-28 14:19:13 -0600206 SkASSERT(widthInAtlas <= (int)fAtlasMaxPathWidth);
Chris Daltonc3176002021-07-23 15:33:09 -0600207 SkASSERT(heightInAtlas <= kAtlasMaxPathHeight);
208
209 // Check if this path is already in the atlas. This is mainly for clip paths.
210 AtlasPathKey atlasPathKey;
211 if (!path.isVolatile()) {
212 atlasPathKey.set(viewMatrix, path);
213 if (const SkIPoint16* existingLocation = fAtlasPathCache.find(atlasPathKey)) {
214 *locationInAtlas = *existingLocation;
215 return true;
216 }
217 }
218
219 if (fAtlasRenderTasks.empty() ||
220 !fAtlasRenderTasks.back()->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas,
221 heightInAtlas, *transposedInAtlas, locationInAtlas)) {
222 // We either don't have an atlas yet or the current one is full. Try to replace it.
Robert Phillipsfdafc0c2021-08-25 16:39:14 -0400223 auto currentAtlasTask = (!fAtlasRenderTasks.empty()) ? fAtlasRenderTasks.back().get()
224 : nullptr;
Chris Daltonc3176002021-07-23 15:33:09 -0600225 if (currentAtlasTask &&
226 drawRefsAtlasCallback &&
227 drawRefsAtlasCallback(currentAtlasTask->atlasProxy())) {
228 // The draw already refs the current atlas. Give up. Otherwise the draw would ref two
229 // different atlases and they couldn't share a texture.
230 return false;
231 }
232 // Replace the atlas with a new one.
233 auto dynamicAtlas = std::make_unique<GrDynamicAtlas>(
234 kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes,
235 SkISize{fAtlasInitialSize, fAtlasInitialSize}, fAtlasMaxSize,
236 *rContext->priv().caps(), kAtlasAlgorithm);
Robert Phillipsfdafc0c2021-08-25 16:39:14 -0400237 auto newAtlasTask = sk_make_sp<AtlasRenderTask>(rContext,
238 sk_make_sp<GrArenas>(),
239 std::move(dynamicAtlas));
Chris Daltonc3176002021-07-23 15:33:09 -0600240 rContext->priv().drawingManager()->addAtlasTask(newAtlasTask, currentAtlasTask);
241 SkAssertResult(newAtlasTask->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas,
242 heightInAtlas, *transposedInAtlas, locationInAtlas));
243 fAtlasRenderTasks.push_back(std::move(newAtlasTask));
244 fAtlasPathCache.reset();
245 }
246
247 // Remember this path's location in the atlas, in case it gets drawn again.
248 if (!path.isVolatile()) {
249 fAtlasPathCache.set(atlasPathKey, *locationInAtlas);
250 }
251 return true;
252}
253
Robert Phillipsdb0ec082021-08-19 12:30:12 -0400254PathRenderer::CanDrawPath AtlasPathRenderer::onCanDrawPath(const CanDrawPathArgs& args) const {
Chris Daltonc3176002021-07-23 15:33:09 -0600255#ifdef SK_DEBUG
256 if (!fAtlasRenderTasks.empty()) {
257 // args.fPaint should NEVER reference our current atlas. If it does, it means somebody
258 // intercepted a clip FP meant for a different op and will cause rendering artifacts.
259 const GrSurfaceProxy* atlasProxy = fAtlasRenderTasks.back()->atlasProxy();
260 SkASSERT(!refs_atlas(args.fPaint->getColorFragmentProcessor(), atlasProxy));
261 SkASSERT(!refs_atlas(args.fPaint->getCoverageFragmentProcessor(), atlasProxy));
262 }
263 SkASSERT(!args.fHasUserStencilSettings); // See onGetStencilSupport().
264#endif
265 bool canDrawPath = args.fShape->style().isSimpleFill() &&
Chris Dalton7893d2d2021-08-03 17:42:58 -0600266#ifdef SK_DISABLE_ATLAS_PATH_RENDERER_WITH_COVERAGE_AA
Chris Daltonc3176002021-07-23 15:33:09 -0600267 // The MSAA requirement is a temporary limitation in order to preserve
268 // functionality for refactoring. TODO: Allow kCoverage AA types.
269 args.fAAType == GrAAType::kMSAA &&
Chris Dalton7893d2d2021-08-03 17:42:58 -0600270#else
271 args.fAAType != GrAAType::kNone &&
272#endif
Chris Daltonc3176002021-07-23 15:33:09 -0600273 !args.fShape->style().hasPathEffect() &&
274 !args.fViewMatrix->hasPerspective() &&
Chris Dalton66deeb22021-07-23 13:57:03 -0600275 this->pathFitsInAtlas(args.fViewMatrix->mapRect(args.fShape->bounds()),
276 args.fAAType);
Chris Daltonc3176002021-07-23 15:33:09 -0600277 return canDrawPath ? CanDrawPath::kYes : CanDrawPath::kNo;
278}
279
Robert Phillips43e70f12021-08-19 11:12:48 -0400280bool AtlasPathRenderer::onDrawPath(const DrawPathArgs& args) {
Chris Daltonc3176002021-07-23 15:33:09 -0600281 SkPath path;
282 args.fShape->asPath(&path);
283
284 const SkRect pathDevBounds = args.fViewMatrix->mapRect(args.fShape->bounds());
Chris Dalton66deeb22021-07-23 13:57:03 -0600285 SkASSERT(this->pathFitsInAtlas(pathDevBounds, args.fAAType));
Chris Daltonc3176002021-07-23 15:33:09 -0600286
287 if (!is_visible(pathDevBounds, args.fClip->getConservativeBounds())) {
288 // The path is empty or outside the clip. No mask is needed.
289 if (path.isInverseFillType()) {
290 args.fSurfaceDrawContext->drawPaint(args.fClip, std::move(args.fPaint),
291 *args.fViewMatrix);
292 }
293 return true;
294 }
295
296 SkIRect devIBounds;
297 SkIPoint16 locationInAtlas;
298 bool transposedInAtlas;
299 SkAssertResult(this->addPathToAtlas(args.fContext, *args.fViewMatrix, path, pathDevBounds,
300 &devIBounds, &locationInAtlas, &transposedInAtlas,
301 nullptr/*DrawRefsAtlasCallback -- see onCanDrawPath()*/));
302
303 const SkIRect& fillBounds = args.fShape->inverseFilled()
304 ? (args.fClip
305 ? args.fClip->getConservativeBounds()
306 : args.fSurfaceDrawContext->asSurfaceProxy()->backingStoreBoundsIRect())
307 : devIBounds;
308 const GrCaps& caps = *args.fSurfaceDrawContext->caps();
Robert Phillips769b4882021-09-07 16:48:46 -0400309 auto op = GrOp::Make<DrawAtlasPathOp>(args.fContext,
310 args.fSurfaceDrawContext->arenaAlloc(),
311 fillBounds, *args.fViewMatrix,
312 std::move(args.fPaint), locationInAtlas,
313 devIBounds, transposedInAtlas,
314 fAtlasRenderTasks.back()->readView(caps),
315 args.fShape->inverseFilled());
Chris Daltonc3176002021-07-23 15:33:09 -0600316 args.fSurfaceDrawContext->addDrawOp(args.fClip, std::move(op));
317 return true;
318}
319
Robert Phillips43e70f12021-08-19 11:12:48 -0400320GrFPResult AtlasPathRenderer::makeAtlasClipEffect(const SurfaceDrawContext* sdc,
321 const GrOp* opBeingClipped,
322 std::unique_ptr<GrFragmentProcessor> inputFP,
323 const SkIRect& drawBounds,
324 const SkMatrix& viewMatrix,
325 const SkPath& path) {
Chris Daltonc3176002021-07-23 15:33:09 -0600326 if (viewMatrix.hasPerspective()) {
327 return GrFPFailure(std::move(inputFP));
328 }
329
330 const SkRect pathDevBounds = viewMatrix.mapRect(path.getBounds());
331 if (!is_visible(pathDevBounds, drawBounds)) {
332 // The path is empty or outside the drawBounds. No mask is needed.
333 return path.isInverseFillType() ? GrFPSuccess(std::move(inputFP))
334 : GrFPFailure(std::move(inputFP));
335 }
336
Chris Dalton66deeb22021-07-23 13:57:03 -0600337 auto fallbackAAType = (sdc->numSamples() > 1 || sdc->canUseDynamicMSAA()) ? GrAAType::kMSAA
338 : GrAAType::kCoverage;
339 if (!this->pathFitsInAtlas(pathDevBounds, fallbackAAType)) {
Chris Daltonc3176002021-07-23 15:33:09 -0600340 // The path is too big.
341 return GrFPFailure(std::move(inputFP));
342 }
343
344 SkIRect devIBounds;
345 SkIPoint16 locationInAtlas;
346 bool transposedInAtlas;
347 // Called if the atlas runs out of room, to determine if it's safe to create a new one. (Draws
348 // can never access more than one atlas.)
349 auto drawRefsAtlasCallback = [opBeingClipped, &inputFP](const GrSurfaceProxy* atlasProxy) {
350 return refs_atlas(opBeingClipped, atlasProxy) ||
351 refs_atlas(inputFP.get(), atlasProxy);
352 };
353 // addPathToAtlas() ignores inverseness of the fill. See GrAtlasRenderTask::getAtlasUberPath().
Chris Dalton66deeb22021-07-23 13:57:03 -0600354 if (!this->addPathToAtlas(sdc->recordingContext(), viewMatrix, path, pathDevBounds, &devIBounds,
Chris Daltonc3176002021-07-23 15:33:09 -0600355 &locationInAtlas, &transposedInAtlas, drawRefsAtlasCallback)) {
356 // The atlas ran out of room and we were unable to start a new one.
357 return GrFPFailure(std::move(inputFP));
358 }
359
360 SkMatrix atlasMatrix;
361 auto [atlasX, atlasY] = locationInAtlas;
362 if (!transposedInAtlas) {
363 atlasMatrix = SkMatrix::Translate(atlasX - devIBounds.left(), atlasY - devIBounds.top());
364 } else {
365 atlasMatrix.setAll(0, 1, atlasX - devIBounds.top(),
366 1, 0, atlasY - devIBounds.left(),
367 0, 0, 1);
368 }
369 auto flags = GrModulateAtlasCoverageEffect::Flags::kNone;
370 if (path.isInverseFillType()) {
371 flags |= GrModulateAtlasCoverageEffect::Flags::kInvertCoverage;
372 }
373 if (!devIBounds.contains(drawBounds)) {
374 flags |= GrModulateAtlasCoverageEffect::Flags::kCheckBounds;
375 // At this point in time we expect callers to tighten the scissor for "kIntersect" clips, as
376 // opposed to us having to check the path bounds. Feel free to remove this assert if that
377 // ever changes.
378 SkASSERT(path.isInverseFillType());
379 }
Chris Dalton66deeb22021-07-23 13:57:03 -0600380 GrSurfaceProxyView atlasView = fAtlasRenderTasks.back()->readView(*sdc->caps());
Chris Daltonc3176002021-07-23 15:33:09 -0600381 return GrFPSuccess(std::make_unique<GrModulateAtlasCoverageEffect>(flags, std::move(inputFP),
382 std::move(atlasView),
383 atlasMatrix, devIBounds));
384}
385
Robert Phillips43e70f12021-08-19 11:12:48 -0400386void AtlasPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
387 SkSpan<const uint32_t> /* taskIDs */) {
Chris Daltonc3176002021-07-23 15:33:09 -0600388 if (fAtlasRenderTasks.empty()) {
389 SkASSERT(fAtlasPathCache.count() == 0);
390 return;
391 }
392
393 // Verify the atlases can all share the same texture.
394 SkDEBUGCODE(validate_atlas_dependencies(fAtlasRenderTasks);)
395
396 // Instantiate the first atlas.
397 fAtlasRenderTasks[0]->instantiate(onFlushRP);
398
399 // Instantiate the remaining atlases.
400 GrTexture* firstAtlasTexture = fAtlasRenderTasks[0]->atlasProxy()->peekTexture();
401 SkASSERT(firstAtlasTexture);
402 for (int i = 1; i < fAtlasRenderTasks.count(); ++i) {
Robert Phillipsfdafc0c2021-08-25 16:39:14 -0400403 auto atlasTask = fAtlasRenderTasks[i].get();
Chris Daltonc3176002021-07-23 15:33:09 -0600404 if (atlasTask->atlasProxy()->backingStoreDimensions() == firstAtlasTexture->dimensions()) {
405 atlasTask->instantiate(onFlushRP, sk_ref_sp(firstAtlasTexture));
406 } else {
407 // The atlases are expected to all be full size except possibly the final one.
408 SkASSERT(i == fAtlasRenderTasks.count() - 1);
409 SkASSERT(atlasTask->atlasProxy()->backingStoreDimensions().area() <
410 firstAtlasTexture->dimensions().area());
411 // TODO: Recycle the larger atlas texture anyway?
412 atlasTask->instantiate(onFlushRP);
413 }
414 }
415
416 // Reset all atlas data.
417 fAtlasRenderTasks.reset();
418 fAtlasPathCache.reset();
419}
Robert Phillips43e70f12021-08-19 11:12:48 -0400420
421} // namespace skgpu::v1