blob: 26bbe990f8faf6877def12b5d1c11793b240799d [file] [log] [blame]
Chris Daltonc3176002021-07-23 15:33:09 -06001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "src/gpu/ops/GrAtlasPathRenderer.h"
9
10#include "include/private/SkVx.h"
11#include "src/core/SkIPoint16.h"
12#include "src/gpu/GrClip.h"
13#include "src/gpu/GrDirectContextPriv.h"
Chris Daltonc3176002021-07-23 15:33:09 -060014#include "src/gpu/GrVx.h"
15#include "src/gpu/effects/GrModulateAtlasCoverageEffect.h"
16#include "src/gpu/geometry/GrStyledShape.h"
17#include "src/gpu/ops/GrDrawAtlasPathOp.h"
18#include "src/gpu/tessellate/GrAtlasRenderTask.h"
19#include "src/gpu/tessellate/GrTessellationPathRenderer.h"
20#include "src/gpu/tessellate/shaders/GrTessellationShader.h"
Robert Phillips4dca8312021-07-28 15:13:20 -040021#include "src/gpu/v1/SurfaceDrawContext_v1.h"
Chris Daltonc3176002021-07-23 15:33:09 -060022
23using grvx::float2;
24using grvx::int2;
25
26constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8;
27constexpr static int kAtlasInitialSize = 512;
28
29// The atlas is only used for small-area paths, which means at least one dimension of every path is
30// guaranteed to be quite small. So if we transpose tall paths, then every path will have a small
31// height, which lends very well to efficient pow2 atlas packing.
32constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2;
33
Chris Dalton66deeb22021-07-23 13:57:03 -060034// Ensure every path in the atlas falls in or below the 256px high rectanizer band.
35constexpr static int kAtlasMaxPathHeight = 256;
36
37// If we have MSAA to fall back on, paths are already fast enough that we really only benefit from
38// atlasing when they are very small.
39constexpr static int kAtlasMaxPathHeightWithMSAAFallback = 128;
Chris Daltonc3176002021-07-23 15:33:09 -060040
41bool GrAtlasPathRenderer::IsSupported(GrRecordingContext* rContext) {
42 const GrCaps& caps = *rContext->priv().caps();
43 auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes);
44 return rContext->asDirectContext() && // The atlas doesn't support DDL yet.
45 caps.internalMultisampleCount(atlasFormat) > 1 &&
46 // GrAtlasRenderTask currently requires tessellation. In the future it could use the
47 // default path renderer when tessellation isn't available.
48 GrTessellationPathRenderer::IsSupported(caps);
49}
50
51sk_sp<GrAtlasPathRenderer> GrAtlasPathRenderer::Make(GrRecordingContext* rContext) {
52 return IsSupported(rContext)
53 ? sk_sp<GrAtlasPathRenderer>(new GrAtlasPathRenderer(rContext->asDirectContext()))
54 : nullptr;
55}
56
57GrAtlasPathRenderer::GrAtlasPathRenderer(GrDirectContext* dContext) {
58 SkASSERT(IsSupported(dContext));
59 const GrCaps& caps = *dContext->priv().caps();
60#if GR_TEST_UTILS
61 fAtlasMaxSize = dContext->priv().options().fMaxTextureAtlasSize;
62#else
63 fAtlasMaxSize = 2048;
64#endif
65 fAtlasMaxSize = SkPrevPow2(std::min(fAtlasMaxSize, (float)caps.maxPreferredRenderTargetSize()));
66 fAtlasInitialSize = SkNextPow2(std::min(kAtlasInitialSize, (int)fAtlasMaxSize));
67}
68
69// Returns the rect [topLeftFloor, botRightCeil], which is the rect [r] rounded out to integer
70// boundaries.
71static std::tuple<float2,float2> round_out(const SkRect& r) {
72 return {skvx::floor(float2::Load(&r.fLeft)), skvx::ceil(float2::Load(&r.fRight))};
73}
74
Chris Dalton66deeb22021-07-23 13:57:03 -060075bool GrAtlasPathRenderer::pathFitsInAtlas(const SkRect& pathDevBounds,
76 GrAAType fallbackAAType) const {
77 SkASSERT(fallbackAAType != GrAAType::kNone); // The atlas doesn't support non-AA.
78 float atlasMaxPathHeight_pow2 = (fallbackAAType == GrAAType::kMSAA)
79 ? kAtlasMaxPathHeightWithMSAAFallback * kAtlasMaxPathHeightWithMSAAFallback
80 : kAtlasMaxPathHeight * kAtlasMaxPathHeight;
Chris Daltonc3176002021-07-23 15:33:09 -060081 auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds);
82 float2 size = botRightCeil - topLeftFloor;
83 return // Ensure the path's largest dimension fits in the atlas.
84 skvx::all(size <= fAtlasMaxSize) &&
Chris Dalton66deeb22021-07-23 13:57:03 -060085 // Since we will transpose tall skinny paths, limiting to atlasMaxPathHeight^2 pixels
86 // guarantees heightInAtlas <= atlasMaxPathHeight, while also allowing paths that are
Chris Daltonc3176002021-07-23 15:33:09 -060087 // very wide and short.
Chris Dalton66deeb22021-07-23 13:57:03 -060088 size[0] * size[1] <= atlasMaxPathHeight_pow2;
Chris Daltonc3176002021-07-23 15:33:09 -060089}
90
91void GrAtlasPathRenderer::AtlasPathKey::set(const SkMatrix& m, const SkPath& path) {
92 using grvx::float2;
93 fPathGenID = path.getGenerationID();
94 fAffineMatrix[0] = m.getScaleX();
95 fAffineMatrix[1] = m.getSkewX();
96 fAffineMatrix[2] = m.getSkewY();
97 fAffineMatrix[3] = m.getScaleY();
98 float2 translate = {m.getTranslateX(), m.getTranslateY()};
99 float2 subpixelPosition = translate - skvx::floor(translate);
100 float2 subpixelPositionKey = skvx::trunc(subpixelPosition *
101 GrTessellationShader::kLinearizationPrecision);
102 skvx::cast<uint8_t>(subpixelPositionKey).store(fSubpixelPositionKey);
103 fFillRule = (uint16_t)GrFillRuleForSkPath(path); // Fill rule doesn't affect the path's genID.
104}
105
106bool GrAtlasPathRenderer::addPathToAtlas(GrRecordingContext* rContext,
107 const SkMatrix& viewMatrix,
108 const SkPath& path,
109 const SkRect& pathDevBounds,
110 SkIRect* devIBounds,
111 SkIPoint16* locationInAtlas,
112 bool* transposedInAtlas,
113 const DrawRefsAtlasCallback& drawRefsAtlasCallback) {
114 SkASSERT(!viewMatrix.hasPerspective()); // See onCanDrawPath().
115
116 pathDevBounds.roundOut(devIBounds);
117#ifdef SK_DEBUG
118 // is_visible() should have guaranteed the path's bounds were representable as ints, since clip
119 // bounds within the max render target size are nowhere near INT_MAX.
120 auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds);
121 SkASSERT(skvx::all(skvx::cast<float>(int2::Load(&devIBounds->fLeft)) == topLeftFloor));
122 SkASSERT(skvx::all(skvx::cast<float>(int2::Load(&devIBounds->fRight)) == botRightCeil));
123#endif
124
125 int widthInAtlas = devIBounds->width();
126 int heightInAtlas = devIBounds->height();
127 // is_visible() should have guaranteed the path's bounds were non-empty.
128 SkASSERT(widthInAtlas > 0 && heightInAtlas > 0);
129
130 if (SkNextPow2(widthInAtlas) == SkNextPow2(heightInAtlas)) {
131 // Both dimensions go to the same pow2 band in the atlas. Use the larger dimension as height
132 // for more efficient packing.
133 *transposedInAtlas = widthInAtlas > heightInAtlas;
134 } else {
135 // Both dimensions go to different pow2 bands in the atlas. Use the smaller pow2 band for
136 // most efficient packing.
137 *transposedInAtlas = heightInAtlas > widthInAtlas;
138 }
139 if (*transposedInAtlas) {
140 std::swap(heightInAtlas, widthInAtlas);
141 }
Chris Dalton66deeb22021-07-23 13:57:03 -0600142 // pathFitsInAtlas() should have guaranteed these constraints on the path size.
Chris Daltonc3176002021-07-23 15:33:09 -0600143 SkASSERT(widthInAtlas <= (int)fAtlasMaxSize);
144 SkASSERT(heightInAtlas <= kAtlasMaxPathHeight);
145
146 // Check if this path is already in the atlas. This is mainly for clip paths.
147 AtlasPathKey atlasPathKey;
148 if (!path.isVolatile()) {
149 atlasPathKey.set(viewMatrix, path);
150 if (const SkIPoint16* existingLocation = fAtlasPathCache.find(atlasPathKey)) {
151 *locationInAtlas = *existingLocation;
152 return true;
153 }
154 }
155
156 if (fAtlasRenderTasks.empty() ||
157 !fAtlasRenderTasks.back()->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas,
158 heightInAtlas, *transposedInAtlas, locationInAtlas)) {
159 // We either don't have an atlas yet or the current one is full. Try to replace it.
160 GrAtlasRenderTask* currentAtlasTask = (!fAtlasRenderTasks.empty())
161 ? fAtlasRenderTasks.back().get() : nullptr;
162 if (currentAtlasTask &&
163 drawRefsAtlasCallback &&
164 drawRefsAtlasCallback(currentAtlasTask->atlasProxy())) {
165 // The draw already refs the current atlas. Give up. Otherwise the draw would ref two
166 // different atlases and they couldn't share a texture.
167 return false;
168 }
169 // Replace the atlas with a new one.
170 auto dynamicAtlas = std::make_unique<GrDynamicAtlas>(
171 kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes,
172 SkISize{fAtlasInitialSize, fAtlasInitialSize}, fAtlasMaxSize,
173 *rContext->priv().caps(), kAtlasAlgorithm);
174 auto newAtlasTask = sk_make_sp<GrAtlasRenderTask>(rContext,
175 sk_make_sp<GrArenas>(),
176 std::move(dynamicAtlas));
177 rContext->priv().drawingManager()->addAtlasTask(newAtlasTask, currentAtlasTask);
178 SkAssertResult(newAtlasTask->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas,
179 heightInAtlas, *transposedInAtlas, locationInAtlas));
180 fAtlasRenderTasks.push_back(std::move(newAtlasTask));
181 fAtlasPathCache.reset();
182 }
183
184 // Remember this path's location in the atlas, in case it gets drawn again.
185 if (!path.isVolatile()) {
186 fAtlasPathCache.set(atlasPathKey, *locationInAtlas);
187 }
188 return true;
189}
190
191// Returns whether the given proxyOwner uses the atlasProxy.
192template<typename T> bool refs_atlas(const T* proxyOwner, const GrSurfaceProxy* atlasProxy) {
193 bool refsAtlas = false;
194 auto checkForAtlasRef = [atlasProxy, &refsAtlas](GrSurfaceProxy* proxy, GrMipmapped) {
195 if (proxy == atlasProxy) {
196 refsAtlas = true;
197 }
198 };
199 if (proxyOwner) {
200 proxyOwner->visitProxies(checkForAtlasRef);
201 }
202 return refsAtlas;
203}
204
205GrPathRenderer::CanDrawPath GrAtlasPathRenderer::onCanDrawPath(const CanDrawPathArgs& args) const {
206#ifdef SK_DEBUG
207 if (!fAtlasRenderTasks.empty()) {
208 // args.fPaint should NEVER reference our current atlas. If it does, it means somebody
209 // intercepted a clip FP meant for a different op and will cause rendering artifacts.
210 const GrSurfaceProxy* atlasProxy = fAtlasRenderTasks.back()->atlasProxy();
211 SkASSERT(!refs_atlas(args.fPaint->getColorFragmentProcessor(), atlasProxy));
212 SkASSERT(!refs_atlas(args.fPaint->getCoverageFragmentProcessor(), atlasProxy));
213 }
214 SkASSERT(!args.fHasUserStencilSettings); // See onGetStencilSupport().
215#endif
216 bool canDrawPath = args.fShape->style().isSimpleFill() &&
217 // The MSAA requirement is a temporary limitation in order to preserve
218 // functionality for refactoring. TODO: Allow kCoverage AA types.
219 args.fAAType == GrAAType::kMSAA &&
220 !args.fShape->style().hasPathEffect() &&
221 !args.fViewMatrix->hasPerspective() &&
Chris Dalton66deeb22021-07-23 13:57:03 -0600222 this->pathFitsInAtlas(args.fViewMatrix->mapRect(args.fShape->bounds()),
223 args.fAAType);
Chris Daltonc3176002021-07-23 15:33:09 -0600224 return canDrawPath ? CanDrawPath::kYes : CanDrawPath::kNo;
225}
226
227static bool is_visible(const SkRect& pathDevBounds, const SkIRect& clipBounds) {
228 float2 pathTopLeft = float2::Load(&pathDevBounds.fLeft);
229 float2 pathBotRight = float2::Load(&pathDevBounds.fRight);
230 // Empty paths are never visible. Phrase this as a NOT of positive logic so we also return false
231 // in the case of NaN.
232 if (!skvx::all(pathTopLeft < pathBotRight)) {
233 return false;
234 }
235 float2 clipTopLeft = skvx::cast<float>(int2::Load(&clipBounds.fLeft));
236 float2 clipBotRight = skvx::cast<float>(int2::Load(&clipBounds.fRight));
237 static_assert(sizeof(clipBounds) == sizeof(clipTopLeft) + sizeof(clipBotRight));
238 return skvx::all(pathTopLeft < clipBotRight) && skvx::all(pathBotRight > clipTopLeft);
239}
240
241bool GrAtlasPathRenderer::onDrawPath(const DrawPathArgs& args) {
242 SkPath path;
243 args.fShape->asPath(&path);
244
245 const SkRect pathDevBounds = args.fViewMatrix->mapRect(args.fShape->bounds());
Chris Dalton66deeb22021-07-23 13:57:03 -0600246 SkASSERT(this->pathFitsInAtlas(pathDevBounds, args.fAAType));
Chris Daltonc3176002021-07-23 15:33:09 -0600247
248 if (!is_visible(pathDevBounds, args.fClip->getConservativeBounds())) {
249 // The path is empty or outside the clip. No mask is needed.
250 if (path.isInverseFillType()) {
251 args.fSurfaceDrawContext->drawPaint(args.fClip, std::move(args.fPaint),
252 *args.fViewMatrix);
253 }
254 return true;
255 }
256
257 SkIRect devIBounds;
258 SkIPoint16 locationInAtlas;
259 bool transposedInAtlas;
260 SkAssertResult(this->addPathToAtlas(args.fContext, *args.fViewMatrix, path, pathDevBounds,
261 &devIBounds, &locationInAtlas, &transposedInAtlas,
262 nullptr/*DrawRefsAtlasCallback -- see onCanDrawPath()*/));
263
264 const SkIRect& fillBounds = args.fShape->inverseFilled()
265 ? (args.fClip
266 ? args.fClip->getConservativeBounds()
267 : args.fSurfaceDrawContext->asSurfaceProxy()->backingStoreBoundsIRect())
268 : devIBounds;
269 const GrCaps& caps = *args.fSurfaceDrawContext->caps();
270 auto op = GrOp::Make<GrDrawAtlasPathOp>(args.fContext,
271 args.fSurfaceDrawContext->arenaAlloc(),
272 fillBounds, *args.fViewMatrix,
273 std::move(args.fPaint), locationInAtlas,
274 devIBounds, transposedInAtlas,
275 fAtlasRenderTasks.back()->readView(caps),
276 args.fShape->inverseFilled());
277 args.fSurfaceDrawContext->addDrawOp(args.fClip, std::move(op));
278 return true;
279}
280
Robert Phillips4dca8312021-07-28 15:13:20 -0400281GrFPResult GrAtlasPathRenderer::makeAtlasClipEffect(const skgpu::v1::SurfaceDrawContext* sdc,
Chris Daltonc3176002021-07-23 15:33:09 -0600282 const GrOp* opBeingClipped,
283 std::unique_ptr<GrFragmentProcessor> inputFP,
284 const SkIRect& drawBounds,
285 const SkMatrix& viewMatrix,
286 const SkPath& path) {
287 if (viewMatrix.hasPerspective()) {
288 return GrFPFailure(std::move(inputFP));
289 }
290
291 const SkRect pathDevBounds = viewMatrix.mapRect(path.getBounds());
292 if (!is_visible(pathDevBounds, drawBounds)) {
293 // The path is empty or outside the drawBounds. No mask is needed.
294 return path.isInverseFillType() ? GrFPSuccess(std::move(inputFP))
295 : GrFPFailure(std::move(inputFP));
296 }
297
Chris Dalton66deeb22021-07-23 13:57:03 -0600298 auto fallbackAAType = (sdc->numSamples() > 1 || sdc->canUseDynamicMSAA()) ? GrAAType::kMSAA
299 : GrAAType::kCoverage;
300 if (!this->pathFitsInAtlas(pathDevBounds, fallbackAAType)) {
Chris Daltonc3176002021-07-23 15:33:09 -0600301 // The path is too big.
302 return GrFPFailure(std::move(inputFP));
303 }
304
305 SkIRect devIBounds;
306 SkIPoint16 locationInAtlas;
307 bool transposedInAtlas;
308 // Called if the atlas runs out of room, to determine if it's safe to create a new one. (Draws
309 // can never access more than one atlas.)
310 auto drawRefsAtlasCallback = [opBeingClipped, &inputFP](const GrSurfaceProxy* atlasProxy) {
311 return refs_atlas(opBeingClipped, atlasProxy) ||
312 refs_atlas(inputFP.get(), atlasProxy);
313 };
314 // addPathToAtlas() ignores inverseness of the fill. See GrAtlasRenderTask::getAtlasUberPath().
Chris Dalton66deeb22021-07-23 13:57:03 -0600315 if (!this->addPathToAtlas(sdc->recordingContext(), viewMatrix, path, pathDevBounds, &devIBounds,
Chris Daltonc3176002021-07-23 15:33:09 -0600316 &locationInAtlas, &transposedInAtlas, drawRefsAtlasCallback)) {
317 // The atlas ran out of room and we were unable to start a new one.
318 return GrFPFailure(std::move(inputFP));
319 }
320
321 SkMatrix atlasMatrix;
322 auto [atlasX, atlasY] = locationInAtlas;
323 if (!transposedInAtlas) {
324 atlasMatrix = SkMatrix::Translate(atlasX - devIBounds.left(), atlasY - devIBounds.top());
325 } else {
326 atlasMatrix.setAll(0, 1, atlasX - devIBounds.top(),
327 1, 0, atlasY - devIBounds.left(),
328 0, 0, 1);
329 }
330 auto flags = GrModulateAtlasCoverageEffect::Flags::kNone;
331 if (path.isInverseFillType()) {
332 flags |= GrModulateAtlasCoverageEffect::Flags::kInvertCoverage;
333 }
334 if (!devIBounds.contains(drawBounds)) {
335 flags |= GrModulateAtlasCoverageEffect::Flags::kCheckBounds;
336 // At this point in time we expect callers to tighten the scissor for "kIntersect" clips, as
337 // opposed to us having to check the path bounds. Feel free to remove this assert if that
338 // ever changes.
339 SkASSERT(path.isInverseFillType());
340 }
Chris Dalton66deeb22021-07-23 13:57:03 -0600341 GrSurfaceProxyView atlasView = fAtlasRenderTasks.back()->readView(*sdc->caps());
Chris Daltonc3176002021-07-23 15:33:09 -0600342 return GrFPSuccess(std::make_unique<GrModulateAtlasCoverageEffect>(flags, std::move(inputFP),
343 std::move(atlasView),
344 atlasMatrix, devIBounds));
345}
346
347#ifdef SK_DEBUG
348// Ensures the atlas dependencies are set up such that each atlas will be totally out of service
349// before we render the next one in line. This means there will only ever be one atlas active at a
350// time and that they can all share the same texture.
351static void validate_atlas_dependencies(const SkTArray<sk_sp<GrAtlasRenderTask>>& atlasTasks) {
352 for (int i = atlasTasks.count() - 1; i >= 1; --i) {
353 GrAtlasRenderTask* atlasTask = atlasTasks[i].get();
354 GrAtlasRenderTask* previousAtlasTask = atlasTasks[i - 1].get();
355 // Double check that atlasTask depends on every dependent of its previous atlas. If this
356 // fires it might mean previousAtlasTask gained a new dependent after atlasTask came into
357 // service (maybe by an op that hadn't yet been added to an opsTask when we registered the
358 // new atlas with the drawingManager).
359 for (GrRenderTask* previousAtlasUser : previousAtlasTask->dependents()) {
360 SkASSERT(atlasTask->dependsOn(previousAtlasUser));
361 }
362 }
363}
364#endif
365
366void GrAtlasPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
367 SkSpan<const uint32_t> /* taskIDs */) {
368 if (fAtlasRenderTasks.empty()) {
369 SkASSERT(fAtlasPathCache.count() == 0);
370 return;
371 }
372
373 // Verify the atlases can all share the same texture.
374 SkDEBUGCODE(validate_atlas_dependencies(fAtlasRenderTasks);)
375
376 // Instantiate the first atlas.
377 fAtlasRenderTasks[0]->instantiate(onFlushRP);
378
379 // Instantiate the remaining atlases.
380 GrTexture* firstAtlasTexture = fAtlasRenderTasks[0]->atlasProxy()->peekTexture();
381 SkASSERT(firstAtlasTexture);
382 for (int i = 1; i < fAtlasRenderTasks.count(); ++i) {
383 GrAtlasRenderTask* atlasTask = fAtlasRenderTasks[i].get();
384 if (atlasTask->atlasProxy()->backingStoreDimensions() == firstAtlasTexture->dimensions()) {
385 atlasTask->instantiate(onFlushRP, sk_ref_sp(firstAtlasTexture));
386 } else {
387 // The atlases are expected to all be full size except possibly the final one.
388 SkASSERT(i == fAtlasRenderTasks.count() - 1);
389 SkASSERT(atlasTask->atlasProxy()->backingStoreDimensions().area() <
390 firstAtlasTexture->dimensions().area());
391 // TODO: Recycle the larger atlas texture anyway?
392 atlasTask->instantiate(onFlushRP);
393 }
394 }
395
396 // Reset all atlas data.
397 fAtlasRenderTasks.reset();
398 fAtlasPathCache.reset();
399}