blob: 4620bf39d57d81bdc95bac72e3fd7ae07e4e6c63 [file] [log] [blame]
Chris Daltonc3176002021-07-23 15:33:09 -06001/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "src/gpu/ops/GrAtlasPathRenderer.h"
9
10#include "include/private/SkVx.h"
11#include "src/core/SkIPoint16.h"
12#include "src/gpu/GrClip.h"
13#include "src/gpu/GrDirectContextPriv.h"
14#include "src/gpu/GrSurfaceDrawContext.h"
15#include "src/gpu/GrVx.h"
16#include "src/gpu/effects/GrModulateAtlasCoverageEffect.h"
17#include "src/gpu/geometry/GrStyledShape.h"
18#include "src/gpu/ops/GrDrawAtlasPathOp.h"
19#include "src/gpu/tessellate/GrAtlasRenderTask.h"
20#include "src/gpu/tessellate/GrTessellationPathRenderer.h"
21#include "src/gpu/tessellate/shaders/GrTessellationShader.h"
22
23using grvx::float2;
24using grvx::int2;
25
26constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8;
27constexpr static int kAtlasInitialSize = 512;
28
29// The atlas is only used for small-area paths, which means at least one dimension of every path is
30// guaranteed to be quite small. So if we transpose tall paths, then every path will have a small
31// height, which lends very well to efficient pow2 atlas packing.
32constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2;
33
34// Ensure every path in the atlas falls in or below the 128px high rectanizer band.
35constexpr static int kAtlasMaxPathHeight = 128;
36
37bool GrAtlasPathRenderer::IsSupported(GrRecordingContext* rContext) {
38 const GrCaps& caps = *rContext->priv().caps();
39 auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes);
40 return rContext->asDirectContext() && // The atlas doesn't support DDL yet.
41 caps.internalMultisampleCount(atlasFormat) > 1 &&
42 // GrAtlasRenderTask currently requires tessellation. In the future it could use the
43 // default path renderer when tessellation isn't available.
44 GrTessellationPathRenderer::IsSupported(caps);
45}
46
47sk_sp<GrAtlasPathRenderer> GrAtlasPathRenderer::Make(GrRecordingContext* rContext) {
48 return IsSupported(rContext)
49 ? sk_sp<GrAtlasPathRenderer>(new GrAtlasPathRenderer(rContext->asDirectContext()))
50 : nullptr;
51}
52
53GrAtlasPathRenderer::GrAtlasPathRenderer(GrDirectContext* dContext) {
54 SkASSERT(IsSupported(dContext));
55 const GrCaps& caps = *dContext->priv().caps();
56#if GR_TEST_UTILS
57 fAtlasMaxSize = dContext->priv().options().fMaxTextureAtlasSize;
58#else
59 fAtlasMaxSize = 2048;
60#endif
61 fAtlasMaxSize = SkPrevPow2(std::min(fAtlasMaxSize, (float)caps.maxPreferredRenderTargetSize()));
62 fAtlasInitialSize = SkNextPow2(std::min(kAtlasInitialSize, (int)fAtlasMaxSize));
63}
64
65// Returns the rect [topLeftFloor, botRightCeil], which is the rect [r] rounded out to integer
66// boundaries.
67static std::tuple<float2,float2> round_out(const SkRect& r) {
68 return {skvx::floor(float2::Load(&r.fLeft)), skvx::ceil(float2::Load(&r.fRight))};
69}
70
71bool GrAtlasPathRenderer::pathFitsInAtlas(const SkRect& pathDevBounds) const {
72 auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds);
73 float2 size = botRightCeil - topLeftFloor;
74 return // Ensure the path's largest dimension fits in the atlas.
75 skvx::all(size <= fAtlasMaxSize) &&
76 // Since we will transpose tall skinny paths, limiting to kAtlasMaxPathHeight^2 pixels
77 // guarantees heightInAtlas <= kAtlasMaxPathHeight, while also allowing paths that are
78 // very wide and short.
79 size[0] * size[1] <= kAtlasMaxPathHeight * kAtlasMaxPathHeight;
80}
81
82void GrAtlasPathRenderer::AtlasPathKey::set(const SkMatrix& m, const SkPath& path) {
83 using grvx::float2;
84 fPathGenID = path.getGenerationID();
85 fAffineMatrix[0] = m.getScaleX();
86 fAffineMatrix[1] = m.getSkewX();
87 fAffineMatrix[2] = m.getSkewY();
88 fAffineMatrix[3] = m.getScaleY();
89 float2 translate = {m.getTranslateX(), m.getTranslateY()};
90 float2 subpixelPosition = translate - skvx::floor(translate);
91 float2 subpixelPositionKey = skvx::trunc(subpixelPosition *
92 GrTessellationShader::kLinearizationPrecision);
93 skvx::cast<uint8_t>(subpixelPositionKey).store(fSubpixelPositionKey);
94 fFillRule = (uint16_t)GrFillRuleForSkPath(path); // Fill rule doesn't affect the path's genID.
95}
96
97bool GrAtlasPathRenderer::addPathToAtlas(GrRecordingContext* rContext,
98 const SkMatrix& viewMatrix,
99 const SkPath& path,
100 const SkRect& pathDevBounds,
101 SkIRect* devIBounds,
102 SkIPoint16* locationInAtlas,
103 bool* transposedInAtlas,
104 const DrawRefsAtlasCallback& drawRefsAtlasCallback) {
105 SkASSERT(!viewMatrix.hasPerspective()); // See onCanDrawPath().
106
107 pathDevBounds.roundOut(devIBounds);
108#ifdef SK_DEBUG
109 // is_visible() should have guaranteed the path's bounds were representable as ints, since clip
110 // bounds within the max render target size are nowhere near INT_MAX.
111 auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds);
112 SkASSERT(skvx::all(skvx::cast<float>(int2::Load(&devIBounds->fLeft)) == topLeftFloor));
113 SkASSERT(skvx::all(skvx::cast<float>(int2::Load(&devIBounds->fRight)) == botRightCeil));
114#endif
115
116 int widthInAtlas = devIBounds->width();
117 int heightInAtlas = devIBounds->height();
118 // is_visible() should have guaranteed the path's bounds were non-empty.
119 SkASSERT(widthInAtlas > 0 && heightInAtlas > 0);
120
121 if (SkNextPow2(widthInAtlas) == SkNextPow2(heightInAtlas)) {
122 // Both dimensions go to the same pow2 band in the atlas. Use the larger dimension as height
123 // for more efficient packing.
124 *transposedInAtlas = widthInAtlas > heightInAtlas;
125 } else {
126 // Both dimensions go to different pow2 bands in the atlas. Use the smaller pow2 band for
127 // most efficient packing.
128 *transposedInAtlas = heightInAtlas > widthInAtlas;
129 }
130 if (*transposedInAtlas) {
131 std::swap(heightInAtlas, widthInAtlas);
132 }
133 SkASSERT(widthInAtlas <= (int)fAtlasMaxSize);
134 SkASSERT(heightInAtlas <= kAtlasMaxPathHeight);
135
136 // Check if this path is already in the atlas. This is mainly for clip paths.
137 AtlasPathKey atlasPathKey;
138 if (!path.isVolatile()) {
139 atlasPathKey.set(viewMatrix, path);
140 if (const SkIPoint16* existingLocation = fAtlasPathCache.find(atlasPathKey)) {
141 *locationInAtlas = *existingLocation;
142 return true;
143 }
144 }
145
146 if (fAtlasRenderTasks.empty() ||
147 !fAtlasRenderTasks.back()->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas,
148 heightInAtlas, *transposedInAtlas, locationInAtlas)) {
149 // We either don't have an atlas yet or the current one is full. Try to replace it.
150 GrAtlasRenderTask* currentAtlasTask = (!fAtlasRenderTasks.empty())
151 ? fAtlasRenderTasks.back().get() : nullptr;
152 if (currentAtlasTask &&
153 drawRefsAtlasCallback &&
154 drawRefsAtlasCallback(currentAtlasTask->atlasProxy())) {
155 // The draw already refs the current atlas. Give up. Otherwise the draw would ref two
156 // different atlases and they couldn't share a texture.
157 return false;
158 }
159 // Replace the atlas with a new one.
160 auto dynamicAtlas = std::make_unique<GrDynamicAtlas>(
161 kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes,
162 SkISize{fAtlasInitialSize, fAtlasInitialSize}, fAtlasMaxSize,
163 *rContext->priv().caps(), kAtlasAlgorithm);
164 auto newAtlasTask = sk_make_sp<GrAtlasRenderTask>(rContext,
165 sk_make_sp<GrArenas>(),
166 std::move(dynamicAtlas));
167 rContext->priv().drawingManager()->addAtlasTask(newAtlasTask, currentAtlasTask);
168 SkAssertResult(newAtlasTask->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas,
169 heightInAtlas, *transposedInAtlas, locationInAtlas));
170 fAtlasRenderTasks.push_back(std::move(newAtlasTask));
171 fAtlasPathCache.reset();
172 }
173
174 // Remember this path's location in the atlas, in case it gets drawn again.
175 if (!path.isVolatile()) {
176 fAtlasPathCache.set(atlasPathKey, *locationInAtlas);
177 }
178 return true;
179}
180
181// Returns whether the given proxyOwner uses the atlasProxy.
182template<typename T> bool refs_atlas(const T* proxyOwner, const GrSurfaceProxy* atlasProxy) {
183 bool refsAtlas = false;
184 auto checkForAtlasRef = [atlasProxy, &refsAtlas](GrSurfaceProxy* proxy, GrMipmapped) {
185 if (proxy == atlasProxy) {
186 refsAtlas = true;
187 }
188 };
189 if (proxyOwner) {
190 proxyOwner->visitProxies(checkForAtlasRef);
191 }
192 return refsAtlas;
193}
194
195GrPathRenderer::CanDrawPath GrAtlasPathRenderer::onCanDrawPath(const CanDrawPathArgs& args) const {
196#ifdef SK_DEBUG
197 if (!fAtlasRenderTasks.empty()) {
198 // args.fPaint should NEVER reference our current atlas. If it does, it means somebody
199 // intercepted a clip FP meant for a different op and will cause rendering artifacts.
200 const GrSurfaceProxy* atlasProxy = fAtlasRenderTasks.back()->atlasProxy();
201 SkASSERT(!refs_atlas(args.fPaint->getColorFragmentProcessor(), atlasProxy));
202 SkASSERT(!refs_atlas(args.fPaint->getCoverageFragmentProcessor(), atlasProxy));
203 }
204 SkASSERT(!args.fHasUserStencilSettings); // See onGetStencilSupport().
205#endif
206 bool canDrawPath = args.fShape->style().isSimpleFill() &&
207 // The MSAA requirement is a temporary limitation in order to preserve
208 // functionality for refactoring. TODO: Allow kCoverage AA types.
209 args.fAAType == GrAAType::kMSAA &&
210 !args.fShape->style().hasPathEffect() &&
211 !args.fViewMatrix->hasPerspective() &&
212 this->pathFitsInAtlas(args.fViewMatrix->mapRect(args.fShape->bounds()));
213 return canDrawPath ? CanDrawPath::kYes : CanDrawPath::kNo;
214}
215
216static bool is_visible(const SkRect& pathDevBounds, const SkIRect& clipBounds) {
217 float2 pathTopLeft = float2::Load(&pathDevBounds.fLeft);
218 float2 pathBotRight = float2::Load(&pathDevBounds.fRight);
219 // Empty paths are never visible. Phrase this as a NOT of positive logic so we also return false
220 // in the case of NaN.
221 if (!skvx::all(pathTopLeft < pathBotRight)) {
222 return false;
223 }
224 float2 clipTopLeft = skvx::cast<float>(int2::Load(&clipBounds.fLeft));
225 float2 clipBotRight = skvx::cast<float>(int2::Load(&clipBounds.fRight));
226 static_assert(sizeof(clipBounds) == sizeof(clipTopLeft) + sizeof(clipBotRight));
227 return skvx::all(pathTopLeft < clipBotRight) && skvx::all(pathBotRight > clipTopLeft);
228}
229
230bool GrAtlasPathRenderer::onDrawPath(const DrawPathArgs& args) {
231 SkPath path;
232 args.fShape->asPath(&path);
233
234 const SkRect pathDevBounds = args.fViewMatrix->mapRect(args.fShape->bounds());
235 SkASSERT(this->pathFitsInAtlas(pathDevBounds));
236
237 if (!is_visible(pathDevBounds, args.fClip->getConservativeBounds())) {
238 // The path is empty or outside the clip. No mask is needed.
239 if (path.isInverseFillType()) {
240 args.fSurfaceDrawContext->drawPaint(args.fClip, std::move(args.fPaint),
241 *args.fViewMatrix);
242 }
243 return true;
244 }
245
246 SkIRect devIBounds;
247 SkIPoint16 locationInAtlas;
248 bool transposedInAtlas;
249 SkAssertResult(this->addPathToAtlas(args.fContext, *args.fViewMatrix, path, pathDevBounds,
250 &devIBounds, &locationInAtlas, &transposedInAtlas,
251 nullptr/*DrawRefsAtlasCallback -- see onCanDrawPath()*/));
252
253 const SkIRect& fillBounds = args.fShape->inverseFilled()
254 ? (args.fClip
255 ? args.fClip->getConservativeBounds()
256 : args.fSurfaceDrawContext->asSurfaceProxy()->backingStoreBoundsIRect())
257 : devIBounds;
258 const GrCaps& caps = *args.fSurfaceDrawContext->caps();
259 auto op = GrOp::Make<GrDrawAtlasPathOp>(args.fContext,
260 args.fSurfaceDrawContext->arenaAlloc(),
261 fillBounds, *args.fViewMatrix,
262 std::move(args.fPaint), locationInAtlas,
263 devIBounds, transposedInAtlas,
264 fAtlasRenderTasks.back()->readView(caps),
265 args.fShape->inverseFilled());
266 args.fSurfaceDrawContext->addDrawOp(args.fClip, std::move(op));
267 return true;
268}
269
270GrFPResult GrAtlasPathRenderer::makeAtlasClipEffect(GrRecordingContext* rContext,
271 const GrOp* opBeingClipped,
272 std::unique_ptr<GrFragmentProcessor> inputFP,
273 const SkIRect& drawBounds,
274 const SkMatrix& viewMatrix,
275 const SkPath& path) {
276 if (viewMatrix.hasPerspective()) {
277 return GrFPFailure(std::move(inputFP));
278 }
279
280 const SkRect pathDevBounds = viewMatrix.mapRect(path.getBounds());
281 if (!is_visible(pathDevBounds, drawBounds)) {
282 // The path is empty or outside the drawBounds. No mask is needed.
283 return path.isInverseFillType() ? GrFPSuccess(std::move(inputFP))
284 : GrFPFailure(std::move(inputFP));
285 }
286
287 if (!this->pathFitsInAtlas(pathDevBounds)) {
288 // The path is too big.
289 return GrFPFailure(std::move(inputFP));
290 }
291
292 SkIRect devIBounds;
293 SkIPoint16 locationInAtlas;
294 bool transposedInAtlas;
295 // Called if the atlas runs out of room, to determine if it's safe to create a new one. (Draws
296 // can never access more than one atlas.)
297 auto drawRefsAtlasCallback = [opBeingClipped, &inputFP](const GrSurfaceProxy* atlasProxy) {
298 return refs_atlas(opBeingClipped, atlasProxy) ||
299 refs_atlas(inputFP.get(), atlasProxy);
300 };
301 // addPathToAtlas() ignores inverseness of the fill. See GrAtlasRenderTask::getAtlasUberPath().
302 if (!this->addPathToAtlas(rContext, viewMatrix, path, pathDevBounds, &devIBounds,
303 &locationInAtlas, &transposedInAtlas, drawRefsAtlasCallback)) {
304 // The atlas ran out of room and we were unable to start a new one.
305 return GrFPFailure(std::move(inputFP));
306 }
307
308 SkMatrix atlasMatrix;
309 auto [atlasX, atlasY] = locationInAtlas;
310 if (!transposedInAtlas) {
311 atlasMatrix = SkMatrix::Translate(atlasX - devIBounds.left(), atlasY - devIBounds.top());
312 } else {
313 atlasMatrix.setAll(0, 1, atlasX - devIBounds.top(),
314 1, 0, atlasY - devIBounds.left(),
315 0, 0, 1);
316 }
317 auto flags = GrModulateAtlasCoverageEffect::Flags::kNone;
318 if (path.isInverseFillType()) {
319 flags |= GrModulateAtlasCoverageEffect::Flags::kInvertCoverage;
320 }
321 if (!devIBounds.contains(drawBounds)) {
322 flags |= GrModulateAtlasCoverageEffect::Flags::kCheckBounds;
323 // At this point in time we expect callers to tighten the scissor for "kIntersect" clips, as
324 // opposed to us having to check the path bounds. Feel free to remove this assert if that
325 // ever changes.
326 SkASSERT(path.isInverseFillType());
327 }
328 GrSurfaceProxyView atlasView = fAtlasRenderTasks.back()->readView(*rContext->priv().caps());
329 return GrFPSuccess(std::make_unique<GrModulateAtlasCoverageEffect>(flags, std::move(inputFP),
330 std::move(atlasView),
331 atlasMatrix, devIBounds));
332}
333
334#ifdef SK_DEBUG
335// Ensures the atlas dependencies are set up such that each atlas will be totally out of service
336// before we render the next one in line. This means there will only ever be one atlas active at a
337// time and that they can all share the same texture.
338static void validate_atlas_dependencies(const SkTArray<sk_sp<GrAtlasRenderTask>>& atlasTasks) {
339 for (int i = atlasTasks.count() - 1; i >= 1; --i) {
340 GrAtlasRenderTask* atlasTask = atlasTasks[i].get();
341 GrAtlasRenderTask* previousAtlasTask = atlasTasks[i - 1].get();
342 // Double check that atlasTask depends on every dependent of its previous atlas. If this
343 // fires it might mean previousAtlasTask gained a new dependent after atlasTask came into
344 // service (maybe by an op that hadn't yet been added to an opsTask when we registered the
345 // new atlas with the drawingManager).
346 for (GrRenderTask* previousAtlasUser : previousAtlasTask->dependents()) {
347 SkASSERT(atlasTask->dependsOn(previousAtlasUser));
348 }
349 }
350}
351#endif
352
353void GrAtlasPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
354 SkSpan<const uint32_t> /* taskIDs */) {
355 if (fAtlasRenderTasks.empty()) {
356 SkASSERT(fAtlasPathCache.count() == 0);
357 return;
358 }
359
360 // Verify the atlases can all share the same texture.
361 SkDEBUGCODE(validate_atlas_dependencies(fAtlasRenderTasks);)
362
363 // Instantiate the first atlas.
364 fAtlasRenderTasks[0]->instantiate(onFlushRP);
365
366 // Instantiate the remaining atlases.
367 GrTexture* firstAtlasTexture = fAtlasRenderTasks[0]->atlasProxy()->peekTexture();
368 SkASSERT(firstAtlasTexture);
369 for (int i = 1; i < fAtlasRenderTasks.count(); ++i) {
370 GrAtlasRenderTask* atlasTask = fAtlasRenderTasks[i].get();
371 if (atlasTask->atlasProxy()->backingStoreDimensions() == firstAtlasTexture->dimensions()) {
372 atlasTask->instantiate(onFlushRP, sk_ref_sp(firstAtlasTexture));
373 } else {
374 // The atlases are expected to all be full size except possibly the final one.
375 SkASSERT(i == fAtlasRenderTasks.count() - 1);
376 SkASSERT(atlasTask->atlasProxy()->backingStoreDimensions().area() <
377 firstAtlasTexture->dimensions().area());
378 // TODO: Recycle the larger atlas texture anyway?
379 atlasTask->instantiate(onFlushRP);
380 }
381 }
382
383 // Reset all atlas data.
384 fAtlasRenderTasks.reset();
385 fAtlasPathCache.reset();
386}