Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2019 Google LLC. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "src/gpu/ops/GrAtlasPathRenderer.h" |
| 9 | |
| 10 | #include "include/private/SkVx.h" |
| 11 | #include "src/core/SkIPoint16.h" |
| 12 | #include "src/gpu/GrClip.h" |
| 13 | #include "src/gpu/GrDirectContextPriv.h" |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 14 | #include "src/gpu/GrVx.h" |
| 15 | #include "src/gpu/effects/GrModulateAtlasCoverageEffect.h" |
| 16 | #include "src/gpu/geometry/GrStyledShape.h" |
| 17 | #include "src/gpu/ops/GrDrawAtlasPathOp.h" |
| 18 | #include "src/gpu/tessellate/GrAtlasRenderTask.h" |
| 19 | #include "src/gpu/tessellate/GrTessellationPathRenderer.h" |
| 20 | #include "src/gpu/tessellate/shaders/GrTessellationShader.h" |
Robert Phillips | 4dca831 | 2021-07-28 15:13:20 -0400 | [diff] [blame] | 21 | #include "src/gpu/v1/SurfaceDrawContext_v1.h" |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 22 | |
| 23 | using grvx::float2; |
| 24 | using grvx::int2; |
| 25 | |
| 26 | constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8; |
| 27 | constexpr static int kAtlasInitialSize = 512; |
| 28 | |
| 29 | // The atlas is only used for small-area paths, which means at least one dimension of every path is |
| 30 | // guaranteed to be quite small. So if we transpose tall paths, then every path will have a small |
| 31 | // height, which lends very well to efficient pow2 atlas packing. |
| 32 | constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2; |
| 33 | |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 34 | // Ensure every path in the atlas falls in or below the 256px high rectanizer band. |
| 35 | constexpr static int kAtlasMaxPathHeight = 256; |
| 36 | |
| 37 | // If we have MSAA to fall back on, paths are already fast enough that we really only benefit from |
| 38 | // atlasing when they are very small. |
| 39 | constexpr static int kAtlasMaxPathHeightWithMSAAFallback = 128; |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 40 | |
Chris Dalton | 72fd33a | 2021-07-28 14:19:13 -0600 | [diff] [blame^] | 41 | // http://skbug.com/12291 -- The way GrDynamicAtlas works, a single 2048x1 path is given an entire |
| 42 | // 2048x2048 atlas with draw bounds of 2048x1025. Limit the max width to 1024 to avoid this landmine |
| 43 | // until it's resolved. |
| 44 | constexpr static int kAtlasMaxPathWidth = 1024; |
| 45 | |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 46 | bool GrAtlasPathRenderer::IsSupported(GrRecordingContext* rContext) { |
| 47 | const GrCaps& caps = *rContext->priv().caps(); |
| 48 | auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes); |
| 49 | return rContext->asDirectContext() && // The atlas doesn't support DDL yet. |
| 50 | caps.internalMultisampleCount(atlasFormat) > 1 && |
| 51 | // GrAtlasRenderTask currently requires tessellation. In the future it could use the |
| 52 | // default path renderer when tessellation isn't available. |
| 53 | GrTessellationPathRenderer::IsSupported(caps); |
| 54 | } |
| 55 | |
| 56 | sk_sp<GrAtlasPathRenderer> GrAtlasPathRenderer::Make(GrRecordingContext* rContext) { |
| 57 | return IsSupported(rContext) |
| 58 | ? sk_sp<GrAtlasPathRenderer>(new GrAtlasPathRenderer(rContext->asDirectContext())) |
| 59 | : nullptr; |
| 60 | } |
| 61 | |
| 62 | GrAtlasPathRenderer::GrAtlasPathRenderer(GrDirectContext* dContext) { |
| 63 | SkASSERT(IsSupported(dContext)); |
| 64 | const GrCaps& caps = *dContext->priv().caps(); |
| 65 | #if GR_TEST_UTILS |
| 66 | fAtlasMaxSize = dContext->priv().options().fMaxTextureAtlasSize; |
| 67 | #else |
| 68 | fAtlasMaxSize = 2048; |
| 69 | #endif |
| 70 | fAtlasMaxSize = SkPrevPow2(std::min(fAtlasMaxSize, (float)caps.maxPreferredRenderTargetSize())); |
Chris Dalton | 72fd33a | 2021-07-28 14:19:13 -0600 | [diff] [blame^] | 71 | fAtlasMaxPathWidth = std::min((float)kAtlasMaxPathWidth, fAtlasMaxSize); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 72 | fAtlasInitialSize = SkNextPow2(std::min(kAtlasInitialSize, (int)fAtlasMaxSize)); |
| 73 | } |
| 74 | |
| 75 | // Returns the rect [topLeftFloor, botRightCeil], which is the rect [r] rounded out to integer |
| 76 | // boundaries. |
| 77 | static std::tuple<float2,float2> round_out(const SkRect& r) { |
| 78 | return {skvx::floor(float2::Load(&r.fLeft)), skvx::ceil(float2::Load(&r.fRight))}; |
| 79 | } |
| 80 | |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 81 | bool GrAtlasPathRenderer::pathFitsInAtlas(const SkRect& pathDevBounds, |
| 82 | GrAAType fallbackAAType) const { |
| 83 | SkASSERT(fallbackAAType != GrAAType::kNone); // The atlas doesn't support non-AA. |
| 84 | float atlasMaxPathHeight_pow2 = (fallbackAAType == GrAAType::kMSAA) |
| 85 | ? kAtlasMaxPathHeightWithMSAAFallback * kAtlasMaxPathHeightWithMSAAFallback |
| 86 | : kAtlasMaxPathHeight * kAtlasMaxPathHeight; |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 87 | auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds); |
| 88 | float2 size = botRightCeil - topLeftFloor; |
| 89 | return // Ensure the path's largest dimension fits in the atlas. |
Chris Dalton | 72fd33a | 2021-07-28 14:19:13 -0600 | [diff] [blame^] | 90 | skvx::all(size <= fAtlasMaxPathWidth) && |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 91 | // Since we will transpose tall skinny paths, limiting to atlasMaxPathHeight^2 pixels |
| 92 | // guarantees heightInAtlas <= atlasMaxPathHeight, while also allowing paths that are |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 93 | // very wide and short. |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 94 | size[0] * size[1] <= atlasMaxPathHeight_pow2; |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 95 | } |
| 96 | |
| 97 | void GrAtlasPathRenderer::AtlasPathKey::set(const SkMatrix& m, const SkPath& path) { |
| 98 | using grvx::float2; |
| 99 | fPathGenID = path.getGenerationID(); |
| 100 | fAffineMatrix[0] = m.getScaleX(); |
| 101 | fAffineMatrix[1] = m.getSkewX(); |
| 102 | fAffineMatrix[2] = m.getSkewY(); |
| 103 | fAffineMatrix[3] = m.getScaleY(); |
| 104 | float2 translate = {m.getTranslateX(), m.getTranslateY()}; |
| 105 | float2 subpixelPosition = translate - skvx::floor(translate); |
| 106 | float2 subpixelPositionKey = skvx::trunc(subpixelPosition * |
| 107 | GrTessellationShader::kLinearizationPrecision); |
| 108 | skvx::cast<uint8_t>(subpixelPositionKey).store(fSubpixelPositionKey); |
| 109 | fFillRule = (uint16_t)GrFillRuleForSkPath(path); // Fill rule doesn't affect the path's genID. |
| 110 | } |
| 111 | |
| 112 | bool GrAtlasPathRenderer::addPathToAtlas(GrRecordingContext* rContext, |
| 113 | const SkMatrix& viewMatrix, |
| 114 | const SkPath& path, |
| 115 | const SkRect& pathDevBounds, |
| 116 | SkIRect* devIBounds, |
| 117 | SkIPoint16* locationInAtlas, |
| 118 | bool* transposedInAtlas, |
| 119 | const DrawRefsAtlasCallback& drawRefsAtlasCallback) { |
| 120 | SkASSERT(!viewMatrix.hasPerspective()); // See onCanDrawPath(). |
| 121 | |
| 122 | pathDevBounds.roundOut(devIBounds); |
| 123 | #ifdef SK_DEBUG |
| 124 | // is_visible() should have guaranteed the path's bounds were representable as ints, since clip |
| 125 | // bounds within the max render target size are nowhere near INT_MAX. |
| 126 | auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds); |
| 127 | SkASSERT(skvx::all(skvx::cast<float>(int2::Load(&devIBounds->fLeft)) == topLeftFloor)); |
| 128 | SkASSERT(skvx::all(skvx::cast<float>(int2::Load(&devIBounds->fRight)) == botRightCeil)); |
| 129 | #endif |
| 130 | |
| 131 | int widthInAtlas = devIBounds->width(); |
| 132 | int heightInAtlas = devIBounds->height(); |
| 133 | // is_visible() should have guaranteed the path's bounds were non-empty. |
| 134 | SkASSERT(widthInAtlas > 0 && heightInAtlas > 0); |
| 135 | |
| 136 | if (SkNextPow2(widthInAtlas) == SkNextPow2(heightInAtlas)) { |
| 137 | // Both dimensions go to the same pow2 band in the atlas. Use the larger dimension as height |
| 138 | // for more efficient packing. |
| 139 | *transposedInAtlas = widthInAtlas > heightInAtlas; |
| 140 | } else { |
| 141 | // Both dimensions go to different pow2 bands in the atlas. Use the smaller pow2 band for |
| 142 | // most efficient packing. |
| 143 | *transposedInAtlas = heightInAtlas > widthInAtlas; |
| 144 | } |
| 145 | if (*transposedInAtlas) { |
| 146 | std::swap(heightInAtlas, widthInAtlas); |
| 147 | } |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 148 | // pathFitsInAtlas() should have guaranteed these constraints on the path size. |
Chris Dalton | 72fd33a | 2021-07-28 14:19:13 -0600 | [diff] [blame^] | 149 | SkASSERT(widthInAtlas <= (int)fAtlasMaxPathWidth); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 150 | SkASSERT(heightInAtlas <= kAtlasMaxPathHeight); |
| 151 | |
| 152 | // Check if this path is already in the atlas. This is mainly for clip paths. |
| 153 | AtlasPathKey atlasPathKey; |
| 154 | if (!path.isVolatile()) { |
| 155 | atlasPathKey.set(viewMatrix, path); |
| 156 | if (const SkIPoint16* existingLocation = fAtlasPathCache.find(atlasPathKey)) { |
| 157 | *locationInAtlas = *existingLocation; |
| 158 | return true; |
| 159 | } |
| 160 | } |
| 161 | |
| 162 | if (fAtlasRenderTasks.empty() || |
| 163 | !fAtlasRenderTasks.back()->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas, |
| 164 | heightInAtlas, *transposedInAtlas, locationInAtlas)) { |
| 165 | // We either don't have an atlas yet or the current one is full. Try to replace it. |
| 166 | GrAtlasRenderTask* currentAtlasTask = (!fAtlasRenderTasks.empty()) |
| 167 | ? fAtlasRenderTasks.back().get() : nullptr; |
| 168 | if (currentAtlasTask && |
| 169 | drawRefsAtlasCallback && |
| 170 | drawRefsAtlasCallback(currentAtlasTask->atlasProxy())) { |
| 171 | // The draw already refs the current atlas. Give up. Otherwise the draw would ref two |
| 172 | // different atlases and they couldn't share a texture. |
| 173 | return false; |
| 174 | } |
| 175 | // Replace the atlas with a new one. |
| 176 | auto dynamicAtlas = std::make_unique<GrDynamicAtlas>( |
| 177 | kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes, |
| 178 | SkISize{fAtlasInitialSize, fAtlasInitialSize}, fAtlasMaxSize, |
| 179 | *rContext->priv().caps(), kAtlasAlgorithm); |
| 180 | auto newAtlasTask = sk_make_sp<GrAtlasRenderTask>(rContext, |
| 181 | sk_make_sp<GrArenas>(), |
| 182 | std::move(dynamicAtlas)); |
| 183 | rContext->priv().drawingManager()->addAtlasTask(newAtlasTask, currentAtlasTask); |
| 184 | SkAssertResult(newAtlasTask->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas, |
| 185 | heightInAtlas, *transposedInAtlas, locationInAtlas)); |
| 186 | fAtlasRenderTasks.push_back(std::move(newAtlasTask)); |
| 187 | fAtlasPathCache.reset(); |
| 188 | } |
| 189 | |
| 190 | // Remember this path's location in the atlas, in case it gets drawn again. |
| 191 | if (!path.isVolatile()) { |
| 192 | fAtlasPathCache.set(atlasPathKey, *locationInAtlas); |
| 193 | } |
| 194 | return true; |
| 195 | } |
| 196 | |
| 197 | // Returns whether the given proxyOwner uses the atlasProxy. |
| 198 | template<typename T> bool refs_atlas(const T* proxyOwner, const GrSurfaceProxy* atlasProxy) { |
| 199 | bool refsAtlas = false; |
| 200 | auto checkForAtlasRef = [atlasProxy, &refsAtlas](GrSurfaceProxy* proxy, GrMipmapped) { |
| 201 | if (proxy == atlasProxy) { |
| 202 | refsAtlas = true; |
| 203 | } |
| 204 | }; |
| 205 | if (proxyOwner) { |
| 206 | proxyOwner->visitProxies(checkForAtlasRef); |
| 207 | } |
| 208 | return refsAtlas; |
| 209 | } |
| 210 | |
| 211 | GrPathRenderer::CanDrawPath GrAtlasPathRenderer::onCanDrawPath(const CanDrawPathArgs& args) const { |
| 212 | #ifdef SK_DEBUG |
| 213 | if (!fAtlasRenderTasks.empty()) { |
| 214 | // args.fPaint should NEVER reference our current atlas. If it does, it means somebody |
| 215 | // intercepted a clip FP meant for a different op and will cause rendering artifacts. |
| 216 | const GrSurfaceProxy* atlasProxy = fAtlasRenderTasks.back()->atlasProxy(); |
| 217 | SkASSERT(!refs_atlas(args.fPaint->getColorFragmentProcessor(), atlasProxy)); |
| 218 | SkASSERT(!refs_atlas(args.fPaint->getCoverageFragmentProcessor(), atlasProxy)); |
| 219 | } |
| 220 | SkASSERT(!args.fHasUserStencilSettings); // See onGetStencilSupport(). |
| 221 | #endif |
| 222 | bool canDrawPath = args.fShape->style().isSimpleFill() && |
| 223 | // The MSAA requirement is a temporary limitation in order to preserve |
| 224 | // functionality for refactoring. TODO: Allow kCoverage AA types. |
| 225 | args.fAAType == GrAAType::kMSAA && |
| 226 | !args.fShape->style().hasPathEffect() && |
| 227 | !args.fViewMatrix->hasPerspective() && |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 228 | this->pathFitsInAtlas(args.fViewMatrix->mapRect(args.fShape->bounds()), |
| 229 | args.fAAType); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 230 | return canDrawPath ? CanDrawPath::kYes : CanDrawPath::kNo; |
| 231 | } |
| 232 | |
| 233 | static bool is_visible(const SkRect& pathDevBounds, const SkIRect& clipBounds) { |
| 234 | float2 pathTopLeft = float2::Load(&pathDevBounds.fLeft); |
| 235 | float2 pathBotRight = float2::Load(&pathDevBounds.fRight); |
| 236 | // Empty paths are never visible. Phrase this as a NOT of positive logic so we also return false |
| 237 | // in the case of NaN. |
| 238 | if (!skvx::all(pathTopLeft < pathBotRight)) { |
| 239 | return false; |
| 240 | } |
| 241 | float2 clipTopLeft = skvx::cast<float>(int2::Load(&clipBounds.fLeft)); |
| 242 | float2 clipBotRight = skvx::cast<float>(int2::Load(&clipBounds.fRight)); |
| 243 | static_assert(sizeof(clipBounds) == sizeof(clipTopLeft) + sizeof(clipBotRight)); |
| 244 | return skvx::all(pathTopLeft < clipBotRight) && skvx::all(pathBotRight > clipTopLeft); |
| 245 | } |
| 246 | |
| 247 | bool GrAtlasPathRenderer::onDrawPath(const DrawPathArgs& args) { |
| 248 | SkPath path; |
| 249 | args.fShape->asPath(&path); |
| 250 | |
| 251 | const SkRect pathDevBounds = args.fViewMatrix->mapRect(args.fShape->bounds()); |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 252 | SkASSERT(this->pathFitsInAtlas(pathDevBounds, args.fAAType)); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 253 | |
| 254 | if (!is_visible(pathDevBounds, args.fClip->getConservativeBounds())) { |
| 255 | // The path is empty or outside the clip. No mask is needed. |
| 256 | if (path.isInverseFillType()) { |
| 257 | args.fSurfaceDrawContext->drawPaint(args.fClip, std::move(args.fPaint), |
| 258 | *args.fViewMatrix); |
| 259 | } |
| 260 | return true; |
| 261 | } |
| 262 | |
| 263 | SkIRect devIBounds; |
| 264 | SkIPoint16 locationInAtlas; |
| 265 | bool transposedInAtlas; |
| 266 | SkAssertResult(this->addPathToAtlas(args.fContext, *args.fViewMatrix, path, pathDevBounds, |
| 267 | &devIBounds, &locationInAtlas, &transposedInAtlas, |
| 268 | nullptr/*DrawRefsAtlasCallback -- see onCanDrawPath()*/)); |
| 269 | |
| 270 | const SkIRect& fillBounds = args.fShape->inverseFilled() |
| 271 | ? (args.fClip |
| 272 | ? args.fClip->getConservativeBounds() |
| 273 | : args.fSurfaceDrawContext->asSurfaceProxy()->backingStoreBoundsIRect()) |
| 274 | : devIBounds; |
| 275 | const GrCaps& caps = *args.fSurfaceDrawContext->caps(); |
| 276 | auto op = GrOp::Make<GrDrawAtlasPathOp>(args.fContext, |
| 277 | args.fSurfaceDrawContext->arenaAlloc(), |
| 278 | fillBounds, *args.fViewMatrix, |
| 279 | std::move(args.fPaint), locationInAtlas, |
| 280 | devIBounds, transposedInAtlas, |
| 281 | fAtlasRenderTasks.back()->readView(caps), |
| 282 | args.fShape->inverseFilled()); |
| 283 | args.fSurfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
| 284 | return true; |
| 285 | } |
| 286 | |
Robert Phillips | 4dca831 | 2021-07-28 15:13:20 -0400 | [diff] [blame] | 287 | GrFPResult GrAtlasPathRenderer::makeAtlasClipEffect(const skgpu::v1::SurfaceDrawContext* sdc, |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 288 | const GrOp* opBeingClipped, |
| 289 | std::unique_ptr<GrFragmentProcessor> inputFP, |
| 290 | const SkIRect& drawBounds, |
| 291 | const SkMatrix& viewMatrix, |
| 292 | const SkPath& path) { |
| 293 | if (viewMatrix.hasPerspective()) { |
| 294 | return GrFPFailure(std::move(inputFP)); |
| 295 | } |
| 296 | |
| 297 | const SkRect pathDevBounds = viewMatrix.mapRect(path.getBounds()); |
| 298 | if (!is_visible(pathDevBounds, drawBounds)) { |
| 299 | // The path is empty or outside the drawBounds. No mask is needed. |
| 300 | return path.isInverseFillType() ? GrFPSuccess(std::move(inputFP)) |
| 301 | : GrFPFailure(std::move(inputFP)); |
| 302 | } |
| 303 | |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 304 | auto fallbackAAType = (sdc->numSamples() > 1 || sdc->canUseDynamicMSAA()) ? GrAAType::kMSAA |
| 305 | : GrAAType::kCoverage; |
| 306 | if (!this->pathFitsInAtlas(pathDevBounds, fallbackAAType)) { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 307 | // The path is too big. |
| 308 | return GrFPFailure(std::move(inputFP)); |
| 309 | } |
| 310 | |
| 311 | SkIRect devIBounds; |
| 312 | SkIPoint16 locationInAtlas; |
| 313 | bool transposedInAtlas; |
| 314 | // Called if the atlas runs out of room, to determine if it's safe to create a new one. (Draws |
| 315 | // can never access more than one atlas.) |
| 316 | auto drawRefsAtlasCallback = [opBeingClipped, &inputFP](const GrSurfaceProxy* atlasProxy) { |
| 317 | return refs_atlas(opBeingClipped, atlasProxy) || |
| 318 | refs_atlas(inputFP.get(), atlasProxy); |
| 319 | }; |
| 320 | // addPathToAtlas() ignores inverseness of the fill. See GrAtlasRenderTask::getAtlasUberPath(). |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 321 | if (!this->addPathToAtlas(sdc->recordingContext(), viewMatrix, path, pathDevBounds, &devIBounds, |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 322 | &locationInAtlas, &transposedInAtlas, drawRefsAtlasCallback)) { |
| 323 | // The atlas ran out of room and we were unable to start a new one. |
| 324 | return GrFPFailure(std::move(inputFP)); |
| 325 | } |
| 326 | |
| 327 | SkMatrix atlasMatrix; |
| 328 | auto [atlasX, atlasY] = locationInAtlas; |
| 329 | if (!transposedInAtlas) { |
| 330 | atlasMatrix = SkMatrix::Translate(atlasX - devIBounds.left(), atlasY - devIBounds.top()); |
| 331 | } else { |
| 332 | atlasMatrix.setAll(0, 1, atlasX - devIBounds.top(), |
| 333 | 1, 0, atlasY - devIBounds.left(), |
| 334 | 0, 0, 1); |
| 335 | } |
| 336 | auto flags = GrModulateAtlasCoverageEffect::Flags::kNone; |
| 337 | if (path.isInverseFillType()) { |
| 338 | flags |= GrModulateAtlasCoverageEffect::Flags::kInvertCoverage; |
| 339 | } |
| 340 | if (!devIBounds.contains(drawBounds)) { |
| 341 | flags |= GrModulateAtlasCoverageEffect::Flags::kCheckBounds; |
| 342 | // At this point in time we expect callers to tighten the scissor for "kIntersect" clips, as |
| 343 | // opposed to us having to check the path bounds. Feel free to remove this assert if that |
| 344 | // ever changes. |
| 345 | SkASSERT(path.isInverseFillType()); |
| 346 | } |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 347 | GrSurfaceProxyView atlasView = fAtlasRenderTasks.back()->readView(*sdc->caps()); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 348 | return GrFPSuccess(std::make_unique<GrModulateAtlasCoverageEffect>(flags, std::move(inputFP), |
| 349 | std::move(atlasView), |
| 350 | atlasMatrix, devIBounds)); |
| 351 | } |
| 352 | |
| 353 | #ifdef SK_DEBUG |
| 354 | // Ensures the atlas dependencies are set up such that each atlas will be totally out of service |
| 355 | // before we render the next one in line. This means there will only ever be one atlas active at a |
| 356 | // time and that they can all share the same texture. |
| 357 | static void validate_atlas_dependencies(const SkTArray<sk_sp<GrAtlasRenderTask>>& atlasTasks) { |
| 358 | for (int i = atlasTasks.count() - 1; i >= 1; --i) { |
| 359 | GrAtlasRenderTask* atlasTask = atlasTasks[i].get(); |
| 360 | GrAtlasRenderTask* previousAtlasTask = atlasTasks[i - 1].get(); |
| 361 | // Double check that atlasTask depends on every dependent of its previous atlas. If this |
| 362 | // fires it might mean previousAtlasTask gained a new dependent after atlasTask came into |
| 363 | // service (maybe by an op that hadn't yet been added to an opsTask when we registered the |
| 364 | // new atlas with the drawingManager). |
| 365 | for (GrRenderTask* previousAtlasUser : previousAtlasTask->dependents()) { |
| 366 | SkASSERT(atlasTask->dependsOn(previousAtlasUser)); |
| 367 | } |
| 368 | } |
| 369 | } |
| 370 | #endif |
| 371 | |
| 372 | void GrAtlasPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP, |
| 373 | SkSpan<const uint32_t> /* taskIDs */) { |
| 374 | if (fAtlasRenderTasks.empty()) { |
| 375 | SkASSERT(fAtlasPathCache.count() == 0); |
| 376 | return; |
| 377 | } |
| 378 | |
| 379 | // Verify the atlases can all share the same texture. |
| 380 | SkDEBUGCODE(validate_atlas_dependencies(fAtlasRenderTasks);) |
| 381 | |
| 382 | // Instantiate the first atlas. |
| 383 | fAtlasRenderTasks[0]->instantiate(onFlushRP); |
| 384 | |
| 385 | // Instantiate the remaining atlases. |
| 386 | GrTexture* firstAtlasTexture = fAtlasRenderTasks[0]->atlasProxy()->peekTexture(); |
| 387 | SkASSERT(firstAtlasTexture); |
| 388 | for (int i = 1; i < fAtlasRenderTasks.count(); ++i) { |
| 389 | GrAtlasRenderTask* atlasTask = fAtlasRenderTasks[i].get(); |
| 390 | if (atlasTask->atlasProxy()->backingStoreDimensions() == firstAtlasTexture->dimensions()) { |
| 391 | atlasTask->instantiate(onFlushRP, sk_ref_sp(firstAtlasTexture)); |
| 392 | } else { |
| 393 | // The atlases are expected to all be full size except possibly the final one. |
| 394 | SkASSERT(i == fAtlasRenderTasks.count() - 1); |
| 395 | SkASSERT(atlasTask->atlasProxy()->backingStoreDimensions().area() < |
| 396 | firstAtlasTexture->dimensions().area()); |
| 397 | // TODO: Recycle the larger atlas texture anyway? |
| 398 | atlasTask->instantiate(onFlushRP); |
| 399 | } |
| 400 | } |
| 401 | |
| 402 | // Reset all atlas data. |
| 403 | fAtlasRenderTasks.reset(); |
| 404 | fAtlasPathCache.reset(); |
| 405 | } |