Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2019 Google LLC. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame^] | 8 | #include "src/gpu/ops/AtlasPathRenderer.h" |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 9 | |
| 10 | #include "include/private/SkVx.h" |
| 11 | #include "src/core/SkIPoint16.h" |
| 12 | #include "src/gpu/GrClip.h" |
| 13 | #include "src/gpu/GrDirectContextPriv.h" |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 14 | #include "src/gpu/GrVx.h" |
| 15 | #include "src/gpu/effects/GrModulateAtlasCoverageEffect.h" |
| 16 | #include "src/gpu/geometry/GrStyledShape.h" |
| 17 | #include "src/gpu/ops/GrDrawAtlasPathOp.h" |
| 18 | #include "src/gpu/tessellate/GrAtlasRenderTask.h" |
| 19 | #include "src/gpu/tessellate/GrTessellationPathRenderer.h" |
| 20 | #include "src/gpu/tessellate/shaders/GrTessellationShader.h" |
Robert Phillips | 4dca831 | 2021-07-28 15:13:20 -0400 | [diff] [blame] | 21 | #include "src/gpu/v1/SurfaceDrawContext_v1.h" |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 22 | |
| 23 | using grvx::float2; |
| 24 | using grvx::int2; |
| 25 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame^] | 26 | namespace { |
| 27 | |
| 28 | // Returns the rect [topLeftFloor, botRightCeil], which is the rect [r] rounded out to integer |
| 29 | // boundaries. |
| 30 | std::tuple<float2,float2> round_out(const SkRect& r) { |
| 31 | return {skvx::floor(float2::Load(&r.fLeft)), skvx::ceil(float2::Load(&r.fRight))}; |
| 32 | } |
| 33 | |
| 34 | // Returns whether the given proxyOwner uses the atlasProxy. |
| 35 | template<typename T> bool refs_atlas(const T* proxyOwner, const GrSurfaceProxy* atlasProxy) { |
| 36 | bool refsAtlas = false; |
| 37 | auto checkForAtlasRef = [atlasProxy, &refsAtlas](GrSurfaceProxy* proxy, GrMipmapped) { |
| 38 | if (proxy == atlasProxy) { |
| 39 | refsAtlas = true; |
| 40 | } |
| 41 | }; |
| 42 | if (proxyOwner) { |
| 43 | proxyOwner->visitProxies(checkForAtlasRef); |
| 44 | } |
| 45 | return refsAtlas; |
| 46 | } |
| 47 | |
| 48 | bool is_visible(const SkRect& pathDevBounds, const SkIRect& clipBounds) { |
| 49 | float2 pathTopLeft = float2::Load(&pathDevBounds.fLeft); |
| 50 | float2 pathBotRight = float2::Load(&pathDevBounds.fRight); |
| 51 | // Empty paths are never visible. Phrase this as a NOT of positive logic so we also return false |
| 52 | // in the case of NaN. |
| 53 | if (!skvx::all(pathTopLeft < pathBotRight)) { |
| 54 | return false; |
| 55 | } |
| 56 | float2 clipTopLeft = skvx::cast<float>(int2::Load(&clipBounds.fLeft)); |
| 57 | float2 clipBotRight = skvx::cast<float>(int2::Load(&clipBounds.fRight)); |
| 58 | static_assert(sizeof(clipBounds) == sizeof(clipTopLeft) + sizeof(clipBotRight)); |
| 59 | return skvx::all(pathTopLeft < clipBotRight) && skvx::all(pathBotRight > clipTopLeft); |
| 60 | } |
| 61 | |
| 62 | #ifdef SK_DEBUG |
| 63 | // Ensures the atlas dependencies are set up such that each atlas will be totally out of service |
| 64 | // before we render the next one in line. This means there will only ever be one atlas active at a |
| 65 | // time and that they can all share the same texture. |
| 66 | void validate_atlas_dependencies(const SkTArray<sk_sp<GrAtlasRenderTask>>& atlasTasks) { |
| 67 | for (int i = atlasTasks.count() - 1; i >= 1; --i) { |
| 68 | GrAtlasRenderTask* atlasTask = atlasTasks[i].get(); |
| 69 | GrAtlasRenderTask* previousAtlasTask = atlasTasks[i - 1].get(); |
| 70 | // Double check that atlasTask depends on every dependent of its previous atlas. If this |
| 71 | // fires it might mean previousAtlasTask gained a new dependent after atlasTask came into |
| 72 | // service (maybe by an op that hadn't yet been added to an opsTask when we registered the |
| 73 | // new atlas with the drawingManager). |
| 74 | for (GrRenderTask* previousAtlasUser : previousAtlasTask->dependents()) { |
| 75 | SkASSERT(atlasTask->dependsOn(previousAtlasUser)); |
| 76 | } |
| 77 | } |
| 78 | } |
| 79 | #endif |
| 80 | |
| 81 | } // anonymous namespace |
| 82 | |
| 83 | namespace skgpu::v1 { |
| 84 | |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 85 | constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8; |
| 86 | constexpr static int kAtlasInitialSize = 512; |
| 87 | |
| 88 | // The atlas is only used for small-area paths, which means at least one dimension of every path is |
| 89 | // guaranteed to be quite small. So if we transpose tall paths, then every path will have a small |
| 90 | // height, which lends very well to efficient pow2 atlas packing. |
| 91 | constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2; |
| 92 | |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 93 | // Ensure every path in the atlas falls in or below the 256px high rectanizer band. |
| 94 | constexpr static int kAtlasMaxPathHeight = 256; |
| 95 | |
| 96 | // If we have MSAA to fall back on, paths are already fast enough that we really only benefit from |
| 97 | // atlasing when they are very small. |
| 98 | constexpr static int kAtlasMaxPathHeightWithMSAAFallback = 128; |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 99 | |
Chris Dalton | 72fd33a | 2021-07-28 14:19:13 -0600 | [diff] [blame] | 100 | // http://skbug.com/12291 -- The way GrDynamicAtlas works, a single 2048x1 path is given an entire |
| 101 | // 2048x2048 atlas with draw bounds of 2048x1025. Limit the max width to 1024 to avoid this landmine |
| 102 | // until it's resolved. |
| 103 | constexpr static int kAtlasMaxPathWidth = 1024; |
| 104 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame^] | 105 | bool AtlasPathRenderer::IsSupported(GrRecordingContext* rContext) { |
Chris Dalton | 7893d2d | 2021-08-03 17:42:58 -0600 | [diff] [blame] | 106 | #ifdef SK_BUILD_FOR_IOS |
| 107 | // b/195095846: There is a bug with the atlas path renderer on OpenGL iOS. Disable until we can |
| 108 | // investigate. |
| 109 | if (rContext->backend() == GrBackendApi::kOpenGL) { |
| 110 | return false; |
| 111 | } |
| 112 | #endif |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 113 | const GrCaps& caps = *rContext->priv().caps(); |
| 114 | auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes); |
| 115 | return rContext->asDirectContext() && // The atlas doesn't support DDL yet. |
| 116 | caps.internalMultisampleCount(atlasFormat) > 1 && |
| 117 | // GrAtlasRenderTask currently requires tessellation. In the future it could use the |
| 118 | // default path renderer when tessellation isn't available. |
| 119 | GrTessellationPathRenderer::IsSupported(caps); |
| 120 | } |
| 121 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame^] | 122 | sk_sp<AtlasPathRenderer> AtlasPathRenderer::Make(GrRecordingContext* rContext) { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 123 | return IsSupported(rContext) |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame^] | 124 | ? sk_sp<AtlasPathRenderer>(new AtlasPathRenderer(rContext->asDirectContext())) |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 125 | : nullptr; |
| 126 | } |
| 127 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame^] | 128 | AtlasPathRenderer::AtlasPathRenderer(GrDirectContext* dContext) { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 129 | SkASSERT(IsSupported(dContext)); |
| 130 | const GrCaps& caps = *dContext->priv().caps(); |
| 131 | #if GR_TEST_UTILS |
| 132 | fAtlasMaxSize = dContext->priv().options().fMaxTextureAtlasSize; |
| 133 | #else |
| 134 | fAtlasMaxSize = 2048; |
| 135 | #endif |
| 136 | fAtlasMaxSize = SkPrevPow2(std::min(fAtlasMaxSize, (float)caps.maxPreferredRenderTargetSize())); |
Chris Dalton | 72fd33a | 2021-07-28 14:19:13 -0600 | [diff] [blame] | 137 | fAtlasMaxPathWidth = std::min((float)kAtlasMaxPathWidth, fAtlasMaxSize); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 138 | fAtlasInitialSize = SkNextPow2(std::min(kAtlasInitialSize, (int)fAtlasMaxSize)); |
| 139 | } |
| 140 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame^] | 141 | bool AtlasPathRenderer::pathFitsInAtlas(const SkRect& pathDevBounds, |
| 142 | GrAAType fallbackAAType) const { |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 143 | SkASSERT(fallbackAAType != GrAAType::kNone); // The atlas doesn't support non-AA. |
| 144 | float atlasMaxPathHeight_pow2 = (fallbackAAType == GrAAType::kMSAA) |
| 145 | ? kAtlasMaxPathHeightWithMSAAFallback * kAtlasMaxPathHeightWithMSAAFallback |
| 146 | : kAtlasMaxPathHeight * kAtlasMaxPathHeight; |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 147 | auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds); |
| 148 | float2 size = botRightCeil - topLeftFloor; |
| 149 | return // Ensure the path's largest dimension fits in the atlas. |
Chris Dalton | 72fd33a | 2021-07-28 14:19:13 -0600 | [diff] [blame] | 150 | skvx::all(size <= fAtlasMaxPathWidth) && |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 151 | // Since we will transpose tall skinny paths, limiting to atlasMaxPathHeight^2 pixels |
| 152 | // guarantees heightInAtlas <= atlasMaxPathHeight, while also allowing paths that are |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 153 | // very wide and short. |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 154 | size[0] * size[1] <= atlasMaxPathHeight_pow2; |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 155 | } |
| 156 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame^] | 157 | void AtlasPathRenderer::AtlasPathKey::set(const SkMatrix& m, const SkPath& path) { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 158 | using grvx::float2; |
| 159 | fPathGenID = path.getGenerationID(); |
| 160 | fAffineMatrix[0] = m.getScaleX(); |
| 161 | fAffineMatrix[1] = m.getSkewX(); |
| 162 | fAffineMatrix[2] = m.getSkewY(); |
| 163 | fAffineMatrix[3] = m.getScaleY(); |
| 164 | float2 translate = {m.getTranslateX(), m.getTranslateY()}; |
| 165 | float2 subpixelPosition = translate - skvx::floor(translate); |
| 166 | float2 subpixelPositionKey = skvx::trunc(subpixelPosition * |
| 167 | GrTessellationShader::kLinearizationPrecision); |
| 168 | skvx::cast<uint8_t>(subpixelPositionKey).store(fSubpixelPositionKey); |
| 169 | fFillRule = (uint16_t)GrFillRuleForSkPath(path); // Fill rule doesn't affect the path's genID. |
| 170 | } |
| 171 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame^] | 172 | bool AtlasPathRenderer::addPathToAtlas(GrRecordingContext* rContext, |
| 173 | const SkMatrix& viewMatrix, |
| 174 | const SkPath& path, |
| 175 | const SkRect& pathDevBounds, |
| 176 | SkIRect* devIBounds, |
| 177 | SkIPoint16* locationInAtlas, |
| 178 | bool* transposedInAtlas, |
| 179 | const DrawRefsAtlasCallback& drawRefsAtlasCallback) { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 180 | SkASSERT(!viewMatrix.hasPerspective()); // See onCanDrawPath(). |
| 181 | |
| 182 | pathDevBounds.roundOut(devIBounds); |
| 183 | #ifdef SK_DEBUG |
| 184 | // is_visible() should have guaranteed the path's bounds were representable as ints, since clip |
| 185 | // bounds within the max render target size are nowhere near INT_MAX. |
| 186 | auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds); |
| 187 | SkASSERT(skvx::all(skvx::cast<float>(int2::Load(&devIBounds->fLeft)) == topLeftFloor)); |
| 188 | SkASSERT(skvx::all(skvx::cast<float>(int2::Load(&devIBounds->fRight)) == botRightCeil)); |
| 189 | #endif |
| 190 | |
| 191 | int widthInAtlas = devIBounds->width(); |
| 192 | int heightInAtlas = devIBounds->height(); |
| 193 | // is_visible() should have guaranteed the path's bounds were non-empty. |
| 194 | SkASSERT(widthInAtlas > 0 && heightInAtlas > 0); |
| 195 | |
| 196 | if (SkNextPow2(widthInAtlas) == SkNextPow2(heightInAtlas)) { |
| 197 | // Both dimensions go to the same pow2 band in the atlas. Use the larger dimension as height |
| 198 | // for more efficient packing. |
| 199 | *transposedInAtlas = widthInAtlas > heightInAtlas; |
| 200 | } else { |
| 201 | // Both dimensions go to different pow2 bands in the atlas. Use the smaller pow2 band for |
| 202 | // most efficient packing. |
| 203 | *transposedInAtlas = heightInAtlas > widthInAtlas; |
| 204 | } |
| 205 | if (*transposedInAtlas) { |
| 206 | std::swap(heightInAtlas, widthInAtlas); |
| 207 | } |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 208 | // pathFitsInAtlas() should have guaranteed these constraints on the path size. |
Chris Dalton | 72fd33a | 2021-07-28 14:19:13 -0600 | [diff] [blame] | 209 | SkASSERT(widthInAtlas <= (int)fAtlasMaxPathWidth); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 210 | SkASSERT(heightInAtlas <= kAtlasMaxPathHeight); |
| 211 | |
| 212 | // Check if this path is already in the atlas. This is mainly for clip paths. |
| 213 | AtlasPathKey atlasPathKey; |
| 214 | if (!path.isVolatile()) { |
| 215 | atlasPathKey.set(viewMatrix, path); |
| 216 | if (const SkIPoint16* existingLocation = fAtlasPathCache.find(atlasPathKey)) { |
| 217 | *locationInAtlas = *existingLocation; |
| 218 | return true; |
| 219 | } |
| 220 | } |
| 221 | |
| 222 | if (fAtlasRenderTasks.empty() || |
| 223 | !fAtlasRenderTasks.back()->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas, |
| 224 | heightInAtlas, *transposedInAtlas, locationInAtlas)) { |
| 225 | // We either don't have an atlas yet or the current one is full. Try to replace it. |
| 226 | GrAtlasRenderTask* currentAtlasTask = (!fAtlasRenderTasks.empty()) |
| 227 | ? fAtlasRenderTasks.back().get() : nullptr; |
| 228 | if (currentAtlasTask && |
| 229 | drawRefsAtlasCallback && |
| 230 | drawRefsAtlasCallback(currentAtlasTask->atlasProxy())) { |
| 231 | // The draw already refs the current atlas. Give up. Otherwise the draw would ref two |
| 232 | // different atlases and they couldn't share a texture. |
| 233 | return false; |
| 234 | } |
| 235 | // Replace the atlas with a new one. |
| 236 | auto dynamicAtlas = std::make_unique<GrDynamicAtlas>( |
| 237 | kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes, |
| 238 | SkISize{fAtlasInitialSize, fAtlasInitialSize}, fAtlasMaxSize, |
| 239 | *rContext->priv().caps(), kAtlasAlgorithm); |
| 240 | auto newAtlasTask = sk_make_sp<GrAtlasRenderTask>(rContext, |
| 241 | sk_make_sp<GrArenas>(), |
| 242 | std::move(dynamicAtlas)); |
| 243 | rContext->priv().drawingManager()->addAtlasTask(newAtlasTask, currentAtlasTask); |
| 244 | SkAssertResult(newAtlasTask->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas, |
| 245 | heightInAtlas, *transposedInAtlas, locationInAtlas)); |
| 246 | fAtlasRenderTasks.push_back(std::move(newAtlasTask)); |
| 247 | fAtlasPathCache.reset(); |
| 248 | } |
| 249 | |
| 250 | // Remember this path's location in the atlas, in case it gets drawn again. |
| 251 | if (!path.isVolatile()) { |
| 252 | fAtlasPathCache.set(atlasPathKey, *locationInAtlas); |
| 253 | } |
| 254 | return true; |
| 255 | } |
| 256 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame^] | 257 | GrPathRenderer::CanDrawPath AtlasPathRenderer::onCanDrawPath(const CanDrawPathArgs& args) const { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 258 | #ifdef SK_DEBUG |
| 259 | if (!fAtlasRenderTasks.empty()) { |
| 260 | // args.fPaint should NEVER reference our current atlas. If it does, it means somebody |
| 261 | // intercepted a clip FP meant for a different op and will cause rendering artifacts. |
| 262 | const GrSurfaceProxy* atlasProxy = fAtlasRenderTasks.back()->atlasProxy(); |
| 263 | SkASSERT(!refs_atlas(args.fPaint->getColorFragmentProcessor(), atlasProxy)); |
| 264 | SkASSERT(!refs_atlas(args.fPaint->getCoverageFragmentProcessor(), atlasProxy)); |
| 265 | } |
| 266 | SkASSERT(!args.fHasUserStencilSettings); // See onGetStencilSupport(). |
| 267 | #endif |
| 268 | bool canDrawPath = args.fShape->style().isSimpleFill() && |
Chris Dalton | 7893d2d | 2021-08-03 17:42:58 -0600 | [diff] [blame] | 269 | #ifdef SK_DISABLE_ATLAS_PATH_RENDERER_WITH_COVERAGE_AA |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 270 | // The MSAA requirement is a temporary limitation in order to preserve |
| 271 | // functionality for refactoring. TODO: Allow kCoverage AA types. |
| 272 | args.fAAType == GrAAType::kMSAA && |
Chris Dalton | 7893d2d | 2021-08-03 17:42:58 -0600 | [diff] [blame] | 273 | #else |
| 274 | args.fAAType != GrAAType::kNone && |
| 275 | #endif |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 276 | !args.fShape->style().hasPathEffect() && |
| 277 | !args.fViewMatrix->hasPerspective() && |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 278 | this->pathFitsInAtlas(args.fViewMatrix->mapRect(args.fShape->bounds()), |
| 279 | args.fAAType); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 280 | return canDrawPath ? CanDrawPath::kYes : CanDrawPath::kNo; |
| 281 | } |
| 282 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame^] | 283 | bool AtlasPathRenderer::onDrawPath(const DrawPathArgs& args) { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 284 | SkPath path; |
| 285 | args.fShape->asPath(&path); |
| 286 | |
| 287 | const SkRect pathDevBounds = args.fViewMatrix->mapRect(args.fShape->bounds()); |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 288 | SkASSERT(this->pathFitsInAtlas(pathDevBounds, args.fAAType)); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 289 | |
| 290 | if (!is_visible(pathDevBounds, args.fClip->getConservativeBounds())) { |
| 291 | // The path is empty or outside the clip. No mask is needed. |
| 292 | if (path.isInverseFillType()) { |
| 293 | args.fSurfaceDrawContext->drawPaint(args.fClip, std::move(args.fPaint), |
| 294 | *args.fViewMatrix); |
| 295 | } |
| 296 | return true; |
| 297 | } |
| 298 | |
| 299 | SkIRect devIBounds; |
| 300 | SkIPoint16 locationInAtlas; |
| 301 | bool transposedInAtlas; |
| 302 | SkAssertResult(this->addPathToAtlas(args.fContext, *args.fViewMatrix, path, pathDevBounds, |
| 303 | &devIBounds, &locationInAtlas, &transposedInAtlas, |
| 304 | nullptr/*DrawRefsAtlasCallback -- see onCanDrawPath()*/)); |
| 305 | |
| 306 | const SkIRect& fillBounds = args.fShape->inverseFilled() |
| 307 | ? (args.fClip |
| 308 | ? args.fClip->getConservativeBounds() |
| 309 | : args.fSurfaceDrawContext->asSurfaceProxy()->backingStoreBoundsIRect()) |
| 310 | : devIBounds; |
| 311 | const GrCaps& caps = *args.fSurfaceDrawContext->caps(); |
| 312 | auto op = GrOp::Make<GrDrawAtlasPathOp>(args.fContext, |
| 313 | args.fSurfaceDrawContext->arenaAlloc(), |
| 314 | fillBounds, *args.fViewMatrix, |
| 315 | std::move(args.fPaint), locationInAtlas, |
| 316 | devIBounds, transposedInAtlas, |
| 317 | fAtlasRenderTasks.back()->readView(caps), |
| 318 | args.fShape->inverseFilled()); |
| 319 | args.fSurfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
| 320 | return true; |
| 321 | } |
| 322 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame^] | 323 | GrFPResult AtlasPathRenderer::makeAtlasClipEffect(const SurfaceDrawContext* sdc, |
| 324 | const GrOp* opBeingClipped, |
| 325 | std::unique_ptr<GrFragmentProcessor> inputFP, |
| 326 | const SkIRect& drawBounds, |
| 327 | const SkMatrix& viewMatrix, |
| 328 | const SkPath& path) { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 329 | if (viewMatrix.hasPerspective()) { |
| 330 | return GrFPFailure(std::move(inputFP)); |
| 331 | } |
| 332 | |
| 333 | const SkRect pathDevBounds = viewMatrix.mapRect(path.getBounds()); |
| 334 | if (!is_visible(pathDevBounds, drawBounds)) { |
| 335 | // The path is empty or outside the drawBounds. No mask is needed. |
| 336 | return path.isInverseFillType() ? GrFPSuccess(std::move(inputFP)) |
| 337 | : GrFPFailure(std::move(inputFP)); |
| 338 | } |
| 339 | |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 340 | auto fallbackAAType = (sdc->numSamples() > 1 || sdc->canUseDynamicMSAA()) ? GrAAType::kMSAA |
| 341 | : GrAAType::kCoverage; |
| 342 | if (!this->pathFitsInAtlas(pathDevBounds, fallbackAAType)) { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 343 | // The path is too big. |
| 344 | return GrFPFailure(std::move(inputFP)); |
| 345 | } |
| 346 | |
| 347 | SkIRect devIBounds; |
| 348 | SkIPoint16 locationInAtlas; |
| 349 | bool transposedInAtlas; |
| 350 | // Called if the atlas runs out of room, to determine if it's safe to create a new one. (Draws |
| 351 | // can never access more than one atlas.) |
| 352 | auto drawRefsAtlasCallback = [opBeingClipped, &inputFP](const GrSurfaceProxy* atlasProxy) { |
| 353 | return refs_atlas(opBeingClipped, atlasProxy) || |
| 354 | refs_atlas(inputFP.get(), atlasProxy); |
| 355 | }; |
| 356 | // addPathToAtlas() ignores inverseness of the fill. See GrAtlasRenderTask::getAtlasUberPath(). |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 357 | if (!this->addPathToAtlas(sdc->recordingContext(), viewMatrix, path, pathDevBounds, &devIBounds, |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 358 | &locationInAtlas, &transposedInAtlas, drawRefsAtlasCallback)) { |
| 359 | // The atlas ran out of room and we were unable to start a new one. |
| 360 | return GrFPFailure(std::move(inputFP)); |
| 361 | } |
| 362 | |
| 363 | SkMatrix atlasMatrix; |
| 364 | auto [atlasX, atlasY] = locationInAtlas; |
| 365 | if (!transposedInAtlas) { |
| 366 | atlasMatrix = SkMatrix::Translate(atlasX - devIBounds.left(), atlasY - devIBounds.top()); |
| 367 | } else { |
| 368 | atlasMatrix.setAll(0, 1, atlasX - devIBounds.top(), |
| 369 | 1, 0, atlasY - devIBounds.left(), |
| 370 | 0, 0, 1); |
| 371 | } |
| 372 | auto flags = GrModulateAtlasCoverageEffect::Flags::kNone; |
| 373 | if (path.isInverseFillType()) { |
| 374 | flags |= GrModulateAtlasCoverageEffect::Flags::kInvertCoverage; |
| 375 | } |
| 376 | if (!devIBounds.contains(drawBounds)) { |
| 377 | flags |= GrModulateAtlasCoverageEffect::Flags::kCheckBounds; |
| 378 | // At this point in time we expect callers to tighten the scissor for "kIntersect" clips, as |
| 379 | // opposed to us having to check the path bounds. Feel free to remove this assert if that |
| 380 | // ever changes. |
| 381 | SkASSERT(path.isInverseFillType()); |
| 382 | } |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 383 | GrSurfaceProxyView atlasView = fAtlasRenderTasks.back()->readView(*sdc->caps()); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 384 | return GrFPSuccess(std::make_unique<GrModulateAtlasCoverageEffect>(flags, std::move(inputFP), |
| 385 | std::move(atlasView), |
| 386 | atlasMatrix, devIBounds)); |
| 387 | } |
| 388 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame^] | 389 | void AtlasPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP, |
| 390 | SkSpan<const uint32_t> /* taskIDs */) { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 391 | if (fAtlasRenderTasks.empty()) { |
| 392 | SkASSERT(fAtlasPathCache.count() == 0); |
| 393 | return; |
| 394 | } |
| 395 | |
| 396 | // Verify the atlases can all share the same texture. |
| 397 | SkDEBUGCODE(validate_atlas_dependencies(fAtlasRenderTasks);) |
| 398 | |
| 399 | // Instantiate the first atlas. |
| 400 | fAtlasRenderTasks[0]->instantiate(onFlushRP); |
| 401 | |
| 402 | // Instantiate the remaining atlases. |
| 403 | GrTexture* firstAtlasTexture = fAtlasRenderTasks[0]->atlasProxy()->peekTexture(); |
| 404 | SkASSERT(firstAtlasTexture); |
| 405 | for (int i = 1; i < fAtlasRenderTasks.count(); ++i) { |
| 406 | GrAtlasRenderTask* atlasTask = fAtlasRenderTasks[i].get(); |
| 407 | if (atlasTask->atlasProxy()->backingStoreDimensions() == firstAtlasTexture->dimensions()) { |
| 408 | atlasTask->instantiate(onFlushRP, sk_ref_sp(firstAtlasTexture)); |
| 409 | } else { |
| 410 | // The atlases are expected to all be full size except possibly the final one. |
| 411 | SkASSERT(i == fAtlasRenderTasks.count() - 1); |
| 412 | SkASSERT(atlasTask->atlasProxy()->backingStoreDimensions().area() < |
| 413 | firstAtlasTexture->dimensions().area()); |
| 414 | // TODO: Recycle the larger atlas texture anyway? |
| 415 | atlasTask->instantiate(onFlushRP); |
| 416 | } |
| 417 | } |
| 418 | |
| 419 | // Reset all atlas data. |
| 420 | fAtlasRenderTasks.reset(); |
| 421 | fAtlasPathCache.reset(); |
| 422 | } |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame^] | 423 | |
| 424 | } // namespace skgpu::v1 |