Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2019 Google LLC. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame] | 8 | #include "src/gpu/ops/AtlasPathRenderer.h" |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 9 | |
| 10 | #include "include/private/SkVx.h" |
| 11 | #include "src/core/SkIPoint16.h" |
| 12 | #include "src/gpu/GrClip.h" |
| 13 | #include "src/gpu/GrDirectContextPriv.h" |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 14 | #include "src/gpu/GrVx.h" |
| 15 | #include "src/gpu/effects/GrModulateAtlasCoverageEffect.h" |
| 16 | #include "src/gpu/geometry/GrStyledShape.h" |
Robert Phillips | fdafc0c | 2021-08-25 16:39:14 -0400 | [diff] [blame] | 17 | #include "src/gpu/ops/AtlasRenderTask.h" |
Robert Phillips | 769b488 | 2021-09-07 16:48:46 -0400 | [diff] [blame] | 18 | #include "src/gpu/ops/DrawAtlasPathOp.h" |
Robert Phillips | e453fa0 | 2021-08-19 14:57:05 -0400 | [diff] [blame] | 19 | #include "src/gpu/ops/TessellationPathRenderer.h" |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 20 | #include "src/gpu/tessellate/shaders/GrTessellationShader.h" |
Robert Phillips | 4dca831 | 2021-07-28 15:13:20 -0400 | [diff] [blame] | 21 | #include "src/gpu/v1/SurfaceDrawContext_v1.h" |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 22 | |
| 23 | using grvx::float2; |
| 24 | using grvx::int2; |
| 25 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame] | 26 | namespace { |
| 27 | |
| 28 | // Returns the rect [topLeftFloor, botRightCeil], which is the rect [r] rounded out to integer |
| 29 | // boundaries. |
| 30 | std::tuple<float2,float2> round_out(const SkRect& r) { |
| 31 | return {skvx::floor(float2::Load(&r.fLeft)), skvx::ceil(float2::Load(&r.fRight))}; |
| 32 | } |
| 33 | |
| 34 | // Returns whether the given proxyOwner uses the atlasProxy. |
| 35 | template<typename T> bool refs_atlas(const T* proxyOwner, const GrSurfaceProxy* atlasProxy) { |
| 36 | bool refsAtlas = false; |
| 37 | auto checkForAtlasRef = [atlasProxy, &refsAtlas](GrSurfaceProxy* proxy, GrMipmapped) { |
| 38 | if (proxy == atlasProxy) { |
| 39 | refsAtlas = true; |
| 40 | } |
| 41 | }; |
| 42 | if (proxyOwner) { |
| 43 | proxyOwner->visitProxies(checkForAtlasRef); |
| 44 | } |
| 45 | return refsAtlas; |
| 46 | } |
| 47 | |
| 48 | bool is_visible(const SkRect& pathDevBounds, const SkIRect& clipBounds) { |
| 49 | float2 pathTopLeft = float2::Load(&pathDevBounds.fLeft); |
| 50 | float2 pathBotRight = float2::Load(&pathDevBounds.fRight); |
| 51 | // Empty paths are never visible. Phrase this as a NOT of positive logic so we also return false |
| 52 | // in the case of NaN. |
| 53 | if (!skvx::all(pathTopLeft < pathBotRight)) { |
| 54 | return false; |
| 55 | } |
| 56 | float2 clipTopLeft = skvx::cast<float>(int2::Load(&clipBounds.fLeft)); |
| 57 | float2 clipBotRight = skvx::cast<float>(int2::Load(&clipBounds.fRight)); |
| 58 | static_assert(sizeof(clipBounds) == sizeof(clipTopLeft) + sizeof(clipBotRight)); |
| 59 | return skvx::all(pathTopLeft < clipBotRight) && skvx::all(pathBotRight > clipTopLeft); |
| 60 | } |
| 61 | |
| 62 | #ifdef SK_DEBUG |
| 63 | // Ensures the atlas dependencies are set up such that each atlas will be totally out of service |
| 64 | // before we render the next one in line. This means there will only ever be one atlas active at a |
| 65 | // time and that they can all share the same texture. |
Robert Phillips | fdafc0c | 2021-08-25 16:39:14 -0400 | [diff] [blame] | 66 | void validate_atlas_dependencies(const SkTArray<sk_sp<skgpu::v1::AtlasRenderTask>>& atlasTasks) { |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame] | 67 | for (int i = atlasTasks.count() - 1; i >= 1; --i) { |
Robert Phillips | fdafc0c | 2021-08-25 16:39:14 -0400 | [diff] [blame] | 68 | auto atlasTask = atlasTasks[i].get(); |
| 69 | auto previousAtlasTask = atlasTasks[i - 1].get(); |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame] | 70 | // Double check that atlasTask depends on every dependent of its previous atlas. If this |
| 71 | // fires it might mean previousAtlasTask gained a new dependent after atlasTask came into |
| 72 | // service (maybe by an op that hadn't yet been added to an opsTask when we registered the |
| 73 | // new atlas with the drawingManager). |
| 74 | for (GrRenderTask* previousAtlasUser : previousAtlasTask->dependents()) { |
| 75 | SkASSERT(atlasTask->dependsOn(previousAtlasUser)); |
| 76 | } |
| 77 | } |
| 78 | } |
| 79 | #endif |
| 80 | |
| 81 | } // anonymous namespace |
| 82 | |
| 83 | namespace skgpu::v1 { |
| 84 | |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 85 | constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8; |
| 86 | constexpr static int kAtlasInitialSize = 512; |
| 87 | |
| 88 | // The atlas is only used for small-area paths, which means at least one dimension of every path is |
| 89 | // guaranteed to be quite small. So if we transpose tall paths, then every path will have a small |
| 90 | // height, which lends very well to efficient pow2 atlas packing. |
| 91 | constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2; |
| 92 | |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 93 | // Ensure every path in the atlas falls in or below the 256px high rectanizer band. |
| 94 | constexpr static int kAtlasMaxPathHeight = 256; |
| 95 | |
| 96 | // If we have MSAA to fall back on, paths are already fast enough that we really only benefit from |
| 97 | // atlasing when they are very small. |
| 98 | constexpr static int kAtlasMaxPathHeightWithMSAAFallback = 128; |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 99 | |
Chris Dalton | 72fd33a | 2021-07-28 14:19:13 -0600 | [diff] [blame] | 100 | // http://skbug.com/12291 -- The way GrDynamicAtlas works, a single 2048x1 path is given an entire |
| 101 | // 2048x2048 atlas with draw bounds of 2048x1025. Limit the max width to 1024 to avoid this landmine |
| 102 | // until it's resolved. |
| 103 | constexpr static int kAtlasMaxPathWidth = 1024; |
| 104 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame] | 105 | bool AtlasPathRenderer::IsSupported(GrRecordingContext* rContext) { |
Chris Dalton | 7893d2d | 2021-08-03 17:42:58 -0600 | [diff] [blame] | 106 | #ifdef SK_BUILD_FOR_IOS |
| 107 | // b/195095846: There is a bug with the atlas path renderer on OpenGL iOS. Disable until we can |
| 108 | // investigate. |
| 109 | if (rContext->backend() == GrBackendApi::kOpenGL) { |
| 110 | return false; |
| 111 | } |
| 112 | #endif |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 113 | const GrCaps& caps = *rContext->priv().caps(); |
| 114 | auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes); |
| 115 | return rContext->asDirectContext() && // The atlas doesn't support DDL yet. |
| 116 | caps.internalMultisampleCount(atlasFormat) > 1 && |
| 117 | // GrAtlasRenderTask currently requires tessellation. In the future it could use the |
| 118 | // default path renderer when tessellation isn't available. |
Robert Phillips | 24d622d | 2021-08-19 17:04:05 -0400 | [diff] [blame] | 119 | TessellationPathRenderer::IsSupported(caps); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 120 | } |
| 121 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame] | 122 | sk_sp<AtlasPathRenderer> AtlasPathRenderer::Make(GrRecordingContext* rContext) { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 123 | return IsSupported(rContext) |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame] | 124 | ? sk_sp<AtlasPathRenderer>(new AtlasPathRenderer(rContext->asDirectContext())) |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 125 | : nullptr; |
| 126 | } |
| 127 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame] | 128 | AtlasPathRenderer::AtlasPathRenderer(GrDirectContext* dContext) { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 129 | SkASSERT(IsSupported(dContext)); |
| 130 | const GrCaps& caps = *dContext->priv().caps(); |
| 131 | #if GR_TEST_UTILS |
| 132 | fAtlasMaxSize = dContext->priv().options().fMaxTextureAtlasSize; |
| 133 | #else |
| 134 | fAtlasMaxSize = 2048; |
| 135 | #endif |
| 136 | fAtlasMaxSize = SkPrevPow2(std::min(fAtlasMaxSize, (float)caps.maxPreferredRenderTargetSize())); |
Chris Dalton | 72fd33a | 2021-07-28 14:19:13 -0600 | [diff] [blame] | 137 | fAtlasMaxPathWidth = std::min((float)kAtlasMaxPathWidth, fAtlasMaxSize); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 138 | fAtlasInitialSize = SkNextPow2(std::min(kAtlasInitialSize, (int)fAtlasMaxSize)); |
| 139 | } |
| 140 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame] | 141 | bool AtlasPathRenderer::pathFitsInAtlas(const SkRect& pathDevBounds, |
| 142 | GrAAType fallbackAAType) const { |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 143 | SkASSERT(fallbackAAType != GrAAType::kNone); // The atlas doesn't support non-AA. |
| 144 | float atlasMaxPathHeight_pow2 = (fallbackAAType == GrAAType::kMSAA) |
| 145 | ? kAtlasMaxPathHeightWithMSAAFallback * kAtlasMaxPathHeightWithMSAAFallback |
| 146 | : kAtlasMaxPathHeight * kAtlasMaxPathHeight; |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 147 | auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds); |
| 148 | float2 size = botRightCeil - topLeftFloor; |
| 149 | return // Ensure the path's largest dimension fits in the atlas. |
Chris Dalton | 72fd33a | 2021-07-28 14:19:13 -0600 | [diff] [blame] | 150 | skvx::all(size <= fAtlasMaxPathWidth) && |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 151 | // Since we will transpose tall skinny paths, limiting to atlasMaxPathHeight^2 pixels |
| 152 | // guarantees heightInAtlas <= atlasMaxPathHeight, while also allowing paths that are |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 153 | // very wide and short. |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 154 | size[0] * size[1] <= atlasMaxPathHeight_pow2; |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 155 | } |
| 156 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame] | 157 | void AtlasPathRenderer::AtlasPathKey::set(const SkMatrix& m, const SkPath& path) { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 158 | using grvx::float2; |
| 159 | fPathGenID = path.getGenerationID(); |
| 160 | fAffineMatrix[0] = m.getScaleX(); |
| 161 | fAffineMatrix[1] = m.getSkewX(); |
Chris Dalton | 7311f9a | 2021-10-05 13:31:16 -0600 | [diff] [blame^] | 162 | fAffineMatrix[2] = m.getTranslateX(); |
| 163 | fAffineMatrix[3] = m.getSkewY(); |
| 164 | fAffineMatrix[4] = m.getScaleY(); |
| 165 | fAffineMatrix[5] = m.getTranslateY(); |
| 166 | fFillRule = (uint32_t)GrFillRuleForSkPath(path); // Fill rule doesn't affect the path's genID. |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 167 | } |
| 168 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame] | 169 | bool AtlasPathRenderer::addPathToAtlas(GrRecordingContext* rContext, |
| 170 | const SkMatrix& viewMatrix, |
| 171 | const SkPath& path, |
| 172 | const SkRect& pathDevBounds, |
| 173 | SkIRect* devIBounds, |
| 174 | SkIPoint16* locationInAtlas, |
| 175 | bool* transposedInAtlas, |
| 176 | const DrawRefsAtlasCallback& drawRefsAtlasCallback) { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 177 | SkASSERT(!viewMatrix.hasPerspective()); // See onCanDrawPath(). |
| 178 | |
| 179 | pathDevBounds.roundOut(devIBounds); |
| 180 | #ifdef SK_DEBUG |
| 181 | // is_visible() should have guaranteed the path's bounds were representable as ints, since clip |
| 182 | // bounds within the max render target size are nowhere near INT_MAX. |
| 183 | auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds); |
| 184 | SkASSERT(skvx::all(skvx::cast<float>(int2::Load(&devIBounds->fLeft)) == topLeftFloor)); |
| 185 | SkASSERT(skvx::all(skvx::cast<float>(int2::Load(&devIBounds->fRight)) == botRightCeil)); |
| 186 | #endif |
| 187 | |
| 188 | int widthInAtlas = devIBounds->width(); |
| 189 | int heightInAtlas = devIBounds->height(); |
| 190 | // is_visible() should have guaranteed the path's bounds were non-empty. |
| 191 | SkASSERT(widthInAtlas > 0 && heightInAtlas > 0); |
| 192 | |
| 193 | if (SkNextPow2(widthInAtlas) == SkNextPow2(heightInAtlas)) { |
| 194 | // Both dimensions go to the same pow2 band in the atlas. Use the larger dimension as height |
| 195 | // for more efficient packing. |
| 196 | *transposedInAtlas = widthInAtlas > heightInAtlas; |
| 197 | } else { |
| 198 | // Both dimensions go to different pow2 bands in the atlas. Use the smaller pow2 band for |
| 199 | // most efficient packing. |
| 200 | *transposedInAtlas = heightInAtlas > widthInAtlas; |
| 201 | } |
| 202 | if (*transposedInAtlas) { |
| 203 | std::swap(heightInAtlas, widthInAtlas); |
| 204 | } |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 205 | // pathFitsInAtlas() should have guaranteed these constraints on the path size. |
Chris Dalton | 72fd33a | 2021-07-28 14:19:13 -0600 | [diff] [blame] | 206 | SkASSERT(widthInAtlas <= (int)fAtlasMaxPathWidth); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 207 | SkASSERT(heightInAtlas <= kAtlasMaxPathHeight); |
| 208 | |
| 209 | // Check if this path is already in the atlas. This is mainly for clip paths. |
| 210 | AtlasPathKey atlasPathKey; |
| 211 | if (!path.isVolatile()) { |
| 212 | atlasPathKey.set(viewMatrix, path); |
| 213 | if (const SkIPoint16* existingLocation = fAtlasPathCache.find(atlasPathKey)) { |
| 214 | *locationInAtlas = *existingLocation; |
| 215 | return true; |
| 216 | } |
| 217 | } |
| 218 | |
| 219 | if (fAtlasRenderTasks.empty() || |
| 220 | !fAtlasRenderTasks.back()->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas, |
| 221 | heightInAtlas, *transposedInAtlas, locationInAtlas)) { |
| 222 | // We either don't have an atlas yet or the current one is full. Try to replace it. |
Robert Phillips | fdafc0c | 2021-08-25 16:39:14 -0400 | [diff] [blame] | 223 | auto currentAtlasTask = (!fAtlasRenderTasks.empty()) ? fAtlasRenderTasks.back().get() |
| 224 | : nullptr; |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 225 | if (currentAtlasTask && |
| 226 | drawRefsAtlasCallback && |
| 227 | drawRefsAtlasCallback(currentAtlasTask->atlasProxy())) { |
| 228 | // The draw already refs the current atlas. Give up. Otherwise the draw would ref two |
| 229 | // different atlases and they couldn't share a texture. |
| 230 | return false; |
| 231 | } |
| 232 | // Replace the atlas with a new one. |
| 233 | auto dynamicAtlas = std::make_unique<GrDynamicAtlas>( |
| 234 | kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes, |
| 235 | SkISize{fAtlasInitialSize, fAtlasInitialSize}, fAtlasMaxSize, |
| 236 | *rContext->priv().caps(), kAtlasAlgorithm); |
Robert Phillips | fdafc0c | 2021-08-25 16:39:14 -0400 | [diff] [blame] | 237 | auto newAtlasTask = sk_make_sp<AtlasRenderTask>(rContext, |
| 238 | sk_make_sp<GrArenas>(), |
| 239 | std::move(dynamicAtlas)); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 240 | rContext->priv().drawingManager()->addAtlasTask(newAtlasTask, currentAtlasTask); |
| 241 | SkAssertResult(newAtlasTask->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas, |
| 242 | heightInAtlas, *transposedInAtlas, locationInAtlas)); |
| 243 | fAtlasRenderTasks.push_back(std::move(newAtlasTask)); |
| 244 | fAtlasPathCache.reset(); |
| 245 | } |
| 246 | |
| 247 | // Remember this path's location in the atlas, in case it gets drawn again. |
| 248 | if (!path.isVolatile()) { |
| 249 | fAtlasPathCache.set(atlasPathKey, *locationInAtlas); |
| 250 | } |
| 251 | return true; |
| 252 | } |
| 253 | |
Robert Phillips | db0ec08 | 2021-08-19 12:30:12 -0400 | [diff] [blame] | 254 | PathRenderer::CanDrawPath AtlasPathRenderer::onCanDrawPath(const CanDrawPathArgs& args) const { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 255 | #ifdef SK_DEBUG |
| 256 | if (!fAtlasRenderTasks.empty()) { |
| 257 | // args.fPaint should NEVER reference our current atlas. If it does, it means somebody |
| 258 | // intercepted a clip FP meant for a different op and will cause rendering artifacts. |
| 259 | const GrSurfaceProxy* atlasProxy = fAtlasRenderTasks.back()->atlasProxy(); |
| 260 | SkASSERT(!refs_atlas(args.fPaint->getColorFragmentProcessor(), atlasProxy)); |
| 261 | SkASSERT(!refs_atlas(args.fPaint->getCoverageFragmentProcessor(), atlasProxy)); |
| 262 | } |
| 263 | SkASSERT(!args.fHasUserStencilSettings); // See onGetStencilSupport(). |
| 264 | #endif |
| 265 | bool canDrawPath = args.fShape->style().isSimpleFill() && |
Chris Dalton | 7893d2d | 2021-08-03 17:42:58 -0600 | [diff] [blame] | 266 | #ifdef SK_DISABLE_ATLAS_PATH_RENDERER_WITH_COVERAGE_AA |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 267 | // The MSAA requirement is a temporary limitation in order to preserve |
| 268 | // functionality for refactoring. TODO: Allow kCoverage AA types. |
| 269 | args.fAAType == GrAAType::kMSAA && |
Chris Dalton | 7893d2d | 2021-08-03 17:42:58 -0600 | [diff] [blame] | 270 | #else |
| 271 | args.fAAType != GrAAType::kNone && |
| 272 | #endif |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 273 | !args.fShape->style().hasPathEffect() && |
| 274 | !args.fViewMatrix->hasPerspective() && |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 275 | this->pathFitsInAtlas(args.fViewMatrix->mapRect(args.fShape->bounds()), |
| 276 | args.fAAType); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 277 | return canDrawPath ? CanDrawPath::kYes : CanDrawPath::kNo; |
| 278 | } |
| 279 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame] | 280 | bool AtlasPathRenderer::onDrawPath(const DrawPathArgs& args) { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 281 | SkPath path; |
| 282 | args.fShape->asPath(&path); |
| 283 | |
| 284 | const SkRect pathDevBounds = args.fViewMatrix->mapRect(args.fShape->bounds()); |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 285 | SkASSERT(this->pathFitsInAtlas(pathDevBounds, args.fAAType)); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 286 | |
| 287 | if (!is_visible(pathDevBounds, args.fClip->getConservativeBounds())) { |
| 288 | // The path is empty or outside the clip. No mask is needed. |
| 289 | if (path.isInverseFillType()) { |
| 290 | args.fSurfaceDrawContext->drawPaint(args.fClip, std::move(args.fPaint), |
| 291 | *args.fViewMatrix); |
| 292 | } |
| 293 | return true; |
| 294 | } |
| 295 | |
| 296 | SkIRect devIBounds; |
| 297 | SkIPoint16 locationInAtlas; |
| 298 | bool transposedInAtlas; |
| 299 | SkAssertResult(this->addPathToAtlas(args.fContext, *args.fViewMatrix, path, pathDevBounds, |
| 300 | &devIBounds, &locationInAtlas, &transposedInAtlas, |
| 301 | nullptr/*DrawRefsAtlasCallback -- see onCanDrawPath()*/)); |
| 302 | |
| 303 | const SkIRect& fillBounds = args.fShape->inverseFilled() |
| 304 | ? (args.fClip |
| 305 | ? args.fClip->getConservativeBounds() |
| 306 | : args.fSurfaceDrawContext->asSurfaceProxy()->backingStoreBoundsIRect()) |
| 307 | : devIBounds; |
| 308 | const GrCaps& caps = *args.fSurfaceDrawContext->caps(); |
Robert Phillips | 769b488 | 2021-09-07 16:48:46 -0400 | [diff] [blame] | 309 | auto op = GrOp::Make<DrawAtlasPathOp>(args.fContext, |
| 310 | args.fSurfaceDrawContext->arenaAlloc(), |
| 311 | fillBounds, *args.fViewMatrix, |
| 312 | std::move(args.fPaint), locationInAtlas, |
| 313 | devIBounds, transposedInAtlas, |
| 314 | fAtlasRenderTasks.back()->readView(caps), |
| 315 | args.fShape->inverseFilled()); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 316 | args.fSurfaceDrawContext->addDrawOp(args.fClip, std::move(op)); |
| 317 | return true; |
| 318 | } |
| 319 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame] | 320 | GrFPResult AtlasPathRenderer::makeAtlasClipEffect(const SurfaceDrawContext* sdc, |
| 321 | const GrOp* opBeingClipped, |
| 322 | std::unique_ptr<GrFragmentProcessor> inputFP, |
| 323 | const SkIRect& drawBounds, |
| 324 | const SkMatrix& viewMatrix, |
| 325 | const SkPath& path) { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 326 | if (viewMatrix.hasPerspective()) { |
| 327 | return GrFPFailure(std::move(inputFP)); |
| 328 | } |
| 329 | |
| 330 | const SkRect pathDevBounds = viewMatrix.mapRect(path.getBounds()); |
| 331 | if (!is_visible(pathDevBounds, drawBounds)) { |
| 332 | // The path is empty or outside the drawBounds. No mask is needed. |
| 333 | return path.isInverseFillType() ? GrFPSuccess(std::move(inputFP)) |
| 334 | : GrFPFailure(std::move(inputFP)); |
| 335 | } |
| 336 | |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 337 | auto fallbackAAType = (sdc->numSamples() > 1 || sdc->canUseDynamicMSAA()) ? GrAAType::kMSAA |
| 338 | : GrAAType::kCoverage; |
| 339 | if (!this->pathFitsInAtlas(pathDevBounds, fallbackAAType)) { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 340 | // The path is too big. |
| 341 | return GrFPFailure(std::move(inputFP)); |
| 342 | } |
| 343 | |
| 344 | SkIRect devIBounds; |
| 345 | SkIPoint16 locationInAtlas; |
| 346 | bool transposedInAtlas; |
| 347 | // Called if the atlas runs out of room, to determine if it's safe to create a new one. (Draws |
| 348 | // can never access more than one atlas.) |
| 349 | auto drawRefsAtlasCallback = [opBeingClipped, &inputFP](const GrSurfaceProxy* atlasProxy) { |
| 350 | return refs_atlas(opBeingClipped, atlasProxy) || |
| 351 | refs_atlas(inputFP.get(), atlasProxy); |
| 352 | }; |
| 353 | // addPathToAtlas() ignores inverseness of the fill. See GrAtlasRenderTask::getAtlasUberPath(). |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 354 | if (!this->addPathToAtlas(sdc->recordingContext(), viewMatrix, path, pathDevBounds, &devIBounds, |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 355 | &locationInAtlas, &transposedInAtlas, drawRefsAtlasCallback)) { |
| 356 | // The atlas ran out of room and we were unable to start a new one. |
| 357 | return GrFPFailure(std::move(inputFP)); |
| 358 | } |
| 359 | |
| 360 | SkMatrix atlasMatrix; |
| 361 | auto [atlasX, atlasY] = locationInAtlas; |
| 362 | if (!transposedInAtlas) { |
| 363 | atlasMatrix = SkMatrix::Translate(atlasX - devIBounds.left(), atlasY - devIBounds.top()); |
| 364 | } else { |
| 365 | atlasMatrix.setAll(0, 1, atlasX - devIBounds.top(), |
| 366 | 1, 0, atlasY - devIBounds.left(), |
| 367 | 0, 0, 1); |
| 368 | } |
| 369 | auto flags = GrModulateAtlasCoverageEffect::Flags::kNone; |
| 370 | if (path.isInverseFillType()) { |
| 371 | flags |= GrModulateAtlasCoverageEffect::Flags::kInvertCoverage; |
| 372 | } |
| 373 | if (!devIBounds.contains(drawBounds)) { |
| 374 | flags |= GrModulateAtlasCoverageEffect::Flags::kCheckBounds; |
| 375 | // At this point in time we expect callers to tighten the scissor for "kIntersect" clips, as |
| 376 | // opposed to us having to check the path bounds. Feel free to remove this assert if that |
| 377 | // ever changes. |
| 378 | SkASSERT(path.isInverseFillType()); |
| 379 | } |
Chris Dalton | 66deeb2 | 2021-07-23 13:57:03 -0600 | [diff] [blame] | 380 | GrSurfaceProxyView atlasView = fAtlasRenderTasks.back()->readView(*sdc->caps()); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 381 | return GrFPSuccess(std::make_unique<GrModulateAtlasCoverageEffect>(flags, std::move(inputFP), |
| 382 | std::move(atlasView), |
| 383 | atlasMatrix, devIBounds)); |
| 384 | } |
| 385 | |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame] | 386 | void AtlasPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP, |
| 387 | SkSpan<const uint32_t> /* taskIDs */) { |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 388 | if (fAtlasRenderTasks.empty()) { |
| 389 | SkASSERT(fAtlasPathCache.count() == 0); |
| 390 | return; |
| 391 | } |
| 392 | |
| 393 | // Verify the atlases can all share the same texture. |
| 394 | SkDEBUGCODE(validate_atlas_dependencies(fAtlasRenderTasks);) |
| 395 | |
| 396 | // Instantiate the first atlas. |
| 397 | fAtlasRenderTasks[0]->instantiate(onFlushRP); |
| 398 | |
| 399 | // Instantiate the remaining atlases. |
| 400 | GrTexture* firstAtlasTexture = fAtlasRenderTasks[0]->atlasProxy()->peekTexture(); |
| 401 | SkASSERT(firstAtlasTexture); |
| 402 | for (int i = 1; i < fAtlasRenderTasks.count(); ++i) { |
Robert Phillips | fdafc0c | 2021-08-25 16:39:14 -0400 | [diff] [blame] | 403 | auto atlasTask = fAtlasRenderTasks[i].get(); |
Chris Dalton | c317600 | 2021-07-23 15:33:09 -0600 | [diff] [blame] | 404 | if (atlasTask->atlasProxy()->backingStoreDimensions() == firstAtlasTexture->dimensions()) { |
| 405 | atlasTask->instantiate(onFlushRP, sk_ref_sp(firstAtlasTexture)); |
| 406 | } else { |
| 407 | // The atlases are expected to all be full size except possibly the final one. |
| 408 | SkASSERT(i == fAtlasRenderTasks.count() - 1); |
| 409 | SkASSERT(atlasTask->atlasProxy()->backingStoreDimensions().area() < |
| 410 | firstAtlasTexture->dimensions().area()); |
| 411 | // TODO: Recycle the larger atlas texture anyway? |
| 412 | atlasTask->instantiate(onFlushRP); |
| 413 | } |
| 414 | } |
| 415 | |
| 416 | // Reset all atlas data. |
| 417 | fAtlasRenderTasks.reset(); |
| 418 | fAtlasPathCache.reset(); |
| 419 | } |
Robert Phillips | 43e70f1 | 2021-08-19 11:12:48 -0400 | [diff] [blame] | 420 | |
| 421 | } // namespace skgpu::v1 |