blob: dd32197f8b907c683619ac8973747e70fba43003 [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Chris Dalton1a325d22017-07-14 15:17:41 -06009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/pathops/SkPathOps.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "src/gpu/GrCaps.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050012#include "src/gpu/GrProxyProvider.h"
Brian Salomoneebe7352020-12-09 16:37:04 -050013#include "src/gpu/GrSurfaceDrawContext.h"
Mike Kleinc0bd9f92019-04-23 12:05:21 -050014#include "src/gpu/ccpr/GrCCClipProcessor.h"
15#include "src/gpu/ccpr/GrCCDrawPathsOp.h"
16#include "src/gpu/ccpr/GrCCPathCache.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060017
Chris Dalton5ba36ba2018-05-09 01:08:38 -060018using PathInstance = GrCCPathProcessor::Instance;
Chris Daltona32a3c32017-12-05 10:05:21 -070019
Chris Daltonc3318f02019-07-19 14:20:53 -060020bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps, CoverageType* coverageType) {
Chris Dalton1a325d22017-07-14 15:17:41 -060021 const GrShaderCaps& shaderCaps = *caps.shaderCaps();
Greg Daniel0258c902019-08-01 13:08:33 -040022 GrBackendFormat defaultA8Format = caps.getDefaultBackendFormat(GrColorType::kAlpha_8,
23 GrRenderable::kYes);
Jim Van Verth1bef9792020-07-09 08:09:13 -040024 if (caps.driverDisableCCPR() || !shaderCaps.integerSupport() ||
Chris Daltona77cdee2020-04-03 14:50:43 -060025 !caps.drawInstancedSupport() || !shaderCaps.floatIs32Bits() ||
Greg Daniel0258c902019-08-01 13:08:33 -040026 !defaultA8Format.isValid() || // This checks both texturable and renderable
Chris Daltona8fbeba2019-03-30 00:31:23 -060027 !caps.halfFloatVertexAttributeSupport()) {
28 return false;
29 }
Chris Daltonc3318f02019-07-19 14:20:53 -060030
Greg Daniel0258c902019-08-01 13:08:33 -040031 GrBackendFormat defaultAHalfFormat = caps.getDefaultBackendFormat(GrColorType::kAlpha_F16,
32 GrRenderable::kYes);
Chris Daltonc3318f02019-07-19 14:20:53 -060033 if (caps.allowCoverageCounting() &&
Greg Daniel0258c902019-08-01 13:08:33 -040034 defaultAHalfFormat.isValid()) { // This checks both texturable and renderable
Chris Daltonc3318f02019-07-19 14:20:53 -060035 if (coverageType) {
36 *coverageType = CoverageType::kFP16_CoverageCount;
37 }
38 return true;
39 }
40
Jim Van Verth1bef9792020-07-09 08:09:13 -040041 if (!caps.driverDisableMSAACCPR() &&
Greg Danieleadfac92019-08-02 09:03:53 -040042 caps.internalMultisampleCount(defaultA8Format) > 1 &&
Chris Daltonc3318f02019-07-19 14:20:53 -060043 caps.sampleLocationsSupport() &&
Chris Dalton8a64a442019-10-29 18:54:58 -060044 shaderCaps.sampleMaskSupport()) {
Chris Daltonc3318f02019-07-19 14:20:53 -060045 if (coverageType) {
46 *coverageType = CoverageType::kA8_Multisample;
47 }
48 return true;
49 }
50
51 return false;
Chris Dalton1a325d22017-07-14 15:17:41 -060052}
53
Chris Dalton383a2ef2018-01-08 17:21:41 -050054sk_sp<GrCoverageCountingPathRenderer> GrCoverageCountingPathRenderer::CreateIfSupported(
Chris Dalton351e80c2019-01-06 22:51:00 -070055 const GrCaps& caps, AllowCaching allowCaching, uint32_t contextUniqueID) {
Chris Daltonc3318f02019-07-19 14:20:53 -060056 CoverageType coverageType;
57 if (IsSupported(caps, &coverageType)) {
58 return sk_sp<GrCoverageCountingPathRenderer>(new GrCoverageCountingPathRenderer(
59 coverageType, allowCaching, contextUniqueID));
60 }
61 return nullptr;
Chris Daltona2b5b642018-06-24 13:08:57 -060062}
63
Chris Daltonc3318f02019-07-19 14:20:53 -060064GrCoverageCountingPathRenderer::GrCoverageCountingPathRenderer(
65 CoverageType coverageType, AllowCaching allowCaching, uint32_t contextUniqueID)
66 : fCoverageType(coverageType) {
Chris Daltona2b5b642018-06-24 13:08:57 -060067 if (AllowCaching::kYes == allowCaching) {
Mike Kleinf46d5ca2019-12-11 10:45:01 -050068 fPathCache = std::make_unique<GrCCPathCache>(contextUniqueID);
Chris Daltona2b5b642018-06-24 13:08:57 -060069 }
70}
71
Greg Danielf41b2bd2019-08-22 16:19:24 -040072GrCCPerOpsTaskPaths* GrCoverageCountingPathRenderer::lookupPendingPaths(uint32_t opsTaskID) {
73 auto it = fPendingPaths.find(opsTaskID);
Chris Daltond7e22272018-05-23 10:17:17 -060074 if (fPendingPaths.end() == it) {
Greg Danielf41b2bd2019-08-22 16:19:24 -040075 sk_sp<GrCCPerOpsTaskPaths> paths = sk_make_sp<GrCCPerOpsTaskPaths>();
76 it = fPendingPaths.insert(std::make_pair(opsTaskID, std::move(paths))).first;
Chris Daltond7e22272018-05-23 10:17:17 -060077 }
78 return it->second.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -060079}
80
Chris Dalton383a2ef2018-01-08 17:21:41 -050081GrPathRenderer::CanDrawPath GrCoverageCountingPathRenderer::onCanDrawPath(
82 const CanDrawPathArgs& args) const {
Chris Dalton1e7b2e52021-03-09 21:29:32 -070083#if 1
84 // The atlas takes up too much memory. We should focus on other path renderers instead.
85 return CanDrawPath::kNo;
86#else
Michael Ludwig2686d692020-04-17 20:21:37 +000087 const GrStyledShape& shape = *args.fShape;
Chris Daltonc3318f02019-07-19 14:20:53 -060088 // We use "kCoverage", or analytic AA, no mater what the coverage type of our atlas: Even if the
89 // atlas is multisampled, that resolves into analytic coverage before we draw the path to the
90 // main canvas.
Chris Dalton6ce447a2019-06-23 18:07:38 -060091 if (GrAAType::kCoverage != args.fAAType || shape.style().hasPathEffect() ||
Chris Dalton09a7bb22018-08-31 19:53:15 +080092 args.fViewMatrix->hasPerspective() || shape.inverseFilled()) {
Chris Dalton5ed44232017-09-07 13:22:46 -060093 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -060094 }
95
96 SkPath path;
Chris Dalton09a7bb22018-08-31 19:53:15 +080097 shape.asPath(&path);
Chris Daltona2b5b642018-06-24 13:08:57 -060098
Chris Dalton82de18f2018-09-12 17:24:09 -060099 const SkStrokeRec& stroke = shape.style().strokeRec();
100 switch (stroke.getStyle()) {
Chris Dalton09a7bb22018-08-31 19:53:15 +0800101 case SkStrokeRec::kFill_Style: {
102 SkRect devBounds;
103 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
Chris Daltona2b5b642018-06-24 13:08:57 -0600104
Chris Dalton09a7bb22018-08-31 19:53:15 +0800105 SkIRect clippedIBounds;
106 devBounds.roundOut(&clippedIBounds);
107 if (!clippedIBounds.intersect(*args.fClipConservativeBounds)) {
108 // The path is completely clipped away. Our code will eventually notice this before
109 // doing any real work.
110 return CanDrawPath::kYes;
111 }
112
113 int64_t numPixels = sk_64_mul(clippedIBounds.height(), clippedIBounds.width());
114 if (path.countVerbs() > 1000 && path.countPoints() > numPixels) {
115 // This is a complicated path that has more vertices than pixels! Let's let the SW
116 // renderer have this one: It will probably be faster and a bitmap will require less
117 // total memory on the GPU than CCPR instance buffers would for the raw path data.
118 return CanDrawPath::kNo;
119 }
120
121 if (numPixels > 256 * 256) {
122 // Large paths can blow up the atlas fast. And they are not ideal for a two-pass
123 // rendering algorithm. Give the simpler direct renderers a chance before we commit
124 // to drawing it.
125 return CanDrawPath::kAsBackup;
126 }
127
128 if (args.fShape->hasUnstyledKey() && path.countVerbs() > 50) {
129 // Complex paths do better cached in an SDF, if the renderer will accept them.
130 return CanDrawPath::kAsBackup;
131 }
132
133 return CanDrawPath::kYes;
134 }
135
136 case SkStrokeRec::kStroke_Style:
137 if (!args.fViewMatrix->isSimilarity()) {
138 // The stroker currently only supports rigid-body transfoms for the stroke lines
139 // themselves. This limitation doesn't affect hairlines since their stroke lines are
140 // defined relative to device space.
141 return CanDrawPath::kNo;
142 }
John Stiles30212b72020-06-11 17:55:07 -0400143 [[fallthrough]];
Chris Dalton82de18f2018-09-12 17:24:09 -0600144 case SkStrokeRec::kHairline_Style: {
Chris Daltonc3318f02019-07-19 14:20:53 -0600145 if (CoverageType::kFP16_CoverageCount != fCoverageType) {
146 // Stroking is not yet supported in MSAA atlas mode.
147 return CanDrawPath::kNo;
148 }
Chris Dalton82de18f2018-09-12 17:24:09 -0600149 float inflationRadius;
150 GetStrokeDevWidth(*args.fViewMatrix, stroke, &inflationRadius);
151 if (!(inflationRadius <= kMaxBoundsInflationFromStroke)) {
152 // Let extremely wide strokes be converted to fill paths and drawn by the CCPR
153 // filler instead. (Cast the logic negatively in order to also catch r=NaN.)
154 return CanDrawPath::kNo;
155 }
156 SkASSERT(!SkScalarIsNaN(inflationRadius));
157 if (SkPathPriv::ConicWeightCnt(path)) {
158 // The stroker does not support conics yet.
159 return CanDrawPath::kNo;
160 }
161 return CanDrawPath::kYes;
162 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800163
164 case SkStrokeRec::kStrokeAndFill_Style:
165 return CanDrawPath::kNo;
Chris Daltondb91c6e2017-09-08 16:25:08 -0600166 }
167
Chris Dalton09a7bb22018-08-31 19:53:15 +0800168 SK_ABORT("Invalid stroke style.");
Chris Dalton1e7b2e52021-03-09 21:29:32 -0700169#endif
Chris Dalton1a325d22017-07-14 15:17:41 -0600170}
171
172bool GrCoverageCountingPathRenderer::onDrawPath(const DrawPathArgs& args) {
173 SkASSERT(!fFlushing);
Chris Dalton1a325d22017-07-14 15:17:41 -0600174
Michael Ludwig7c12e282020-05-29 09:54:07 -0400175 auto op = GrCCDrawPathsOp::Make(args.fContext, *args.fClipConservativeBounds, *args.fViewMatrix,
176 *args.fShape, std::move(args.fPaint));
Chris Dalton42c21152018-06-13 15:28:19 -0600177 this->recordOp(std::move(op), args);
Chris Dalton1a325d22017-07-14 15:17:41 -0600178 return true;
179}
180
Herb Derbyc76d4092020-10-07 16:46:15 -0400181void GrCoverageCountingPathRenderer::recordOp(GrOp::Owner op,
Chris Dalton42c21152018-06-13 15:28:19 -0600182 const DrawPathArgs& args) {
Brian Salomon348a0372018-10-31 10:42:18 -0400183 if (op) {
Greg Danielf41b2bd2019-08-22 16:19:24 -0400184 auto addToOwningPerOpsTaskPaths = [this](GrOp* op, uint32_t opsTaskID) {
185 op->cast<GrCCDrawPathsOp>()->addToOwningPerOpsTaskPaths(
186 sk_ref_sp(this->lookupPendingPaths(opsTaskID)));
Brian Salomon348a0372018-10-31 10:42:18 -0400187 };
Michael Ludwig7c12e282020-05-29 09:54:07 -0400188 args.fRenderTargetContext->addDrawOp(args.fClip, std::move(op),
Greg Danielf41b2bd2019-08-22 16:19:24 -0400189 addToOwningPerOpsTaskPaths);
Chris Dalton42c21152018-06-13 15:28:19 -0600190 }
191}
192
Chris Dalton383a2ef2018-01-08 17:21:41 -0500193std::unique_ptr<GrFragmentProcessor> GrCoverageCountingPathRenderer::makeClipProcessor(
John Stiles956ec8a2020-06-19 15:32:16 -0400194 std::unique_ptr<GrFragmentProcessor> inputFP, uint32_t opsTaskID,
195 const SkPath& deviceSpacePath, const SkIRect& accessRect, const GrCaps& caps) {
Chris Dalton1e7b2e52021-03-09 21:29:32 -0700196#ifdef SK_DEBUG
Chris Daltona32a3c32017-12-05 10:05:21 -0700197 SkASSERT(!fFlushing);
Chris Dalton1e7b2e52021-03-09 21:29:32 -0700198 SkIRect pathIBounds;
199 deviceSpacePath.getBounds().roundOut(&pathIBounds);
200 SkIRect maskBounds;
201 if (maskBounds.intersect(accessRect, pathIBounds)) {
202 SkASSERT(maskBounds.height64() * maskBounds.width64() <= kMaxClipPathArea);
203 }
204#endif
Chris Daltona32a3c32017-12-05 10:05:21 -0700205
Chris Daltonc3318f02019-07-19 14:20:53 -0600206 uint32_t key = deviceSpacePath.getGenerationID();
207 if (CoverageType::kA8_Multisample == fCoverageType) {
208 // We only need to consider fill rule in MSAA mode. In coverage count mode Even/Odd and
209 // Nonzero both reference the same coverage count mask.
210 key = (key << 1) | (uint32_t)GrFillRuleForSkPath(deviceSpacePath);
211 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600212 GrCCClipPath& clipPath =
Greg Danielf41b2bd2019-08-22 16:19:24 -0400213 this->lookupPendingPaths(opsTaskID)->fClipPaths[key];
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600214 if (!clipPath.isInitialized()) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700215 // This ClipPath was just created during lookup. Initialize it.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600216 const SkRect& pathDevBounds = deviceSpacePath.getBounds();
Brian Osman788b9162020-02-07 10:36:46 -0500217 if (std::max(pathDevBounds.height(), pathDevBounds.width()) > kPathCropThreshold) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600218 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
219 SkPath croppedPath;
Chris Dalton4c458b12018-06-16 17:22:59 -0600220 int maxRTSize = caps.maxRenderTargetSize();
Chris Dalton09a7bb22018-08-31 19:53:15 +0800221 CropPath(deviceSpacePath, SkIRect::MakeWH(maxRTSize, maxRTSize), &croppedPath);
Chris Daltonc3318f02019-07-19 14:20:53 -0600222 clipPath.init(croppedPath, accessRect, fCoverageType, caps);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600223 } else {
Chris Daltonc3318f02019-07-19 14:20:53 -0600224 clipPath.init(deviceSpacePath, accessRect, fCoverageType, caps);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600225 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700226 } else {
227 clipPath.addAccess(accessRect);
228 }
229
Chris Daltonc3318f02019-07-19 14:20:53 -0600230 auto isCoverageCount = GrCCClipProcessor::IsCoverageCount(
231 CoverageType::kFP16_CoverageCount == fCoverageType);
232 auto mustCheckBounds = GrCCClipProcessor::MustCheckBounds(
233 !clipPath.pathDevIBounds().contains(accessRect));
John Stiles956ec8a2020-06-19 15:32:16 -0400234 return std::make_unique<GrCCClipProcessor>(
235 std::move(inputFP), caps, &clipPath, isCoverageCount, mustCheckBounds);
Chris Daltona32a3c32017-12-05 10:05:21 -0700236}
237
Brian Salomonbf6b9792019-08-21 09:38:10 -0400238void GrCoverageCountingPathRenderer::preFlush(
Adlai Holler9902cff2020-11-11 08:51:25 -0500239 GrOnFlushResourceProvider* onFlushRP, SkSpan<const uint32_t> taskIDs) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700240 using DoCopiesToA8Coverage = GrCCDrawPathsOp::DoCopiesToA8Coverage;
Chris Daltona32a3c32017-12-05 10:05:21 -0700241 SkASSERT(!fFlushing);
Chris Daltond7e22272018-05-23 10:17:17 -0600242 SkASSERT(fFlushingPaths.empty());
Chris Dalton383a2ef2018-01-08 17:21:41 -0500243 SkDEBUGCODE(fFlushing = true);
Chris Daltona32a3c32017-12-05 10:05:21 -0700244
Chris Dalton351e80c2019-01-06 22:51:00 -0700245 if (fPathCache) {
246 fPathCache->doPreFlushProcessing();
Chris Dalton4da70192018-06-18 09:51:36 -0600247 }
248
Chris Daltond7e22272018-05-23 10:17:17 -0600249 if (fPendingPaths.empty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500250 return; // Nothing to draw.
Chris Daltona32a3c32017-12-05 10:05:21 -0700251 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600252
Chris Dalton4da70192018-06-18 09:51:36 -0600253 GrCCPerFlushResourceSpecs specs;
Chris Dalton42c21152018-06-13 15:28:19 -0600254 int maxPreferredRTSize = onFlushRP->caps()->maxPreferredRenderTargetSize();
Brian Osman788b9162020-02-07 10:36:46 -0500255 specs.fCopyAtlasSpecs.fMaxPreferredTextureSize = std::min(2048, maxPreferredRTSize);
Chris Dalton4da70192018-06-18 09:51:36 -0600256 SkASSERT(0 == specs.fCopyAtlasSpecs.fMinTextureSize);
257 specs.fRenderedAtlasSpecs.fMaxPreferredTextureSize = maxPreferredRTSize;
Brian Osman788b9162020-02-07 10:36:46 -0500258 specs.fRenderedAtlasSpecs.fMinTextureSize = std::min(512, maxPreferredRTSize);
Chris Dalton42c21152018-06-13 15:28:19 -0600259
Greg Danielf41b2bd2019-08-22 16:19:24 -0400260 // Move the per-opsTask paths that are about to be flushed from fPendingPaths to fFlushingPaths,
Chris Dalton42c21152018-06-13 15:28:19 -0600261 // and count them up so we can preallocate buffers.
Adlai Holler9902cff2020-11-11 08:51:25 -0500262 fFlushingPaths.reserve_back(taskIDs.count());
263 for (uint32_t taskID : taskIDs) {
264 auto iter = fPendingPaths.find(taskID);
Chris Daltond7e22272018-05-23 10:17:17 -0600265 if (fPendingPaths.end() == iter) {
Greg Danielf41b2bd2019-08-22 16:19:24 -0400266 continue; // No paths on this opsTask.
Chris Dalton1a325d22017-07-14 15:17:41 -0600267 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700268
tzikbdb49562018-05-28 14:58:00 +0900269 fFlushingPaths.push_back(std::move(iter->second));
Chris Daltond7e22272018-05-23 10:17:17 -0600270 fPendingPaths.erase(iter);
271
Chris Dalton4da70192018-06-18 09:51:36 -0600272 for (GrCCDrawPathsOp* op : fFlushingPaths.back()->fDrawOps) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700273 op->accountForOwnPaths(fPathCache.get(), onFlushRP, &specs);
Chris Dalton080baa42017-11-06 14:19:19 -0700274 }
Chris Daltond7e22272018-05-23 10:17:17 -0600275 for (const auto& clipsIter : fFlushingPaths.back()->fClipPaths) {
Chris Dalton4da70192018-06-18 09:51:36 -0600276 clipsIter.second.accountForOwnPath(&specs);
Chris Daltona32a3c32017-12-05 10:05:21 -0700277 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600278 }
279
Chris Dalton4da70192018-06-18 09:51:36 -0600280 if (specs.isEmpty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500281 return; // Nothing to draw.
Chris Dalton1a325d22017-07-14 15:17:41 -0600282 }
283
Chris Dalton4da70192018-06-18 09:51:36 -0600284 // Determine if there are enough reusable paths from last flush for it to be worth our time to
285 // copy them to cached atlas(es).
Chris Dalton09a7bb22018-08-31 19:53:15 +0800286 int numCopies = specs.fNumCopiedPaths[GrCCPerFlushResourceSpecs::kFillIdx] +
287 specs.fNumCopiedPaths[GrCCPerFlushResourceSpecs::kStrokeIdx];
Jiulong Wangdf5739c2020-10-13 15:09:09 -0700288 auto doCopies = DoCopiesToA8Coverage(numCopies > kDoCopiesThreshold ||
Chris Dalton351e80c2019-01-06 22:51:00 -0700289 specs.fCopyAtlasSpecs.fApproxNumPixels > 256 * 256);
290 if (numCopies && DoCopiesToA8Coverage::kNo == doCopies) {
291 specs.cancelCopies();
Chris Dalton4da70192018-06-18 09:51:36 -0600292 }
293
Chris Daltonc3318f02019-07-19 14:20:53 -0600294 auto resources = sk_make_sp<GrCCPerFlushResources>(onFlushRP, fCoverageType, specs);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600295 if (!resources->isMapped()) {
296 return; // Some allocation failed.
Chris Dalton1a325d22017-07-14 15:17:41 -0600297 }
298
Chris Dalton4da70192018-06-18 09:51:36 -0600299 // Layout the atlas(es) and parse paths.
Chris Daltond7e22272018-05-23 10:17:17 -0600300 for (const auto& flushingPaths : fFlushingPaths) {
301 for (GrCCDrawPathsOp* op : flushingPaths->fDrawOps) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700302 op->setupResources(fPathCache.get(), onFlushRP, resources.get(), doCopies);
Chris Dalton1a325d22017-07-14 15:17:41 -0600303 }
Chris Daltond7e22272018-05-23 10:17:17 -0600304 for (auto& clipsIter : flushingPaths->fClipPaths) {
Chris Daltondaef06a2018-05-23 17:11:09 -0600305 clipsIter.second.renderPathInAtlas(resources.get(), onFlushRP);
Chris Daltonc1e59632017-09-05 00:30:07 -0600306 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600307 }
308
Chris Dalton351e80c2019-01-06 22:51:00 -0700309 if (fPathCache) {
310 // Purge invalidated textures from previous atlases *before* calling finalize(). That way,
311 // the underlying textures objects can be freed up and reused for the next atlases.
312 fPathCache->purgeInvalidatedAtlasTextures(onFlushRP);
Chris Daltond6fa4542019-01-04 13:23:51 -0700313 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600314
Chris Dalton351e80c2019-01-06 22:51:00 -0700315 // Allocate resources and then render the atlas(es).
Chris Daltonc4b47352019-08-23 10:10:36 -0600316 if (!resources->finalize(onFlushRP)) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700317 return;
318 }
Chris Dalton2e825a32019-01-04 22:14:27 +0000319
Chris Daltond7e22272018-05-23 10:17:17 -0600320 // Commit flushing paths to the resources once they are successfully completed.
321 for (auto& flushingPaths : fFlushingPaths) {
Robert Phillips774168e2018-05-31 12:43:27 -0400322 SkASSERT(!flushingPaths->fFlushResources);
Chris Daltond7e22272018-05-23 10:17:17 -0600323 flushingPaths->fFlushResources = resources;
324 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600325}
326
Adlai Holler9902cff2020-11-11 08:51:25 -0500327void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken,
328 SkSpan<const uint32_t> /* taskIDs */) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600329 SkASSERT(fFlushing);
Robert Phillips774168e2018-05-31 12:43:27 -0400330
Chris Dalton4da70192018-06-18 09:51:36 -0600331 if (!fFlushingPaths.empty()) {
Chris Dalton4da70192018-06-18 09:51:36 -0600332 // In DDL mode these aren't guaranteed to be deleted so we must clear out the perFlush
333 // resources manually.
334 for (auto& flushingPaths : fFlushingPaths) {
335 flushingPaths->fFlushResources = nullptr;
336 }
337
338 // We wait to erase these until after flush, once Ops and FPs are done accessing their data.
339 fFlushingPaths.reset();
Robert Phillips774168e2018-05-31 12:43:27 -0400340 }
341
Chris Dalton383a2ef2018-01-08 17:21:41 -0500342 SkDEBUGCODE(fFlushing = false);
Chris Dalton1a325d22017-07-14 15:17:41 -0600343}
Chris Dalton09a7bb22018-08-31 19:53:15 +0800344
Chris Dalton6c3879d2018-11-01 11:13:19 -0600345void GrCoverageCountingPathRenderer::purgeCacheEntriesOlderThan(
Chris Dalton351e80c2019-01-06 22:51:00 -0700346 GrProxyProvider* proxyProvider, const GrStdSteadyClock::time_point& purgeTime) {
Chris Dalton6c3879d2018-11-01 11:13:19 -0600347 if (fPathCache) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700348 fPathCache->purgeEntriesOlderThan(proxyProvider, purgeTime);
Chris Dalton6c3879d2018-11-01 11:13:19 -0600349 }
350}
351
Chris Dalton09a7bb22018-08-31 19:53:15 +0800352void GrCoverageCountingPathRenderer::CropPath(const SkPath& path, const SkIRect& cropbox,
353 SkPath* out) {
354 SkPath cropboxPath;
355 cropboxPath.addRect(SkRect::Make(cropbox));
356 if (!Op(cropboxPath, path, kIntersect_SkPathOp, out)) {
357 // This can fail if the PathOps encounter NaN or infinities.
358 out->reset();
359 }
360 out->setIsVolatile(true);
361}
Chris Dalton82de18f2018-09-12 17:24:09 -0600362
363float GrCoverageCountingPathRenderer::GetStrokeDevWidth(const SkMatrix& m,
364 const SkStrokeRec& stroke,
365 float* inflationRadius) {
366 float strokeDevWidth;
367 if (stroke.isHairlineStyle()) {
368 strokeDevWidth = 1;
369 } else {
370 SkASSERT(SkStrokeRec::kStroke_Style == stroke.getStyle());
371 SkASSERT(m.isSimilarity()); // Otherwise matrixScaleFactor = m.getMaxScale().
372 float matrixScaleFactor = SkVector::Length(m.getScaleX(), m.getSkewY());
373 strokeDevWidth = stroke.getWidth() * matrixScaleFactor;
374 }
375 if (inflationRadius) {
376 // Inflate for a minimum stroke width of 1. In some cases when the stroke is less than 1px
377 // wide, we may inflate it to 1px and instead reduce the opacity.
378 *inflationRadius = SkStrokeRec::GetInflationRadius(
Brian Osman788b9162020-02-07 10:36:46 -0500379 stroke.getJoin(), stroke.getMiter(), stroke.getCap(), std::max(strokeDevWidth, 1.f));
Chris Dalton82de18f2018-09-12 17:24:09 -0600380 }
381 return strokeDevWidth;
382}