blob: 901ca38934dbbdccf8196b7730c07aed412e3c55 [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCoverageCountingPathRenderer.h"
9
10#include "GrCaps.h"
11#include "GrClip.h"
Robert Phillips777707b2018-01-17 11:40:14 -050012#include "GrProxyProvider.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060013#include "SkMakeUnique.h"
Chris Daltona039d3b2017-09-28 11:16:36 -060014#include "SkPathOps.h"
Chris Dalton383a2ef2018-01-08 17:21:41 -050015#include "ccpr/GrCCClipProcessor.h"
Chris Daltond7e22272018-05-23 10:17:17 -060016#include "ccpr/GrCCDrawPathsOp.h"
Chris Daltona2b5b642018-06-24 13:08:57 -060017#include "ccpr/GrCCPathCache.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060018
Chris Dalton5ba36ba2018-05-09 01:08:38 -060019using PathInstance = GrCCPathProcessor::Instance;
Chris Daltona32a3c32017-12-05 10:05:21 -070020
Chris Dalton1a325d22017-07-14 15:17:41 -060021bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps) {
22 const GrShaderCaps& shaderCaps = *caps.shaderCaps();
Chris Dalton49ec21d2018-09-17 09:36:40 -060023 return caps.instanceAttribSupport() && shaderCaps.integerSupport() &&
Chris Dalton1b4ad762018-10-04 11:58:09 -060024 shaderCaps.floatIs32Bits() && GrCaps::kNone_MapFlags != caps.mapBufferFlags() &&
Chris Dalton1a325d22017-07-14 15:17:41 -060025 caps.isConfigTexturable(kAlpha_half_GrPixelConfig) &&
Brian Salomonbdecacf2018-02-02 20:32:49 -050026 caps.isConfigRenderable(kAlpha_half_GrPixelConfig) &&
Chris Dalton4da70192018-06-18 09:51:36 -060027 caps.isConfigTexturable(kAlpha_8_GrPixelConfig) &&
28 caps.isConfigRenderable(kAlpha_8_GrPixelConfig) &&
Chris Daltone4679fa2017-09-29 13:58:26 -060029 !caps.blacklistCoverageCounting();
Chris Dalton1a325d22017-07-14 15:17:41 -060030}
31
Chris Dalton383a2ef2018-01-08 17:21:41 -050032sk_sp<GrCoverageCountingPathRenderer> GrCoverageCountingPathRenderer::CreateIfSupported(
Chris Dalton351e80c2019-01-06 22:51:00 -070033 const GrCaps& caps, AllowCaching allowCaching, uint32_t contextUniqueID) {
Chris Dalton9a986cf2018-10-18 15:27:59 -060034 return sk_sp<GrCoverageCountingPathRenderer>((IsSupported(caps))
Chris Dalton351e80c2019-01-06 22:51:00 -070035 ? new GrCoverageCountingPathRenderer(allowCaching, contextUniqueID)
Chris Dalton9a986cf2018-10-18 15:27:59 -060036 : nullptr);
Chris Daltona2b5b642018-06-24 13:08:57 -060037}
38
Chris Dalton351e80c2019-01-06 22:51:00 -070039GrCoverageCountingPathRenderer::GrCoverageCountingPathRenderer(AllowCaching allowCaching,
40 uint32_t contextUniqueID) {
Chris Daltona2b5b642018-06-24 13:08:57 -060041 if (AllowCaching::kYes == allowCaching) {
Chris Dalton351e80c2019-01-06 22:51:00 -070042 fPathCache = skstd::make_unique<GrCCPathCache>(contextUniqueID);
Chris Daltona2b5b642018-06-24 13:08:57 -060043 }
44}
45
Chris Daltond7e22272018-05-23 10:17:17 -060046GrCCPerOpListPaths* GrCoverageCountingPathRenderer::lookupPendingPaths(uint32_t opListID) {
47 auto it = fPendingPaths.find(opListID);
48 if (fPendingPaths.end() == it) {
Robert Phillips774168e2018-05-31 12:43:27 -040049 sk_sp<GrCCPerOpListPaths> paths = sk_make_sp<GrCCPerOpListPaths>();
Chris Daltond7e22272018-05-23 10:17:17 -060050 it = fPendingPaths.insert(std::make_pair(opListID, std::move(paths))).first;
51 }
52 return it->second.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -060053}
54
Chris Dalton383a2ef2018-01-08 17:21:41 -050055GrPathRenderer::CanDrawPath GrCoverageCountingPathRenderer::onCanDrawPath(
56 const CanDrawPathArgs& args) const {
Chris Dalton09a7bb22018-08-31 19:53:15 +080057 const GrShape& shape = *args.fShape;
58 if (GrAAType::kCoverage != args.fAAType || shape.style().hasPathEffect() ||
59 args.fViewMatrix->hasPerspective() || shape.inverseFilled()) {
Chris Dalton5ed44232017-09-07 13:22:46 -060060 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -060061 }
62
63 SkPath path;
Chris Dalton09a7bb22018-08-31 19:53:15 +080064 shape.asPath(&path);
Chris Daltona2b5b642018-06-24 13:08:57 -060065
Chris Dalton82de18f2018-09-12 17:24:09 -060066 const SkStrokeRec& stroke = shape.style().strokeRec();
67 switch (stroke.getStyle()) {
Chris Dalton09a7bb22018-08-31 19:53:15 +080068 case SkStrokeRec::kFill_Style: {
69 SkRect devBounds;
70 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
Chris Daltona2b5b642018-06-24 13:08:57 -060071
Chris Dalton09a7bb22018-08-31 19:53:15 +080072 SkIRect clippedIBounds;
73 devBounds.roundOut(&clippedIBounds);
74 if (!clippedIBounds.intersect(*args.fClipConservativeBounds)) {
75 // The path is completely clipped away. Our code will eventually notice this before
76 // doing any real work.
77 return CanDrawPath::kYes;
78 }
79
80 int64_t numPixels = sk_64_mul(clippedIBounds.height(), clippedIBounds.width());
81 if (path.countVerbs() > 1000 && path.countPoints() > numPixels) {
82 // This is a complicated path that has more vertices than pixels! Let's let the SW
83 // renderer have this one: It will probably be faster and a bitmap will require less
84 // total memory on the GPU than CCPR instance buffers would for the raw path data.
85 return CanDrawPath::kNo;
86 }
87
88 if (numPixels > 256 * 256) {
89 // Large paths can blow up the atlas fast. And they are not ideal for a two-pass
90 // rendering algorithm. Give the simpler direct renderers a chance before we commit
91 // to drawing it.
92 return CanDrawPath::kAsBackup;
93 }
94
95 if (args.fShape->hasUnstyledKey() && path.countVerbs() > 50) {
96 // Complex paths do better cached in an SDF, if the renderer will accept them.
97 return CanDrawPath::kAsBackup;
98 }
99
100 return CanDrawPath::kYes;
101 }
102
103 case SkStrokeRec::kStroke_Style:
104 if (!args.fViewMatrix->isSimilarity()) {
105 // The stroker currently only supports rigid-body transfoms for the stroke lines
106 // themselves. This limitation doesn't affect hairlines since their stroke lines are
107 // defined relative to device space.
108 return CanDrawPath::kNo;
109 }
110 // fallthru
Chris Dalton82de18f2018-09-12 17:24:09 -0600111 case SkStrokeRec::kHairline_Style: {
112 float inflationRadius;
113 GetStrokeDevWidth(*args.fViewMatrix, stroke, &inflationRadius);
114 if (!(inflationRadius <= kMaxBoundsInflationFromStroke)) {
115 // Let extremely wide strokes be converted to fill paths and drawn by the CCPR
116 // filler instead. (Cast the logic negatively in order to also catch r=NaN.)
117 return CanDrawPath::kNo;
118 }
119 SkASSERT(!SkScalarIsNaN(inflationRadius));
120 if (SkPathPriv::ConicWeightCnt(path)) {
121 // The stroker does not support conics yet.
122 return CanDrawPath::kNo;
123 }
124 return CanDrawPath::kYes;
125 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800126
127 case SkStrokeRec::kStrokeAndFill_Style:
128 return CanDrawPath::kNo;
Chris Daltondb91c6e2017-09-08 16:25:08 -0600129 }
130
Chris Dalton09a7bb22018-08-31 19:53:15 +0800131 SK_ABORT("Invalid stroke style.");
132 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -0600133}
134
135bool GrCoverageCountingPathRenderer::onDrawPath(const DrawPathArgs& args) {
136 SkASSERT(!fFlushing);
Chris Dalton1a325d22017-07-14 15:17:41 -0600137
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600138 SkIRect clipIBounds;
139 GrRenderTargetContext* rtc = args.fRenderTargetContext;
140 args.fClip->getConservativeBounds(rtc->width(), rtc->height(), &clipIBounds, nullptr);
141
Chris Dalton09a7bb22018-08-31 19:53:15 +0800142 auto op = GrCCDrawPathsOp::Make(args.fContext, clipIBounds, *args.fViewMatrix, *args.fShape,
143 std::move(args.fPaint));
Chris Dalton42c21152018-06-13 15:28:19 -0600144 this->recordOp(std::move(op), args);
Chris Dalton1a325d22017-07-14 15:17:41 -0600145 return true;
146}
147
Brian Salomon348a0372018-10-31 10:42:18 -0400148void GrCoverageCountingPathRenderer::recordOp(std::unique_ptr<GrCCDrawPathsOp> op,
Chris Dalton42c21152018-06-13 15:28:19 -0600149 const DrawPathArgs& args) {
Brian Salomon348a0372018-10-31 10:42:18 -0400150 if (op) {
151 auto addToOwningPerOpListPaths = [this](GrOp* op, uint32_t opListID) {
152 op->cast<GrCCDrawPathsOp>()->addToOwningPerOpListPaths(
153 sk_ref_sp(this->lookupPendingPaths(opListID)));
154 };
155 args.fRenderTargetContext->addDrawOp(*args.fClip, std::move(op), addToOwningPerOpListPaths);
Chris Dalton42c21152018-06-13 15:28:19 -0600156 }
157}
158
Chris Dalton383a2ef2018-01-08 17:21:41 -0500159std::unique_ptr<GrFragmentProcessor> GrCoverageCountingPathRenderer::makeClipProcessor(
Chris Dalton4c458b12018-06-16 17:22:59 -0600160 uint32_t opListID, const SkPath& deviceSpacePath, const SkIRect& accessRect, int rtWidth,
161 int rtHeight, const GrCaps& caps) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500162 using MustCheckBounds = GrCCClipProcessor::MustCheckBounds;
Chris Daltona32a3c32017-12-05 10:05:21 -0700163
164 SkASSERT(!fFlushing);
Chris Daltona32a3c32017-12-05 10:05:21 -0700165
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600166 GrCCClipPath& clipPath =
Chris Daltond7e22272018-05-23 10:17:17 -0600167 this->lookupPendingPaths(opListID)->fClipPaths[deviceSpacePath.getGenerationID()];
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600168 if (!clipPath.isInitialized()) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700169 // This ClipPath was just created during lookup. Initialize it.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600170 const SkRect& pathDevBounds = deviceSpacePath.getBounds();
171 if (SkTMax(pathDevBounds.height(), pathDevBounds.width()) > kPathCropThreshold) {
172 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
173 SkPath croppedPath;
Chris Dalton4c458b12018-06-16 17:22:59 -0600174 int maxRTSize = caps.maxRenderTargetSize();
Chris Dalton09a7bb22018-08-31 19:53:15 +0800175 CropPath(deviceSpacePath, SkIRect::MakeWH(maxRTSize, maxRTSize), &croppedPath);
Chris Dalton4c458b12018-06-16 17:22:59 -0600176 clipPath.init(croppedPath, accessRect, rtWidth, rtHeight, caps);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600177 } else {
Chris Dalton4c458b12018-06-16 17:22:59 -0600178 clipPath.init(deviceSpacePath, accessRect, rtWidth, rtHeight, caps);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600179 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700180 } else {
181 clipPath.addAccess(accessRect);
182 }
183
184 bool mustCheckBounds = !clipPath.pathDevIBounds().contains(accessRect);
Chris Dalton383a2ef2018-01-08 17:21:41 -0500185 return skstd::make_unique<GrCCClipProcessor>(&clipPath, MustCheckBounds(mustCheckBounds),
186 deviceSpacePath.getFillType());
Chris Daltona32a3c32017-12-05 10:05:21 -0700187}
188
Chris Dalton1a325d22017-07-14 15:17:41 -0600189void GrCoverageCountingPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
190 const uint32_t* opListIDs, int numOpListIDs,
Chris Dalton9414c962018-06-14 10:14:50 -0600191 SkTArray<sk_sp<GrRenderTargetContext>>* out) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700192 using DoCopiesToA8Coverage = GrCCDrawPathsOp::DoCopiesToA8Coverage;
Chris Daltona32a3c32017-12-05 10:05:21 -0700193 SkASSERT(!fFlushing);
Chris Daltond7e22272018-05-23 10:17:17 -0600194 SkASSERT(fFlushingPaths.empty());
Chris Dalton383a2ef2018-01-08 17:21:41 -0500195 SkDEBUGCODE(fFlushing = true);
Chris Daltona32a3c32017-12-05 10:05:21 -0700196
Chris Dalton351e80c2019-01-06 22:51:00 -0700197 if (fPathCache) {
198 fPathCache->doPreFlushProcessing();
Chris Dalton4da70192018-06-18 09:51:36 -0600199 }
200
Chris Daltond7e22272018-05-23 10:17:17 -0600201 if (fPendingPaths.empty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500202 return; // Nothing to draw.
Chris Daltona32a3c32017-12-05 10:05:21 -0700203 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600204
Chris Dalton4da70192018-06-18 09:51:36 -0600205 GrCCPerFlushResourceSpecs specs;
Chris Dalton42c21152018-06-13 15:28:19 -0600206 int maxPreferredRTSize = onFlushRP->caps()->maxPreferredRenderTargetSize();
Chris Dalton4da70192018-06-18 09:51:36 -0600207 specs.fCopyAtlasSpecs.fMaxPreferredTextureSize = SkTMin(2048, maxPreferredRTSize);
208 SkASSERT(0 == specs.fCopyAtlasSpecs.fMinTextureSize);
209 specs.fRenderedAtlasSpecs.fMaxPreferredTextureSize = maxPreferredRTSize;
Chris Daltona2b5b642018-06-24 13:08:57 -0600210 specs.fRenderedAtlasSpecs.fMinTextureSize = SkTMin(512, maxPreferredRTSize);
Chris Dalton42c21152018-06-13 15:28:19 -0600211
Chris Daltond7e22272018-05-23 10:17:17 -0600212 // Move the per-opList paths that are about to be flushed from fPendingPaths to fFlushingPaths,
Chris Dalton42c21152018-06-13 15:28:19 -0600213 // and count them up so we can preallocate buffers.
Chris Daltond7e22272018-05-23 10:17:17 -0600214 fFlushingPaths.reserve(numOpListIDs);
Chris Dalton1a325d22017-07-14 15:17:41 -0600215 for (int i = 0; i < numOpListIDs; ++i) {
Chris Daltond7e22272018-05-23 10:17:17 -0600216 auto iter = fPendingPaths.find(opListIDs[i]);
217 if (fPendingPaths.end() == iter) {
218 continue; // No paths on this opList.
Chris Dalton1a325d22017-07-14 15:17:41 -0600219 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700220
tzikbdb49562018-05-28 14:58:00 +0900221 fFlushingPaths.push_back(std::move(iter->second));
Chris Daltond7e22272018-05-23 10:17:17 -0600222 fPendingPaths.erase(iter);
223
Chris Dalton4da70192018-06-18 09:51:36 -0600224 for (GrCCDrawPathsOp* op : fFlushingPaths.back()->fDrawOps) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700225 op->accountForOwnPaths(fPathCache.get(), onFlushRP, &specs);
Chris Dalton080baa42017-11-06 14:19:19 -0700226 }
Chris Daltond7e22272018-05-23 10:17:17 -0600227 for (const auto& clipsIter : fFlushingPaths.back()->fClipPaths) {
Chris Dalton4da70192018-06-18 09:51:36 -0600228 clipsIter.second.accountForOwnPath(&specs);
Chris Daltona32a3c32017-12-05 10:05:21 -0700229 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600230 }
231
Chris Dalton4da70192018-06-18 09:51:36 -0600232 if (specs.isEmpty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500233 return; // Nothing to draw.
Chris Dalton1a325d22017-07-14 15:17:41 -0600234 }
235
Chris Dalton4da70192018-06-18 09:51:36 -0600236 // Determine if there are enough reusable paths from last flush for it to be worth our time to
237 // copy them to cached atlas(es).
Chris Dalton09a7bb22018-08-31 19:53:15 +0800238 int numCopies = specs.fNumCopiedPaths[GrCCPerFlushResourceSpecs::kFillIdx] +
239 specs.fNumCopiedPaths[GrCCPerFlushResourceSpecs::kStrokeIdx];
Chris Dalton351e80c2019-01-06 22:51:00 -0700240 auto doCopies = DoCopiesToA8Coverage(numCopies > 100 ||
241 specs.fCopyAtlasSpecs.fApproxNumPixels > 256 * 256);
242 if (numCopies && DoCopiesToA8Coverage::kNo == doCopies) {
243 specs.cancelCopies();
Chris Dalton4da70192018-06-18 09:51:36 -0600244 }
245
246 auto resources = sk_make_sp<GrCCPerFlushResources>(onFlushRP, specs);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600247 if (!resources->isMapped()) {
248 return; // Some allocation failed.
Chris Dalton1a325d22017-07-14 15:17:41 -0600249 }
250
Chris Dalton4da70192018-06-18 09:51:36 -0600251 // Layout the atlas(es) and parse paths.
Chris Daltond7e22272018-05-23 10:17:17 -0600252 for (const auto& flushingPaths : fFlushingPaths) {
253 for (GrCCDrawPathsOp* op : flushingPaths->fDrawOps) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700254 op->setupResources(fPathCache.get(), onFlushRP, resources.get(), doCopies);
Chris Dalton1a325d22017-07-14 15:17:41 -0600255 }
Chris Daltond7e22272018-05-23 10:17:17 -0600256 for (auto& clipsIter : flushingPaths->fClipPaths) {
Chris Daltondaef06a2018-05-23 17:11:09 -0600257 clipsIter.second.renderPathInAtlas(resources.get(), onFlushRP);
Chris Daltonc1e59632017-09-05 00:30:07 -0600258 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600259 }
260
Chris Dalton351e80c2019-01-06 22:51:00 -0700261 if (fPathCache) {
262 // Purge invalidated textures from previous atlases *before* calling finalize(). That way,
263 // the underlying textures objects can be freed up and reused for the next atlases.
264 fPathCache->purgeInvalidatedAtlasTextures(onFlushRP);
Chris Daltond6fa4542019-01-04 13:23:51 -0700265 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600266
Chris Dalton351e80c2019-01-06 22:51:00 -0700267 // Allocate resources and then render the atlas(es).
268 if (!resources->finalize(onFlushRP, out)) {
269 return;
270 }
Chris Dalton2e825a32019-01-04 22:14:27 +0000271
Chris Daltond7e22272018-05-23 10:17:17 -0600272 // Commit flushing paths to the resources once they are successfully completed.
273 for (auto& flushingPaths : fFlushingPaths) {
Robert Phillips774168e2018-05-31 12:43:27 -0400274 SkASSERT(!flushingPaths->fFlushResources);
Chris Daltond7e22272018-05-23 10:17:17 -0600275 flushingPaths->fFlushResources = resources;
276 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600277}
278
Chris Dalton3968ff92017-11-27 12:26:31 -0700279void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken, const uint32_t* opListIDs,
280 int numOpListIDs) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600281 SkASSERT(fFlushing);
Robert Phillips774168e2018-05-31 12:43:27 -0400282
Chris Dalton4da70192018-06-18 09:51:36 -0600283 if (!fFlushingPaths.empty()) {
Chris Dalton4da70192018-06-18 09:51:36 -0600284 // In DDL mode these aren't guaranteed to be deleted so we must clear out the perFlush
285 // resources manually.
286 for (auto& flushingPaths : fFlushingPaths) {
287 flushingPaths->fFlushResources = nullptr;
288 }
289
290 // We wait to erase these until after flush, once Ops and FPs are done accessing their data.
291 fFlushingPaths.reset();
Robert Phillips774168e2018-05-31 12:43:27 -0400292 }
293
Chris Dalton383a2ef2018-01-08 17:21:41 -0500294 SkDEBUGCODE(fFlushing = false);
Chris Dalton1a325d22017-07-14 15:17:41 -0600295}
Chris Dalton09a7bb22018-08-31 19:53:15 +0800296
Chris Dalton6c3879d2018-11-01 11:13:19 -0600297void GrCoverageCountingPathRenderer::purgeCacheEntriesOlderThan(
Chris Dalton351e80c2019-01-06 22:51:00 -0700298 GrProxyProvider* proxyProvider, const GrStdSteadyClock::time_point& purgeTime) {
Chris Dalton6c3879d2018-11-01 11:13:19 -0600299 if (fPathCache) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700300 fPathCache->purgeEntriesOlderThan(proxyProvider, purgeTime);
Chris Dalton6c3879d2018-11-01 11:13:19 -0600301 }
302}
303
Chris Dalton09a7bb22018-08-31 19:53:15 +0800304void GrCoverageCountingPathRenderer::CropPath(const SkPath& path, const SkIRect& cropbox,
305 SkPath* out) {
306 SkPath cropboxPath;
307 cropboxPath.addRect(SkRect::Make(cropbox));
308 if (!Op(cropboxPath, path, kIntersect_SkPathOp, out)) {
309 // This can fail if the PathOps encounter NaN or infinities.
310 out->reset();
311 }
312 out->setIsVolatile(true);
313}
Chris Dalton82de18f2018-09-12 17:24:09 -0600314
315float GrCoverageCountingPathRenderer::GetStrokeDevWidth(const SkMatrix& m,
316 const SkStrokeRec& stroke,
317 float* inflationRadius) {
318 float strokeDevWidth;
319 if (stroke.isHairlineStyle()) {
320 strokeDevWidth = 1;
321 } else {
322 SkASSERT(SkStrokeRec::kStroke_Style == stroke.getStyle());
323 SkASSERT(m.isSimilarity()); // Otherwise matrixScaleFactor = m.getMaxScale().
324 float matrixScaleFactor = SkVector::Length(m.getScaleX(), m.getSkewY());
325 strokeDevWidth = stroke.getWidth() * matrixScaleFactor;
326 }
327 if (inflationRadius) {
328 // Inflate for a minimum stroke width of 1. In some cases when the stroke is less than 1px
329 // wide, we may inflate it to 1px and instead reduce the opacity.
330 *inflationRadius = SkStrokeRec::GetInflationRadius(
331 stroke.getJoin(), stroke.getMiter(), stroke.getCap(), SkTMax(strokeDevWidth, 1.f));
332 }
333 return strokeDevWidth;
334}