blob: 9d30bb475d6d1322a8f0dfda4644284e89643925 [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCoverageCountingPathRenderer.h"
9
10#include "GrCaps.h"
11#include "GrClip.h"
Robert Phillips777707b2018-01-17 11:40:14 -050012#include "GrProxyProvider.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060013#include "SkMakeUnique.h"
Chris Daltona039d3b2017-09-28 11:16:36 -060014#include "SkPathOps.h"
Chris Dalton383a2ef2018-01-08 17:21:41 -050015#include "ccpr/GrCCClipProcessor.h"
Chris Daltond7e22272018-05-23 10:17:17 -060016#include "ccpr/GrCCDrawPathsOp.h"
Chris Daltona2b5b642018-06-24 13:08:57 -060017#include "ccpr/GrCCPathCache.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060018
Chris Dalton5ba36ba2018-05-09 01:08:38 -060019using PathInstance = GrCCPathProcessor::Instance;
Chris Daltona32a3c32017-12-05 10:05:21 -070020
Robert Phillips774168e2018-05-31 12:43:27 -040021GrCCPerOpListPaths::~GrCCPerOpListPaths() {
22 // Ensure there are no surviving DrawPathsOps with a dangling pointer into this class.
23 if (!fDrawOps.isEmpty()) {
24 SK_ABORT("GrCCDrawPathsOp(s) not deleted during flush");
25 }
26 // Clip lazy proxies also reference this class from their callbacks, but those callbacks
27 // are only invoked at flush time while we are still alive. (Unlike DrawPathsOps, that
28 // unregister themselves upon destruction.) So it shouldn't matter if any clip proxies
29 // are still around.
30}
31
Chris Dalton1a325d22017-07-14 15:17:41 -060032bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps) {
33 const GrShaderCaps& shaderCaps = *caps.shaderCaps();
Chris Dalton383a2ef2018-01-08 17:21:41 -050034 return shaderCaps.integerSupport() && shaderCaps.flatInterpolationSupport() &&
35 caps.instanceAttribSupport() && GrCaps::kNone_MapFlags != caps.mapBufferFlags() &&
Chris Dalton1a325d22017-07-14 15:17:41 -060036 caps.isConfigTexturable(kAlpha_half_GrPixelConfig) &&
Brian Salomonbdecacf2018-02-02 20:32:49 -050037 caps.isConfigRenderable(kAlpha_half_GrPixelConfig) &&
Chris Dalton4da70192018-06-18 09:51:36 -060038 caps.isConfigTexturable(kAlpha_8_GrPixelConfig) &&
39 caps.isConfigRenderable(kAlpha_8_GrPixelConfig) &&
Chris Daltone4679fa2017-09-29 13:58:26 -060040 !caps.blacklistCoverageCounting();
Chris Dalton1a325d22017-07-14 15:17:41 -060041}
42
Chris Dalton383a2ef2018-01-08 17:21:41 -050043sk_sp<GrCoverageCountingPathRenderer> GrCoverageCountingPathRenderer::CreateIfSupported(
Chris Daltona2b5b642018-06-24 13:08:57 -060044 const GrCaps& caps, AllowCaching allowCaching) {
45 return sk_sp<GrCoverageCountingPathRenderer>(
46 IsSupported(caps) ? new GrCoverageCountingPathRenderer(allowCaching) : nullptr);
47}
48
49GrCoverageCountingPathRenderer::GrCoverageCountingPathRenderer(AllowCaching allowCaching) {
50 if (AllowCaching::kYes == allowCaching) {
51 fPathCache = skstd::make_unique<GrCCPathCache>();
52 }
53}
54
55GrCoverageCountingPathRenderer::~GrCoverageCountingPathRenderer() {
56 // Ensure callers are actually flushing paths they record, not causing us to leak memory.
57 SkASSERT(fPendingPaths.empty());
58 SkASSERT(!fFlushing);
Chris Dalton1a325d22017-07-14 15:17:41 -060059}
60
Chris Daltond7e22272018-05-23 10:17:17 -060061GrCCPerOpListPaths* GrCoverageCountingPathRenderer::lookupPendingPaths(uint32_t opListID) {
62 auto it = fPendingPaths.find(opListID);
63 if (fPendingPaths.end() == it) {
Robert Phillips774168e2018-05-31 12:43:27 -040064 sk_sp<GrCCPerOpListPaths> paths = sk_make_sp<GrCCPerOpListPaths>();
Chris Daltond7e22272018-05-23 10:17:17 -060065 it = fPendingPaths.insert(std::make_pair(opListID, std::move(paths))).first;
66 }
67 return it->second.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -060068}
69
Chris Dalton383a2ef2018-01-08 17:21:41 -050070GrPathRenderer::CanDrawPath GrCoverageCountingPathRenderer::onCanDrawPath(
71 const CanDrawPathArgs& args) const {
Chris Dalton09a7bb22018-08-31 19:53:15 +080072 const GrShape& shape = *args.fShape;
73 if (GrAAType::kCoverage != args.fAAType || shape.style().hasPathEffect() ||
74 args.fViewMatrix->hasPerspective() || shape.inverseFilled()) {
Chris Dalton5ed44232017-09-07 13:22:46 -060075 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -060076 }
77
78 SkPath path;
Chris Dalton09a7bb22018-08-31 19:53:15 +080079 shape.asPath(&path);
Chris Daltona2b5b642018-06-24 13:08:57 -060080
Chris Dalton82de18f2018-09-12 17:24:09 -060081 const SkStrokeRec& stroke = shape.style().strokeRec();
82 switch (stroke.getStyle()) {
Chris Dalton09a7bb22018-08-31 19:53:15 +080083 case SkStrokeRec::kFill_Style: {
84 SkRect devBounds;
85 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
Chris Daltona2b5b642018-06-24 13:08:57 -060086
Chris Dalton09a7bb22018-08-31 19:53:15 +080087 SkIRect clippedIBounds;
88 devBounds.roundOut(&clippedIBounds);
89 if (!clippedIBounds.intersect(*args.fClipConservativeBounds)) {
90 // The path is completely clipped away. Our code will eventually notice this before
91 // doing any real work.
92 return CanDrawPath::kYes;
93 }
94
95 int64_t numPixels = sk_64_mul(clippedIBounds.height(), clippedIBounds.width());
96 if (path.countVerbs() > 1000 && path.countPoints() > numPixels) {
97 // This is a complicated path that has more vertices than pixels! Let's let the SW
98 // renderer have this one: It will probably be faster and a bitmap will require less
99 // total memory on the GPU than CCPR instance buffers would for the raw path data.
100 return CanDrawPath::kNo;
101 }
102
103 if (numPixels > 256 * 256) {
104 // Large paths can blow up the atlas fast. And they are not ideal for a two-pass
105 // rendering algorithm. Give the simpler direct renderers a chance before we commit
106 // to drawing it.
107 return CanDrawPath::kAsBackup;
108 }
109
110 if (args.fShape->hasUnstyledKey() && path.countVerbs() > 50) {
111 // Complex paths do better cached in an SDF, if the renderer will accept them.
112 return CanDrawPath::kAsBackup;
113 }
114
115 return CanDrawPath::kYes;
116 }
117
118 case SkStrokeRec::kStroke_Style:
119 if (!args.fViewMatrix->isSimilarity()) {
120 // The stroker currently only supports rigid-body transfoms for the stroke lines
121 // themselves. This limitation doesn't affect hairlines since their stroke lines are
122 // defined relative to device space.
123 return CanDrawPath::kNo;
124 }
125 // fallthru
Chris Dalton82de18f2018-09-12 17:24:09 -0600126 case SkStrokeRec::kHairline_Style: {
127 float inflationRadius;
128 GetStrokeDevWidth(*args.fViewMatrix, stroke, &inflationRadius);
129 if (!(inflationRadius <= kMaxBoundsInflationFromStroke)) {
130 // Let extremely wide strokes be converted to fill paths and drawn by the CCPR
131 // filler instead. (Cast the logic negatively in order to also catch r=NaN.)
132 return CanDrawPath::kNo;
133 }
134 SkASSERT(!SkScalarIsNaN(inflationRadius));
135 if (SkPathPriv::ConicWeightCnt(path)) {
136 // The stroker does not support conics yet.
137 return CanDrawPath::kNo;
138 }
139 return CanDrawPath::kYes;
140 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800141
142 case SkStrokeRec::kStrokeAndFill_Style:
143 return CanDrawPath::kNo;
Chris Daltondb91c6e2017-09-08 16:25:08 -0600144 }
145
Chris Dalton09a7bb22018-08-31 19:53:15 +0800146 SK_ABORT("Invalid stroke style.");
147 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -0600148}
149
150bool GrCoverageCountingPathRenderer::onDrawPath(const DrawPathArgs& args) {
151 SkASSERT(!fFlushing);
Chris Dalton1a325d22017-07-14 15:17:41 -0600152
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600153 SkIRect clipIBounds;
154 GrRenderTargetContext* rtc = args.fRenderTargetContext;
155 args.fClip->getConservativeBounds(rtc->width(), rtc->height(), &clipIBounds, nullptr);
156
Chris Dalton09a7bb22018-08-31 19:53:15 +0800157 auto op = GrCCDrawPathsOp::Make(args.fContext, clipIBounds, *args.fViewMatrix, *args.fShape,
158 std::move(args.fPaint));
Chris Dalton42c21152018-06-13 15:28:19 -0600159 this->recordOp(std::move(op), args);
Chris Dalton1a325d22017-07-14 15:17:41 -0600160 return true;
161}
162
Chris Dalton42c21152018-06-13 15:28:19 -0600163void GrCoverageCountingPathRenderer::recordOp(std::unique_ptr<GrCCDrawPathsOp> opHolder,
164 const DrawPathArgs& args) {
165 if (GrCCDrawPathsOp* op = opHolder.get()) {
166 GrRenderTargetContext* rtc = args.fRenderTargetContext;
167 if (uint32_t opListID = rtc->addDrawOp(*args.fClip, std::move(opHolder))) {
168 op->wasRecorded(this->lookupPendingPaths(opListID));
169 }
170 }
171}
172
Chris Dalton383a2ef2018-01-08 17:21:41 -0500173std::unique_ptr<GrFragmentProcessor> GrCoverageCountingPathRenderer::makeClipProcessor(
Chris Dalton4c458b12018-06-16 17:22:59 -0600174 uint32_t opListID, const SkPath& deviceSpacePath, const SkIRect& accessRect, int rtWidth,
175 int rtHeight, const GrCaps& caps) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500176 using MustCheckBounds = GrCCClipProcessor::MustCheckBounds;
Chris Daltona32a3c32017-12-05 10:05:21 -0700177
178 SkASSERT(!fFlushing);
Chris Daltona32a3c32017-12-05 10:05:21 -0700179
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600180 GrCCClipPath& clipPath =
Chris Daltond7e22272018-05-23 10:17:17 -0600181 this->lookupPendingPaths(opListID)->fClipPaths[deviceSpacePath.getGenerationID()];
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600182 if (!clipPath.isInitialized()) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700183 // This ClipPath was just created during lookup. Initialize it.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600184 const SkRect& pathDevBounds = deviceSpacePath.getBounds();
185 if (SkTMax(pathDevBounds.height(), pathDevBounds.width()) > kPathCropThreshold) {
186 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
187 SkPath croppedPath;
Chris Dalton4c458b12018-06-16 17:22:59 -0600188 int maxRTSize = caps.maxRenderTargetSize();
Chris Dalton09a7bb22018-08-31 19:53:15 +0800189 CropPath(deviceSpacePath, SkIRect::MakeWH(maxRTSize, maxRTSize), &croppedPath);
Chris Dalton4c458b12018-06-16 17:22:59 -0600190 clipPath.init(croppedPath, accessRect, rtWidth, rtHeight, caps);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600191 } else {
Chris Dalton4c458b12018-06-16 17:22:59 -0600192 clipPath.init(deviceSpacePath, accessRect, rtWidth, rtHeight, caps);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600193 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700194 } else {
195 clipPath.addAccess(accessRect);
196 }
197
198 bool mustCheckBounds = !clipPath.pathDevIBounds().contains(accessRect);
Chris Dalton383a2ef2018-01-08 17:21:41 -0500199 return skstd::make_unique<GrCCClipProcessor>(&clipPath, MustCheckBounds(mustCheckBounds),
200 deviceSpacePath.getFillType());
Chris Daltona32a3c32017-12-05 10:05:21 -0700201}
202
Chris Dalton1a325d22017-07-14 15:17:41 -0600203void GrCoverageCountingPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
204 const uint32_t* opListIDs, int numOpListIDs,
Chris Dalton9414c962018-06-14 10:14:50 -0600205 SkTArray<sk_sp<GrRenderTargetContext>>* out) {
Chris Dalton4da70192018-06-18 09:51:36 -0600206 using DoCopiesToCache = GrCCDrawPathsOp::DoCopiesToCache;
Chris Daltona32a3c32017-12-05 10:05:21 -0700207 SkASSERT(!fFlushing);
Chris Daltond7e22272018-05-23 10:17:17 -0600208 SkASSERT(fFlushingPaths.empty());
Chris Dalton383a2ef2018-01-08 17:21:41 -0500209 SkDEBUGCODE(fFlushing = true);
Chris Daltona32a3c32017-12-05 10:05:21 -0700210
Chris Dalton4da70192018-06-18 09:51:36 -0600211 // Dig up the stashed atlas from the previous flush (if any) so we can attempt to copy any
212 // reusable paths out of it and into the resource cache. We also need to clear its unique key.
213 sk_sp<GrTextureProxy> stashedAtlasProxy;
214 if (fStashedAtlasKey.isValid()) {
215 stashedAtlasProxy = onFlushRP->findOrCreateProxyByUniqueKey(fStashedAtlasKey,
216 GrCCAtlas::kTextureOrigin);
217 if (stashedAtlasProxy) {
218 // Instantiate the proxy so we can clear the underlying texture's unique key.
219 onFlushRP->instatiateProxy(stashedAtlasProxy.get());
220 onFlushRP->removeUniqueKeyFromProxy(fStashedAtlasKey, stashedAtlasProxy.get());
221 } else {
222 fStashedAtlasKey.reset(); // Indicate there is no stashed atlas to copy from.
223 }
224 }
225
Chris Daltond7e22272018-05-23 10:17:17 -0600226 if (fPendingPaths.empty()) {
Chris Dalton4da70192018-06-18 09:51:36 -0600227 fStashedAtlasKey.reset();
Chris Dalton383a2ef2018-01-08 17:21:41 -0500228 return; // Nothing to draw.
Chris Daltona32a3c32017-12-05 10:05:21 -0700229 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600230
Chris Dalton4da70192018-06-18 09:51:36 -0600231 GrCCPerFlushResourceSpecs specs;
Chris Dalton42c21152018-06-13 15:28:19 -0600232 int maxPreferredRTSize = onFlushRP->caps()->maxPreferredRenderTargetSize();
Chris Dalton4da70192018-06-18 09:51:36 -0600233 specs.fCopyAtlasSpecs.fMaxPreferredTextureSize = SkTMin(2048, maxPreferredRTSize);
234 SkASSERT(0 == specs.fCopyAtlasSpecs.fMinTextureSize);
235 specs.fRenderedAtlasSpecs.fMaxPreferredTextureSize = maxPreferredRTSize;
Chris Daltona2b5b642018-06-24 13:08:57 -0600236 specs.fRenderedAtlasSpecs.fMinTextureSize = SkTMin(512, maxPreferredRTSize);
Chris Dalton42c21152018-06-13 15:28:19 -0600237
Chris Daltond7e22272018-05-23 10:17:17 -0600238 // Move the per-opList paths that are about to be flushed from fPendingPaths to fFlushingPaths,
Chris Dalton42c21152018-06-13 15:28:19 -0600239 // and count them up so we can preallocate buffers.
Chris Daltond7e22272018-05-23 10:17:17 -0600240 fFlushingPaths.reserve(numOpListIDs);
Chris Dalton1a325d22017-07-14 15:17:41 -0600241 for (int i = 0; i < numOpListIDs; ++i) {
Chris Daltond7e22272018-05-23 10:17:17 -0600242 auto iter = fPendingPaths.find(opListIDs[i]);
243 if (fPendingPaths.end() == iter) {
244 continue; // No paths on this opList.
Chris Dalton1a325d22017-07-14 15:17:41 -0600245 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700246
tzikbdb49562018-05-28 14:58:00 +0900247 fFlushingPaths.push_back(std::move(iter->second));
Chris Daltond7e22272018-05-23 10:17:17 -0600248 fPendingPaths.erase(iter);
249
Chris Dalton4da70192018-06-18 09:51:36 -0600250 for (GrCCDrawPathsOp* op : fFlushingPaths.back()->fDrawOps) {
Chris Daltona2b5b642018-06-24 13:08:57 -0600251 op->accountForOwnPaths(fPathCache.get(), onFlushRP, fStashedAtlasKey, &specs);
Chris Dalton080baa42017-11-06 14:19:19 -0700252 }
Chris Daltond7e22272018-05-23 10:17:17 -0600253 for (const auto& clipsIter : fFlushingPaths.back()->fClipPaths) {
Chris Dalton4da70192018-06-18 09:51:36 -0600254 clipsIter.second.accountForOwnPath(&specs);
Chris Daltona32a3c32017-12-05 10:05:21 -0700255 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600256 }
Chris Dalton4da70192018-06-18 09:51:36 -0600257 fStashedAtlasKey.reset();
Chris Dalton1a325d22017-07-14 15:17:41 -0600258
Chris Dalton4da70192018-06-18 09:51:36 -0600259 if (specs.isEmpty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500260 return; // Nothing to draw.
Chris Dalton1a325d22017-07-14 15:17:41 -0600261 }
262
Chris Dalton4da70192018-06-18 09:51:36 -0600263 // Determine if there are enough reusable paths from last flush for it to be worth our time to
264 // copy them to cached atlas(es).
Chris Dalton09a7bb22018-08-31 19:53:15 +0800265 int numCopies = specs.fNumCopiedPaths[GrCCPerFlushResourceSpecs::kFillIdx] +
266 specs.fNumCopiedPaths[GrCCPerFlushResourceSpecs::kStrokeIdx];
267 DoCopiesToCache doCopies = DoCopiesToCache(numCopies > 100 ||
Chris Daltona2b5b642018-06-24 13:08:57 -0600268 specs.fCopyAtlasSpecs.fApproxNumPixels > 256 * 256);
Chris Dalton09a7bb22018-08-31 19:53:15 +0800269 if (numCopies && DoCopiesToCache::kNo == doCopies) {
Chris Dalton4da70192018-06-18 09:51:36 -0600270 specs.convertCopiesToRenders();
Chris Dalton09a7bb22018-08-31 19:53:15 +0800271 SkASSERT(!specs.fNumCopiedPaths[GrCCPerFlushResourceSpecs::kFillIdx]);
272 SkASSERT(!specs.fNumCopiedPaths[GrCCPerFlushResourceSpecs::kStrokeIdx]);
Chris Dalton4da70192018-06-18 09:51:36 -0600273 }
274
275 auto resources = sk_make_sp<GrCCPerFlushResources>(onFlushRP, specs);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600276 if (!resources->isMapped()) {
277 return; // Some allocation failed.
Chris Dalton1a325d22017-07-14 15:17:41 -0600278 }
279
Chris Dalton4da70192018-06-18 09:51:36 -0600280 // Layout the atlas(es) and parse paths.
Chris Daltond7e22272018-05-23 10:17:17 -0600281 for (const auto& flushingPaths : fFlushingPaths) {
282 for (GrCCDrawPathsOp* op : flushingPaths->fDrawOps) {
Chris Dalton4da70192018-06-18 09:51:36 -0600283 op->setupResources(onFlushRP, resources.get(), doCopies);
Chris Dalton1a325d22017-07-14 15:17:41 -0600284 }
Chris Daltond7e22272018-05-23 10:17:17 -0600285 for (auto& clipsIter : flushingPaths->fClipPaths) {
Chris Daltondaef06a2018-05-23 17:11:09 -0600286 clipsIter.second.renderPathInAtlas(resources.get(), onFlushRP);
Chris Daltonc1e59632017-09-05 00:30:07 -0600287 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600288 }
289
Chris Dalton4da70192018-06-18 09:51:36 -0600290 // Allocate resources and then render the atlas(es).
291 if (!resources->finalize(onFlushRP, std::move(stashedAtlasProxy), out)) {
Chris Daltonc1e59632017-09-05 00:30:07 -0600292 return;
Chris Dalton1a325d22017-07-14 15:17:41 -0600293 }
Chris Dalton4da70192018-06-18 09:51:36 -0600294 // Verify the stashed atlas got released so its texture could be recycled.
295 SkASSERT(!stashedAtlasProxy);
Chris Dalton1a325d22017-07-14 15:17:41 -0600296
Chris Daltond7e22272018-05-23 10:17:17 -0600297 // Commit flushing paths to the resources once they are successfully completed.
298 for (auto& flushingPaths : fFlushingPaths) {
Robert Phillips774168e2018-05-31 12:43:27 -0400299 SkASSERT(!flushingPaths->fFlushResources);
Chris Daltond7e22272018-05-23 10:17:17 -0600300 flushingPaths->fFlushResources = resources;
301 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600302}
303
Chris Dalton3968ff92017-11-27 12:26:31 -0700304void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken, const uint32_t* opListIDs,
305 int numOpListIDs) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600306 SkASSERT(fFlushing);
Chris Dalton4da70192018-06-18 09:51:36 -0600307 SkASSERT(!fStashedAtlasKey.isValid()); // Should have been cleared in preFlush().
Robert Phillips774168e2018-05-31 12:43:27 -0400308
Chris Dalton4da70192018-06-18 09:51:36 -0600309 if (!fFlushingPaths.empty()) {
310 // Note the stashed atlas's key for next flush, if any.
311 auto resources = fFlushingPaths.front()->fFlushResources.get();
312 if (resources && resources->hasStashedAtlas()) {
313 fStashedAtlasKey = resources->stashedAtlasKey();
314 }
315
316 // In DDL mode these aren't guaranteed to be deleted so we must clear out the perFlush
317 // resources manually.
318 for (auto& flushingPaths : fFlushingPaths) {
319 flushingPaths->fFlushResources = nullptr;
320 }
321
322 // We wait to erase these until after flush, once Ops and FPs are done accessing their data.
323 fFlushingPaths.reset();
Robert Phillips774168e2018-05-31 12:43:27 -0400324 }
325
Chris Dalton383a2ef2018-01-08 17:21:41 -0500326 SkDEBUGCODE(fFlushing = false);
Chris Dalton1a325d22017-07-14 15:17:41 -0600327}
Chris Dalton09a7bb22018-08-31 19:53:15 +0800328
329void GrCoverageCountingPathRenderer::CropPath(const SkPath& path, const SkIRect& cropbox,
330 SkPath* out) {
331 SkPath cropboxPath;
332 cropboxPath.addRect(SkRect::Make(cropbox));
333 if (!Op(cropboxPath, path, kIntersect_SkPathOp, out)) {
334 // This can fail if the PathOps encounter NaN or infinities.
335 out->reset();
336 }
337 out->setIsVolatile(true);
338}
Chris Dalton82de18f2018-09-12 17:24:09 -0600339
340float GrCoverageCountingPathRenderer::GetStrokeDevWidth(const SkMatrix& m,
341 const SkStrokeRec& stroke,
342 float* inflationRadius) {
343 float strokeDevWidth;
344 if (stroke.isHairlineStyle()) {
345 strokeDevWidth = 1;
346 } else {
347 SkASSERT(SkStrokeRec::kStroke_Style == stroke.getStyle());
348 SkASSERT(m.isSimilarity()); // Otherwise matrixScaleFactor = m.getMaxScale().
349 float matrixScaleFactor = SkVector::Length(m.getScaleX(), m.getSkewY());
350 strokeDevWidth = stroke.getWidth() * matrixScaleFactor;
351 }
352 if (inflationRadius) {
353 // Inflate for a minimum stroke width of 1. In some cases when the stroke is less than 1px
354 // wide, we may inflate it to 1px and instead reduce the opacity.
355 *inflationRadius = SkStrokeRec::GetInflationRadius(
356 stroke.getJoin(), stroke.getMiter(), stroke.getCap(), SkTMax(strokeDevWidth, 1.f));
357 }
358 return strokeDevWidth;
359}