blob: 372a586797ecd8dcf3d634e8b92bd724481a034f [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Chris Dalton1a325d22017-07-14 15:17:41 -06009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/pathops/SkPathOps.h"
11#include "src/core/SkMakeUnique.h"
12#include "src/gpu/GrCaps.h"
13#include "src/gpu/GrClip.h"
14#include "src/gpu/GrProxyProvider.h"
15#include "src/gpu/ccpr/GrCCClipProcessor.h"
16#include "src/gpu/ccpr/GrCCDrawPathsOp.h"
17#include "src/gpu/ccpr/GrCCPathCache.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060018
Chris Dalton5ba36ba2018-05-09 01:08:38 -060019using PathInstance = GrCCPathProcessor::Instance;
Chris Daltona32a3c32017-12-05 10:05:21 -070020
Chris Daltonc3318f02019-07-19 14:20:53 -060021bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps, CoverageType* coverageType) {
Chris Dalton1a325d22017-07-14 15:17:41 -060022 const GrShaderCaps& shaderCaps = *caps.shaderCaps();
Greg Daniel0258c902019-08-01 13:08:33 -040023 GrBackendFormat defaultA8Format = caps.getDefaultBackendFormat(GrColorType::kAlpha_8,
24 GrRenderable::kYes);
Chris Daltonc3318f02019-07-19 14:20:53 -060025 if (caps.driverBlacklistCCPR() || !shaderCaps.integerSupport() ||
26 !caps.instanceAttribSupport() || !shaderCaps.floatIs32Bits() ||
27 GrCaps::kNone_MapFlags == caps.mapBufferFlags() ||
Greg Daniel0258c902019-08-01 13:08:33 -040028 !defaultA8Format.isValid() || // This checks both texturable and renderable
Chris Daltona8fbeba2019-03-30 00:31:23 -060029 !caps.halfFloatVertexAttributeSupport()) {
30 return false;
31 }
Chris Daltonc3318f02019-07-19 14:20:53 -060032
Greg Daniel0258c902019-08-01 13:08:33 -040033 GrBackendFormat defaultAHalfFormat = caps.getDefaultBackendFormat(GrColorType::kAlpha_F16,
34 GrRenderable::kYes);
Chris Daltonc3318f02019-07-19 14:20:53 -060035 if (caps.allowCoverageCounting() &&
Greg Daniel0258c902019-08-01 13:08:33 -040036 defaultAHalfFormat.isValid()) { // This checks both texturable and renderable
Chris Daltonc3318f02019-07-19 14:20:53 -060037 if (coverageType) {
38 *coverageType = CoverageType::kFP16_CoverageCount;
39 }
40 return true;
41 }
42
Chris Dalton7c012082019-07-22 00:45:52 -040043 if (!caps.driverBlacklistMSAACCPR() &&
44 caps.internalMultisampleCount(kAlpha_8_GrPixelConfig) > 1 &&
Chris Daltonc3318f02019-07-19 14:20:53 -060045 caps.sampleLocationsSupport() &&
46 shaderCaps.sampleVariablesStencilSupport()) {
47 if (coverageType) {
48 *coverageType = CoverageType::kA8_Multisample;
49 }
50 return true;
51 }
52
53 return false;
Chris Dalton1a325d22017-07-14 15:17:41 -060054}
55
Chris Dalton383a2ef2018-01-08 17:21:41 -050056sk_sp<GrCoverageCountingPathRenderer> GrCoverageCountingPathRenderer::CreateIfSupported(
Chris Dalton351e80c2019-01-06 22:51:00 -070057 const GrCaps& caps, AllowCaching allowCaching, uint32_t contextUniqueID) {
Chris Daltonc3318f02019-07-19 14:20:53 -060058 CoverageType coverageType;
59 if (IsSupported(caps, &coverageType)) {
60 return sk_sp<GrCoverageCountingPathRenderer>(new GrCoverageCountingPathRenderer(
61 coverageType, allowCaching, contextUniqueID));
62 }
63 return nullptr;
Chris Daltona2b5b642018-06-24 13:08:57 -060064}
65
Chris Daltonc3318f02019-07-19 14:20:53 -060066GrCoverageCountingPathRenderer::GrCoverageCountingPathRenderer(
67 CoverageType coverageType, AllowCaching allowCaching, uint32_t contextUniqueID)
68 : fCoverageType(coverageType) {
Chris Daltona2b5b642018-06-24 13:08:57 -060069 if (AllowCaching::kYes == allowCaching) {
Chris Dalton351e80c2019-01-06 22:51:00 -070070 fPathCache = skstd::make_unique<GrCCPathCache>(contextUniqueID);
Chris Daltona2b5b642018-06-24 13:08:57 -060071 }
72}
73
Chris Daltond7e22272018-05-23 10:17:17 -060074GrCCPerOpListPaths* GrCoverageCountingPathRenderer::lookupPendingPaths(uint32_t opListID) {
75 auto it = fPendingPaths.find(opListID);
76 if (fPendingPaths.end() == it) {
Robert Phillips774168e2018-05-31 12:43:27 -040077 sk_sp<GrCCPerOpListPaths> paths = sk_make_sp<GrCCPerOpListPaths>();
Chris Daltond7e22272018-05-23 10:17:17 -060078 it = fPendingPaths.insert(std::make_pair(opListID, std::move(paths))).first;
79 }
80 return it->second.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -060081}
82
Chris Dalton383a2ef2018-01-08 17:21:41 -050083GrPathRenderer::CanDrawPath GrCoverageCountingPathRenderer::onCanDrawPath(
84 const CanDrawPathArgs& args) const {
Chris Dalton09a7bb22018-08-31 19:53:15 +080085 const GrShape& shape = *args.fShape;
Chris Daltonc3318f02019-07-19 14:20:53 -060086 // We use "kCoverage", or analytic AA, no mater what the coverage type of our atlas: Even if the
87 // atlas is multisampled, that resolves into analytic coverage before we draw the path to the
88 // main canvas.
Chris Dalton6ce447a2019-06-23 18:07:38 -060089 if (GrAAType::kCoverage != args.fAAType || shape.style().hasPathEffect() ||
Chris Dalton09a7bb22018-08-31 19:53:15 +080090 args.fViewMatrix->hasPerspective() || shape.inverseFilled()) {
Chris Dalton5ed44232017-09-07 13:22:46 -060091 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -060092 }
93
94 SkPath path;
Chris Dalton09a7bb22018-08-31 19:53:15 +080095 shape.asPath(&path);
Chris Daltona2b5b642018-06-24 13:08:57 -060096
Chris Dalton82de18f2018-09-12 17:24:09 -060097 const SkStrokeRec& stroke = shape.style().strokeRec();
98 switch (stroke.getStyle()) {
Chris Dalton09a7bb22018-08-31 19:53:15 +080099 case SkStrokeRec::kFill_Style: {
100 SkRect devBounds;
101 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
Chris Daltona2b5b642018-06-24 13:08:57 -0600102
Chris Dalton09a7bb22018-08-31 19:53:15 +0800103 SkIRect clippedIBounds;
104 devBounds.roundOut(&clippedIBounds);
105 if (!clippedIBounds.intersect(*args.fClipConservativeBounds)) {
106 // The path is completely clipped away. Our code will eventually notice this before
107 // doing any real work.
108 return CanDrawPath::kYes;
109 }
110
111 int64_t numPixels = sk_64_mul(clippedIBounds.height(), clippedIBounds.width());
112 if (path.countVerbs() > 1000 && path.countPoints() > numPixels) {
113 // This is a complicated path that has more vertices than pixels! Let's let the SW
114 // renderer have this one: It will probably be faster and a bitmap will require less
115 // total memory on the GPU than CCPR instance buffers would for the raw path data.
116 return CanDrawPath::kNo;
117 }
118
119 if (numPixels > 256 * 256) {
120 // Large paths can blow up the atlas fast. And they are not ideal for a two-pass
121 // rendering algorithm. Give the simpler direct renderers a chance before we commit
122 // to drawing it.
123 return CanDrawPath::kAsBackup;
124 }
125
126 if (args.fShape->hasUnstyledKey() && path.countVerbs() > 50) {
127 // Complex paths do better cached in an SDF, if the renderer will accept them.
128 return CanDrawPath::kAsBackup;
129 }
130
131 return CanDrawPath::kYes;
132 }
133
134 case SkStrokeRec::kStroke_Style:
135 if (!args.fViewMatrix->isSimilarity()) {
136 // The stroker currently only supports rigid-body transfoms for the stroke lines
137 // themselves. This limitation doesn't affect hairlines since their stroke lines are
138 // defined relative to device space.
139 return CanDrawPath::kNo;
140 }
141 // fallthru
Chris Dalton82de18f2018-09-12 17:24:09 -0600142 case SkStrokeRec::kHairline_Style: {
Chris Daltonc3318f02019-07-19 14:20:53 -0600143 if (CoverageType::kFP16_CoverageCount != fCoverageType) {
144 // Stroking is not yet supported in MSAA atlas mode.
145 return CanDrawPath::kNo;
146 }
Chris Dalton82de18f2018-09-12 17:24:09 -0600147 float inflationRadius;
148 GetStrokeDevWidth(*args.fViewMatrix, stroke, &inflationRadius);
149 if (!(inflationRadius <= kMaxBoundsInflationFromStroke)) {
150 // Let extremely wide strokes be converted to fill paths and drawn by the CCPR
151 // filler instead. (Cast the logic negatively in order to also catch r=NaN.)
152 return CanDrawPath::kNo;
153 }
154 SkASSERT(!SkScalarIsNaN(inflationRadius));
155 if (SkPathPriv::ConicWeightCnt(path)) {
156 // The stroker does not support conics yet.
157 return CanDrawPath::kNo;
158 }
159 return CanDrawPath::kYes;
160 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800161
162 case SkStrokeRec::kStrokeAndFill_Style:
163 return CanDrawPath::kNo;
Chris Daltondb91c6e2017-09-08 16:25:08 -0600164 }
165
Chris Dalton09a7bb22018-08-31 19:53:15 +0800166 SK_ABORT("Invalid stroke style.");
167 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -0600168}
169
170bool GrCoverageCountingPathRenderer::onDrawPath(const DrawPathArgs& args) {
171 SkASSERT(!fFlushing);
Chris Dalton1a325d22017-07-14 15:17:41 -0600172
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600173 SkIRect clipIBounds;
174 GrRenderTargetContext* rtc = args.fRenderTargetContext;
175 args.fClip->getConservativeBounds(rtc->width(), rtc->height(), &clipIBounds, nullptr);
176
Chris Dalton09a7bb22018-08-31 19:53:15 +0800177 auto op = GrCCDrawPathsOp::Make(args.fContext, clipIBounds, *args.fViewMatrix, *args.fShape,
178 std::move(args.fPaint));
Chris Dalton42c21152018-06-13 15:28:19 -0600179 this->recordOp(std::move(op), args);
Chris Dalton1a325d22017-07-14 15:17:41 -0600180 return true;
181}
182
Brian Salomon348a0372018-10-31 10:42:18 -0400183void GrCoverageCountingPathRenderer::recordOp(std::unique_ptr<GrCCDrawPathsOp> op,
Chris Dalton42c21152018-06-13 15:28:19 -0600184 const DrawPathArgs& args) {
Brian Salomon348a0372018-10-31 10:42:18 -0400185 if (op) {
186 auto addToOwningPerOpListPaths = [this](GrOp* op, uint32_t opListID) {
187 op->cast<GrCCDrawPathsOp>()->addToOwningPerOpListPaths(
188 sk_ref_sp(this->lookupPendingPaths(opListID)));
189 };
190 args.fRenderTargetContext->addDrawOp(*args.fClip, std::move(op), addToOwningPerOpListPaths);
Chris Dalton42c21152018-06-13 15:28:19 -0600191 }
192}
193
Chris Dalton383a2ef2018-01-08 17:21:41 -0500194std::unique_ptr<GrFragmentProcessor> GrCoverageCountingPathRenderer::makeClipProcessor(
Chris Daltonc3318f02019-07-19 14:20:53 -0600195 uint32_t opListID, const SkPath& deviceSpacePath, const SkIRect& accessRect,
196 const GrCaps& caps) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700197 SkASSERT(!fFlushing);
Chris Daltona32a3c32017-12-05 10:05:21 -0700198
Chris Daltonc3318f02019-07-19 14:20:53 -0600199 uint32_t key = deviceSpacePath.getGenerationID();
200 if (CoverageType::kA8_Multisample == fCoverageType) {
201 // We only need to consider fill rule in MSAA mode. In coverage count mode Even/Odd and
202 // Nonzero both reference the same coverage count mask.
203 key = (key << 1) | (uint32_t)GrFillRuleForSkPath(deviceSpacePath);
204 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600205 GrCCClipPath& clipPath =
Chris Daltonc3318f02019-07-19 14:20:53 -0600206 this->lookupPendingPaths(opListID)->fClipPaths[key];
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600207 if (!clipPath.isInitialized()) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700208 // This ClipPath was just created during lookup. Initialize it.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600209 const SkRect& pathDevBounds = deviceSpacePath.getBounds();
210 if (SkTMax(pathDevBounds.height(), pathDevBounds.width()) > kPathCropThreshold) {
211 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
212 SkPath croppedPath;
Chris Dalton4c458b12018-06-16 17:22:59 -0600213 int maxRTSize = caps.maxRenderTargetSize();
Chris Dalton09a7bb22018-08-31 19:53:15 +0800214 CropPath(deviceSpacePath, SkIRect::MakeWH(maxRTSize, maxRTSize), &croppedPath);
Chris Daltonc3318f02019-07-19 14:20:53 -0600215 clipPath.init(croppedPath, accessRect, fCoverageType, caps);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600216 } else {
Chris Daltonc3318f02019-07-19 14:20:53 -0600217 clipPath.init(deviceSpacePath, accessRect, fCoverageType, caps);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600218 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700219 } else {
220 clipPath.addAccess(accessRect);
221 }
222
Chris Daltonc3318f02019-07-19 14:20:53 -0600223 auto isCoverageCount = GrCCClipProcessor::IsCoverageCount(
224 CoverageType::kFP16_CoverageCount == fCoverageType);
225 auto mustCheckBounds = GrCCClipProcessor::MustCheckBounds(
226 !clipPath.pathDevIBounds().contains(accessRect));
227 return skstd::make_unique<GrCCClipProcessor>(&clipPath, isCoverageCount, mustCheckBounds);
Chris Daltona32a3c32017-12-05 10:05:21 -0700228}
229
Chris Dalton1a325d22017-07-14 15:17:41 -0600230void GrCoverageCountingPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP,
231 const uint32_t* opListIDs, int numOpListIDs,
Chris Dalton9414c962018-06-14 10:14:50 -0600232 SkTArray<sk_sp<GrRenderTargetContext>>* out) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700233 using DoCopiesToA8Coverage = GrCCDrawPathsOp::DoCopiesToA8Coverage;
Chris Daltona32a3c32017-12-05 10:05:21 -0700234 SkASSERT(!fFlushing);
Chris Daltond7e22272018-05-23 10:17:17 -0600235 SkASSERT(fFlushingPaths.empty());
Chris Dalton383a2ef2018-01-08 17:21:41 -0500236 SkDEBUGCODE(fFlushing = true);
Chris Daltona32a3c32017-12-05 10:05:21 -0700237
Chris Dalton351e80c2019-01-06 22:51:00 -0700238 if (fPathCache) {
239 fPathCache->doPreFlushProcessing();
Chris Dalton4da70192018-06-18 09:51:36 -0600240 }
241
Chris Daltond7e22272018-05-23 10:17:17 -0600242 if (fPendingPaths.empty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500243 return; // Nothing to draw.
Chris Daltona32a3c32017-12-05 10:05:21 -0700244 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600245
Chris Dalton4da70192018-06-18 09:51:36 -0600246 GrCCPerFlushResourceSpecs specs;
Chris Dalton42c21152018-06-13 15:28:19 -0600247 int maxPreferredRTSize = onFlushRP->caps()->maxPreferredRenderTargetSize();
Chris Dalton4da70192018-06-18 09:51:36 -0600248 specs.fCopyAtlasSpecs.fMaxPreferredTextureSize = SkTMin(2048, maxPreferredRTSize);
249 SkASSERT(0 == specs.fCopyAtlasSpecs.fMinTextureSize);
250 specs.fRenderedAtlasSpecs.fMaxPreferredTextureSize = maxPreferredRTSize;
Chris Daltona2b5b642018-06-24 13:08:57 -0600251 specs.fRenderedAtlasSpecs.fMinTextureSize = SkTMin(512, maxPreferredRTSize);
Chris Dalton42c21152018-06-13 15:28:19 -0600252
Chris Daltond7e22272018-05-23 10:17:17 -0600253 // Move the per-opList paths that are about to be flushed from fPendingPaths to fFlushingPaths,
Chris Dalton42c21152018-06-13 15:28:19 -0600254 // and count them up so we can preallocate buffers.
Chris Daltond7e22272018-05-23 10:17:17 -0600255 fFlushingPaths.reserve(numOpListIDs);
Chris Dalton1a325d22017-07-14 15:17:41 -0600256 for (int i = 0; i < numOpListIDs; ++i) {
Chris Daltond7e22272018-05-23 10:17:17 -0600257 auto iter = fPendingPaths.find(opListIDs[i]);
258 if (fPendingPaths.end() == iter) {
259 continue; // No paths on this opList.
Chris Dalton1a325d22017-07-14 15:17:41 -0600260 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700261
tzikbdb49562018-05-28 14:58:00 +0900262 fFlushingPaths.push_back(std::move(iter->second));
Chris Daltond7e22272018-05-23 10:17:17 -0600263 fPendingPaths.erase(iter);
264
Chris Dalton4da70192018-06-18 09:51:36 -0600265 for (GrCCDrawPathsOp* op : fFlushingPaths.back()->fDrawOps) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700266 op->accountForOwnPaths(fPathCache.get(), onFlushRP, &specs);
Chris Dalton080baa42017-11-06 14:19:19 -0700267 }
Chris Daltond7e22272018-05-23 10:17:17 -0600268 for (const auto& clipsIter : fFlushingPaths.back()->fClipPaths) {
Chris Dalton4da70192018-06-18 09:51:36 -0600269 clipsIter.second.accountForOwnPath(&specs);
Chris Daltona32a3c32017-12-05 10:05:21 -0700270 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600271 }
272
Chris Dalton4da70192018-06-18 09:51:36 -0600273 if (specs.isEmpty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500274 return; // Nothing to draw.
Chris Dalton1a325d22017-07-14 15:17:41 -0600275 }
276
Chris Dalton4da70192018-06-18 09:51:36 -0600277 // Determine if there are enough reusable paths from last flush for it to be worth our time to
278 // copy them to cached atlas(es).
Chris Dalton09a7bb22018-08-31 19:53:15 +0800279 int numCopies = specs.fNumCopiedPaths[GrCCPerFlushResourceSpecs::kFillIdx] +
280 specs.fNumCopiedPaths[GrCCPerFlushResourceSpecs::kStrokeIdx];
Chris Dalton351e80c2019-01-06 22:51:00 -0700281 auto doCopies = DoCopiesToA8Coverage(numCopies > 100 ||
282 specs.fCopyAtlasSpecs.fApproxNumPixels > 256 * 256);
283 if (numCopies && DoCopiesToA8Coverage::kNo == doCopies) {
284 specs.cancelCopies();
Chris Dalton4da70192018-06-18 09:51:36 -0600285 }
286
Chris Daltonc3318f02019-07-19 14:20:53 -0600287 auto resources = sk_make_sp<GrCCPerFlushResources>(onFlushRP, fCoverageType, specs);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600288 if (!resources->isMapped()) {
289 return; // Some allocation failed.
Chris Dalton1a325d22017-07-14 15:17:41 -0600290 }
291
Chris Dalton4da70192018-06-18 09:51:36 -0600292 // Layout the atlas(es) and parse paths.
Chris Daltond7e22272018-05-23 10:17:17 -0600293 for (const auto& flushingPaths : fFlushingPaths) {
294 for (GrCCDrawPathsOp* op : flushingPaths->fDrawOps) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700295 op->setupResources(fPathCache.get(), onFlushRP, resources.get(), doCopies);
Chris Dalton1a325d22017-07-14 15:17:41 -0600296 }
Chris Daltond7e22272018-05-23 10:17:17 -0600297 for (auto& clipsIter : flushingPaths->fClipPaths) {
Chris Daltondaef06a2018-05-23 17:11:09 -0600298 clipsIter.second.renderPathInAtlas(resources.get(), onFlushRP);
Chris Daltonc1e59632017-09-05 00:30:07 -0600299 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600300 }
301
Chris Dalton351e80c2019-01-06 22:51:00 -0700302 if (fPathCache) {
303 // Purge invalidated textures from previous atlases *before* calling finalize(). That way,
304 // the underlying textures objects can be freed up and reused for the next atlases.
305 fPathCache->purgeInvalidatedAtlasTextures(onFlushRP);
Chris Daltond6fa4542019-01-04 13:23:51 -0700306 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600307
Chris Dalton351e80c2019-01-06 22:51:00 -0700308 // Allocate resources and then render the atlas(es).
309 if (!resources->finalize(onFlushRP, out)) {
310 return;
311 }
Chris Dalton2e825a32019-01-04 22:14:27 +0000312
Chris Daltond7e22272018-05-23 10:17:17 -0600313 // Commit flushing paths to the resources once they are successfully completed.
314 for (auto& flushingPaths : fFlushingPaths) {
Robert Phillips774168e2018-05-31 12:43:27 -0400315 SkASSERT(!flushingPaths->fFlushResources);
Chris Daltond7e22272018-05-23 10:17:17 -0600316 flushingPaths->fFlushResources = resources;
317 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600318}
319
Chris Dalton3968ff92017-11-27 12:26:31 -0700320void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken, const uint32_t* opListIDs,
321 int numOpListIDs) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600322 SkASSERT(fFlushing);
Robert Phillips774168e2018-05-31 12:43:27 -0400323
Chris Dalton4da70192018-06-18 09:51:36 -0600324 if (!fFlushingPaths.empty()) {
Chris Dalton4da70192018-06-18 09:51:36 -0600325 // In DDL mode these aren't guaranteed to be deleted so we must clear out the perFlush
326 // resources manually.
327 for (auto& flushingPaths : fFlushingPaths) {
328 flushingPaths->fFlushResources = nullptr;
329 }
330
331 // We wait to erase these until after flush, once Ops and FPs are done accessing their data.
332 fFlushingPaths.reset();
Robert Phillips774168e2018-05-31 12:43:27 -0400333 }
334
Chris Dalton383a2ef2018-01-08 17:21:41 -0500335 SkDEBUGCODE(fFlushing = false);
Chris Dalton1a325d22017-07-14 15:17:41 -0600336}
Chris Dalton09a7bb22018-08-31 19:53:15 +0800337
Chris Dalton6c3879d2018-11-01 11:13:19 -0600338void GrCoverageCountingPathRenderer::purgeCacheEntriesOlderThan(
Chris Dalton351e80c2019-01-06 22:51:00 -0700339 GrProxyProvider* proxyProvider, const GrStdSteadyClock::time_point& purgeTime) {
Chris Dalton6c3879d2018-11-01 11:13:19 -0600340 if (fPathCache) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700341 fPathCache->purgeEntriesOlderThan(proxyProvider, purgeTime);
Chris Dalton6c3879d2018-11-01 11:13:19 -0600342 }
343}
344
Chris Dalton09a7bb22018-08-31 19:53:15 +0800345void GrCoverageCountingPathRenderer::CropPath(const SkPath& path, const SkIRect& cropbox,
346 SkPath* out) {
347 SkPath cropboxPath;
348 cropboxPath.addRect(SkRect::Make(cropbox));
349 if (!Op(cropboxPath, path, kIntersect_SkPathOp, out)) {
350 // This can fail if the PathOps encounter NaN or infinities.
351 out->reset();
352 }
353 out->setIsVolatile(true);
354}
Chris Dalton82de18f2018-09-12 17:24:09 -0600355
356float GrCoverageCountingPathRenderer::GetStrokeDevWidth(const SkMatrix& m,
357 const SkStrokeRec& stroke,
358 float* inflationRadius) {
359 float strokeDevWidth;
360 if (stroke.isHairlineStyle()) {
361 strokeDevWidth = 1;
362 } else {
363 SkASSERT(SkStrokeRec::kStroke_Style == stroke.getStyle());
364 SkASSERT(m.isSimilarity()); // Otherwise matrixScaleFactor = m.getMaxScale().
365 float matrixScaleFactor = SkVector::Length(m.getScaleX(), m.getSkewY());
366 strokeDevWidth = stroke.getWidth() * matrixScaleFactor;
367 }
368 if (inflationRadius) {
369 // Inflate for a minimum stroke width of 1. In some cases when the stroke is less than 1px
370 // wide, we may inflate it to 1px and instead reduce the opacity.
371 *inflationRadius = SkStrokeRec::GetInflationRadius(
372 stroke.getJoin(), stroke.getMiter(), stroke.getCap(), SkTMax(strokeDevWidth, 1.f));
373 }
374 return strokeDevWidth;
375}