blob: 4798b6fc6de37f8ff1ec915a00e3dbf0c69a39f4 [file] [log] [blame]
Chris Dalton1a325d22017-07-14 15:17:41 -06001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "src/gpu/ccpr/GrCoverageCountingPathRenderer.h"
Chris Dalton1a325d22017-07-14 15:17:41 -06009
Mike Kleinc0bd9f92019-04-23 12:05:21 -050010#include "include/pathops/SkPathOps.h"
11#include "src/core/SkMakeUnique.h"
12#include "src/gpu/GrCaps.h"
13#include "src/gpu/GrClip.h"
14#include "src/gpu/GrProxyProvider.h"
15#include "src/gpu/ccpr/GrCCClipProcessor.h"
16#include "src/gpu/ccpr/GrCCDrawPathsOp.h"
17#include "src/gpu/ccpr/GrCCPathCache.h"
Chris Dalton1a325d22017-07-14 15:17:41 -060018
Chris Dalton5ba36ba2018-05-09 01:08:38 -060019using PathInstance = GrCCPathProcessor::Instance;
Chris Daltona32a3c32017-12-05 10:05:21 -070020
Chris Daltonc3318f02019-07-19 14:20:53 -060021bool GrCoverageCountingPathRenderer::IsSupported(const GrCaps& caps, CoverageType* coverageType) {
Chris Dalton1a325d22017-07-14 15:17:41 -060022 const GrShaderCaps& shaderCaps = *caps.shaderCaps();
Greg Daniel0258c902019-08-01 13:08:33 -040023 GrBackendFormat defaultA8Format = caps.getDefaultBackendFormat(GrColorType::kAlpha_8,
24 GrRenderable::kYes);
Chris Daltonc3318f02019-07-19 14:20:53 -060025 if (caps.driverBlacklistCCPR() || !shaderCaps.integerSupport() ||
26 !caps.instanceAttribSupport() || !shaderCaps.floatIs32Bits() ||
27 GrCaps::kNone_MapFlags == caps.mapBufferFlags() ||
Greg Daniel0258c902019-08-01 13:08:33 -040028 !defaultA8Format.isValid() || // This checks both texturable and renderable
Chris Daltona8fbeba2019-03-30 00:31:23 -060029 !caps.halfFloatVertexAttributeSupport()) {
30 return false;
31 }
Chris Daltonc3318f02019-07-19 14:20:53 -060032
Greg Daniel0258c902019-08-01 13:08:33 -040033 GrBackendFormat defaultAHalfFormat = caps.getDefaultBackendFormat(GrColorType::kAlpha_F16,
34 GrRenderable::kYes);
Chris Daltonc3318f02019-07-19 14:20:53 -060035 if (caps.allowCoverageCounting() &&
Greg Daniel0258c902019-08-01 13:08:33 -040036 defaultAHalfFormat.isValid()) { // This checks both texturable and renderable
Chris Daltonc3318f02019-07-19 14:20:53 -060037 if (coverageType) {
38 *coverageType = CoverageType::kFP16_CoverageCount;
39 }
40 return true;
41 }
42
Chris Dalton7c012082019-07-22 00:45:52 -040043 if (!caps.driverBlacklistMSAACCPR() &&
Greg Danieleadfac92019-08-02 09:03:53 -040044 caps.internalMultisampleCount(defaultA8Format) > 1 &&
Chris Daltonc3318f02019-07-19 14:20:53 -060045 caps.sampleLocationsSupport() &&
46 shaderCaps.sampleVariablesStencilSupport()) {
47 if (coverageType) {
48 *coverageType = CoverageType::kA8_Multisample;
49 }
50 return true;
51 }
52
53 return false;
Chris Dalton1a325d22017-07-14 15:17:41 -060054}
55
Chris Dalton383a2ef2018-01-08 17:21:41 -050056sk_sp<GrCoverageCountingPathRenderer> GrCoverageCountingPathRenderer::CreateIfSupported(
Chris Dalton351e80c2019-01-06 22:51:00 -070057 const GrCaps& caps, AllowCaching allowCaching, uint32_t contextUniqueID) {
Chris Daltonc3318f02019-07-19 14:20:53 -060058 CoverageType coverageType;
59 if (IsSupported(caps, &coverageType)) {
60 return sk_sp<GrCoverageCountingPathRenderer>(new GrCoverageCountingPathRenderer(
61 coverageType, allowCaching, contextUniqueID));
62 }
63 return nullptr;
Chris Daltona2b5b642018-06-24 13:08:57 -060064}
65
Chris Daltonc3318f02019-07-19 14:20:53 -060066GrCoverageCountingPathRenderer::GrCoverageCountingPathRenderer(
67 CoverageType coverageType, AllowCaching allowCaching, uint32_t contextUniqueID)
68 : fCoverageType(coverageType) {
Chris Daltona2b5b642018-06-24 13:08:57 -060069 if (AllowCaching::kYes == allowCaching) {
Chris Dalton351e80c2019-01-06 22:51:00 -070070 fPathCache = skstd::make_unique<GrCCPathCache>(contextUniqueID);
Chris Daltona2b5b642018-06-24 13:08:57 -060071 }
72}
73
Greg Danielf41b2bd2019-08-22 16:19:24 -040074GrCCPerOpsTaskPaths* GrCoverageCountingPathRenderer::lookupPendingPaths(uint32_t opsTaskID) {
75 auto it = fPendingPaths.find(opsTaskID);
Chris Daltond7e22272018-05-23 10:17:17 -060076 if (fPendingPaths.end() == it) {
Greg Danielf41b2bd2019-08-22 16:19:24 -040077 sk_sp<GrCCPerOpsTaskPaths> paths = sk_make_sp<GrCCPerOpsTaskPaths>();
78 it = fPendingPaths.insert(std::make_pair(opsTaskID, std::move(paths))).first;
Chris Daltond7e22272018-05-23 10:17:17 -060079 }
80 return it->second.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -060081}
82
Chris Dalton383a2ef2018-01-08 17:21:41 -050083GrPathRenderer::CanDrawPath GrCoverageCountingPathRenderer::onCanDrawPath(
84 const CanDrawPathArgs& args) const {
Chris Dalton09a7bb22018-08-31 19:53:15 +080085 const GrShape& shape = *args.fShape;
Chris Daltonc3318f02019-07-19 14:20:53 -060086 // We use "kCoverage", or analytic AA, no mater what the coverage type of our atlas: Even if the
87 // atlas is multisampled, that resolves into analytic coverage before we draw the path to the
88 // main canvas.
Chris Dalton6ce447a2019-06-23 18:07:38 -060089 if (GrAAType::kCoverage != args.fAAType || shape.style().hasPathEffect() ||
Chris Dalton09a7bb22018-08-31 19:53:15 +080090 args.fViewMatrix->hasPerspective() || shape.inverseFilled()) {
Chris Dalton5ed44232017-09-07 13:22:46 -060091 return CanDrawPath::kNo;
Chris Dalton1a325d22017-07-14 15:17:41 -060092 }
93
94 SkPath path;
Chris Dalton09a7bb22018-08-31 19:53:15 +080095 shape.asPath(&path);
Chris Daltona2b5b642018-06-24 13:08:57 -060096
Chris Dalton82de18f2018-09-12 17:24:09 -060097 const SkStrokeRec& stroke = shape.style().strokeRec();
98 switch (stroke.getStyle()) {
Chris Dalton09a7bb22018-08-31 19:53:15 +080099 case SkStrokeRec::kFill_Style: {
100 SkRect devBounds;
101 args.fViewMatrix->mapRect(&devBounds, path.getBounds());
Chris Daltona2b5b642018-06-24 13:08:57 -0600102
Chris Dalton09a7bb22018-08-31 19:53:15 +0800103 SkIRect clippedIBounds;
104 devBounds.roundOut(&clippedIBounds);
105 if (!clippedIBounds.intersect(*args.fClipConservativeBounds)) {
106 // The path is completely clipped away. Our code will eventually notice this before
107 // doing any real work.
108 return CanDrawPath::kYes;
109 }
110
111 int64_t numPixels = sk_64_mul(clippedIBounds.height(), clippedIBounds.width());
112 if (path.countVerbs() > 1000 && path.countPoints() > numPixels) {
113 // This is a complicated path that has more vertices than pixels! Let's let the SW
114 // renderer have this one: It will probably be faster and a bitmap will require less
115 // total memory on the GPU than CCPR instance buffers would for the raw path data.
116 return CanDrawPath::kNo;
117 }
118
119 if (numPixels > 256 * 256) {
120 // Large paths can blow up the atlas fast. And they are not ideal for a two-pass
121 // rendering algorithm. Give the simpler direct renderers a chance before we commit
122 // to drawing it.
123 return CanDrawPath::kAsBackup;
124 }
125
126 if (args.fShape->hasUnstyledKey() && path.countVerbs() > 50) {
127 // Complex paths do better cached in an SDF, if the renderer will accept them.
128 return CanDrawPath::kAsBackup;
129 }
130
131 return CanDrawPath::kYes;
132 }
133
134 case SkStrokeRec::kStroke_Style:
135 if (!args.fViewMatrix->isSimilarity()) {
136 // The stroker currently only supports rigid-body transfoms for the stroke lines
137 // themselves. This limitation doesn't affect hairlines since their stroke lines are
138 // defined relative to device space.
139 return CanDrawPath::kNo;
140 }
141 // fallthru
Chris Dalton82de18f2018-09-12 17:24:09 -0600142 case SkStrokeRec::kHairline_Style: {
Chris Daltonc3318f02019-07-19 14:20:53 -0600143 if (CoverageType::kFP16_CoverageCount != fCoverageType) {
144 // Stroking is not yet supported in MSAA atlas mode.
145 return CanDrawPath::kNo;
146 }
Chris Dalton82de18f2018-09-12 17:24:09 -0600147 float inflationRadius;
148 GetStrokeDevWidth(*args.fViewMatrix, stroke, &inflationRadius);
149 if (!(inflationRadius <= kMaxBoundsInflationFromStroke)) {
150 // Let extremely wide strokes be converted to fill paths and drawn by the CCPR
151 // filler instead. (Cast the logic negatively in order to also catch r=NaN.)
152 return CanDrawPath::kNo;
153 }
154 SkASSERT(!SkScalarIsNaN(inflationRadius));
155 if (SkPathPriv::ConicWeightCnt(path)) {
156 // The stroker does not support conics yet.
157 return CanDrawPath::kNo;
158 }
159 return CanDrawPath::kYes;
160 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800161
162 case SkStrokeRec::kStrokeAndFill_Style:
163 return CanDrawPath::kNo;
Chris Daltondb91c6e2017-09-08 16:25:08 -0600164 }
165
Chris Dalton09a7bb22018-08-31 19:53:15 +0800166 SK_ABORT("Invalid stroke style.");
Chris Dalton1a325d22017-07-14 15:17:41 -0600167}
168
169bool GrCoverageCountingPathRenderer::onDrawPath(const DrawPathArgs& args) {
170 SkASSERT(!fFlushing);
Chris Dalton1a325d22017-07-14 15:17:41 -0600171
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600172 SkIRect clipIBounds;
173 GrRenderTargetContext* rtc = args.fRenderTargetContext;
174 args.fClip->getConservativeBounds(rtc->width(), rtc->height(), &clipIBounds, nullptr);
175
Chris Dalton09a7bb22018-08-31 19:53:15 +0800176 auto op = GrCCDrawPathsOp::Make(args.fContext, clipIBounds, *args.fViewMatrix, *args.fShape,
177 std::move(args.fPaint));
Chris Dalton42c21152018-06-13 15:28:19 -0600178 this->recordOp(std::move(op), args);
Chris Dalton1a325d22017-07-14 15:17:41 -0600179 return true;
180}
181
Brian Salomon348a0372018-10-31 10:42:18 -0400182void GrCoverageCountingPathRenderer::recordOp(std::unique_ptr<GrCCDrawPathsOp> op,
Chris Dalton42c21152018-06-13 15:28:19 -0600183 const DrawPathArgs& args) {
Brian Salomon348a0372018-10-31 10:42:18 -0400184 if (op) {
Greg Danielf41b2bd2019-08-22 16:19:24 -0400185 auto addToOwningPerOpsTaskPaths = [this](GrOp* op, uint32_t opsTaskID) {
186 op->cast<GrCCDrawPathsOp>()->addToOwningPerOpsTaskPaths(
187 sk_ref_sp(this->lookupPendingPaths(opsTaskID)));
Brian Salomon348a0372018-10-31 10:42:18 -0400188 };
Greg Danielf41b2bd2019-08-22 16:19:24 -0400189 args.fRenderTargetContext->addDrawOp(*args.fClip, std::move(op),
190 addToOwningPerOpsTaskPaths);
Chris Dalton42c21152018-06-13 15:28:19 -0600191 }
192}
193
Chris Dalton383a2ef2018-01-08 17:21:41 -0500194std::unique_ptr<GrFragmentProcessor> GrCoverageCountingPathRenderer::makeClipProcessor(
Greg Danielf41b2bd2019-08-22 16:19:24 -0400195 uint32_t opsTaskID, const SkPath& deviceSpacePath, const SkIRect& accessRect,
Chris Daltonc3318f02019-07-19 14:20:53 -0600196 const GrCaps& caps) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700197 SkASSERT(!fFlushing);
Chris Daltona32a3c32017-12-05 10:05:21 -0700198
Chris Daltonc3318f02019-07-19 14:20:53 -0600199 uint32_t key = deviceSpacePath.getGenerationID();
200 if (CoverageType::kA8_Multisample == fCoverageType) {
201 // We only need to consider fill rule in MSAA mode. In coverage count mode Even/Odd and
202 // Nonzero both reference the same coverage count mask.
203 key = (key << 1) | (uint32_t)GrFillRuleForSkPath(deviceSpacePath);
204 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600205 GrCCClipPath& clipPath =
Greg Danielf41b2bd2019-08-22 16:19:24 -0400206 this->lookupPendingPaths(opsTaskID)->fClipPaths[key];
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600207 if (!clipPath.isInitialized()) {
Chris Daltona32a3c32017-12-05 10:05:21 -0700208 // This ClipPath was just created during lookup. Initialize it.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600209 const SkRect& pathDevBounds = deviceSpacePath.getBounds();
210 if (SkTMax(pathDevBounds.height(), pathDevBounds.width()) > kPathCropThreshold) {
211 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
212 SkPath croppedPath;
Chris Dalton4c458b12018-06-16 17:22:59 -0600213 int maxRTSize = caps.maxRenderTargetSize();
Chris Dalton09a7bb22018-08-31 19:53:15 +0800214 CropPath(deviceSpacePath, SkIRect::MakeWH(maxRTSize, maxRTSize), &croppedPath);
Chris Daltonc3318f02019-07-19 14:20:53 -0600215 clipPath.init(croppedPath, accessRect, fCoverageType, caps);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600216 } else {
Chris Daltonc3318f02019-07-19 14:20:53 -0600217 clipPath.init(deviceSpacePath, accessRect, fCoverageType, caps);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600218 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700219 } else {
220 clipPath.addAccess(accessRect);
221 }
222
Chris Daltonc3318f02019-07-19 14:20:53 -0600223 auto isCoverageCount = GrCCClipProcessor::IsCoverageCount(
224 CoverageType::kFP16_CoverageCount == fCoverageType);
225 auto mustCheckBounds = GrCCClipProcessor::MustCheckBounds(
226 !clipPath.pathDevIBounds().contains(accessRect));
227 return skstd::make_unique<GrCCClipProcessor>(&clipPath, isCoverageCount, mustCheckBounds);
Chris Daltona32a3c32017-12-05 10:05:21 -0700228}
229
Brian Salomonbf6b9792019-08-21 09:38:10 -0400230void GrCoverageCountingPathRenderer::preFlush(
231 GrOnFlushResourceProvider* onFlushRP,
Greg Danielf41b2bd2019-08-22 16:19:24 -0400232 const uint32_t* opsTaskIDs,
233 int numOpsTaskIDs,
Brian Salomonbf6b9792019-08-21 09:38:10 -0400234 SkTArray<std::unique_ptr<GrRenderTargetContext>>* out) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700235 using DoCopiesToA8Coverage = GrCCDrawPathsOp::DoCopiesToA8Coverage;
Chris Daltona32a3c32017-12-05 10:05:21 -0700236 SkASSERT(!fFlushing);
Chris Daltond7e22272018-05-23 10:17:17 -0600237 SkASSERT(fFlushingPaths.empty());
Chris Dalton383a2ef2018-01-08 17:21:41 -0500238 SkDEBUGCODE(fFlushing = true);
Chris Daltona32a3c32017-12-05 10:05:21 -0700239
Chris Dalton351e80c2019-01-06 22:51:00 -0700240 if (fPathCache) {
241 fPathCache->doPreFlushProcessing();
Chris Dalton4da70192018-06-18 09:51:36 -0600242 }
243
Chris Daltond7e22272018-05-23 10:17:17 -0600244 if (fPendingPaths.empty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500245 return; // Nothing to draw.
Chris Daltona32a3c32017-12-05 10:05:21 -0700246 }
Chris Daltonc1e59632017-09-05 00:30:07 -0600247
Chris Dalton4da70192018-06-18 09:51:36 -0600248 GrCCPerFlushResourceSpecs specs;
Chris Dalton42c21152018-06-13 15:28:19 -0600249 int maxPreferredRTSize = onFlushRP->caps()->maxPreferredRenderTargetSize();
Chris Dalton4da70192018-06-18 09:51:36 -0600250 specs.fCopyAtlasSpecs.fMaxPreferredTextureSize = SkTMin(2048, maxPreferredRTSize);
251 SkASSERT(0 == specs.fCopyAtlasSpecs.fMinTextureSize);
252 specs.fRenderedAtlasSpecs.fMaxPreferredTextureSize = maxPreferredRTSize;
Chris Daltona2b5b642018-06-24 13:08:57 -0600253 specs.fRenderedAtlasSpecs.fMinTextureSize = SkTMin(512, maxPreferredRTSize);
Chris Dalton42c21152018-06-13 15:28:19 -0600254
Greg Danielf41b2bd2019-08-22 16:19:24 -0400255 // Move the per-opsTask paths that are about to be flushed from fPendingPaths to fFlushingPaths,
Chris Dalton42c21152018-06-13 15:28:19 -0600256 // and count them up so we can preallocate buffers.
Greg Danielf41b2bd2019-08-22 16:19:24 -0400257 fFlushingPaths.reserve(numOpsTaskIDs);
258 for (int i = 0; i < numOpsTaskIDs; ++i) {
259 auto iter = fPendingPaths.find(opsTaskIDs[i]);
Chris Daltond7e22272018-05-23 10:17:17 -0600260 if (fPendingPaths.end() == iter) {
Greg Danielf41b2bd2019-08-22 16:19:24 -0400261 continue; // No paths on this opsTask.
Chris Dalton1a325d22017-07-14 15:17:41 -0600262 }
Chris Daltona32a3c32017-12-05 10:05:21 -0700263
tzikbdb49562018-05-28 14:58:00 +0900264 fFlushingPaths.push_back(std::move(iter->second));
Chris Daltond7e22272018-05-23 10:17:17 -0600265 fPendingPaths.erase(iter);
266
Chris Dalton4da70192018-06-18 09:51:36 -0600267 for (GrCCDrawPathsOp* op : fFlushingPaths.back()->fDrawOps) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700268 op->accountForOwnPaths(fPathCache.get(), onFlushRP, &specs);
Chris Dalton080baa42017-11-06 14:19:19 -0700269 }
Chris Daltond7e22272018-05-23 10:17:17 -0600270 for (const auto& clipsIter : fFlushingPaths.back()->fClipPaths) {
Chris Dalton4da70192018-06-18 09:51:36 -0600271 clipsIter.second.accountForOwnPath(&specs);
Chris Daltona32a3c32017-12-05 10:05:21 -0700272 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600273 }
274
Chris Dalton4da70192018-06-18 09:51:36 -0600275 if (specs.isEmpty()) {
Chris Dalton383a2ef2018-01-08 17:21:41 -0500276 return; // Nothing to draw.
Chris Dalton1a325d22017-07-14 15:17:41 -0600277 }
278
Chris Dalton4da70192018-06-18 09:51:36 -0600279 // Determine if there are enough reusable paths from last flush for it to be worth our time to
280 // copy them to cached atlas(es).
Chris Dalton09a7bb22018-08-31 19:53:15 +0800281 int numCopies = specs.fNumCopiedPaths[GrCCPerFlushResourceSpecs::kFillIdx] +
282 specs.fNumCopiedPaths[GrCCPerFlushResourceSpecs::kStrokeIdx];
Chris Dalton351e80c2019-01-06 22:51:00 -0700283 auto doCopies = DoCopiesToA8Coverage(numCopies > 100 ||
284 specs.fCopyAtlasSpecs.fApproxNumPixels > 256 * 256);
285 if (numCopies && DoCopiesToA8Coverage::kNo == doCopies) {
286 specs.cancelCopies();
Chris Dalton4da70192018-06-18 09:51:36 -0600287 }
288
Chris Daltonc3318f02019-07-19 14:20:53 -0600289 auto resources = sk_make_sp<GrCCPerFlushResources>(onFlushRP, fCoverageType, specs);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600290 if (!resources->isMapped()) {
291 return; // Some allocation failed.
Chris Dalton1a325d22017-07-14 15:17:41 -0600292 }
293
Chris Dalton4da70192018-06-18 09:51:36 -0600294 // Layout the atlas(es) and parse paths.
Chris Daltond7e22272018-05-23 10:17:17 -0600295 for (const auto& flushingPaths : fFlushingPaths) {
296 for (GrCCDrawPathsOp* op : flushingPaths->fDrawOps) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700297 op->setupResources(fPathCache.get(), onFlushRP, resources.get(), doCopies);
Chris Dalton1a325d22017-07-14 15:17:41 -0600298 }
Chris Daltond7e22272018-05-23 10:17:17 -0600299 for (auto& clipsIter : flushingPaths->fClipPaths) {
Chris Daltondaef06a2018-05-23 17:11:09 -0600300 clipsIter.second.renderPathInAtlas(resources.get(), onFlushRP);
Chris Daltonc1e59632017-09-05 00:30:07 -0600301 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600302 }
303
Chris Dalton351e80c2019-01-06 22:51:00 -0700304 if (fPathCache) {
305 // Purge invalidated textures from previous atlases *before* calling finalize(). That way,
306 // the underlying textures objects can be freed up and reused for the next atlases.
307 fPathCache->purgeInvalidatedAtlasTextures(onFlushRP);
Chris Daltond6fa4542019-01-04 13:23:51 -0700308 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600309
Chris Dalton351e80c2019-01-06 22:51:00 -0700310 // Allocate resources and then render the atlas(es).
311 if (!resources->finalize(onFlushRP, out)) {
312 return;
313 }
Chris Dalton2e825a32019-01-04 22:14:27 +0000314
Chris Daltond7e22272018-05-23 10:17:17 -0600315 // Commit flushing paths to the resources once they are successfully completed.
316 for (auto& flushingPaths : fFlushingPaths) {
Robert Phillips774168e2018-05-31 12:43:27 -0400317 SkASSERT(!flushingPaths->fFlushResources);
Chris Daltond7e22272018-05-23 10:17:17 -0600318 flushingPaths->fFlushResources = resources;
319 }
Chris Dalton1a325d22017-07-14 15:17:41 -0600320}
321
Greg Danielf41b2bd2019-08-22 16:19:24 -0400322void GrCoverageCountingPathRenderer::postFlush(GrDeferredUploadToken, const uint32_t* opsTaskIDs,
323 int numOpsTaskIDs) {
Chris Dalton1a325d22017-07-14 15:17:41 -0600324 SkASSERT(fFlushing);
Robert Phillips774168e2018-05-31 12:43:27 -0400325
Chris Dalton4da70192018-06-18 09:51:36 -0600326 if (!fFlushingPaths.empty()) {
Chris Dalton4da70192018-06-18 09:51:36 -0600327 // In DDL mode these aren't guaranteed to be deleted so we must clear out the perFlush
328 // resources manually.
329 for (auto& flushingPaths : fFlushingPaths) {
330 flushingPaths->fFlushResources = nullptr;
331 }
332
333 // We wait to erase these until after flush, once Ops and FPs are done accessing their data.
334 fFlushingPaths.reset();
Robert Phillips774168e2018-05-31 12:43:27 -0400335 }
336
Chris Dalton383a2ef2018-01-08 17:21:41 -0500337 SkDEBUGCODE(fFlushing = false);
Chris Dalton1a325d22017-07-14 15:17:41 -0600338}
Chris Dalton09a7bb22018-08-31 19:53:15 +0800339
Chris Dalton6c3879d2018-11-01 11:13:19 -0600340void GrCoverageCountingPathRenderer::purgeCacheEntriesOlderThan(
Chris Dalton351e80c2019-01-06 22:51:00 -0700341 GrProxyProvider* proxyProvider, const GrStdSteadyClock::time_point& purgeTime) {
Chris Dalton6c3879d2018-11-01 11:13:19 -0600342 if (fPathCache) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700343 fPathCache->purgeEntriesOlderThan(proxyProvider, purgeTime);
Chris Dalton6c3879d2018-11-01 11:13:19 -0600344 }
345}
346
Chris Dalton09a7bb22018-08-31 19:53:15 +0800347void GrCoverageCountingPathRenderer::CropPath(const SkPath& path, const SkIRect& cropbox,
348 SkPath* out) {
349 SkPath cropboxPath;
350 cropboxPath.addRect(SkRect::Make(cropbox));
351 if (!Op(cropboxPath, path, kIntersect_SkPathOp, out)) {
352 // This can fail if the PathOps encounter NaN or infinities.
353 out->reset();
354 }
355 out->setIsVolatile(true);
356}
Chris Dalton82de18f2018-09-12 17:24:09 -0600357
358float GrCoverageCountingPathRenderer::GetStrokeDevWidth(const SkMatrix& m,
359 const SkStrokeRec& stroke,
360 float* inflationRadius) {
361 float strokeDevWidth;
362 if (stroke.isHairlineStyle()) {
363 strokeDevWidth = 1;
364 } else {
365 SkASSERT(SkStrokeRec::kStroke_Style == stroke.getStyle());
366 SkASSERT(m.isSimilarity()); // Otherwise matrixScaleFactor = m.getMaxScale().
367 float matrixScaleFactor = SkVector::Length(m.getScaleX(), m.getSkewY());
368 strokeDevWidth = stroke.getWidth() * matrixScaleFactor;
369 }
370 if (inflationRadius) {
371 // Inflate for a minimum stroke width of 1. In some cases when the stroke is less than 1px
372 // wide, we may inflate it to 1px and instead reduce the opacity.
373 *inflationRadius = SkStrokeRec::GetInflationRadius(
374 stroke.getJoin(), stroke.getMiter(), stroke.getCap(), SkTMax(strokeDevWidth, 1.f));
375 }
376 return strokeDevWidth;
377}