blob: b9f09699843bb39a9f62ae83f7fd940f8137daf8 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCDrawPathsOp.h"
Chris Dalton351e80c2019-01-06 22:51:00 -07009
Brian Salomon653f42f2018-07-10 10:07:31 -040010#include "GrContext.h"
11#include "GrContextPriv.h"
Robert Phillips7c525e62018-06-12 10:11:12 -040012#include "GrMemoryPool.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060013#include "GrOpFlushState.h"
Chris Daltona2b5b642018-06-24 13:08:57 -060014#include "ccpr/GrCCPathCache.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060015#include "ccpr/GrCCPerFlushResources.h"
16#include "ccpr/GrCoverageCountingPathRenderer.h"
17
Chris Dalton1c548942018-05-22 13:09:48 -060018static bool has_coord_transforms(const GrPaint& paint) {
19 GrFragmentProcessor::Iter iter(paint);
20 while (const GrFragmentProcessor* fp = iter.next()) {
21 if (!fp->coordTransforms().empty()) {
22 return true;
23 }
24 }
25 return false;
26}
27
Chris Daltona8429cf2018-06-22 11:43:31 -060028static int64_t area(const SkIRect& r) {
29 return sk_64_mul(r.height(), r.width());
30}
31
Chris Dalton09a7bb22018-08-31 19:53:15 +080032std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::Make(
33 GrContext* context, const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape,
34 GrPaint&& paint) {
Chris Dalton09a7bb22018-08-31 19:53:15 +080035 SkRect conservativeDevBounds;
36 m.mapRect(&conservativeDevBounds, shape.bounds());
37
38 const SkStrokeRec& stroke = shape.style().strokeRec();
39 float strokeDevWidth = 0;
40 float conservativeInflationRadius = 0;
41 if (!stroke.isFillStyle()) {
Chris Dalton82de18f2018-09-12 17:24:09 -060042 strokeDevWidth = GrCoverageCountingPathRenderer::GetStrokeDevWidth(
43 m, stroke, &conservativeInflationRadius);
Chris Dalton09a7bb22018-08-31 19:53:15 +080044 conservativeDevBounds.outset(conservativeInflationRadius, conservativeInflationRadius);
45 }
46
47 std::unique_ptr<GrCCDrawPathsOp> op;
48 float conservativeSize = SkTMax(conservativeDevBounds.height(), conservativeDevBounds.width());
Chris Dalton82de18f2018-09-12 17:24:09 -060049 if (conservativeSize > GrCoverageCountingPathRenderer::kPathCropThreshold) {
Chris Dalton09a7bb22018-08-31 19:53:15 +080050 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
51 SkPath croppedDevPath;
52 shape.asPath(&croppedDevPath);
53 croppedDevPath.transform(m, &croppedDevPath);
54
55 SkIRect cropBox = clipIBounds;
56 GrShape croppedDevShape;
57 if (stroke.isFillStyle()) {
58 GrCoverageCountingPathRenderer::CropPath(croppedDevPath, cropBox, &croppedDevPath);
59 croppedDevShape = GrShape(croppedDevPath);
60 conservativeDevBounds = croppedDevShape.bounds();
61 } else {
62 int r = SkScalarCeilToInt(conservativeInflationRadius);
63 cropBox.outset(r, r);
64 GrCoverageCountingPathRenderer::CropPath(croppedDevPath, cropBox, &croppedDevPath);
65 SkStrokeRec devStroke = stroke;
66 devStroke.setStrokeStyle(strokeDevWidth);
67 croppedDevShape = GrShape(croppedDevPath, GrStyle(devStroke, nullptr));
68 conservativeDevBounds = croppedDevPath.getBounds();
69 conservativeDevBounds.outset(conservativeInflationRadius, conservativeInflationRadius);
70 }
71
72 // FIXME: This breaks local coords: http://skbug.com/8003
73 return InternalMake(context, clipIBounds, SkMatrix::I(), croppedDevShape, strokeDevWidth,
74 conservativeDevBounds, std::move(paint));
75 }
76
77 return InternalMake(context, clipIBounds, m, shape, strokeDevWidth, conservativeDevBounds,
78 std::move(paint));
79}
80
81std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::InternalMake(
82 GrContext* context, const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape,
83 float strokeDevWidth, const SkRect& conservativeDevBounds, GrPaint&& paint) {
Chris Dalton82de18f2018-09-12 17:24:09 -060084 // The path itself should have been cropped if larger than kPathCropThreshold. If it had a
85 // stroke, that would have further inflated its draw bounds.
86 SkASSERT(SkTMax(conservativeDevBounds.height(), conservativeDevBounds.width()) <
87 GrCoverageCountingPathRenderer::kPathCropThreshold +
88 GrCoverageCountingPathRenderer::kMaxBoundsInflationFromStroke*2 + 1);
89
Chris Dalton09a7bb22018-08-31 19:53:15 +080090 SkIRect shapeConservativeIBounds;
91 conservativeDevBounds.roundOut(&shapeConservativeIBounds);
Chris Daltona8429cf2018-06-22 11:43:31 -060092
93 SkIRect maskDevIBounds;
94 Visibility maskVisibility;
Chris Dalton09a7bb22018-08-31 19:53:15 +080095 if (clipIBounds.contains(shapeConservativeIBounds)) {
96 maskDevIBounds = shapeConservativeIBounds;
Chris Daltona8429cf2018-06-22 11:43:31 -060097 maskVisibility = Visibility::kComplete;
98 } else {
Chris Dalton09a7bb22018-08-31 19:53:15 +080099 if (!maskDevIBounds.intersect(clipIBounds, shapeConservativeIBounds)) {
Chris Dalton4da70192018-06-18 09:51:36 -0600100 return nullptr;
101 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800102 int64_t unclippedArea = area(shapeConservativeIBounds);
Chris Daltona8429cf2018-06-22 11:43:31 -0600103 int64_t clippedArea = area(maskDevIBounds);
104 maskVisibility = (clippedArea >= unclippedArea/2 || unclippedArea < 100*100)
105 ? Visibility::kMostlyComplete // i.e., visible enough to justify rendering the
106 // whole thing if we think we can cache it.
107 : Visibility::kPartial;
Chris Dalton42c21152018-06-13 15:28:19 -0600108 }
Robert Phillipsc994a932018-06-19 13:09:54 -0400109
110 GrOpMemoryPool* pool = context->contextPriv().opMemoryPool();
111
Chris Dalton09a7bb22018-08-31 19:53:15 +0800112 return pool->allocate<GrCCDrawPathsOp>(m, shape, strokeDevWidth, shapeConservativeIBounds,
113 maskDevIBounds, maskVisibility, conservativeDevBounds,
114 std::move(paint));
Chris Dalton42c21152018-06-13 15:28:19 -0600115}
116
Chris Dalton09a7bb22018-08-31 19:53:15 +0800117GrCCDrawPathsOp::GrCCDrawPathsOp(const SkMatrix& m, const GrShape& shape, float strokeDevWidth,
118 const SkIRect& shapeConservativeIBounds,
119 const SkIRect& maskDevIBounds, Visibility maskVisibility,
120 const SkRect& conservativeDevBounds, GrPaint&& paint)
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600121 : GrDrawOp(ClassID())
Chris Daltond7e22272018-05-23 10:17:17 -0600122 , fViewMatrixIfUsingLocalCoords(has_coord_transforms(paint) ? m : SkMatrix::I())
Chris Dalton09a7bb22018-08-31 19:53:15 +0800123 , fDraws(m, shape, strokeDevWidth, shapeConservativeIBounds, maskDevIBounds, maskVisibility,
Brian Osmancf860852018-10-31 14:04:39 -0400124 paint.getColor4f())
Chris Dalton42c21152018-06-13 15:28:19 -0600125 , fProcessors(std::move(paint)) { // Paint must be moved after fetching its color above.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600126 SkDEBUGCODE(fBaseInstance = -1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600127 // FIXME: intersect with clip bounds to (hopefully) improve batching.
128 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800129 this->setBounds(conservativeDevBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600130}
131
132GrCCDrawPathsOp::~GrCCDrawPathsOp() {
Chris Daltond7e22272018-05-23 10:17:17 -0600133 if (fOwningPerOpListPaths) {
Chris Daltonb68bcc42018-09-14 00:44:22 -0600134 // Remove the list's dangling pointer to this Op before deleting it.
Chris Daltond7e22272018-05-23 10:17:17 -0600135 fOwningPerOpListPaths->fDrawOps.remove(this);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600136 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600137}
138
Chris Daltona8429cf2018-06-22 11:43:31 -0600139GrCCDrawPathsOp::SingleDraw::SingleDraw(const SkMatrix& m, const GrShape& shape,
Chris Dalton09a7bb22018-08-31 19:53:15 +0800140 float strokeDevWidth,
141 const SkIRect& shapeConservativeIBounds,
Chris Daltona8429cf2018-06-22 11:43:31 -0600142 const SkIRect& maskDevIBounds, Visibility maskVisibility,
Brian Osmancf860852018-10-31 14:04:39 -0400143 const SkPMColor4f& color)
Chris Daltona8429cf2018-06-22 11:43:31 -0600144 : fMatrix(m)
Chris Dalton644341a2018-06-18 19:14:16 -0600145 , fShape(shape)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800146 , fStrokeDevWidth(strokeDevWidth)
147 , fShapeConservativeIBounds(shapeConservativeIBounds)
Chris Daltona8429cf2018-06-22 11:43:31 -0600148 , fMaskDevIBounds(maskDevIBounds)
149 , fMaskVisibility(maskVisibility)
150 , fColor(color) {
Chris Dalton644341a2018-06-18 19:14:16 -0600151#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
152 if (fShape.hasUnstyledKey()) {
153 // On AOSP we round view matrix translates to integer values for cachable paths. We do this
154 // to match HWUI's cache hit ratio, which doesn't consider the matrix when caching paths.
155 fMatrix.setTranslateX(SkScalarRoundToScalar(fMatrix.getTranslateX()));
156 fMatrix.setTranslateY(SkScalarRoundToScalar(fMatrix.getTranslateY()));
157 }
158#endif
159}
160
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600161GrDrawOp::RequiresDstTexture GrCCDrawPathsOp::finalize(const GrCaps& caps,
Brian Osman532b3f92018-07-11 10:02:07 -0400162 const GrAppliedClip* clip) {
Chris Dalton4da70192018-06-18 09:51:36 -0600163 SkASSERT(1 == fNumDraws); // There should only be one single path draw in this Op right now.
Chris Daltona13078c2019-01-07 09:34:05 -0700164 return fDraws.head().finalize(caps, clip, &fProcessors);
165}
Chris Dalton09a7bb22018-08-31 19:53:15 +0800166
Chris Daltona13078c2019-01-07 09:34:05 -0700167GrDrawOp::RequiresDstTexture GrCCDrawPathsOp::SingleDraw::finalize(
168 const GrCaps& caps, const GrAppliedClip* clip, GrProcessorSet* processors) {
169 const GrProcessorSet::Analysis& analysis = processors->finalize(
170 fColor, GrProcessorAnalysisCoverage::kSingleChannel, clip, false, caps,
171 &fColor);
Chris Dalton09a7bb22018-08-31 19:53:15 +0800172
173 // Lines start looking jagged when they get thinner than 1px. For thin strokes it looks better
174 // if we can convert them to hairline (i.e., inflate the stroke width to 1px), and instead
175 // reduce the opacity to create the illusion of thin-ness. This strategy also helps reduce
176 // artifacts from coverage dilation when there are self intersections.
177 if (analysis.isCompatibleWithCoverageAsAlpha() &&
Chris Daltona13078c2019-01-07 09:34:05 -0700178 !fShape.style().strokeRec().isFillStyle() && fStrokeDevWidth < 1) {
Chris Dalton09a7bb22018-08-31 19:53:15 +0800179 // Modifying the shape affects its cache key. The draw can't have a cache entry yet or else
180 // our next step would invalidate it.
Chris Daltona13078c2019-01-07 09:34:05 -0700181 SkASSERT(!fCacheEntry);
182 SkASSERT(SkStrokeRec::kStroke_Style == fShape.style().strokeRec().getStyle());
Chris Dalton09a7bb22018-08-31 19:53:15 +0800183
184 SkPath path;
Chris Daltona13078c2019-01-07 09:34:05 -0700185 fShape.asPath(&path);
Chris Dalton09a7bb22018-08-31 19:53:15 +0800186
187 // Create a hairline version of our stroke.
Chris Daltona13078c2019-01-07 09:34:05 -0700188 SkStrokeRec hairlineStroke = fShape.style().strokeRec();
Chris Dalton09a7bb22018-08-31 19:53:15 +0800189 hairlineStroke.setStrokeStyle(0);
190
191 // How transparent does a 1px stroke have to be in order to appear as thin as the real one?
Chris Daltona13078c2019-01-07 09:34:05 -0700192 float coverage = fStrokeDevWidth;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800193
Chris Daltona13078c2019-01-07 09:34:05 -0700194 fShape = GrShape(path, GrStyle(hairlineStroke, nullptr));
195 fStrokeDevWidth = 1;
Brian Osman1be2b7c2018-10-29 16:07:15 -0400196
197 // TODO4F: Preserve float colors
Chris Dalton09a7bb22018-08-31 19:53:15 +0800198 // fShapeConservativeIBounds already accounted for this possibility of inflating the stroke.
Chris Daltona13078c2019-01-07 09:34:05 -0700199 fColor = fColor * coverage;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800200 }
201
Chris Dalton4da70192018-06-18 09:51:36 -0600202 return RequiresDstTexture(analysis.requiresDstTexture());
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600203}
204
Brian Salomon7eae3e02018-08-07 14:02:38 +0000205GrOp::CombineResult GrCCDrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps&) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600206 GrCCDrawPathsOp* that = op->cast<GrCCDrawPathsOp>();
Chris Daltond7e22272018-05-23 10:17:17 -0600207 SkASSERT(fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600208 SkASSERT(fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600209 SkASSERT(!that->fOwningPerOpListPaths || that->fOwningPerOpListPaths == fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600210 SkASSERT(that->fNumDraws);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600211
Brian Osman9aa30c62018-07-02 15:21:46 -0400212 if (fProcessors != that->fProcessors ||
Chris Dalton1c548942018-05-22 13:09:48 -0600213 fViewMatrixIfUsingLocalCoords != that->fViewMatrixIfUsingLocalCoords) {
Brian Salomon7eae3e02018-08-07 14:02:38 +0000214 return CombineResult::kCannotCombine;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600215 }
216
Chris Daltond7e22272018-05-23 10:17:17 -0600217 fDraws.append(std::move(that->fDraws), &fOwningPerOpListPaths->fAllocator);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600218
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600219 SkDEBUGCODE(fNumDraws += that->fNumDraws);
220 SkDEBUGCODE(that->fNumDraws = 0);
Brian Salomon7eae3e02018-08-07 14:02:38 +0000221 return CombineResult::kMerged;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600222}
223
Brian Salomon348a0372018-10-31 10:42:18 -0400224void GrCCDrawPathsOp::addToOwningPerOpListPaths(sk_sp<GrCCPerOpListPaths> owningPerOpListPaths) {
Chris Daltonf104fec2018-05-22 16:17:48 -0600225 SkASSERT(1 == fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600226 SkASSERT(!fOwningPerOpListPaths);
Chris Daltonb68bcc42018-09-14 00:44:22 -0600227 fOwningPerOpListPaths = std::move(owningPerOpListPaths);
228 fOwningPerOpListPaths->fDrawOps.addToTail(this);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600229}
230
Chris Dalton4da70192018-06-18 09:51:36 -0600231void GrCCDrawPathsOp::accountForOwnPaths(GrCCPathCache* pathCache,
232 GrOnFlushResourceProvider* onFlushRP,
Chris Dalton4da70192018-06-18 09:51:36 -0600233 GrCCPerFlushResourceSpecs* specs) {
Chris Daltona13078c2019-01-07 09:34:05 -0700234 for (SingleDraw& draw : fDraws) {
235 draw.accountForOwnPath(pathCache, onFlushRP, specs);
236 }
237}
238
239void GrCCDrawPathsOp::SingleDraw::accountForOwnPath(
240 GrCCPathCache* pathCache, GrOnFlushResourceProvider* onFlushRP,
241 GrCCPerFlushResourceSpecs* specs) {
Chris Dalton4da70192018-06-18 09:51:36 -0600242 using CreateIfAbsent = GrCCPathCache::CreateIfAbsent;
243 using MaskTransform = GrCCPathCache::MaskTransform;
Chris Dalton351e80c2019-01-06 22:51:00 -0700244 using CoverageType = GrCCAtlas::CoverageType;
Chris Dalton4da70192018-06-18 09:51:36 -0600245
Chris Daltona13078c2019-01-07 09:34:05 -0700246 SkPath path;
247 fShape.asPath(&path);
Chris Dalton4da70192018-06-18 09:51:36 -0600248
Chris Daltona13078c2019-01-07 09:34:05 -0700249 SkASSERT(!fCacheEntry);
Chris Daltona2b5b642018-06-24 13:08:57 -0600250
Chris Daltona13078c2019-01-07 09:34:05 -0700251 if (pathCache) {
252 MaskTransform m(fMatrix, &fCachedMaskShift);
253 bool canStashPathMask = fMaskVisibility >= Visibility::kMostlyComplete;
254 fCacheEntry = pathCache->find(onFlushRP, fShape, m, CreateIfAbsent(canStashPathMask));
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600255 }
Chris Daltona13078c2019-01-07 09:34:05 -0700256
257 if (fCacheEntry) {
258 if (const GrCCCachedAtlas* cachedAtlas = fCacheEntry->cachedAtlas()) {
259 SkASSERT(cachedAtlas->getOnFlushProxy());
260 if (CoverageType::kA8_LiteralCoverage == cachedAtlas->coverageType()) {
261 ++specs->fNumCachedPaths;
262 } else {
263 // Suggest that this path be copied to a literal coverage atlas, to save memory.
264 // (The client may decline this copy via DoCopiesToA8Coverage::kNo.)
265 int idx = (fShape.style().strokeRec().isFillStyle())
266 ? GrCCPerFlushResourceSpecs::kFillIdx
267 : GrCCPerFlushResourceSpecs::kStrokeIdx;
268 ++specs->fNumCopiedPaths[idx];
269 specs->fCopyPathStats[idx].statPath(path);
270 specs->fCopyAtlasSpecs.accountForSpace(fCacheEntry->width(), fCacheEntry->height());
271 fDoCopyToA8Coverage = true;
272 }
273 return;
274 }
275
276 if (Visibility::kMostlyComplete == fMaskVisibility && fCacheEntry->hitCount() > 1) {
277 int shapeSize = SkTMax(fShapeConservativeIBounds.height(),
278 fShapeConservativeIBounds.width());
279 if (shapeSize <= onFlushRP->caps()->maxRenderTargetSize()) {
280 // We've seen this path before with a compatible matrix, and it's mostly
281 // visible. Just render the whole mask so we can try to cache it.
282 fMaskDevIBounds = fShapeConservativeIBounds;
283 fMaskVisibility = Visibility::kComplete;
284 }
285 }
286 }
287
288 int idx = (fShape.style().strokeRec().isFillStyle())
289 ? GrCCPerFlushResourceSpecs::kFillIdx
290 : GrCCPerFlushResourceSpecs::kStrokeIdx;
291 ++specs->fNumRenderedPaths[idx];
292 specs->fRenderedPathStats[idx].statPath(path);
293 specs->fRenderedAtlasSpecs.accountForSpace(fMaskDevIBounds.width(),
294 fMaskDevIBounds.height());
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600295}
296
Chris Dalton351e80c2019-01-06 22:51:00 -0700297void GrCCDrawPathsOp::setupResources(
298 GrCCPathCache* pathCache, GrOnFlushResourceProvider* onFlushRP,
299 GrCCPerFlushResources* resources, DoCopiesToA8Coverage doCopies) {
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600300 SkASSERT(fNumDraws > 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600301 SkASSERT(-1 == fBaseInstance);
Chris Daltondaef06a2018-05-23 17:11:09 -0600302 fBaseInstance = resources->nextPathInstanceIdx();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600303
Chris Dalton4da70192018-06-18 09:51:36 -0600304 for (SingleDraw& draw : fDraws) {
Chris Daltona13078c2019-01-07 09:34:05 -0700305 draw.setupResources(pathCache, onFlushRP, resources, doCopies, this);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600306 }
307
Chris Dalton4da70192018-06-18 09:51:36 -0600308 if (!fInstanceRanges.empty()) {
309 fInstanceRanges.back().fEndInstanceIdx = resources->nextPathInstanceIdx();
310 }
311}
312
Chris Daltona13078c2019-01-07 09:34:05 -0700313void GrCCDrawPathsOp::SingleDraw::setupResources(
314 GrCCPathCache* pathCache, GrOnFlushResourceProvider* onFlushRP,
315 GrCCPerFlushResources* resources, DoCopiesToA8Coverage doCopies, GrCCDrawPathsOp* op) {
316 using DoEvenOddFill = GrCCPathProcessor::DoEvenOddFill;
317
318 SkPath path;
319 fShape.asPath(&path);
320
321 auto doEvenOddFill = DoEvenOddFill(fShape.style().strokeRec().isFillStyle() &&
322 SkPath::kEvenOdd_FillType == path.getFillType());
323 SkASSERT(SkPath::kEvenOdd_FillType == path.getFillType() ||
324 SkPath::kWinding_FillType == path.getFillType());
325
326 if (fCacheEntry) {
327 // Does the path already exist in a cached atlas texture?
328 if (fCacheEntry->cachedAtlas()) {
329 SkASSERT(fCacheEntry->cachedAtlas()->getOnFlushProxy());
330 if (DoCopiesToA8Coverage::kYes == doCopies && fDoCopyToA8Coverage) {
331 resources->upgradeEntryToLiteralCoverageAtlas(pathCache, onFlushRP,
332 fCacheEntry.get(), doEvenOddFill);
333 SkASSERT(fCacheEntry->cachedAtlas());
334 SkASSERT(GrCCAtlas::CoverageType::kA8_LiteralCoverage
335 == fCacheEntry->cachedAtlas()->coverageType());
336 SkASSERT(fCacheEntry->cachedAtlas()->getOnFlushProxy());
337 }
338 op->recordInstance(fCacheEntry->cachedAtlas()->getOnFlushProxy(),
339 resources->nextPathInstanceIdx());
340 // TODO4F: Preserve float colors
341 resources->appendDrawPathInstance().set(*fCacheEntry, fCachedMaskShift,
342 fColor.toBytes_RGBA());
343 return;
344 }
345 }
346
347 // Render the raw path into a coverage count atlas. renderShapeInAtlas() gives us two tight
348 // bounding boxes: One in device space, as well as a second one rotated an additional 45
349 // degrees. The path vertex shader uses these two bounding boxes to generate an octagon that
350 // circumscribes the path.
351 SkRect devBounds, devBounds45;
352 SkIRect devIBounds;
353 SkIVector devToAtlasOffset;
354 if (auto atlas = resources->renderShapeInAtlas(
355 fMaskDevIBounds, fMatrix, fShape, fStrokeDevWidth, &devBounds, &devBounds45,
356 &devIBounds, &devToAtlasOffset)) {
357 op->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
358 // TODO4F: Preserve float colors
359 resources->appendDrawPathInstance().set(devBounds, devBounds45, devToAtlasOffset,
360 fColor.toBytes_RGBA(), doEvenOddFill);
361
362 // If we have a spot in the path cache, try to make a note of where this mask is so we
363 // can reuse it in the future.
364 if (fCacheEntry) {
365 SkASSERT(!fCacheEntry->cachedAtlas());
366
367 if (Visibility::kComplete != fMaskVisibility || fCacheEntry->hitCount() <= 1) {
368 // Don't cache a path mask unless it's completely visible with a hit count > 1.
369 //
370 // NOTE: mostly-visible paths with a hit count > 1 should have been promoted to
371 // fully visible during accountForOwnPaths().
372 return;
373 }
374
375 fCacheEntry->setCoverageCountAtlas(onFlushRP, atlas, devToAtlasOffset, devBounds,
376 devBounds45, devIBounds, fCachedMaskShift);
377 }
378 }
379}
380
Brian Salomon7eae3e02018-08-07 14:02:38 +0000381inline void GrCCDrawPathsOp::recordInstance(GrTextureProxy* atlasProxy, int instanceIdx) {
Chris Dalton4da70192018-06-18 09:51:36 -0600382 if (fInstanceRanges.empty()) {
383 fInstanceRanges.push_back({atlasProxy, instanceIdx});
384 return;
385 }
386 if (fInstanceRanges.back().fAtlasProxy != atlasProxy) {
387 fInstanceRanges.back().fEndInstanceIdx = instanceIdx;
388 fInstanceRanges.push_back({atlasProxy, instanceIdx});
389 return;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600390 }
391}
392
Brian Salomon588cec72018-11-14 13:56:37 -0500393void GrCCDrawPathsOp::onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) {
Chris Daltond7e22272018-05-23 10:17:17 -0600394 SkASSERT(fOwningPerOpListPaths);
Chris Daltonf104fec2018-05-22 16:17:48 -0600395
Chris Daltond7e22272018-05-23 10:17:17 -0600396 const GrCCPerFlushResources* resources = fOwningPerOpListPaths->fFlushResources.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600397 if (!resources) {
398 return; // Setup failed.
399 }
400
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600401 GrPipeline::InitArgs initArgs;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600402 initArgs.fProxy = flushState->drawOpArgs().fProxy;
403 initArgs.fCaps = &flushState->caps();
404 initArgs.fResourceProvider = flushState->resourceProvider();
405 initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy;
Brian Salomon49348902018-06-26 09:12:38 -0400406 auto clip = flushState->detachAppliedClip();
407 GrPipeline::FixedDynamicState fixedDynamicState(clip.scissorState().rect());
408 GrPipeline pipeline(initArgs, std::move(fProcessors), std::move(clip));
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600409
410 int baseInstance = fBaseInstance;
Chris Dalton4da70192018-06-18 09:51:36 -0600411 SkASSERT(baseInstance >= 0); // Make sure setupResources() has been called.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600412
Chris Dalton4c458b12018-06-16 17:22:59 -0600413 for (const InstanceRange& range : fInstanceRanges) {
414 SkASSERT(range.fEndInstanceIdx > baseInstance);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600415
Brian Salomon7eae3e02018-08-07 14:02:38 +0000416 GrCCPathProcessor pathProc(range.fAtlasProxy, fViewMatrixIfUsingLocalCoords);
417 GrTextureProxy* atlasProxy = range.fAtlasProxy;
418 fixedDynamicState.fPrimitiveProcessorTextures = &atlasProxy;
Brian Salomon49348902018-06-26 09:12:38 -0400419 pathProc.drawPaths(flushState, pipeline, &fixedDynamicState, *resources, baseInstance,
420 range.fEndInstanceIdx, this->bounds());
Chris Dalton4c458b12018-06-16 17:22:59 -0600421
422 baseInstance = range.fEndInstanceIdx;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600423 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600424}