blob: ba16f586ca018c3b910152ac5b7dc4715921b5ae [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCDrawPathsOp.h"
Chris Dalton351e80c2019-01-06 22:51:00 -07009
Brian Salomon653f42f2018-07-10 10:07:31 -040010#include "GrContext.h"
11#include "GrContextPriv.h"
Robert Phillips7c525e62018-06-12 10:11:12 -040012#include "GrMemoryPool.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060013#include "GrOpFlushState.h"
Chris Daltona2b5b642018-06-24 13:08:57 -060014#include "ccpr/GrCCPathCache.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060015#include "ccpr/GrCCPerFlushResources.h"
16#include "ccpr/GrCoverageCountingPathRenderer.h"
17
Chris Dalton1c548942018-05-22 13:09:48 -060018static bool has_coord_transforms(const GrPaint& paint) {
19 GrFragmentProcessor::Iter iter(paint);
20 while (const GrFragmentProcessor* fp = iter.next()) {
21 if (!fp->coordTransforms().empty()) {
22 return true;
23 }
24 }
25 return false;
26}
27
Chris Daltona8429cf2018-06-22 11:43:31 -060028static int64_t area(const SkIRect& r) {
29 return sk_64_mul(r.height(), r.width());
30}
31
Chris Dalton09a7bb22018-08-31 19:53:15 +080032std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::Make(
33 GrContext* context, const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape,
34 GrPaint&& paint) {
Chris Dalton09a7bb22018-08-31 19:53:15 +080035 SkRect conservativeDevBounds;
36 m.mapRect(&conservativeDevBounds, shape.bounds());
37
38 const SkStrokeRec& stroke = shape.style().strokeRec();
39 float strokeDevWidth = 0;
40 float conservativeInflationRadius = 0;
41 if (!stroke.isFillStyle()) {
Chris Dalton82de18f2018-09-12 17:24:09 -060042 strokeDevWidth = GrCoverageCountingPathRenderer::GetStrokeDevWidth(
43 m, stroke, &conservativeInflationRadius);
Chris Dalton09a7bb22018-08-31 19:53:15 +080044 conservativeDevBounds.outset(conservativeInflationRadius, conservativeInflationRadius);
45 }
46
47 std::unique_ptr<GrCCDrawPathsOp> op;
48 float conservativeSize = SkTMax(conservativeDevBounds.height(), conservativeDevBounds.width());
Chris Dalton82de18f2018-09-12 17:24:09 -060049 if (conservativeSize > GrCoverageCountingPathRenderer::kPathCropThreshold) {
Chris Dalton09a7bb22018-08-31 19:53:15 +080050 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
51 SkPath croppedDevPath;
52 shape.asPath(&croppedDevPath);
53 croppedDevPath.transform(m, &croppedDevPath);
54
55 SkIRect cropBox = clipIBounds;
56 GrShape croppedDevShape;
57 if (stroke.isFillStyle()) {
58 GrCoverageCountingPathRenderer::CropPath(croppedDevPath, cropBox, &croppedDevPath);
59 croppedDevShape = GrShape(croppedDevPath);
60 conservativeDevBounds = croppedDevShape.bounds();
61 } else {
62 int r = SkScalarCeilToInt(conservativeInflationRadius);
63 cropBox.outset(r, r);
64 GrCoverageCountingPathRenderer::CropPath(croppedDevPath, cropBox, &croppedDevPath);
65 SkStrokeRec devStroke = stroke;
66 devStroke.setStrokeStyle(strokeDevWidth);
67 croppedDevShape = GrShape(croppedDevPath, GrStyle(devStroke, nullptr));
68 conservativeDevBounds = croppedDevPath.getBounds();
69 conservativeDevBounds.outset(conservativeInflationRadius, conservativeInflationRadius);
70 }
71
72 // FIXME: This breaks local coords: http://skbug.com/8003
73 return InternalMake(context, clipIBounds, SkMatrix::I(), croppedDevShape, strokeDevWidth,
74 conservativeDevBounds, std::move(paint));
75 }
76
77 return InternalMake(context, clipIBounds, m, shape, strokeDevWidth, conservativeDevBounds,
78 std::move(paint));
79}
80
81std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::InternalMake(
82 GrContext* context, const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape,
83 float strokeDevWidth, const SkRect& conservativeDevBounds, GrPaint&& paint) {
Chris Dalton82de18f2018-09-12 17:24:09 -060084 // The path itself should have been cropped if larger than kPathCropThreshold. If it had a
85 // stroke, that would have further inflated its draw bounds.
86 SkASSERT(SkTMax(conservativeDevBounds.height(), conservativeDevBounds.width()) <
87 GrCoverageCountingPathRenderer::kPathCropThreshold +
88 GrCoverageCountingPathRenderer::kMaxBoundsInflationFromStroke*2 + 1);
89
Chris Dalton09a7bb22018-08-31 19:53:15 +080090 SkIRect shapeConservativeIBounds;
91 conservativeDevBounds.roundOut(&shapeConservativeIBounds);
Chris Daltona8429cf2018-06-22 11:43:31 -060092
93 SkIRect maskDevIBounds;
94 Visibility maskVisibility;
Chris Dalton09a7bb22018-08-31 19:53:15 +080095 if (clipIBounds.contains(shapeConservativeIBounds)) {
96 maskDevIBounds = shapeConservativeIBounds;
Chris Daltona8429cf2018-06-22 11:43:31 -060097 maskVisibility = Visibility::kComplete;
98 } else {
Chris Dalton09a7bb22018-08-31 19:53:15 +080099 if (!maskDevIBounds.intersect(clipIBounds, shapeConservativeIBounds)) {
Chris Dalton4da70192018-06-18 09:51:36 -0600100 return nullptr;
101 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800102 int64_t unclippedArea = area(shapeConservativeIBounds);
Chris Daltona8429cf2018-06-22 11:43:31 -0600103 int64_t clippedArea = area(maskDevIBounds);
104 maskVisibility = (clippedArea >= unclippedArea/2 || unclippedArea < 100*100)
105 ? Visibility::kMostlyComplete // i.e., visible enough to justify rendering the
106 // whole thing if we think we can cache it.
107 : Visibility::kPartial;
Chris Dalton42c21152018-06-13 15:28:19 -0600108 }
Robert Phillipsc994a932018-06-19 13:09:54 -0400109
110 GrOpMemoryPool* pool = context->contextPriv().opMemoryPool();
111
Chris Dalton09a7bb22018-08-31 19:53:15 +0800112 return pool->allocate<GrCCDrawPathsOp>(m, shape, strokeDevWidth, shapeConservativeIBounds,
113 maskDevIBounds, maskVisibility, conservativeDevBounds,
114 std::move(paint));
Chris Dalton42c21152018-06-13 15:28:19 -0600115}
116
Chris Dalton09a7bb22018-08-31 19:53:15 +0800117GrCCDrawPathsOp::GrCCDrawPathsOp(const SkMatrix& m, const GrShape& shape, float strokeDevWidth,
118 const SkIRect& shapeConservativeIBounds,
119 const SkIRect& maskDevIBounds, Visibility maskVisibility,
120 const SkRect& conservativeDevBounds, GrPaint&& paint)
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600121 : GrDrawOp(ClassID())
Chris Daltond7e22272018-05-23 10:17:17 -0600122 , fViewMatrixIfUsingLocalCoords(has_coord_transforms(paint) ? m : SkMatrix::I())
Chris Dalton09a7bb22018-08-31 19:53:15 +0800123 , fDraws(m, shape, strokeDevWidth, shapeConservativeIBounds, maskDevIBounds, maskVisibility,
Brian Osmancf860852018-10-31 14:04:39 -0400124 paint.getColor4f())
Chris Dalton42c21152018-06-13 15:28:19 -0600125 , fProcessors(std::move(paint)) { // Paint must be moved after fetching its color above.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600126 SkDEBUGCODE(fBaseInstance = -1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600127 // FIXME: intersect with clip bounds to (hopefully) improve batching.
128 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800129 this->setBounds(conservativeDevBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600130}
131
132GrCCDrawPathsOp::~GrCCDrawPathsOp() {
Chris Daltond7e22272018-05-23 10:17:17 -0600133 if (fOwningPerOpListPaths) {
Chris Daltonb68bcc42018-09-14 00:44:22 -0600134 // Remove the list's dangling pointer to this Op before deleting it.
Chris Daltond7e22272018-05-23 10:17:17 -0600135 fOwningPerOpListPaths->fDrawOps.remove(this);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600136 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600137}
138
Chris Daltona8429cf2018-06-22 11:43:31 -0600139GrCCDrawPathsOp::SingleDraw::SingleDraw(const SkMatrix& m, const GrShape& shape,
Chris Dalton09a7bb22018-08-31 19:53:15 +0800140 float strokeDevWidth,
141 const SkIRect& shapeConservativeIBounds,
Chris Daltona8429cf2018-06-22 11:43:31 -0600142 const SkIRect& maskDevIBounds, Visibility maskVisibility,
Brian Osmancf860852018-10-31 14:04:39 -0400143 const SkPMColor4f& color)
Chris Daltona8429cf2018-06-22 11:43:31 -0600144 : fMatrix(m)
Chris Dalton644341a2018-06-18 19:14:16 -0600145 , fShape(shape)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800146 , fStrokeDevWidth(strokeDevWidth)
147 , fShapeConservativeIBounds(shapeConservativeIBounds)
Chris Daltona8429cf2018-06-22 11:43:31 -0600148 , fMaskDevIBounds(maskDevIBounds)
149 , fMaskVisibility(maskVisibility)
150 , fColor(color) {
Chris Dalton644341a2018-06-18 19:14:16 -0600151#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
152 if (fShape.hasUnstyledKey()) {
153 // On AOSP we round view matrix translates to integer values for cachable paths. We do this
154 // to match HWUI's cache hit ratio, which doesn't consider the matrix when caching paths.
155 fMatrix.setTranslateX(SkScalarRoundToScalar(fMatrix.getTranslateX()));
156 fMatrix.setTranslateY(SkScalarRoundToScalar(fMatrix.getTranslateY()));
157 }
158#endif
159}
160
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600161GrDrawOp::RequiresDstTexture GrCCDrawPathsOp::finalize(const GrCaps& caps,
Brian Osman532b3f92018-07-11 10:02:07 -0400162 const GrAppliedClip* clip) {
Chris Dalton4da70192018-06-18 09:51:36 -0600163 SkASSERT(1 == fNumDraws); // There should only be one single path draw in this Op right now.
Chris Dalton09a7bb22018-08-31 19:53:15 +0800164 SingleDraw* draw = &fDraws.head();
165
166 const GrProcessorSet::Analysis& analysis = fProcessors.finalize(
167 draw->fColor, GrProcessorAnalysisCoverage::kSingleChannel, clip, false, caps,
168 &draw->fColor);
169
170 // Lines start looking jagged when they get thinner than 1px. For thin strokes it looks better
171 // if we can convert them to hairline (i.e., inflate the stroke width to 1px), and instead
172 // reduce the opacity to create the illusion of thin-ness. This strategy also helps reduce
173 // artifacts from coverage dilation when there are self intersections.
174 if (analysis.isCompatibleWithCoverageAsAlpha() &&
175 !draw->fShape.style().strokeRec().isFillStyle() && draw->fStrokeDevWidth < 1) {
176 // Modifying the shape affects its cache key. The draw can't have a cache entry yet or else
177 // our next step would invalidate it.
178 SkASSERT(!draw->fCacheEntry);
179 SkASSERT(SkStrokeRec::kStroke_Style == draw->fShape.style().strokeRec().getStyle());
180
181 SkPath path;
182 draw->fShape.asPath(&path);
183
184 // Create a hairline version of our stroke.
185 SkStrokeRec hairlineStroke = draw->fShape.style().strokeRec();
186 hairlineStroke.setStrokeStyle(0);
187
188 // How transparent does a 1px stroke have to be in order to appear as thin as the real one?
Brian Osman40139132018-11-16 09:57:11 -0500189 float coverage = draw->fStrokeDevWidth;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800190
191 draw->fShape = GrShape(path, GrStyle(hairlineStroke, nullptr));
192 draw->fStrokeDevWidth = 1;
Brian Osman1be2b7c2018-10-29 16:07:15 -0400193
194 // TODO4F: Preserve float colors
Chris Dalton09a7bb22018-08-31 19:53:15 +0800195 // fShapeConservativeIBounds already accounted for this possibility of inflating the stroke.
Brian Osman40139132018-11-16 09:57:11 -0500196 draw->fColor = draw->fColor * coverage;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800197 }
198
Chris Dalton4da70192018-06-18 09:51:36 -0600199 return RequiresDstTexture(analysis.requiresDstTexture());
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600200}
201
Brian Salomon7eae3e02018-08-07 14:02:38 +0000202GrOp::CombineResult GrCCDrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps&) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600203 GrCCDrawPathsOp* that = op->cast<GrCCDrawPathsOp>();
Chris Daltond7e22272018-05-23 10:17:17 -0600204 SkASSERT(fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600205 SkASSERT(fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600206 SkASSERT(!that->fOwningPerOpListPaths || that->fOwningPerOpListPaths == fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600207 SkASSERT(that->fNumDraws);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600208
Brian Osman9aa30c62018-07-02 15:21:46 -0400209 if (fProcessors != that->fProcessors ||
Chris Dalton1c548942018-05-22 13:09:48 -0600210 fViewMatrixIfUsingLocalCoords != that->fViewMatrixIfUsingLocalCoords) {
Brian Salomon7eae3e02018-08-07 14:02:38 +0000211 return CombineResult::kCannotCombine;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600212 }
213
Chris Daltond7e22272018-05-23 10:17:17 -0600214 fDraws.append(std::move(that->fDraws), &fOwningPerOpListPaths->fAllocator);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600215
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600216 SkDEBUGCODE(fNumDraws += that->fNumDraws);
217 SkDEBUGCODE(that->fNumDraws = 0);
Brian Salomon7eae3e02018-08-07 14:02:38 +0000218 return CombineResult::kMerged;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600219}
220
Brian Salomon348a0372018-10-31 10:42:18 -0400221void GrCCDrawPathsOp::addToOwningPerOpListPaths(sk_sp<GrCCPerOpListPaths> owningPerOpListPaths) {
Chris Daltonf104fec2018-05-22 16:17:48 -0600222 SkASSERT(1 == fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600223 SkASSERT(!fOwningPerOpListPaths);
Chris Daltonb68bcc42018-09-14 00:44:22 -0600224 fOwningPerOpListPaths = std::move(owningPerOpListPaths);
225 fOwningPerOpListPaths->fDrawOps.addToTail(this);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600226}
227
Chris Dalton4da70192018-06-18 09:51:36 -0600228void GrCCDrawPathsOp::accountForOwnPaths(GrCCPathCache* pathCache,
229 GrOnFlushResourceProvider* onFlushRP,
Chris Dalton4da70192018-06-18 09:51:36 -0600230 GrCCPerFlushResourceSpecs* specs) {
231 using CreateIfAbsent = GrCCPathCache::CreateIfAbsent;
232 using MaskTransform = GrCCPathCache::MaskTransform;
Chris Dalton351e80c2019-01-06 22:51:00 -0700233 using CoverageType = GrCCAtlas::CoverageType;
Chris Dalton4da70192018-06-18 09:51:36 -0600234
235 for (SingleDraw& draw : fDraws) {
Chris Dalton4da70192018-06-18 09:51:36 -0600236 SkPath path;
237 draw.fShape.asPath(&path);
238
Chris Daltona2b5b642018-06-24 13:08:57 -0600239 SkASSERT(!draw.fCacheEntry);
240
241 if (pathCache) {
242 MaskTransform m(draw.fMatrix, &draw.fCachedMaskShift);
243 bool canStashPathMask = draw.fMaskVisibility >= Visibility::kMostlyComplete;
Chris Dalton351e80c2019-01-06 22:51:00 -0700244 draw.fCacheEntry =
245 pathCache->find(onFlushRP, draw.fShape, m, CreateIfAbsent(canStashPathMask));
Chris Daltona2b5b642018-06-24 13:08:57 -0600246 }
Chris Daltona8429cf2018-06-22 11:43:31 -0600247
Chris Dalton351e80c2019-01-06 22:51:00 -0700248 if (draw.fCacheEntry) {
249 if (const GrCCCachedAtlas* cachedAtlas = draw.fCacheEntry->cachedAtlas()) {
250 SkASSERT(cachedAtlas->getOnFlushProxy());
251 if (CoverageType::kA8_LiteralCoverage == cachedAtlas->coverageType()) {
Chris Dalton4da70192018-06-18 09:51:36 -0600252 ++specs->fNumCachedPaths;
Chris Dalton351e80c2019-01-06 22:51:00 -0700253 } else {
254 // Suggest that this path be copied to a literal coverage atlas, to save memory.
255 // (The client may decline this copy via DoCopiesToA8Coverage::kNo.)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800256 int idx = (draw.fShape.style().strokeRec().isFillStyle())
257 ? GrCCPerFlushResourceSpecs::kFillIdx
258 : GrCCPerFlushResourceSpecs::kStrokeIdx;
259 ++specs->fNumCopiedPaths[idx];
260 specs->fCopyPathStats[idx].statPath(path);
Chris Dalton351e80c2019-01-06 22:51:00 -0700261 specs->fCopyAtlasSpecs.accountForSpace(
262 draw.fCacheEntry->width(), draw.fCacheEntry->height());
263 draw.fDoCopyToA8Coverage = true;
Chris Dalton4da70192018-06-18 09:51:36 -0600264 }
Chris Dalton351e80c2019-01-06 22:51:00 -0700265 continue;
Chris Dalton4da70192018-06-18 09:51:36 -0600266 }
267
Chris Dalton351e80c2019-01-06 22:51:00 -0700268 if (Visibility::kMostlyComplete == draw.fMaskVisibility &&
269 draw.fCacheEntry->hitCount() > 1) {
Chris Dalton09a7bb22018-08-31 19:53:15 +0800270 int shapeSize = SkTMax(draw.fShapeConservativeIBounds.height(),
271 draw.fShapeConservativeIBounds.width());
272 if (shapeSize <= onFlushRP->caps()->maxRenderTargetSize()) {
273 // We've seen this path before with a compatible matrix, and it's mostly
274 // visible. Just render the whole mask so we can try to cache it.
275 draw.fMaskDevIBounds = draw.fShapeConservativeIBounds;
276 draw.fMaskVisibility = Visibility::kComplete;
277 }
Chris Dalton4da70192018-06-18 09:51:36 -0600278 }
279 }
280
Chris Dalton09a7bb22018-08-31 19:53:15 +0800281 int idx = (draw.fShape.style().strokeRec().isFillStyle())
282 ? GrCCPerFlushResourceSpecs::kFillIdx
283 : GrCCPerFlushResourceSpecs::kStrokeIdx;
284 ++specs->fNumRenderedPaths[idx];
285 specs->fRenderedPathStats[idx].statPath(path);
Chris Daltona8429cf2018-06-22 11:43:31 -0600286 specs->fRenderedAtlasSpecs.accountForSpace(draw.fMaskDevIBounds.width(),
287 draw.fMaskDevIBounds.height());
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600288 }
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600289}
290
Chris Dalton351e80c2019-01-06 22:51:00 -0700291void GrCCDrawPathsOp::setupResources(
292 GrCCPathCache* pathCache, GrOnFlushResourceProvider* onFlushRP,
293 GrCCPerFlushResources* resources, DoCopiesToA8Coverage doCopies) {
Chris Dalton4da70192018-06-18 09:51:36 -0600294 using DoEvenOddFill = GrCCPathProcessor::DoEvenOddFill;
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600295 SkASSERT(fNumDraws > 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600296 SkASSERT(-1 == fBaseInstance);
Chris Daltondaef06a2018-05-23 17:11:09 -0600297 fBaseInstance = resources->nextPathInstanceIdx();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600298
Chris Dalton4da70192018-06-18 09:51:36 -0600299 for (SingleDraw& draw : fDraws) {
300 SkPath path;
301 draw.fShape.asPath(&path);
302
Chris Dalton09a7bb22018-08-31 19:53:15 +0800303 auto doEvenOddFill = DoEvenOddFill(draw.fShape.style().strokeRec().isFillStyle() &&
304 SkPath::kEvenOdd_FillType == path.getFillType());
Chris Dalton4da70192018-06-18 09:51:36 -0600305 SkASSERT(SkPath::kEvenOdd_FillType == path.getFillType() ||
306 SkPath::kWinding_FillType == path.getFillType());
307
308 if (auto cacheEntry = draw.fCacheEntry.get()) {
309 // Does the path already exist in a cached atlas texture?
Chris Dalton351e80c2019-01-06 22:51:00 -0700310 if (cacheEntry->cachedAtlas()) {
311 SkASSERT(cacheEntry->cachedAtlas()->getOnFlushProxy());
312 if (DoCopiesToA8Coverage::kYes == doCopies && draw.fDoCopyToA8Coverage) {
313 resources->upgradeEntryToLiteralCoverageAtlas(pathCache, onFlushRP, cacheEntry,
314 doEvenOddFill);
315 SkASSERT(cacheEntry->cachedAtlas());
316 SkASSERT(GrCCAtlas::CoverageType::kA8_LiteralCoverage
317 == cacheEntry->cachedAtlas()->coverageType());
318 SkASSERT(cacheEntry->cachedAtlas()->getOnFlushProxy());
319 }
320 this->recordInstance(cacheEntry->cachedAtlas()->getOnFlushProxy(),
321 resources->nextPathInstanceIdx());
Brian Osman1be2b7c2018-10-29 16:07:15 -0400322 // TODO4F: Preserve float colors
Chris Dalton4da70192018-06-18 09:51:36 -0600323 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
Brian Osmancf860852018-10-31 14:04:39 -0400324 draw.fColor.toBytes_RGBA());
Chris Dalton4da70192018-06-18 09:51:36 -0600325 continue;
326 }
Chris Dalton4da70192018-06-18 09:51:36 -0600327 }
328
Chris Dalton351e80c2019-01-06 22:51:00 -0700329 // Render the raw path into a coverage count atlas. renderShapeInAtlas() gives us two tight
Chris Dalton4da70192018-06-18 09:51:36 -0600330 // bounding boxes: One in device space, as well as a second one rotated an additional 45
331 // degrees. The path vertex shader uses these two bounding boxes to generate an octagon that
332 // circumscribes the path.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600333 SkRect devBounds, devBounds45;
Chris Dalton4da70192018-06-18 09:51:36 -0600334 SkIRect devIBounds;
Chris Dalton9414c962018-06-14 10:14:50 -0600335 SkIVector devToAtlasOffset;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800336 if (auto atlas = resources->renderShapeInAtlas(
337 draw.fMaskDevIBounds, draw.fMatrix, draw.fShape, draw.fStrokeDevWidth,
338 &devBounds, &devBounds45, &devIBounds, &devToAtlasOffset)) {
Chris Dalton4da70192018-06-18 09:51:36 -0600339 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
Brian Osman1be2b7c2018-10-29 16:07:15 -0400340 // TODO4F: Preserve float colors
Chris Dalton4da70192018-06-18 09:51:36 -0600341 resources->appendDrawPathInstance().set(devBounds, devBounds45, devToAtlasOffset,
Brian Osmancf860852018-10-31 14:04:39 -0400342 draw.fColor.toBytes_RGBA(), doEvenOddFill);
Chris Daltona8429cf2018-06-22 11:43:31 -0600343
344 // If we have a spot in the path cache, try to make a note of where this mask is so we
345 // can reuse it in the future.
346 if (auto cacheEntry = draw.fCacheEntry.get()) {
Chris Dalton351e80c2019-01-06 22:51:00 -0700347 SkASSERT(!cacheEntry->cachedAtlas());
Chris Daltona8429cf2018-06-22 11:43:31 -0600348
349 if (Visibility::kComplete != draw.fMaskVisibility || cacheEntry->hitCount() <= 1) {
350 // Don't cache a path mask unless it's completely visible with a hit count > 1.
351 //
352 // NOTE: mostly-visible paths with a hit count > 1 should have been promoted to
353 // fully visible during accountForOwnPaths().
354 continue;
355 }
356
Chris Dalton351e80c2019-01-06 22:51:00 -0700357 cacheEntry->setCoverageCountAtlas(onFlushRP, atlas, devToAtlasOffset, devBounds,
358 devBounds45, devIBounds, draw.fCachedMaskShift);
Chris Dalton4da70192018-06-18 09:51:36 -0600359 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600360 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600361 }
362
Chris Dalton4da70192018-06-18 09:51:36 -0600363 if (!fInstanceRanges.empty()) {
364 fInstanceRanges.back().fEndInstanceIdx = resources->nextPathInstanceIdx();
365 }
366}
367
Brian Salomon7eae3e02018-08-07 14:02:38 +0000368inline void GrCCDrawPathsOp::recordInstance(GrTextureProxy* atlasProxy, int instanceIdx) {
Chris Dalton4da70192018-06-18 09:51:36 -0600369 if (fInstanceRanges.empty()) {
370 fInstanceRanges.push_back({atlasProxy, instanceIdx});
371 return;
372 }
373 if (fInstanceRanges.back().fAtlasProxy != atlasProxy) {
374 fInstanceRanges.back().fEndInstanceIdx = instanceIdx;
375 fInstanceRanges.push_back({atlasProxy, instanceIdx});
376 return;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600377 }
378}
379
Brian Salomon588cec72018-11-14 13:56:37 -0500380void GrCCDrawPathsOp::onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) {
Chris Daltond7e22272018-05-23 10:17:17 -0600381 SkASSERT(fOwningPerOpListPaths);
Chris Daltonf104fec2018-05-22 16:17:48 -0600382
Chris Daltond7e22272018-05-23 10:17:17 -0600383 const GrCCPerFlushResources* resources = fOwningPerOpListPaths->fFlushResources.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600384 if (!resources) {
385 return; // Setup failed.
386 }
387
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600388 GrPipeline::InitArgs initArgs;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600389 initArgs.fProxy = flushState->drawOpArgs().fProxy;
390 initArgs.fCaps = &flushState->caps();
391 initArgs.fResourceProvider = flushState->resourceProvider();
392 initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy;
Brian Salomon49348902018-06-26 09:12:38 -0400393 auto clip = flushState->detachAppliedClip();
394 GrPipeline::FixedDynamicState fixedDynamicState(clip.scissorState().rect());
395 GrPipeline pipeline(initArgs, std::move(fProcessors), std::move(clip));
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600396
397 int baseInstance = fBaseInstance;
Chris Dalton4da70192018-06-18 09:51:36 -0600398 SkASSERT(baseInstance >= 0); // Make sure setupResources() has been called.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600399
Chris Dalton4c458b12018-06-16 17:22:59 -0600400 for (const InstanceRange& range : fInstanceRanges) {
401 SkASSERT(range.fEndInstanceIdx > baseInstance);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600402
Brian Salomon7eae3e02018-08-07 14:02:38 +0000403 GrCCPathProcessor pathProc(range.fAtlasProxy, fViewMatrixIfUsingLocalCoords);
404 GrTextureProxy* atlasProxy = range.fAtlasProxy;
405 fixedDynamicState.fPrimitiveProcessorTextures = &atlasProxy;
Brian Salomon49348902018-06-26 09:12:38 -0400406 pathProc.drawPaths(flushState, pipeline, &fixedDynamicState, *resources, baseInstance,
407 range.fEndInstanceIdx, this->bounds());
Chris Dalton4c458b12018-06-16 17:22:59 -0600408
409 baseInstance = range.fEndInstanceIdx;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600410 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600411}