blob: 0f3aef506ac06a6d0f486e0dae5ed4bc0cc57335 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCDrawPathsOp.h"
Brian Salomon653f42f2018-07-10 10:07:31 -04009#include "GrContext.h"
10#include "GrContextPriv.h"
Robert Phillips7c525e62018-06-12 10:11:12 -040011#include "GrMemoryPool.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060012#include "GrOpFlushState.h"
Chris Daltona2b5b642018-06-24 13:08:57 -060013#include "ccpr/GrCCPathCache.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060014#include "ccpr/GrCCPerFlushResources.h"
15#include "ccpr/GrCoverageCountingPathRenderer.h"
16
Chris Dalton1c548942018-05-22 13:09:48 -060017static bool has_coord_transforms(const GrPaint& paint) {
18 GrFragmentProcessor::Iter iter(paint);
19 while (const GrFragmentProcessor* fp = iter.next()) {
20 if (!fp->coordTransforms().empty()) {
21 return true;
22 }
23 }
24 return false;
25}
26
Chris Daltona8429cf2018-06-22 11:43:31 -060027static int64_t area(const SkIRect& r) {
28 return sk_64_mul(r.height(), r.width());
29}
30
Chris Dalton09a7bb22018-08-31 19:53:15 +080031std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::Make(
32 GrContext* context, const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape,
33 GrPaint&& paint) {
Chris Dalton09a7bb22018-08-31 19:53:15 +080034 SkRect conservativeDevBounds;
35 m.mapRect(&conservativeDevBounds, shape.bounds());
36
37 const SkStrokeRec& stroke = shape.style().strokeRec();
38 float strokeDevWidth = 0;
39 float conservativeInflationRadius = 0;
40 if (!stroke.isFillStyle()) {
Chris Dalton82de18f2018-09-12 17:24:09 -060041 strokeDevWidth = GrCoverageCountingPathRenderer::GetStrokeDevWidth(
42 m, stroke, &conservativeInflationRadius);
Chris Dalton09a7bb22018-08-31 19:53:15 +080043 conservativeDevBounds.outset(conservativeInflationRadius, conservativeInflationRadius);
44 }
45
46 std::unique_ptr<GrCCDrawPathsOp> op;
47 float conservativeSize = SkTMax(conservativeDevBounds.height(), conservativeDevBounds.width());
Chris Dalton82de18f2018-09-12 17:24:09 -060048 if (conservativeSize > GrCoverageCountingPathRenderer::kPathCropThreshold) {
Chris Dalton09a7bb22018-08-31 19:53:15 +080049 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
50 SkPath croppedDevPath;
51 shape.asPath(&croppedDevPath);
52 croppedDevPath.transform(m, &croppedDevPath);
53
54 SkIRect cropBox = clipIBounds;
55 GrShape croppedDevShape;
56 if (stroke.isFillStyle()) {
57 GrCoverageCountingPathRenderer::CropPath(croppedDevPath, cropBox, &croppedDevPath);
58 croppedDevShape = GrShape(croppedDevPath);
59 conservativeDevBounds = croppedDevShape.bounds();
60 } else {
61 int r = SkScalarCeilToInt(conservativeInflationRadius);
62 cropBox.outset(r, r);
63 GrCoverageCountingPathRenderer::CropPath(croppedDevPath, cropBox, &croppedDevPath);
64 SkStrokeRec devStroke = stroke;
65 devStroke.setStrokeStyle(strokeDevWidth);
66 croppedDevShape = GrShape(croppedDevPath, GrStyle(devStroke, nullptr));
67 conservativeDevBounds = croppedDevPath.getBounds();
68 conservativeDevBounds.outset(conservativeInflationRadius, conservativeInflationRadius);
69 }
70
71 // FIXME: This breaks local coords: http://skbug.com/8003
72 return InternalMake(context, clipIBounds, SkMatrix::I(), croppedDevShape, strokeDevWidth,
73 conservativeDevBounds, std::move(paint));
74 }
75
76 return InternalMake(context, clipIBounds, m, shape, strokeDevWidth, conservativeDevBounds,
77 std::move(paint));
78}
79
80std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::InternalMake(
81 GrContext* context, const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape,
82 float strokeDevWidth, const SkRect& conservativeDevBounds, GrPaint&& paint) {
Chris Dalton82de18f2018-09-12 17:24:09 -060083 // The path itself should have been cropped if larger than kPathCropThreshold. If it had a
84 // stroke, that would have further inflated its draw bounds.
85 SkASSERT(SkTMax(conservativeDevBounds.height(), conservativeDevBounds.width()) <
86 GrCoverageCountingPathRenderer::kPathCropThreshold +
87 GrCoverageCountingPathRenderer::kMaxBoundsInflationFromStroke*2 + 1);
88
Chris Dalton09a7bb22018-08-31 19:53:15 +080089 SkIRect shapeConservativeIBounds;
90 conservativeDevBounds.roundOut(&shapeConservativeIBounds);
Chris Daltona8429cf2018-06-22 11:43:31 -060091
92 SkIRect maskDevIBounds;
93 Visibility maskVisibility;
Chris Dalton09a7bb22018-08-31 19:53:15 +080094 if (clipIBounds.contains(shapeConservativeIBounds)) {
95 maskDevIBounds = shapeConservativeIBounds;
Chris Daltona8429cf2018-06-22 11:43:31 -060096 maskVisibility = Visibility::kComplete;
97 } else {
Chris Dalton09a7bb22018-08-31 19:53:15 +080098 if (!maskDevIBounds.intersect(clipIBounds, shapeConservativeIBounds)) {
Chris Dalton4da70192018-06-18 09:51:36 -060099 return nullptr;
100 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800101 int64_t unclippedArea = area(shapeConservativeIBounds);
Chris Daltona8429cf2018-06-22 11:43:31 -0600102 int64_t clippedArea = area(maskDevIBounds);
103 maskVisibility = (clippedArea >= unclippedArea/2 || unclippedArea < 100*100)
104 ? Visibility::kMostlyComplete // i.e., visible enough to justify rendering the
105 // whole thing if we think we can cache it.
106 : Visibility::kPartial;
Chris Dalton42c21152018-06-13 15:28:19 -0600107 }
Robert Phillipsc994a932018-06-19 13:09:54 -0400108
109 GrOpMemoryPool* pool = context->contextPriv().opMemoryPool();
110
Chris Dalton09a7bb22018-08-31 19:53:15 +0800111 return pool->allocate<GrCCDrawPathsOp>(m, shape, strokeDevWidth, shapeConservativeIBounds,
112 maskDevIBounds, maskVisibility, conservativeDevBounds,
113 std::move(paint));
Chris Dalton42c21152018-06-13 15:28:19 -0600114}
115
Chris Dalton09a7bb22018-08-31 19:53:15 +0800116GrCCDrawPathsOp::GrCCDrawPathsOp(const SkMatrix& m, const GrShape& shape, float strokeDevWidth,
117 const SkIRect& shapeConservativeIBounds,
118 const SkIRect& maskDevIBounds, Visibility maskVisibility,
119 const SkRect& conservativeDevBounds, GrPaint&& paint)
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600120 : GrDrawOp(ClassID())
Chris Daltond7e22272018-05-23 10:17:17 -0600121 , fViewMatrixIfUsingLocalCoords(has_coord_transforms(paint) ? m : SkMatrix::I())
Chris Dalton09a7bb22018-08-31 19:53:15 +0800122 , fDraws(m, shape, strokeDevWidth, shapeConservativeIBounds, maskDevIBounds, maskVisibility,
Brian Osmancf860852018-10-31 14:04:39 -0400123 paint.getColor4f())
Chris Dalton42c21152018-06-13 15:28:19 -0600124 , fProcessors(std::move(paint)) { // Paint must be moved after fetching its color above.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600125 SkDEBUGCODE(fBaseInstance = -1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600126 // FIXME: intersect with clip bounds to (hopefully) improve batching.
127 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800128 this->setBounds(conservativeDevBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600129}
130
131GrCCDrawPathsOp::~GrCCDrawPathsOp() {
Chris Daltond7e22272018-05-23 10:17:17 -0600132 if (fOwningPerOpListPaths) {
Chris Daltonb68bcc42018-09-14 00:44:22 -0600133 // Remove the list's dangling pointer to this Op before deleting it.
Chris Daltond7e22272018-05-23 10:17:17 -0600134 fOwningPerOpListPaths->fDrawOps.remove(this);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600135 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600136}
137
Chris Daltona8429cf2018-06-22 11:43:31 -0600138GrCCDrawPathsOp::SingleDraw::SingleDraw(const SkMatrix& m, const GrShape& shape,
Chris Dalton09a7bb22018-08-31 19:53:15 +0800139 float strokeDevWidth,
140 const SkIRect& shapeConservativeIBounds,
Chris Daltona8429cf2018-06-22 11:43:31 -0600141 const SkIRect& maskDevIBounds, Visibility maskVisibility,
Brian Osmancf860852018-10-31 14:04:39 -0400142 const SkPMColor4f& color)
Chris Daltona8429cf2018-06-22 11:43:31 -0600143 : fMatrix(m)
Chris Dalton644341a2018-06-18 19:14:16 -0600144 , fShape(shape)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800145 , fStrokeDevWidth(strokeDevWidth)
146 , fShapeConservativeIBounds(shapeConservativeIBounds)
Chris Daltona8429cf2018-06-22 11:43:31 -0600147 , fMaskDevIBounds(maskDevIBounds)
148 , fMaskVisibility(maskVisibility)
149 , fColor(color) {
Chris Dalton644341a2018-06-18 19:14:16 -0600150#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
151 if (fShape.hasUnstyledKey()) {
152 // On AOSP we round view matrix translates to integer values for cachable paths. We do this
153 // to match HWUI's cache hit ratio, which doesn't consider the matrix when caching paths.
154 fMatrix.setTranslateX(SkScalarRoundToScalar(fMatrix.getTranslateX()));
155 fMatrix.setTranslateY(SkScalarRoundToScalar(fMatrix.getTranslateY()));
156 }
157#endif
158}
159
Chris Dalton4da70192018-06-18 09:51:36 -0600160GrCCDrawPathsOp::SingleDraw::~SingleDraw() {
161 if (fCacheEntry) {
162 // All currFlushAtlas references must be reset back to null before the flush is finished.
163 fCacheEntry->setCurrFlushAtlas(nullptr);
164 }
165}
166
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600167GrDrawOp::RequiresDstTexture GrCCDrawPathsOp::finalize(const GrCaps& caps,
Brian Osman532b3f92018-07-11 10:02:07 -0400168 const GrAppliedClip* clip) {
Chris Dalton4da70192018-06-18 09:51:36 -0600169 SkASSERT(1 == fNumDraws); // There should only be one single path draw in this Op right now.
Chris Dalton09a7bb22018-08-31 19:53:15 +0800170 SingleDraw* draw = &fDraws.head();
171
172 const GrProcessorSet::Analysis& analysis = fProcessors.finalize(
173 draw->fColor, GrProcessorAnalysisCoverage::kSingleChannel, clip, false, caps,
174 &draw->fColor);
175
176 // Lines start looking jagged when they get thinner than 1px. For thin strokes it looks better
177 // if we can convert them to hairline (i.e., inflate the stroke width to 1px), and instead
178 // reduce the opacity to create the illusion of thin-ness. This strategy also helps reduce
179 // artifacts from coverage dilation when there are self intersections.
180 if (analysis.isCompatibleWithCoverageAsAlpha() &&
181 !draw->fShape.style().strokeRec().isFillStyle() && draw->fStrokeDevWidth < 1) {
182 // Modifying the shape affects its cache key. The draw can't have a cache entry yet or else
183 // our next step would invalidate it.
184 SkASSERT(!draw->fCacheEntry);
185 SkASSERT(SkStrokeRec::kStroke_Style == draw->fShape.style().strokeRec().getStyle());
186
187 SkPath path;
188 draw->fShape.asPath(&path);
189
190 // Create a hairline version of our stroke.
191 SkStrokeRec hairlineStroke = draw->fShape.style().strokeRec();
192 hairlineStroke.setStrokeStyle(0);
193
194 // How transparent does a 1px stroke have to be in order to appear as thin as the real one?
195 GrColor coverageAsAlpha = GrColorPackA4(SkScalarFloorToInt(draw->fStrokeDevWidth * 255));
196
197 draw->fShape = GrShape(path, GrStyle(hairlineStroke, nullptr));
198 draw->fStrokeDevWidth = 1;
Brian Osman1be2b7c2018-10-29 16:07:15 -0400199
200 // TODO4F: Preserve float colors
Chris Dalton09a7bb22018-08-31 19:53:15 +0800201 // fShapeConservativeIBounds already accounted for this possibility of inflating the stroke.
Brian Osmancf860852018-10-31 14:04:39 -0400202 draw->fColor = SkPMColor4f::FromBytes_RGBA(
203 GrColorMul(draw->fColor.toBytes_RGBA(), coverageAsAlpha));
Chris Dalton09a7bb22018-08-31 19:53:15 +0800204 }
205
Chris Dalton4da70192018-06-18 09:51:36 -0600206 return RequiresDstTexture(analysis.requiresDstTexture());
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600207}
208
Brian Salomon7eae3e02018-08-07 14:02:38 +0000209GrOp::CombineResult GrCCDrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps&) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600210 GrCCDrawPathsOp* that = op->cast<GrCCDrawPathsOp>();
Chris Daltond7e22272018-05-23 10:17:17 -0600211 SkASSERT(fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600212 SkASSERT(fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600213 SkASSERT(!that->fOwningPerOpListPaths || that->fOwningPerOpListPaths == fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600214 SkASSERT(that->fNumDraws);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600215
Brian Osman9aa30c62018-07-02 15:21:46 -0400216 if (fProcessors != that->fProcessors ||
Chris Dalton1c548942018-05-22 13:09:48 -0600217 fViewMatrixIfUsingLocalCoords != that->fViewMatrixIfUsingLocalCoords) {
Brian Salomon7eae3e02018-08-07 14:02:38 +0000218 return CombineResult::kCannotCombine;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600219 }
220
Chris Daltond7e22272018-05-23 10:17:17 -0600221 fDraws.append(std::move(that->fDraws), &fOwningPerOpListPaths->fAllocator);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600222
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600223 SkDEBUGCODE(fNumDraws += that->fNumDraws);
224 SkDEBUGCODE(that->fNumDraws = 0);
Brian Salomon7eae3e02018-08-07 14:02:38 +0000225 return CombineResult::kMerged;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600226}
227
Brian Salomon348a0372018-10-31 10:42:18 -0400228void GrCCDrawPathsOp::addToOwningPerOpListPaths(sk_sp<GrCCPerOpListPaths> owningPerOpListPaths) {
Chris Daltonf104fec2018-05-22 16:17:48 -0600229 SkASSERT(1 == fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600230 SkASSERT(!fOwningPerOpListPaths);
Chris Daltonb68bcc42018-09-14 00:44:22 -0600231 fOwningPerOpListPaths = std::move(owningPerOpListPaths);
232 fOwningPerOpListPaths->fDrawOps.addToTail(this);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600233}
234
Chris Dalton4da70192018-06-18 09:51:36 -0600235void GrCCDrawPathsOp::accountForOwnPaths(GrCCPathCache* pathCache,
236 GrOnFlushResourceProvider* onFlushRP,
237 const GrUniqueKey& stashedAtlasKey,
238 GrCCPerFlushResourceSpecs* specs) {
239 using CreateIfAbsent = GrCCPathCache::CreateIfAbsent;
240 using MaskTransform = GrCCPathCache::MaskTransform;
241
242 for (SingleDraw& draw : fDraws) {
Chris Dalton4da70192018-06-18 09:51:36 -0600243 SkPath path;
244 draw.fShape.asPath(&path);
245
Chris Daltona2b5b642018-06-24 13:08:57 -0600246 SkASSERT(!draw.fCacheEntry);
247
248 if (pathCache) {
249 MaskTransform m(draw.fMatrix, &draw.fCachedMaskShift);
250 bool canStashPathMask = draw.fMaskVisibility >= Visibility::kMostlyComplete;
251 draw.fCacheEntry = pathCache->find(draw.fShape, m, CreateIfAbsent(canStashPathMask));
252 }
Chris Daltona8429cf2018-06-22 11:43:31 -0600253
Chris Dalton4da70192018-06-18 09:51:36 -0600254 if (auto cacheEntry = draw.fCacheEntry.get()) {
255 SkASSERT(!cacheEntry->currFlushAtlas()); // Shouldn't be set until setupResources().
Chris Daltona8429cf2018-06-22 11:43:31 -0600256
Chris Dalton4da70192018-06-18 09:51:36 -0600257 if (cacheEntry->atlasKey().isValid()) {
258 // Does the path already exist in a cached atlas?
259 if (cacheEntry->hasCachedAtlas() &&
260 (draw.fCachedAtlasProxy = onFlushRP->findOrCreateProxyByUniqueKey(
261 cacheEntry->atlasKey(),
262 GrCCAtlas::kTextureOrigin))) {
263 ++specs->fNumCachedPaths;
264 continue;
265 }
266
267 // Does the path exist in the atlas that we stashed away from last flush? If so we
268 // can copy it into a new 8-bit atlas and keep it in the resource cache.
269 if (stashedAtlasKey.isValid() && stashedAtlasKey == cacheEntry->atlasKey()) {
270 SkASSERT(!cacheEntry->hasCachedAtlas());
Chris Dalton09a7bb22018-08-31 19:53:15 +0800271 int idx = (draw.fShape.style().strokeRec().isFillStyle())
272 ? GrCCPerFlushResourceSpecs::kFillIdx
273 : GrCCPerFlushResourceSpecs::kStrokeIdx;
274 ++specs->fNumCopiedPaths[idx];
275 specs->fCopyPathStats[idx].statPath(path);
Chris Dalton4da70192018-06-18 09:51:36 -0600276 specs->fCopyAtlasSpecs.accountForSpace(cacheEntry->width(),
277 cacheEntry->height());
278 continue;
279 }
280
281 // Whatever atlas the path used to reside in, it no longer exists.
282 cacheEntry->resetAtlasKeyAndInfo();
283 }
284
Chris Dalton09a7bb22018-08-31 19:53:15 +0800285 if (Visibility::kMostlyComplete == draw.fMaskVisibility && cacheEntry->hitCount() > 1) {
286 int shapeSize = SkTMax(draw.fShapeConservativeIBounds.height(),
287 draw.fShapeConservativeIBounds.width());
288 if (shapeSize <= onFlushRP->caps()->maxRenderTargetSize()) {
289 // We've seen this path before with a compatible matrix, and it's mostly
290 // visible. Just render the whole mask so we can try to cache it.
291 draw.fMaskDevIBounds = draw.fShapeConservativeIBounds;
292 draw.fMaskVisibility = Visibility::kComplete;
293 }
Chris Dalton4da70192018-06-18 09:51:36 -0600294 }
295 }
296
Chris Dalton09a7bb22018-08-31 19:53:15 +0800297 int idx = (draw.fShape.style().strokeRec().isFillStyle())
298 ? GrCCPerFlushResourceSpecs::kFillIdx
299 : GrCCPerFlushResourceSpecs::kStrokeIdx;
300 ++specs->fNumRenderedPaths[idx];
301 specs->fRenderedPathStats[idx].statPath(path);
Chris Daltona8429cf2018-06-22 11:43:31 -0600302 specs->fRenderedAtlasSpecs.accountForSpace(draw.fMaskDevIBounds.width(),
303 draw.fMaskDevIBounds.height());
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600304 }
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600305}
306
Chris Dalton4da70192018-06-18 09:51:36 -0600307void GrCCDrawPathsOp::setupResources(GrOnFlushResourceProvider* onFlushRP,
308 GrCCPerFlushResources* resources, DoCopiesToCache doCopies) {
309 using DoEvenOddFill = GrCCPathProcessor::DoEvenOddFill;
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600310 SkASSERT(fNumDraws > 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600311 SkASSERT(-1 == fBaseInstance);
Chris Daltondaef06a2018-05-23 17:11:09 -0600312 fBaseInstance = resources->nextPathInstanceIdx();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600313
Chris Dalton4da70192018-06-18 09:51:36 -0600314 for (SingleDraw& draw : fDraws) {
315 SkPath path;
316 draw.fShape.asPath(&path);
317
Chris Dalton09a7bb22018-08-31 19:53:15 +0800318 auto doEvenOddFill = DoEvenOddFill(draw.fShape.style().strokeRec().isFillStyle() &&
319 SkPath::kEvenOdd_FillType == path.getFillType());
Chris Dalton4da70192018-06-18 09:51:36 -0600320 SkASSERT(SkPath::kEvenOdd_FillType == path.getFillType() ||
321 SkPath::kWinding_FillType == path.getFillType());
322
323 if (auto cacheEntry = draw.fCacheEntry.get()) {
324 // Does the path already exist in a cached atlas texture?
325 if (auto proxy = draw.fCachedAtlasProxy.get()) {
326 SkASSERT(!cacheEntry->currFlushAtlas());
327 this->recordInstance(proxy, resources->nextPathInstanceIdx());
Brian Osman1be2b7c2018-10-29 16:07:15 -0400328 // TODO4F: Preserve float colors
Chris Dalton4da70192018-06-18 09:51:36 -0600329 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
Brian Osmancf860852018-10-31 14:04:39 -0400330 draw.fColor.toBytes_RGBA());
Chris Dalton4da70192018-06-18 09:51:36 -0600331 continue;
332 }
333
334 // Have we already encountered this path during the flush? (i.e. was the same SkPath
335 // drawn more than once during the same flush, with a compatible matrix?)
336 if (auto atlas = cacheEntry->currFlushAtlas()) {
337 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
Brian Osman1be2b7c2018-10-29 16:07:15 -0400338 // TODO4F: Preserve float colors
Chris Dalton4da70192018-06-18 09:51:36 -0600339 resources->appendDrawPathInstance().set(
Brian Osmancf860852018-10-31 14:04:39 -0400340 *cacheEntry, draw.fCachedMaskShift, draw.fColor.toBytes_RGBA(),
Chris Dalton4da70192018-06-18 09:51:36 -0600341 cacheEntry->hasCachedAtlas() ? DoEvenOddFill::kNo : doEvenOddFill);
342 continue;
343 }
344
345 // If the cache entry still has a valid atlas key at this point, it means the path
346 // exists in the atlas that we stashed away from last flush. Copy it into a permanent
347 // 8-bit atlas in the resource cache.
348 if (DoCopiesToCache::kYes == doCopies && cacheEntry->atlasKey().isValid()) {
349 SkIVector newOffset;
350 GrCCAtlas* atlas =
351 resources->copyPathToCachedAtlas(*cacheEntry, doEvenOddFill, &newOffset);
Chris Dalton8429c792018-10-23 15:56:22 -0600352 cacheEntry->updateToCachedAtlas(
353 atlas->getOrAssignUniqueKey(onFlushRP), newOffset,
354 atlas->refOrMakeCachedAtlasInfo(onFlushRP->contextUniqueID()));
Chris Dalton4da70192018-06-18 09:51:36 -0600355 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
Brian Osman1be2b7c2018-10-29 16:07:15 -0400356 // TODO4F: Preserve float colors
Chris Dalton4da70192018-06-18 09:51:36 -0600357 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
Brian Osmancf860852018-10-31 14:04:39 -0400358 draw.fColor.toBytes_RGBA());
Chris Dalton4da70192018-06-18 09:51:36 -0600359 // Remember this atlas in case we encounter the path again during the same flush.
360 cacheEntry->setCurrFlushAtlas(atlas);
361 continue;
362 }
363 }
364
365 // Render the raw path into a coverage count atlas. renderPathInAtlas() gives us two tight
366 // bounding boxes: One in device space, as well as a second one rotated an additional 45
367 // degrees. The path vertex shader uses these two bounding boxes to generate an octagon that
368 // circumscribes the path.
369 SkASSERT(!draw.fCachedAtlasProxy);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600370 SkRect devBounds, devBounds45;
Chris Dalton4da70192018-06-18 09:51:36 -0600371 SkIRect devIBounds;
Chris Dalton9414c962018-06-14 10:14:50 -0600372 SkIVector devToAtlasOffset;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800373 if (auto atlas = resources->renderShapeInAtlas(
374 draw.fMaskDevIBounds, draw.fMatrix, draw.fShape, draw.fStrokeDevWidth,
375 &devBounds, &devBounds45, &devIBounds, &devToAtlasOffset)) {
Chris Dalton4da70192018-06-18 09:51:36 -0600376 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
Brian Osman1be2b7c2018-10-29 16:07:15 -0400377 // TODO4F: Preserve float colors
Chris Dalton4da70192018-06-18 09:51:36 -0600378 resources->appendDrawPathInstance().set(devBounds, devBounds45, devToAtlasOffset,
Brian Osmancf860852018-10-31 14:04:39 -0400379 draw.fColor.toBytes_RGBA(), doEvenOddFill);
Chris Daltona8429cf2018-06-22 11:43:31 -0600380
381 // If we have a spot in the path cache, try to make a note of where this mask is so we
382 // can reuse it in the future.
383 if (auto cacheEntry = draw.fCacheEntry.get()) {
384 SkASSERT(!cacheEntry->hasCachedAtlas());
385
386 if (Visibility::kComplete != draw.fMaskVisibility || cacheEntry->hitCount() <= 1) {
387 // Don't cache a path mask unless it's completely visible with a hit count > 1.
388 //
389 // NOTE: mostly-visible paths with a hit count > 1 should have been promoted to
390 // fully visible during accountForOwnPaths().
391 continue;
392 }
393
394 if (resources->nextAtlasToStash() != atlas) {
395 // This mask does not belong to the atlas that will be stashed for next flush.
396 continue;
397 }
398
Chris Dalton4da70192018-06-18 09:51:36 -0600399 const GrUniqueKey& atlasKey =
400 resources->nextAtlasToStash()->getOrAssignUniqueKey(onFlushRP);
Chris Dalton9a986cf2018-10-18 15:27:59 -0600401 cacheEntry->initAsStashedAtlas(atlasKey, devToAtlasOffset, devBounds, devBounds45,
402 devIBounds, draw.fCachedMaskShift);
Chris Dalton4da70192018-06-18 09:51:36 -0600403 // Remember this atlas in case we encounter the path again during the same flush.
Chris Daltona8429cf2018-06-22 11:43:31 -0600404 cacheEntry->setCurrFlushAtlas(atlas);
Chris Dalton4da70192018-06-18 09:51:36 -0600405 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600406 continue;
407 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600408 }
409
Chris Dalton4da70192018-06-18 09:51:36 -0600410 if (!fInstanceRanges.empty()) {
411 fInstanceRanges.back().fEndInstanceIdx = resources->nextPathInstanceIdx();
412 }
413}
414
Brian Salomon7eae3e02018-08-07 14:02:38 +0000415inline void GrCCDrawPathsOp::recordInstance(GrTextureProxy* atlasProxy, int instanceIdx) {
Chris Dalton4da70192018-06-18 09:51:36 -0600416 if (fInstanceRanges.empty()) {
417 fInstanceRanges.push_back({atlasProxy, instanceIdx});
418 return;
419 }
420 if (fInstanceRanges.back().fAtlasProxy != atlasProxy) {
421 fInstanceRanges.back().fEndInstanceIdx = instanceIdx;
422 fInstanceRanges.push_back({atlasProxy, instanceIdx});
423 return;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600424 }
425}
426
427void GrCCDrawPathsOp::onExecute(GrOpFlushState* flushState) {
Chris Daltond7e22272018-05-23 10:17:17 -0600428 SkASSERT(fOwningPerOpListPaths);
Chris Daltonf104fec2018-05-22 16:17:48 -0600429
Chris Daltond7e22272018-05-23 10:17:17 -0600430 const GrCCPerFlushResources* resources = fOwningPerOpListPaths->fFlushResources.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600431 if (!resources) {
432 return; // Setup failed.
433 }
434
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600435 GrPipeline::InitArgs initArgs;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600436 initArgs.fProxy = flushState->drawOpArgs().fProxy;
437 initArgs.fCaps = &flushState->caps();
438 initArgs.fResourceProvider = flushState->resourceProvider();
439 initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy;
Brian Salomon49348902018-06-26 09:12:38 -0400440 auto clip = flushState->detachAppliedClip();
441 GrPipeline::FixedDynamicState fixedDynamicState(clip.scissorState().rect());
442 GrPipeline pipeline(initArgs, std::move(fProcessors), std::move(clip));
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600443
444 int baseInstance = fBaseInstance;
Chris Dalton4da70192018-06-18 09:51:36 -0600445 SkASSERT(baseInstance >= 0); // Make sure setupResources() has been called.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600446
Chris Dalton4c458b12018-06-16 17:22:59 -0600447 for (const InstanceRange& range : fInstanceRanges) {
448 SkASSERT(range.fEndInstanceIdx > baseInstance);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600449
Brian Salomon7eae3e02018-08-07 14:02:38 +0000450 GrCCPathProcessor pathProc(range.fAtlasProxy, fViewMatrixIfUsingLocalCoords);
451 GrTextureProxy* atlasProxy = range.fAtlasProxy;
452 fixedDynamicState.fPrimitiveProcessorTextures = &atlasProxy;
Brian Salomon49348902018-06-26 09:12:38 -0400453 pathProc.drawPaths(flushState, pipeline, &fixedDynamicState, *resources, baseInstance,
454 range.fEndInstanceIdx, this->bounds());
Chris Dalton4c458b12018-06-16 17:22:59 -0600455
456 baseInstance = range.fEndInstanceIdx;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600457 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600458}