blob: c1384fe8ea42c5621ca1f9d0a84cdf8a8db2147e [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCDrawPathsOp.h"
Brian Salomon653f42f2018-07-10 10:07:31 -04009#include "GrContext.h"
10#include "GrContextPriv.h"
Robert Phillips7c525e62018-06-12 10:11:12 -040011#include "GrMemoryPool.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060012#include "GrOpFlushState.h"
Chris Daltona2b5b642018-06-24 13:08:57 -060013#include "ccpr/GrCCPathCache.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060014#include "ccpr/GrCCPerFlushResources.h"
15#include "ccpr/GrCoverageCountingPathRenderer.h"
16
Chris Dalton1c548942018-05-22 13:09:48 -060017static bool has_coord_transforms(const GrPaint& paint) {
18 GrFragmentProcessor::Iter iter(paint);
19 while (const GrFragmentProcessor* fp = iter.next()) {
20 if (!fp->coordTransforms().empty()) {
21 return true;
22 }
23 }
24 return false;
25}
26
Chris Daltona8429cf2018-06-22 11:43:31 -060027static int64_t area(const SkIRect& r) {
28 return sk_64_mul(r.height(), r.width());
29}
30
Chris Dalton09a7bb22018-08-31 19:53:15 +080031std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::Make(
32 GrContext* context, const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape,
33 GrPaint&& paint) {
Chris Dalton09a7bb22018-08-31 19:53:15 +080034 SkRect conservativeDevBounds;
35 m.mapRect(&conservativeDevBounds, shape.bounds());
36
37 const SkStrokeRec& stroke = shape.style().strokeRec();
38 float strokeDevWidth = 0;
39 float conservativeInflationRadius = 0;
40 if (!stroke.isFillStyle()) {
Chris Dalton82de18f2018-09-12 17:24:09 -060041 strokeDevWidth = GrCoverageCountingPathRenderer::GetStrokeDevWidth(
42 m, stroke, &conservativeInflationRadius);
Chris Dalton09a7bb22018-08-31 19:53:15 +080043 conservativeDevBounds.outset(conservativeInflationRadius, conservativeInflationRadius);
44 }
45
46 std::unique_ptr<GrCCDrawPathsOp> op;
47 float conservativeSize = SkTMax(conservativeDevBounds.height(), conservativeDevBounds.width());
Chris Dalton82de18f2018-09-12 17:24:09 -060048 if (conservativeSize > GrCoverageCountingPathRenderer::kPathCropThreshold) {
Chris Dalton09a7bb22018-08-31 19:53:15 +080049 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
50 SkPath croppedDevPath;
51 shape.asPath(&croppedDevPath);
52 croppedDevPath.transform(m, &croppedDevPath);
53
54 SkIRect cropBox = clipIBounds;
55 GrShape croppedDevShape;
56 if (stroke.isFillStyle()) {
57 GrCoverageCountingPathRenderer::CropPath(croppedDevPath, cropBox, &croppedDevPath);
58 croppedDevShape = GrShape(croppedDevPath);
59 conservativeDevBounds = croppedDevShape.bounds();
60 } else {
61 int r = SkScalarCeilToInt(conservativeInflationRadius);
62 cropBox.outset(r, r);
63 GrCoverageCountingPathRenderer::CropPath(croppedDevPath, cropBox, &croppedDevPath);
64 SkStrokeRec devStroke = stroke;
65 devStroke.setStrokeStyle(strokeDevWidth);
66 croppedDevShape = GrShape(croppedDevPath, GrStyle(devStroke, nullptr));
67 conservativeDevBounds = croppedDevPath.getBounds();
68 conservativeDevBounds.outset(conservativeInflationRadius, conservativeInflationRadius);
69 }
70
71 // FIXME: This breaks local coords: http://skbug.com/8003
72 return InternalMake(context, clipIBounds, SkMatrix::I(), croppedDevShape, strokeDevWidth,
73 conservativeDevBounds, std::move(paint));
74 }
75
76 return InternalMake(context, clipIBounds, m, shape, strokeDevWidth, conservativeDevBounds,
77 std::move(paint));
78}
79
80std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::InternalMake(
81 GrContext* context, const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape,
82 float strokeDevWidth, const SkRect& conservativeDevBounds, GrPaint&& paint) {
Chris Dalton82de18f2018-09-12 17:24:09 -060083 // The path itself should have been cropped if larger than kPathCropThreshold. If it had a
84 // stroke, that would have further inflated its draw bounds.
85 SkASSERT(SkTMax(conservativeDevBounds.height(), conservativeDevBounds.width()) <
86 GrCoverageCountingPathRenderer::kPathCropThreshold +
87 GrCoverageCountingPathRenderer::kMaxBoundsInflationFromStroke*2 + 1);
88
Chris Dalton09a7bb22018-08-31 19:53:15 +080089 SkIRect shapeConservativeIBounds;
90 conservativeDevBounds.roundOut(&shapeConservativeIBounds);
Chris Daltona8429cf2018-06-22 11:43:31 -060091
92 SkIRect maskDevIBounds;
93 Visibility maskVisibility;
Chris Dalton09a7bb22018-08-31 19:53:15 +080094 if (clipIBounds.contains(shapeConservativeIBounds)) {
95 maskDevIBounds = shapeConservativeIBounds;
Chris Daltona8429cf2018-06-22 11:43:31 -060096 maskVisibility = Visibility::kComplete;
97 } else {
Chris Dalton09a7bb22018-08-31 19:53:15 +080098 if (!maskDevIBounds.intersect(clipIBounds, shapeConservativeIBounds)) {
Chris Dalton4da70192018-06-18 09:51:36 -060099 return nullptr;
100 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800101 int64_t unclippedArea = area(shapeConservativeIBounds);
Chris Daltona8429cf2018-06-22 11:43:31 -0600102 int64_t clippedArea = area(maskDevIBounds);
103 maskVisibility = (clippedArea >= unclippedArea/2 || unclippedArea < 100*100)
104 ? Visibility::kMostlyComplete // i.e., visible enough to justify rendering the
105 // whole thing if we think we can cache it.
106 : Visibility::kPartial;
Chris Dalton42c21152018-06-13 15:28:19 -0600107 }
Robert Phillipsc994a932018-06-19 13:09:54 -0400108
109 GrOpMemoryPool* pool = context->contextPriv().opMemoryPool();
110
Chris Dalton09a7bb22018-08-31 19:53:15 +0800111 return pool->allocate<GrCCDrawPathsOp>(m, shape, strokeDevWidth, shapeConservativeIBounds,
112 maskDevIBounds, maskVisibility, conservativeDevBounds,
113 std::move(paint));
Chris Dalton42c21152018-06-13 15:28:19 -0600114}
115
Chris Dalton09a7bb22018-08-31 19:53:15 +0800116GrCCDrawPathsOp::GrCCDrawPathsOp(const SkMatrix& m, const GrShape& shape, float strokeDevWidth,
117 const SkIRect& shapeConservativeIBounds,
118 const SkIRect& maskDevIBounds, Visibility maskVisibility,
119 const SkRect& conservativeDevBounds, GrPaint&& paint)
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600120 : GrDrawOp(ClassID())
Chris Daltond7e22272018-05-23 10:17:17 -0600121 , fViewMatrixIfUsingLocalCoords(has_coord_transforms(paint) ? m : SkMatrix::I())
Chris Dalton09a7bb22018-08-31 19:53:15 +0800122 , fDraws(m, shape, strokeDevWidth, shapeConservativeIBounds, maskDevIBounds, maskVisibility,
Brian Osmancf860852018-10-31 14:04:39 -0400123 paint.getColor4f())
Chris Dalton42c21152018-06-13 15:28:19 -0600124 , fProcessors(std::move(paint)) { // Paint must be moved after fetching its color above.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600125 SkDEBUGCODE(fBaseInstance = -1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600126 // FIXME: intersect with clip bounds to (hopefully) improve batching.
127 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800128 this->setBounds(conservativeDevBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600129}
130
131GrCCDrawPathsOp::~GrCCDrawPathsOp() {
Chris Daltond7e22272018-05-23 10:17:17 -0600132 if (fOwningPerOpListPaths) {
Chris Daltonb68bcc42018-09-14 00:44:22 -0600133 // Remove the list's dangling pointer to this Op before deleting it.
Chris Daltond7e22272018-05-23 10:17:17 -0600134 fOwningPerOpListPaths->fDrawOps.remove(this);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600135 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600136}
137
Chris Daltona8429cf2018-06-22 11:43:31 -0600138GrCCDrawPathsOp::SingleDraw::SingleDraw(const SkMatrix& m, const GrShape& shape,
Chris Dalton09a7bb22018-08-31 19:53:15 +0800139 float strokeDevWidth,
140 const SkIRect& shapeConservativeIBounds,
Chris Daltona8429cf2018-06-22 11:43:31 -0600141 const SkIRect& maskDevIBounds, Visibility maskVisibility,
Brian Osmancf860852018-10-31 14:04:39 -0400142 const SkPMColor4f& color)
Chris Daltona8429cf2018-06-22 11:43:31 -0600143 : fMatrix(m)
Chris Dalton644341a2018-06-18 19:14:16 -0600144 , fShape(shape)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800145 , fStrokeDevWidth(strokeDevWidth)
146 , fShapeConservativeIBounds(shapeConservativeIBounds)
Chris Daltona8429cf2018-06-22 11:43:31 -0600147 , fMaskDevIBounds(maskDevIBounds)
148 , fMaskVisibility(maskVisibility)
149 , fColor(color) {
Chris Dalton644341a2018-06-18 19:14:16 -0600150#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
151 if (fShape.hasUnstyledKey()) {
152 // On AOSP we round view matrix translates to integer values for cachable paths. We do this
153 // to match HWUI's cache hit ratio, which doesn't consider the matrix when caching paths.
154 fMatrix.setTranslateX(SkScalarRoundToScalar(fMatrix.getTranslateX()));
155 fMatrix.setTranslateY(SkScalarRoundToScalar(fMatrix.getTranslateY()));
156 }
157#endif
158}
159
Chris Dalton4da70192018-06-18 09:51:36 -0600160GrCCDrawPathsOp::SingleDraw::~SingleDraw() {
161 if (fCacheEntry) {
162 // All currFlushAtlas references must be reset back to null before the flush is finished.
163 fCacheEntry->setCurrFlushAtlas(nullptr);
164 }
165}
166
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600167GrDrawOp::RequiresDstTexture GrCCDrawPathsOp::finalize(const GrCaps& caps,
Brian Osman532b3f92018-07-11 10:02:07 -0400168 const GrAppliedClip* clip) {
Chris Dalton4da70192018-06-18 09:51:36 -0600169 SkASSERT(1 == fNumDraws); // There should only be one single path draw in this Op right now.
Chris Dalton09a7bb22018-08-31 19:53:15 +0800170 SingleDraw* draw = &fDraws.head();
171
172 const GrProcessorSet::Analysis& analysis = fProcessors.finalize(
173 draw->fColor, GrProcessorAnalysisCoverage::kSingleChannel, clip, false, caps,
174 &draw->fColor);
175
176 // Lines start looking jagged when they get thinner than 1px. For thin strokes it looks better
177 // if we can convert them to hairline (i.e., inflate the stroke width to 1px), and instead
178 // reduce the opacity to create the illusion of thin-ness. This strategy also helps reduce
179 // artifacts from coverage dilation when there are self intersections.
180 if (analysis.isCompatibleWithCoverageAsAlpha() &&
181 !draw->fShape.style().strokeRec().isFillStyle() && draw->fStrokeDevWidth < 1) {
182 // Modifying the shape affects its cache key. The draw can't have a cache entry yet or else
183 // our next step would invalidate it.
184 SkASSERT(!draw->fCacheEntry);
185 SkASSERT(SkStrokeRec::kStroke_Style == draw->fShape.style().strokeRec().getStyle());
186
187 SkPath path;
188 draw->fShape.asPath(&path);
189
190 // Create a hairline version of our stroke.
191 SkStrokeRec hairlineStroke = draw->fShape.style().strokeRec();
192 hairlineStroke.setStrokeStyle(0);
193
194 // How transparent does a 1px stroke have to be in order to appear as thin as the real one?
Brian Osman40139132018-11-16 09:57:11 -0500195 float coverage = draw->fStrokeDevWidth;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800196
197 draw->fShape = GrShape(path, GrStyle(hairlineStroke, nullptr));
198 draw->fStrokeDevWidth = 1;
Brian Osman1be2b7c2018-10-29 16:07:15 -0400199
200 // TODO4F: Preserve float colors
Chris Dalton09a7bb22018-08-31 19:53:15 +0800201 // fShapeConservativeIBounds already accounted for this possibility of inflating the stroke.
Brian Osman40139132018-11-16 09:57:11 -0500202 draw->fColor = draw->fColor * coverage;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800203 }
204
Chris Dalton4da70192018-06-18 09:51:36 -0600205 return RequiresDstTexture(analysis.requiresDstTexture());
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600206}
207
Brian Salomon7eae3e02018-08-07 14:02:38 +0000208GrOp::CombineResult GrCCDrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps&) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600209 GrCCDrawPathsOp* that = op->cast<GrCCDrawPathsOp>();
Chris Daltond7e22272018-05-23 10:17:17 -0600210 SkASSERT(fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600211 SkASSERT(fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600212 SkASSERT(!that->fOwningPerOpListPaths || that->fOwningPerOpListPaths == fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600213 SkASSERT(that->fNumDraws);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600214
Brian Osman9aa30c62018-07-02 15:21:46 -0400215 if (fProcessors != that->fProcessors ||
Chris Dalton1c548942018-05-22 13:09:48 -0600216 fViewMatrixIfUsingLocalCoords != that->fViewMatrixIfUsingLocalCoords) {
Brian Salomon7eae3e02018-08-07 14:02:38 +0000217 return CombineResult::kCannotCombine;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600218 }
219
Chris Daltond7e22272018-05-23 10:17:17 -0600220 fDraws.append(std::move(that->fDraws), &fOwningPerOpListPaths->fAllocator);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600221
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600222 SkDEBUGCODE(fNumDraws += that->fNumDraws);
223 SkDEBUGCODE(that->fNumDraws = 0);
Brian Salomon7eae3e02018-08-07 14:02:38 +0000224 return CombineResult::kMerged;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600225}
226
Brian Salomon348a0372018-10-31 10:42:18 -0400227void GrCCDrawPathsOp::addToOwningPerOpListPaths(sk_sp<GrCCPerOpListPaths> owningPerOpListPaths) {
Chris Daltonf104fec2018-05-22 16:17:48 -0600228 SkASSERT(1 == fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600229 SkASSERT(!fOwningPerOpListPaths);
Chris Daltonb68bcc42018-09-14 00:44:22 -0600230 fOwningPerOpListPaths = std::move(owningPerOpListPaths);
231 fOwningPerOpListPaths->fDrawOps.addToTail(this);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600232}
233
Chris Dalton4da70192018-06-18 09:51:36 -0600234void GrCCDrawPathsOp::accountForOwnPaths(GrCCPathCache* pathCache,
235 GrOnFlushResourceProvider* onFlushRP,
236 const GrUniqueKey& stashedAtlasKey,
237 GrCCPerFlushResourceSpecs* specs) {
238 using CreateIfAbsent = GrCCPathCache::CreateIfAbsent;
239 using MaskTransform = GrCCPathCache::MaskTransform;
240
241 for (SingleDraw& draw : fDraws) {
Chris Dalton4da70192018-06-18 09:51:36 -0600242 SkPath path;
243 draw.fShape.asPath(&path);
244
Chris Daltona2b5b642018-06-24 13:08:57 -0600245 SkASSERT(!draw.fCacheEntry);
246
247 if (pathCache) {
248 MaskTransform m(draw.fMatrix, &draw.fCachedMaskShift);
249 bool canStashPathMask = draw.fMaskVisibility >= Visibility::kMostlyComplete;
250 draw.fCacheEntry = pathCache->find(draw.fShape, m, CreateIfAbsent(canStashPathMask));
251 }
Chris Daltona8429cf2018-06-22 11:43:31 -0600252
Chris Dalton4da70192018-06-18 09:51:36 -0600253 if (auto cacheEntry = draw.fCacheEntry.get()) {
254 SkASSERT(!cacheEntry->currFlushAtlas()); // Shouldn't be set until setupResources().
Chris Daltona8429cf2018-06-22 11:43:31 -0600255
Chris Dalton4da70192018-06-18 09:51:36 -0600256 if (cacheEntry->atlasKey().isValid()) {
257 // Does the path already exist in a cached atlas?
258 if (cacheEntry->hasCachedAtlas() &&
259 (draw.fCachedAtlasProxy = onFlushRP->findOrCreateProxyByUniqueKey(
260 cacheEntry->atlasKey(),
261 GrCCAtlas::kTextureOrigin))) {
262 ++specs->fNumCachedPaths;
263 continue;
264 }
265
266 // Does the path exist in the atlas that we stashed away from last flush? If so we
267 // can copy it into a new 8-bit atlas and keep it in the resource cache.
268 if (stashedAtlasKey.isValid() && stashedAtlasKey == cacheEntry->atlasKey()) {
269 SkASSERT(!cacheEntry->hasCachedAtlas());
Chris Dalton09a7bb22018-08-31 19:53:15 +0800270 int idx = (draw.fShape.style().strokeRec().isFillStyle())
271 ? GrCCPerFlushResourceSpecs::kFillIdx
272 : GrCCPerFlushResourceSpecs::kStrokeIdx;
273 ++specs->fNumCopiedPaths[idx];
274 specs->fCopyPathStats[idx].statPath(path);
Chris Dalton4da70192018-06-18 09:51:36 -0600275 specs->fCopyAtlasSpecs.accountForSpace(cacheEntry->width(),
276 cacheEntry->height());
277 continue;
278 }
279
280 // Whatever atlas the path used to reside in, it no longer exists.
281 cacheEntry->resetAtlasKeyAndInfo();
282 }
283
Chris Dalton09a7bb22018-08-31 19:53:15 +0800284 if (Visibility::kMostlyComplete == draw.fMaskVisibility && cacheEntry->hitCount() > 1) {
285 int shapeSize = SkTMax(draw.fShapeConservativeIBounds.height(),
286 draw.fShapeConservativeIBounds.width());
287 if (shapeSize <= onFlushRP->caps()->maxRenderTargetSize()) {
288 // We've seen this path before with a compatible matrix, and it's mostly
289 // visible. Just render the whole mask so we can try to cache it.
290 draw.fMaskDevIBounds = draw.fShapeConservativeIBounds;
291 draw.fMaskVisibility = Visibility::kComplete;
292 }
Chris Dalton4da70192018-06-18 09:51:36 -0600293 }
294 }
295
Chris Dalton09a7bb22018-08-31 19:53:15 +0800296 int idx = (draw.fShape.style().strokeRec().isFillStyle())
297 ? GrCCPerFlushResourceSpecs::kFillIdx
298 : GrCCPerFlushResourceSpecs::kStrokeIdx;
299 ++specs->fNumRenderedPaths[idx];
300 specs->fRenderedPathStats[idx].statPath(path);
Chris Daltona8429cf2018-06-22 11:43:31 -0600301 specs->fRenderedAtlasSpecs.accountForSpace(draw.fMaskDevIBounds.width(),
302 draw.fMaskDevIBounds.height());
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600303 }
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600304}
305
Chris Dalton4da70192018-06-18 09:51:36 -0600306void GrCCDrawPathsOp::setupResources(GrOnFlushResourceProvider* onFlushRP,
307 GrCCPerFlushResources* resources, DoCopiesToCache doCopies) {
308 using DoEvenOddFill = GrCCPathProcessor::DoEvenOddFill;
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600309 SkASSERT(fNumDraws > 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600310 SkASSERT(-1 == fBaseInstance);
Chris Daltondaef06a2018-05-23 17:11:09 -0600311 fBaseInstance = resources->nextPathInstanceIdx();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600312
Chris Dalton4da70192018-06-18 09:51:36 -0600313 for (SingleDraw& draw : fDraws) {
314 SkPath path;
315 draw.fShape.asPath(&path);
316
Chris Dalton09a7bb22018-08-31 19:53:15 +0800317 auto doEvenOddFill = DoEvenOddFill(draw.fShape.style().strokeRec().isFillStyle() &&
318 SkPath::kEvenOdd_FillType == path.getFillType());
Chris Dalton4da70192018-06-18 09:51:36 -0600319 SkASSERT(SkPath::kEvenOdd_FillType == path.getFillType() ||
320 SkPath::kWinding_FillType == path.getFillType());
321
322 if (auto cacheEntry = draw.fCacheEntry.get()) {
323 // Does the path already exist in a cached atlas texture?
324 if (auto proxy = draw.fCachedAtlasProxy.get()) {
325 SkASSERT(!cacheEntry->currFlushAtlas());
326 this->recordInstance(proxy, resources->nextPathInstanceIdx());
Brian Osman1be2b7c2018-10-29 16:07:15 -0400327 // TODO4F: Preserve float colors
Chris Dalton4da70192018-06-18 09:51:36 -0600328 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
Brian Osmancf860852018-10-31 14:04:39 -0400329 draw.fColor.toBytes_RGBA());
Chris Dalton4da70192018-06-18 09:51:36 -0600330 continue;
331 }
332
333 // Have we already encountered this path during the flush? (i.e. was the same SkPath
334 // drawn more than once during the same flush, with a compatible matrix?)
335 if (auto atlas = cacheEntry->currFlushAtlas()) {
336 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
Brian Osman1be2b7c2018-10-29 16:07:15 -0400337 // TODO4F: Preserve float colors
Chris Dalton4da70192018-06-18 09:51:36 -0600338 resources->appendDrawPathInstance().set(
Brian Osmancf860852018-10-31 14:04:39 -0400339 *cacheEntry, draw.fCachedMaskShift, draw.fColor.toBytes_RGBA(),
Chris Dalton4da70192018-06-18 09:51:36 -0600340 cacheEntry->hasCachedAtlas() ? DoEvenOddFill::kNo : doEvenOddFill);
341 continue;
342 }
343
344 // If the cache entry still has a valid atlas key at this point, it means the path
345 // exists in the atlas that we stashed away from last flush. Copy it into a permanent
346 // 8-bit atlas in the resource cache.
347 if (DoCopiesToCache::kYes == doCopies && cacheEntry->atlasKey().isValid()) {
348 SkIVector newOffset;
349 GrCCAtlas* atlas =
350 resources->copyPathToCachedAtlas(*cacheEntry, doEvenOddFill, &newOffset);
Chris Dalton8429c792018-10-23 15:56:22 -0600351 cacheEntry->updateToCachedAtlas(
352 atlas->getOrAssignUniqueKey(onFlushRP), newOffset,
353 atlas->refOrMakeCachedAtlasInfo(onFlushRP->contextUniqueID()));
Chris Dalton4da70192018-06-18 09:51:36 -0600354 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
Brian Osman1be2b7c2018-10-29 16:07:15 -0400355 // TODO4F: Preserve float colors
Chris Dalton4da70192018-06-18 09:51:36 -0600356 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
Brian Osmancf860852018-10-31 14:04:39 -0400357 draw.fColor.toBytes_RGBA());
Chris Dalton4da70192018-06-18 09:51:36 -0600358 // Remember this atlas in case we encounter the path again during the same flush.
359 cacheEntry->setCurrFlushAtlas(atlas);
360 continue;
361 }
362 }
363
364 // Render the raw path into a coverage count atlas. renderPathInAtlas() gives us two tight
365 // bounding boxes: One in device space, as well as a second one rotated an additional 45
366 // degrees. The path vertex shader uses these two bounding boxes to generate an octagon that
367 // circumscribes the path.
368 SkASSERT(!draw.fCachedAtlasProxy);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600369 SkRect devBounds, devBounds45;
Chris Dalton4da70192018-06-18 09:51:36 -0600370 SkIRect devIBounds;
Chris Dalton9414c962018-06-14 10:14:50 -0600371 SkIVector devToAtlasOffset;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800372 if (auto atlas = resources->renderShapeInAtlas(
373 draw.fMaskDevIBounds, draw.fMatrix, draw.fShape, draw.fStrokeDevWidth,
374 &devBounds, &devBounds45, &devIBounds, &devToAtlasOffset)) {
Chris Dalton4da70192018-06-18 09:51:36 -0600375 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
Brian Osman1be2b7c2018-10-29 16:07:15 -0400376 // TODO4F: Preserve float colors
Chris Dalton4da70192018-06-18 09:51:36 -0600377 resources->appendDrawPathInstance().set(devBounds, devBounds45, devToAtlasOffset,
Brian Osmancf860852018-10-31 14:04:39 -0400378 draw.fColor.toBytes_RGBA(), doEvenOddFill);
Chris Daltona8429cf2018-06-22 11:43:31 -0600379
380 // If we have a spot in the path cache, try to make a note of where this mask is so we
381 // can reuse it in the future.
382 if (auto cacheEntry = draw.fCacheEntry.get()) {
383 SkASSERT(!cacheEntry->hasCachedAtlas());
384
385 if (Visibility::kComplete != draw.fMaskVisibility || cacheEntry->hitCount() <= 1) {
386 // Don't cache a path mask unless it's completely visible with a hit count > 1.
387 //
388 // NOTE: mostly-visible paths with a hit count > 1 should have been promoted to
389 // fully visible during accountForOwnPaths().
390 continue;
391 }
392
393 if (resources->nextAtlasToStash() != atlas) {
394 // This mask does not belong to the atlas that will be stashed for next flush.
395 continue;
396 }
397
Chris Dalton4da70192018-06-18 09:51:36 -0600398 const GrUniqueKey& atlasKey =
399 resources->nextAtlasToStash()->getOrAssignUniqueKey(onFlushRP);
Chris Dalton9a986cf2018-10-18 15:27:59 -0600400 cacheEntry->initAsStashedAtlas(atlasKey, devToAtlasOffset, devBounds, devBounds45,
401 devIBounds, draw.fCachedMaskShift);
Chris Dalton4da70192018-06-18 09:51:36 -0600402 // Remember this atlas in case we encounter the path again during the same flush.
Chris Daltona8429cf2018-06-22 11:43:31 -0600403 cacheEntry->setCurrFlushAtlas(atlas);
Chris Dalton4da70192018-06-18 09:51:36 -0600404 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600405 continue;
406 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600407 }
408
Chris Dalton4da70192018-06-18 09:51:36 -0600409 if (!fInstanceRanges.empty()) {
410 fInstanceRanges.back().fEndInstanceIdx = resources->nextPathInstanceIdx();
411 }
412}
413
Brian Salomon7eae3e02018-08-07 14:02:38 +0000414inline void GrCCDrawPathsOp::recordInstance(GrTextureProxy* atlasProxy, int instanceIdx) {
Chris Dalton4da70192018-06-18 09:51:36 -0600415 if (fInstanceRanges.empty()) {
416 fInstanceRanges.push_back({atlasProxy, instanceIdx});
417 return;
418 }
419 if (fInstanceRanges.back().fAtlasProxy != atlasProxy) {
420 fInstanceRanges.back().fEndInstanceIdx = instanceIdx;
421 fInstanceRanges.push_back({atlasProxy, instanceIdx});
422 return;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600423 }
424}
425
Brian Salomon588cec72018-11-14 13:56:37 -0500426void GrCCDrawPathsOp::onExecute(GrOpFlushState* flushState, const SkRect& chainBounds) {
Chris Daltond7e22272018-05-23 10:17:17 -0600427 SkASSERT(fOwningPerOpListPaths);
Chris Daltonf104fec2018-05-22 16:17:48 -0600428
Chris Daltond7e22272018-05-23 10:17:17 -0600429 const GrCCPerFlushResources* resources = fOwningPerOpListPaths->fFlushResources.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600430 if (!resources) {
431 return; // Setup failed.
432 }
433
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600434 GrPipeline::InitArgs initArgs;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600435 initArgs.fProxy = flushState->drawOpArgs().fProxy;
436 initArgs.fCaps = &flushState->caps();
437 initArgs.fResourceProvider = flushState->resourceProvider();
438 initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy;
Brian Salomon49348902018-06-26 09:12:38 -0400439 auto clip = flushState->detachAppliedClip();
440 GrPipeline::FixedDynamicState fixedDynamicState(clip.scissorState().rect());
441 GrPipeline pipeline(initArgs, std::move(fProcessors), std::move(clip));
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600442
443 int baseInstance = fBaseInstance;
Chris Dalton4da70192018-06-18 09:51:36 -0600444 SkASSERT(baseInstance >= 0); // Make sure setupResources() has been called.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600445
Chris Dalton4c458b12018-06-16 17:22:59 -0600446 for (const InstanceRange& range : fInstanceRanges) {
447 SkASSERT(range.fEndInstanceIdx > baseInstance);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600448
Brian Salomon7eae3e02018-08-07 14:02:38 +0000449 GrCCPathProcessor pathProc(range.fAtlasProxy, fViewMatrixIfUsingLocalCoords);
450 GrTextureProxy* atlasProxy = range.fAtlasProxy;
451 fixedDynamicState.fPrimitiveProcessorTextures = &atlasProxy;
Brian Salomon49348902018-06-26 09:12:38 -0400452 pathProc.drawPaths(flushState, pipeline, &fixedDynamicState, *resources, baseInstance,
453 range.fEndInstanceIdx, this->bounds());
Chris Dalton4c458b12018-06-16 17:22:59 -0600454
455 baseInstance = range.fEndInstanceIdx;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600456 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600457}