blob: 2ec8379c13fee6fb5224b43f62be466141d2e2cc [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCDrawPathsOp.h"
Brian Salomon653f42f2018-07-10 10:07:31 -04009#include "GrContext.h"
10#include "GrContextPriv.h"
Robert Phillips7c525e62018-06-12 10:11:12 -040011#include "GrMemoryPool.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060012#include "GrOpFlushState.h"
Chris Daltona2b5b642018-06-24 13:08:57 -060013#include "ccpr/GrCCPathCache.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060014#include "ccpr/GrCCPerFlushResources.h"
15#include "ccpr/GrCoverageCountingPathRenderer.h"
16
Chris Dalton1c548942018-05-22 13:09:48 -060017static bool has_coord_transforms(const GrPaint& paint) {
18 GrFragmentProcessor::Iter iter(paint);
19 while (const GrFragmentProcessor* fp = iter.next()) {
20 if (!fp->coordTransforms().empty()) {
21 return true;
22 }
23 }
24 return false;
25}
26
Chris Daltona8429cf2018-06-22 11:43:31 -060027static int64_t area(const SkIRect& r) {
28 return sk_64_mul(r.height(), r.width());
29}
30
Chris Dalton09a7bb22018-08-31 19:53:15 +080031std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::Make(
32 GrContext* context, const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape,
33 GrPaint&& paint) {
34 static constexpr int kPathCropThreshold = GrCoverageCountingPathRenderer::kPathCropThreshold;
35
36 SkRect conservativeDevBounds;
37 m.mapRect(&conservativeDevBounds, shape.bounds());
38
39 const SkStrokeRec& stroke = shape.style().strokeRec();
40 float strokeDevWidth = 0;
41 float conservativeInflationRadius = 0;
42 if (!stroke.isFillStyle()) {
43 if (stroke.isHairlineStyle()) {
44 strokeDevWidth = 1;
45 } else {
46 SkASSERT(m.isSimilarity()); // Otherwise matrixScaleFactor = m.getMaxScale().
47 float matrixScaleFactor = SkVector::Length(m.getScaleX(), m.getSkewY());
48 strokeDevWidth = stroke.getWidth() * matrixScaleFactor;
49 }
50 // Inflate for a minimum stroke width of 1. In some cases when the stroke is less than 1px
51 // wide, we may inflate it to 1px and instead reduce the opacity.
52 conservativeInflationRadius = SkStrokeRec::GetInflationRadius(
53 stroke.getJoin(), stroke.getMiter(), stroke.getCap(), SkTMax(strokeDevWidth, 1.f));
54 conservativeDevBounds.outset(conservativeInflationRadius, conservativeInflationRadius);
55 }
56
57 std::unique_ptr<GrCCDrawPathsOp> op;
58 float conservativeSize = SkTMax(conservativeDevBounds.height(), conservativeDevBounds.width());
59 if (conservativeSize > kPathCropThreshold) {
60 // The path is too large. Crop it or analytic AA can run out of fp32 precision.
61 SkPath croppedDevPath;
62 shape.asPath(&croppedDevPath);
63 croppedDevPath.transform(m, &croppedDevPath);
64
65 SkIRect cropBox = clipIBounds;
66 GrShape croppedDevShape;
67 if (stroke.isFillStyle()) {
68 GrCoverageCountingPathRenderer::CropPath(croppedDevPath, cropBox, &croppedDevPath);
69 croppedDevShape = GrShape(croppedDevPath);
70 conservativeDevBounds = croppedDevShape.bounds();
71 } else {
72 int r = SkScalarCeilToInt(conservativeInflationRadius);
73 cropBox.outset(r, r);
74 GrCoverageCountingPathRenderer::CropPath(croppedDevPath, cropBox, &croppedDevPath);
75 SkStrokeRec devStroke = stroke;
76 devStroke.setStrokeStyle(strokeDevWidth);
77 croppedDevShape = GrShape(croppedDevPath, GrStyle(devStroke, nullptr));
78 conservativeDevBounds = croppedDevPath.getBounds();
79 conservativeDevBounds.outset(conservativeInflationRadius, conservativeInflationRadius);
80 }
81
82 // FIXME: This breaks local coords: http://skbug.com/8003
83 return InternalMake(context, clipIBounds, SkMatrix::I(), croppedDevShape, strokeDevWidth,
84 conservativeDevBounds, std::move(paint));
85 }
86
87 return InternalMake(context, clipIBounds, m, shape, strokeDevWidth, conservativeDevBounds,
88 std::move(paint));
89}
90
91std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::InternalMake(
92 GrContext* context, const SkIRect& clipIBounds, const SkMatrix& m, const GrShape& shape,
93 float strokeDevWidth, const SkRect& conservativeDevBounds, GrPaint&& paint) {
94 SkIRect shapeConservativeIBounds;
95 conservativeDevBounds.roundOut(&shapeConservativeIBounds);
Chris Daltona8429cf2018-06-22 11:43:31 -060096
97 SkIRect maskDevIBounds;
98 Visibility maskVisibility;
Chris Dalton09a7bb22018-08-31 19:53:15 +080099 if (clipIBounds.contains(shapeConservativeIBounds)) {
100 maskDevIBounds = shapeConservativeIBounds;
Chris Daltona8429cf2018-06-22 11:43:31 -0600101 maskVisibility = Visibility::kComplete;
102 } else {
Chris Dalton09a7bb22018-08-31 19:53:15 +0800103 if (!maskDevIBounds.intersect(clipIBounds, shapeConservativeIBounds)) {
Chris Dalton4da70192018-06-18 09:51:36 -0600104 return nullptr;
105 }
Chris Dalton09a7bb22018-08-31 19:53:15 +0800106 int64_t unclippedArea = area(shapeConservativeIBounds);
Chris Daltona8429cf2018-06-22 11:43:31 -0600107 int64_t clippedArea = area(maskDevIBounds);
108 maskVisibility = (clippedArea >= unclippedArea/2 || unclippedArea < 100*100)
109 ? Visibility::kMostlyComplete // i.e., visible enough to justify rendering the
110 // whole thing if we think we can cache it.
111 : Visibility::kPartial;
Chris Dalton42c21152018-06-13 15:28:19 -0600112 }
Robert Phillipsc994a932018-06-19 13:09:54 -0400113
114 GrOpMemoryPool* pool = context->contextPriv().opMemoryPool();
115
Chris Dalton09a7bb22018-08-31 19:53:15 +0800116 return pool->allocate<GrCCDrawPathsOp>(m, shape, strokeDevWidth, shapeConservativeIBounds,
117 maskDevIBounds, maskVisibility, conservativeDevBounds,
118 std::move(paint));
Chris Dalton42c21152018-06-13 15:28:19 -0600119}
120
Chris Dalton09a7bb22018-08-31 19:53:15 +0800121GrCCDrawPathsOp::GrCCDrawPathsOp(const SkMatrix& m, const GrShape& shape, float strokeDevWidth,
122 const SkIRect& shapeConservativeIBounds,
123 const SkIRect& maskDevIBounds, Visibility maskVisibility,
124 const SkRect& conservativeDevBounds, GrPaint&& paint)
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600125 : GrDrawOp(ClassID())
Chris Daltond7e22272018-05-23 10:17:17 -0600126 , fViewMatrixIfUsingLocalCoords(has_coord_transforms(paint) ? m : SkMatrix::I())
Chris Dalton09a7bb22018-08-31 19:53:15 +0800127 , fDraws(m, shape, strokeDevWidth, shapeConservativeIBounds, maskDevIBounds, maskVisibility,
128 paint.getColor())
Chris Dalton42c21152018-06-13 15:28:19 -0600129 , fProcessors(std::move(paint)) { // Paint must be moved after fetching its color above.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600130 SkDEBUGCODE(fBaseInstance = -1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600131 // FIXME: intersect with clip bounds to (hopefully) improve batching.
132 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800133 this->setBounds(conservativeDevBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600134}
135
136GrCCDrawPathsOp::~GrCCDrawPathsOp() {
Chris Daltond7e22272018-05-23 10:17:17 -0600137 if (fOwningPerOpListPaths) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600138 // Remove CCPR's dangling pointer to this Op before deleting it.
Chris Daltond7e22272018-05-23 10:17:17 -0600139 fOwningPerOpListPaths->fDrawOps.remove(this);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600140 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600141}
142
Chris Daltona8429cf2018-06-22 11:43:31 -0600143GrCCDrawPathsOp::SingleDraw::SingleDraw(const SkMatrix& m, const GrShape& shape,
Chris Dalton09a7bb22018-08-31 19:53:15 +0800144 float strokeDevWidth,
145 const SkIRect& shapeConservativeIBounds,
Chris Daltona8429cf2018-06-22 11:43:31 -0600146 const SkIRect& maskDevIBounds, Visibility maskVisibility,
147 GrColor color)
148 : fMatrix(m)
Chris Dalton644341a2018-06-18 19:14:16 -0600149 , fShape(shape)
Chris Dalton09a7bb22018-08-31 19:53:15 +0800150 , fStrokeDevWidth(strokeDevWidth)
151 , fShapeConservativeIBounds(shapeConservativeIBounds)
Chris Daltona8429cf2018-06-22 11:43:31 -0600152 , fMaskDevIBounds(maskDevIBounds)
153 , fMaskVisibility(maskVisibility)
154 , fColor(color) {
Chris Dalton644341a2018-06-18 19:14:16 -0600155#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
156 if (fShape.hasUnstyledKey()) {
157 // On AOSP we round view matrix translates to integer values for cachable paths. We do this
158 // to match HWUI's cache hit ratio, which doesn't consider the matrix when caching paths.
159 fMatrix.setTranslateX(SkScalarRoundToScalar(fMatrix.getTranslateX()));
160 fMatrix.setTranslateY(SkScalarRoundToScalar(fMatrix.getTranslateY()));
161 }
162#endif
163}
164
Chris Dalton4da70192018-06-18 09:51:36 -0600165GrCCDrawPathsOp::SingleDraw::~SingleDraw() {
166 if (fCacheEntry) {
167 // All currFlushAtlas references must be reset back to null before the flush is finished.
168 fCacheEntry->setCurrFlushAtlas(nullptr);
169 }
170}
171
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600172GrDrawOp::RequiresDstTexture GrCCDrawPathsOp::finalize(const GrCaps& caps,
Brian Osman532b3f92018-07-11 10:02:07 -0400173 const GrAppliedClip* clip) {
Chris Dalton4da70192018-06-18 09:51:36 -0600174 SkASSERT(1 == fNumDraws); // There should only be one single path draw in this Op right now.
Chris Dalton09a7bb22018-08-31 19:53:15 +0800175 SingleDraw* draw = &fDraws.head();
176
177 const GrProcessorSet::Analysis& analysis = fProcessors.finalize(
178 draw->fColor, GrProcessorAnalysisCoverage::kSingleChannel, clip, false, caps,
179 &draw->fColor);
180
181 // Lines start looking jagged when they get thinner than 1px. For thin strokes it looks better
182 // if we can convert them to hairline (i.e., inflate the stroke width to 1px), and instead
183 // reduce the opacity to create the illusion of thin-ness. This strategy also helps reduce
184 // artifacts from coverage dilation when there are self intersections.
185 if (analysis.isCompatibleWithCoverageAsAlpha() &&
186 !draw->fShape.style().strokeRec().isFillStyle() && draw->fStrokeDevWidth < 1) {
187 // Modifying the shape affects its cache key. The draw can't have a cache entry yet or else
188 // our next step would invalidate it.
189 SkASSERT(!draw->fCacheEntry);
190 SkASSERT(SkStrokeRec::kStroke_Style == draw->fShape.style().strokeRec().getStyle());
191
192 SkPath path;
193 draw->fShape.asPath(&path);
194
195 // Create a hairline version of our stroke.
196 SkStrokeRec hairlineStroke = draw->fShape.style().strokeRec();
197 hairlineStroke.setStrokeStyle(0);
198
199 // How transparent does a 1px stroke have to be in order to appear as thin as the real one?
200 GrColor coverageAsAlpha = GrColorPackA4(SkScalarFloorToInt(draw->fStrokeDevWidth * 255));
201
202 draw->fShape = GrShape(path, GrStyle(hairlineStroke, nullptr));
203 draw->fStrokeDevWidth = 1;
204 // fShapeConservativeIBounds already accounted for this possibility of inflating the stroke.
205 draw->fColor = GrColorMul(draw->fColor, coverageAsAlpha);
206 }
207
Chris Dalton4da70192018-06-18 09:51:36 -0600208 return RequiresDstTexture(analysis.requiresDstTexture());
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600209}
210
Brian Salomon7eae3e02018-08-07 14:02:38 +0000211GrOp::CombineResult GrCCDrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps&) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600212 GrCCDrawPathsOp* that = op->cast<GrCCDrawPathsOp>();
Chris Daltond7e22272018-05-23 10:17:17 -0600213 SkASSERT(fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600214 SkASSERT(fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600215 SkASSERT(!that->fOwningPerOpListPaths || that->fOwningPerOpListPaths == fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600216 SkASSERT(that->fNumDraws);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600217
Brian Osman9aa30c62018-07-02 15:21:46 -0400218 if (fProcessors != that->fProcessors ||
Chris Dalton1c548942018-05-22 13:09:48 -0600219 fViewMatrixIfUsingLocalCoords != that->fViewMatrixIfUsingLocalCoords) {
Brian Salomon7eae3e02018-08-07 14:02:38 +0000220 return CombineResult::kCannotCombine;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600221 }
222
Chris Daltond7e22272018-05-23 10:17:17 -0600223 fDraws.append(std::move(that->fDraws), &fOwningPerOpListPaths->fAllocator);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600224 this->joinBounds(*that);
225
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600226 SkDEBUGCODE(fNumDraws += that->fNumDraws);
227 SkDEBUGCODE(that->fNumDraws = 0);
Brian Salomon7eae3e02018-08-07 14:02:38 +0000228 return CombineResult::kMerged;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600229}
230
Chris Daltond7e22272018-05-23 10:17:17 -0600231void GrCCDrawPathsOp::wasRecorded(GrCCPerOpListPaths* owningPerOpListPaths) {
Chris Daltonf104fec2018-05-22 16:17:48 -0600232 SkASSERT(1 == fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600233 SkASSERT(!fOwningPerOpListPaths);
234 owningPerOpListPaths->fDrawOps.addToTail(this);
235 fOwningPerOpListPaths = owningPerOpListPaths;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600236}
237
Chris Dalton4da70192018-06-18 09:51:36 -0600238void GrCCDrawPathsOp::accountForOwnPaths(GrCCPathCache* pathCache,
239 GrOnFlushResourceProvider* onFlushRP,
240 const GrUniqueKey& stashedAtlasKey,
241 GrCCPerFlushResourceSpecs* specs) {
242 using CreateIfAbsent = GrCCPathCache::CreateIfAbsent;
243 using MaskTransform = GrCCPathCache::MaskTransform;
244
245 for (SingleDraw& draw : fDraws) {
Chris Dalton4da70192018-06-18 09:51:36 -0600246 SkPath path;
247 draw.fShape.asPath(&path);
248
Chris Daltona2b5b642018-06-24 13:08:57 -0600249 SkASSERT(!draw.fCacheEntry);
250
251 if (pathCache) {
252 MaskTransform m(draw.fMatrix, &draw.fCachedMaskShift);
253 bool canStashPathMask = draw.fMaskVisibility >= Visibility::kMostlyComplete;
254 draw.fCacheEntry = pathCache->find(draw.fShape, m, CreateIfAbsent(canStashPathMask));
255 }
Chris Daltona8429cf2018-06-22 11:43:31 -0600256
Chris Dalton4da70192018-06-18 09:51:36 -0600257 if (auto cacheEntry = draw.fCacheEntry.get()) {
258 SkASSERT(!cacheEntry->currFlushAtlas()); // Shouldn't be set until setupResources().
Chris Daltona8429cf2018-06-22 11:43:31 -0600259
Chris Dalton4da70192018-06-18 09:51:36 -0600260 if (cacheEntry->atlasKey().isValid()) {
261 // Does the path already exist in a cached atlas?
262 if (cacheEntry->hasCachedAtlas() &&
263 (draw.fCachedAtlasProxy = onFlushRP->findOrCreateProxyByUniqueKey(
264 cacheEntry->atlasKey(),
265 GrCCAtlas::kTextureOrigin))) {
266 ++specs->fNumCachedPaths;
267 continue;
268 }
269
270 // Does the path exist in the atlas that we stashed away from last flush? If so we
271 // can copy it into a new 8-bit atlas and keep it in the resource cache.
272 if (stashedAtlasKey.isValid() && stashedAtlasKey == cacheEntry->atlasKey()) {
273 SkASSERT(!cacheEntry->hasCachedAtlas());
Chris Dalton09a7bb22018-08-31 19:53:15 +0800274 int idx = (draw.fShape.style().strokeRec().isFillStyle())
275 ? GrCCPerFlushResourceSpecs::kFillIdx
276 : GrCCPerFlushResourceSpecs::kStrokeIdx;
277 ++specs->fNumCopiedPaths[idx];
278 specs->fCopyPathStats[idx].statPath(path);
Chris Dalton4da70192018-06-18 09:51:36 -0600279 specs->fCopyAtlasSpecs.accountForSpace(cacheEntry->width(),
280 cacheEntry->height());
281 continue;
282 }
283
284 // Whatever atlas the path used to reside in, it no longer exists.
285 cacheEntry->resetAtlasKeyAndInfo();
286 }
287
Chris Dalton09a7bb22018-08-31 19:53:15 +0800288 if (Visibility::kMostlyComplete == draw.fMaskVisibility && cacheEntry->hitCount() > 1) {
289 int shapeSize = SkTMax(draw.fShapeConservativeIBounds.height(),
290 draw.fShapeConservativeIBounds.width());
291 if (shapeSize <= onFlushRP->caps()->maxRenderTargetSize()) {
292 // We've seen this path before with a compatible matrix, and it's mostly
293 // visible. Just render the whole mask so we can try to cache it.
294 draw.fMaskDevIBounds = draw.fShapeConservativeIBounds;
295 draw.fMaskVisibility = Visibility::kComplete;
296 }
Chris Dalton4da70192018-06-18 09:51:36 -0600297 }
298 }
299
Chris Dalton09a7bb22018-08-31 19:53:15 +0800300 int idx = (draw.fShape.style().strokeRec().isFillStyle())
301 ? GrCCPerFlushResourceSpecs::kFillIdx
302 : GrCCPerFlushResourceSpecs::kStrokeIdx;
303 ++specs->fNumRenderedPaths[idx];
304 specs->fRenderedPathStats[idx].statPath(path);
Chris Daltona8429cf2018-06-22 11:43:31 -0600305 specs->fRenderedAtlasSpecs.accountForSpace(draw.fMaskDevIBounds.width(),
306 draw.fMaskDevIBounds.height());
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600307 }
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600308}
309
Chris Dalton4da70192018-06-18 09:51:36 -0600310void GrCCDrawPathsOp::setupResources(GrOnFlushResourceProvider* onFlushRP,
311 GrCCPerFlushResources* resources, DoCopiesToCache doCopies) {
312 using DoEvenOddFill = GrCCPathProcessor::DoEvenOddFill;
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600313 SkASSERT(fNumDraws > 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600314 SkASSERT(-1 == fBaseInstance);
Chris Daltondaef06a2018-05-23 17:11:09 -0600315 fBaseInstance = resources->nextPathInstanceIdx();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600316
Chris Dalton4da70192018-06-18 09:51:36 -0600317 for (SingleDraw& draw : fDraws) {
318 SkPath path;
319 draw.fShape.asPath(&path);
320
Chris Dalton09a7bb22018-08-31 19:53:15 +0800321 auto doEvenOddFill = DoEvenOddFill(draw.fShape.style().strokeRec().isFillStyle() &&
322 SkPath::kEvenOdd_FillType == path.getFillType());
Chris Dalton4da70192018-06-18 09:51:36 -0600323 SkASSERT(SkPath::kEvenOdd_FillType == path.getFillType() ||
324 SkPath::kWinding_FillType == path.getFillType());
325
326 if (auto cacheEntry = draw.fCacheEntry.get()) {
327 // Does the path already exist in a cached atlas texture?
328 if (auto proxy = draw.fCachedAtlasProxy.get()) {
329 SkASSERT(!cacheEntry->currFlushAtlas());
330 this->recordInstance(proxy, resources->nextPathInstanceIdx());
331 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
332 draw.fColor);
333 continue;
334 }
335
336 // Have we already encountered this path during the flush? (i.e. was the same SkPath
337 // drawn more than once during the same flush, with a compatible matrix?)
338 if (auto atlas = cacheEntry->currFlushAtlas()) {
339 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
340 resources->appendDrawPathInstance().set(
341 *cacheEntry, draw.fCachedMaskShift, draw.fColor,
342 cacheEntry->hasCachedAtlas() ? DoEvenOddFill::kNo : doEvenOddFill);
343 continue;
344 }
345
346 // If the cache entry still has a valid atlas key at this point, it means the path
347 // exists in the atlas that we stashed away from last flush. Copy it into a permanent
348 // 8-bit atlas in the resource cache.
349 if (DoCopiesToCache::kYes == doCopies && cacheEntry->atlasKey().isValid()) {
350 SkIVector newOffset;
351 GrCCAtlas* atlas =
352 resources->copyPathToCachedAtlas(*cacheEntry, doEvenOddFill, &newOffset);
353 cacheEntry->updateToCachedAtlas(atlas->getOrAssignUniqueKey(onFlushRP),
Brian Salomon238069b2018-07-11 15:58:57 -0400354 onFlushRP->contextUniqueID(), newOffset,
355 atlas->refOrMakeCachedAtlasInfo());
Chris Dalton4da70192018-06-18 09:51:36 -0600356 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
357 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
358 draw.fColor);
359 // Remember this atlas in case we encounter the path again during the same flush.
360 cacheEntry->setCurrFlushAtlas(atlas);
361 continue;
362 }
363 }
364
365 // Render the raw path into a coverage count atlas. renderPathInAtlas() gives us two tight
366 // bounding boxes: One in device space, as well as a second one rotated an additional 45
367 // degrees. The path vertex shader uses these two bounding boxes to generate an octagon that
368 // circumscribes the path.
369 SkASSERT(!draw.fCachedAtlasProxy);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600370 SkRect devBounds, devBounds45;
Chris Dalton4da70192018-06-18 09:51:36 -0600371 SkIRect devIBounds;
Chris Dalton9414c962018-06-14 10:14:50 -0600372 SkIVector devToAtlasOffset;
Chris Dalton09a7bb22018-08-31 19:53:15 +0800373 if (auto atlas = resources->renderShapeInAtlas(
374 draw.fMaskDevIBounds, draw.fMatrix, draw.fShape, draw.fStrokeDevWidth,
375 &devBounds, &devBounds45, &devIBounds, &devToAtlasOffset)) {
Chris Dalton4da70192018-06-18 09:51:36 -0600376 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
377 resources->appendDrawPathInstance().set(devBounds, devBounds45, devToAtlasOffset,
378 draw.fColor, doEvenOddFill);
Chris Daltona8429cf2018-06-22 11:43:31 -0600379
380 // If we have a spot in the path cache, try to make a note of where this mask is so we
381 // can reuse it in the future.
382 if (auto cacheEntry = draw.fCacheEntry.get()) {
383 SkASSERT(!cacheEntry->hasCachedAtlas());
384
385 if (Visibility::kComplete != draw.fMaskVisibility || cacheEntry->hitCount() <= 1) {
386 // Don't cache a path mask unless it's completely visible with a hit count > 1.
387 //
388 // NOTE: mostly-visible paths with a hit count > 1 should have been promoted to
389 // fully visible during accountForOwnPaths().
390 continue;
391 }
392
393 if (resources->nextAtlasToStash() != atlas) {
394 // This mask does not belong to the atlas that will be stashed for next flush.
395 continue;
396 }
397
Chris Dalton4da70192018-06-18 09:51:36 -0600398 const GrUniqueKey& atlasKey =
399 resources->nextAtlasToStash()->getOrAssignUniqueKey(onFlushRP);
Brian Salomon238069b2018-07-11 15:58:57 -0400400 cacheEntry->initAsStashedAtlas(atlasKey, onFlushRP->contextUniqueID(),
401 devToAtlasOffset, devBounds, devBounds45, devIBounds,
402 draw.fCachedMaskShift);
Chris Dalton4da70192018-06-18 09:51:36 -0600403 // Remember this atlas in case we encounter the path again during the same flush.
Chris Daltona8429cf2018-06-22 11:43:31 -0600404 cacheEntry->setCurrFlushAtlas(atlas);
Chris Dalton4da70192018-06-18 09:51:36 -0600405 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600406 continue;
407 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600408 }
409
Chris Dalton4da70192018-06-18 09:51:36 -0600410 if (!fInstanceRanges.empty()) {
411 fInstanceRanges.back().fEndInstanceIdx = resources->nextPathInstanceIdx();
412 }
413}
414
Brian Salomon7eae3e02018-08-07 14:02:38 +0000415inline void GrCCDrawPathsOp::recordInstance(GrTextureProxy* atlasProxy, int instanceIdx) {
Chris Dalton4da70192018-06-18 09:51:36 -0600416 if (fInstanceRanges.empty()) {
417 fInstanceRanges.push_back({atlasProxy, instanceIdx});
418 return;
419 }
420 if (fInstanceRanges.back().fAtlasProxy != atlasProxy) {
421 fInstanceRanges.back().fEndInstanceIdx = instanceIdx;
422 fInstanceRanges.push_back({atlasProxy, instanceIdx});
423 return;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600424 }
425}
426
427void GrCCDrawPathsOp::onExecute(GrOpFlushState* flushState) {
Chris Daltond7e22272018-05-23 10:17:17 -0600428 SkASSERT(fOwningPerOpListPaths);
Chris Daltonf104fec2018-05-22 16:17:48 -0600429
Chris Daltond7e22272018-05-23 10:17:17 -0600430 const GrCCPerFlushResources* resources = fOwningPerOpListPaths->fFlushResources.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600431 if (!resources) {
432 return; // Setup failed.
433 }
434
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600435 GrPipeline::InitArgs initArgs;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600436 initArgs.fProxy = flushState->drawOpArgs().fProxy;
437 initArgs.fCaps = &flushState->caps();
438 initArgs.fResourceProvider = flushState->resourceProvider();
439 initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy;
Brian Salomon49348902018-06-26 09:12:38 -0400440 auto clip = flushState->detachAppliedClip();
441 GrPipeline::FixedDynamicState fixedDynamicState(clip.scissorState().rect());
442 GrPipeline pipeline(initArgs, std::move(fProcessors), std::move(clip));
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600443
444 int baseInstance = fBaseInstance;
Chris Dalton4da70192018-06-18 09:51:36 -0600445 SkASSERT(baseInstance >= 0); // Make sure setupResources() has been called.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600446
Chris Dalton4c458b12018-06-16 17:22:59 -0600447 for (const InstanceRange& range : fInstanceRanges) {
448 SkASSERT(range.fEndInstanceIdx > baseInstance);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600449
Brian Salomon7eae3e02018-08-07 14:02:38 +0000450 GrCCPathProcessor pathProc(range.fAtlasProxy, fViewMatrixIfUsingLocalCoords);
451 GrTextureProxy* atlasProxy = range.fAtlasProxy;
452 fixedDynamicState.fPrimitiveProcessorTextures = &atlasProxy;
Brian Salomon49348902018-06-26 09:12:38 -0400453 pathProc.drawPaths(flushState, pipeline, &fixedDynamicState, *resources, baseInstance,
454 range.fEndInstanceIdx, this->bounds());
Chris Dalton4c458b12018-06-16 17:22:59 -0600455
456 baseInstance = range.fEndInstanceIdx;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600457 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600458}