blob: 3f709592e0463dcf9fc37c19487a94625ddb6c4c [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCDrawPathsOp.h"
9
Robert Phillips7c525e62018-06-12 10:11:12 -040010#include "GrMemoryPool.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060011#include "GrOpFlushState.h"
Chris Daltona2b5b642018-06-24 13:08:57 -060012#include "ccpr/GrCCPathCache.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060013#include "ccpr/GrCCPerFlushResources.h"
14#include "ccpr/GrCoverageCountingPathRenderer.h"
15
Chris Dalton1c548942018-05-22 13:09:48 -060016static bool has_coord_transforms(const GrPaint& paint) {
17 GrFragmentProcessor::Iter iter(paint);
18 while (const GrFragmentProcessor* fp = iter.next()) {
19 if (!fp->coordTransforms().empty()) {
20 return true;
21 }
22 }
23 return false;
24}
25
Chris Daltona8429cf2018-06-22 11:43:31 -060026static int64_t area(const SkIRect& r) {
27 return sk_64_mul(r.height(), r.width());
28}
29
Robert Phillipsc994a932018-06-19 13:09:54 -040030std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::Make(GrContext* context,
31 const SkIRect& clipIBounds,
32 const SkMatrix& m,
33 const GrShape& shape,
34 const SkRect& devBounds,
35 GrPaint&& paint) {
Chris Daltona8429cf2018-06-22 11:43:31 -060036 SkIRect shapeDevIBounds;
37 devBounds.roundOut(&shapeDevIBounds); // GrCCPathParser might find slightly tighter bounds.
38
39 SkIRect maskDevIBounds;
40 Visibility maskVisibility;
41 if (clipIBounds.contains(shapeDevIBounds)) {
42 maskDevIBounds = shapeDevIBounds;
43 maskVisibility = Visibility::kComplete;
44 } else {
45 if (!maskDevIBounds.intersect(clipIBounds, shapeDevIBounds)) {
Chris Dalton4da70192018-06-18 09:51:36 -060046 return nullptr;
47 }
Chris Daltona8429cf2018-06-22 11:43:31 -060048 int64_t unclippedArea = area(shapeDevIBounds);
49 int64_t clippedArea = area(maskDevIBounds);
50 maskVisibility = (clippedArea >= unclippedArea/2 || unclippedArea < 100*100)
51 ? Visibility::kMostlyComplete // i.e., visible enough to justify rendering the
52 // whole thing if we think we can cache it.
53 : Visibility::kPartial;
Chris Dalton42c21152018-06-13 15:28:19 -060054 }
Robert Phillipsc994a932018-06-19 13:09:54 -040055
56 GrOpMemoryPool* pool = context->contextPriv().opMemoryPool();
57
Chris Daltona8429cf2018-06-22 11:43:31 -060058 return pool->allocate<GrCCDrawPathsOp>(m, shape, shapeDevIBounds, maskDevIBounds,
59 maskVisibility, devBounds, std::move(paint));
Chris Dalton42c21152018-06-13 15:28:19 -060060}
61
Chris Daltona8429cf2018-06-22 11:43:31 -060062GrCCDrawPathsOp::GrCCDrawPathsOp(const SkMatrix& m, const GrShape& shape,
63 const SkIRect& shapeDevIBounds, const SkIRect& maskDevIBounds,
64 Visibility maskVisibility, const SkRect& devBounds,
65 GrPaint&& paint)
Chris Dalton4bfb50b2018-05-21 09:10:53 -060066 : GrDrawOp(ClassID())
Chris Daltond7e22272018-05-23 10:17:17 -060067 , fViewMatrixIfUsingLocalCoords(has_coord_transforms(paint) ? m : SkMatrix::I())
Chris Daltona8429cf2018-06-22 11:43:31 -060068 , fDraws(m, shape, shapeDevIBounds, maskDevIBounds, maskVisibility, paint.getColor())
Chris Dalton42c21152018-06-13 15:28:19 -060069 , fProcessors(std::move(paint)) { // Paint must be moved after fetching its color above.
Chris Dalton5ba36ba2018-05-09 01:08:38 -060070 SkDEBUGCODE(fBaseInstance = -1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060071 // FIXME: intersect with clip bounds to (hopefully) improve batching.
72 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
73 this->setBounds(devBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
74}
75
76GrCCDrawPathsOp::~GrCCDrawPathsOp() {
Chris Daltond7e22272018-05-23 10:17:17 -060077 if (fOwningPerOpListPaths) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060078 // Remove CCPR's dangling pointer to this Op before deleting it.
Chris Daltond7e22272018-05-23 10:17:17 -060079 fOwningPerOpListPaths->fDrawOps.remove(this);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060080 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060081}
82
Chris Daltona8429cf2018-06-22 11:43:31 -060083GrCCDrawPathsOp::SingleDraw::SingleDraw(const SkMatrix& m, const GrShape& shape,
84 const SkIRect& shapeDevIBounds,
85 const SkIRect& maskDevIBounds, Visibility maskVisibility,
86 GrColor color)
87 : fMatrix(m)
Chris Dalton644341a2018-06-18 19:14:16 -060088 , fShape(shape)
Chris Daltona8429cf2018-06-22 11:43:31 -060089 , fShapeDevIBounds(shapeDevIBounds)
90 , fMaskDevIBounds(maskDevIBounds)
91 , fMaskVisibility(maskVisibility)
92 , fColor(color) {
Chris Dalton644341a2018-06-18 19:14:16 -060093#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
94 if (fShape.hasUnstyledKey()) {
95 // On AOSP we round view matrix translates to integer values for cachable paths. We do this
96 // to match HWUI's cache hit ratio, which doesn't consider the matrix when caching paths.
97 fMatrix.setTranslateX(SkScalarRoundToScalar(fMatrix.getTranslateX()));
98 fMatrix.setTranslateY(SkScalarRoundToScalar(fMatrix.getTranslateY()));
99 }
100#endif
101}
102
Chris Dalton4da70192018-06-18 09:51:36 -0600103GrCCDrawPathsOp::SingleDraw::~SingleDraw() {
104 if (fCacheEntry) {
105 // All currFlushAtlas references must be reset back to null before the flush is finished.
106 fCacheEntry->setCurrFlushAtlas(nullptr);
107 }
108}
109
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600110GrDrawOp::RequiresDstTexture GrCCDrawPathsOp::finalize(const GrCaps& caps,
111 const GrAppliedClip* clip,
112 GrPixelConfigIsClamped dstIsClamped) {
Chris Dalton4da70192018-06-18 09:51:36 -0600113 SkASSERT(1 == fNumDraws); // There should only be one single path draw in this Op right now.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600114 GrProcessorSet::Analysis analysis =
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600115 fProcessors.finalize(fDraws.head().fColor, GrProcessorAnalysisCoverage::kSingleChannel,
116 clip, false, caps, dstIsClamped, &fDraws.head().fColor);
Chris Dalton4da70192018-06-18 09:51:36 -0600117 return RequiresDstTexture(analysis.requiresDstTexture());
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600118}
119
Chris Dalton4da70192018-06-18 09:51:36 -0600120bool GrCCDrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps&) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600121 GrCCDrawPathsOp* that = op->cast<GrCCDrawPathsOp>();
Chris Daltond7e22272018-05-23 10:17:17 -0600122 SkASSERT(fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600123 SkASSERT(fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600124 SkASSERT(!that->fOwningPerOpListPaths || that->fOwningPerOpListPaths == fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600125 SkASSERT(that->fNumDraws);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600126
Brian Osman9aa30c62018-07-02 15:21:46 -0400127 if (fProcessors != that->fProcessors ||
Chris Dalton1c548942018-05-22 13:09:48 -0600128 fViewMatrixIfUsingLocalCoords != that->fViewMatrixIfUsingLocalCoords) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600129 return false;
130 }
131
Chris Daltond7e22272018-05-23 10:17:17 -0600132 fDraws.append(std::move(that->fDraws), &fOwningPerOpListPaths->fAllocator);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600133 this->joinBounds(*that);
134
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600135 SkDEBUGCODE(fNumDraws += that->fNumDraws);
136 SkDEBUGCODE(that->fNumDraws = 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600137 return true;
138}
139
Chris Daltond7e22272018-05-23 10:17:17 -0600140void GrCCDrawPathsOp::wasRecorded(GrCCPerOpListPaths* owningPerOpListPaths) {
Chris Daltonf104fec2018-05-22 16:17:48 -0600141 SkASSERT(1 == fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600142 SkASSERT(!fOwningPerOpListPaths);
143 owningPerOpListPaths->fDrawOps.addToTail(this);
144 fOwningPerOpListPaths = owningPerOpListPaths;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600145}
146
Chris Dalton4da70192018-06-18 09:51:36 -0600147void GrCCDrawPathsOp::accountForOwnPaths(GrCCPathCache* pathCache,
148 GrOnFlushResourceProvider* onFlushRP,
149 const GrUniqueKey& stashedAtlasKey,
150 GrCCPerFlushResourceSpecs* specs) {
151 using CreateIfAbsent = GrCCPathCache::CreateIfAbsent;
152 using MaskTransform = GrCCPathCache::MaskTransform;
153
154 for (SingleDraw& draw : fDraws) {
Chris Dalton4da70192018-06-18 09:51:36 -0600155 SkPath path;
156 draw.fShape.asPath(&path);
157
Chris Daltona2b5b642018-06-24 13:08:57 -0600158 SkASSERT(!draw.fCacheEntry);
159
160 if (pathCache) {
161 MaskTransform m(draw.fMatrix, &draw.fCachedMaskShift);
162 bool canStashPathMask = draw.fMaskVisibility >= Visibility::kMostlyComplete;
163 draw.fCacheEntry = pathCache->find(draw.fShape, m, CreateIfAbsent(canStashPathMask));
164 }
Chris Daltona8429cf2018-06-22 11:43:31 -0600165
Chris Dalton4da70192018-06-18 09:51:36 -0600166 if (auto cacheEntry = draw.fCacheEntry.get()) {
167 SkASSERT(!cacheEntry->currFlushAtlas()); // Shouldn't be set until setupResources().
Chris Daltona8429cf2018-06-22 11:43:31 -0600168
Chris Dalton4da70192018-06-18 09:51:36 -0600169 if (cacheEntry->atlasKey().isValid()) {
170 // Does the path already exist in a cached atlas?
171 if (cacheEntry->hasCachedAtlas() &&
172 (draw.fCachedAtlasProxy = onFlushRP->findOrCreateProxyByUniqueKey(
173 cacheEntry->atlasKey(),
174 GrCCAtlas::kTextureOrigin))) {
175 ++specs->fNumCachedPaths;
176 continue;
177 }
178
179 // Does the path exist in the atlas that we stashed away from last flush? If so we
180 // can copy it into a new 8-bit atlas and keep it in the resource cache.
181 if (stashedAtlasKey.isValid() && stashedAtlasKey == cacheEntry->atlasKey()) {
182 SkASSERT(!cacheEntry->hasCachedAtlas());
183 ++specs->fNumCopiedPaths;
184 specs->fCopyPathStats.statPath(path);
185 specs->fCopyAtlasSpecs.accountForSpace(cacheEntry->width(),
186 cacheEntry->height());
187 continue;
188 }
189
190 // Whatever atlas the path used to reside in, it no longer exists.
191 cacheEntry->resetAtlasKeyAndInfo();
192 }
193
Chris Daltona8429cf2018-06-22 11:43:31 -0600194 if (Visibility::kMostlyComplete == draw.fMaskVisibility && cacheEntry->hitCount() > 1 &&
195 SkTMax(draw.fShapeDevIBounds.height(),
196 draw.fShapeDevIBounds.width()) <= onFlushRP->caps()->maxRenderTargetSize()) {
197 // We've seen this path before with a compatible matrix, and it's mostly visible.
198 // Just render the whole mask so we can try to cache it.
199 draw.fMaskDevIBounds = draw.fShapeDevIBounds;
200 draw.fMaskVisibility = Visibility::kComplete;
Chris Dalton4da70192018-06-18 09:51:36 -0600201 }
202 }
203
204 ++specs->fNumRenderedPaths;
205 specs->fRenderedPathStats.statPath(path);
Chris Daltona8429cf2018-06-22 11:43:31 -0600206 specs->fRenderedAtlasSpecs.accountForSpace(draw.fMaskDevIBounds.width(),
207 draw.fMaskDevIBounds.height());
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600208 }
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600209}
210
Chris Dalton4da70192018-06-18 09:51:36 -0600211void GrCCDrawPathsOp::setupResources(GrOnFlushResourceProvider* onFlushRP,
212 GrCCPerFlushResources* resources, DoCopiesToCache doCopies) {
213 using DoEvenOddFill = GrCCPathProcessor::DoEvenOddFill;
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600214 SkASSERT(fNumDraws > 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600215 SkASSERT(-1 == fBaseInstance);
Chris Daltondaef06a2018-05-23 17:11:09 -0600216 fBaseInstance = resources->nextPathInstanceIdx();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600217
Chris Dalton4da70192018-06-18 09:51:36 -0600218 for (SingleDraw& draw : fDraws) {
219 SkPath path;
220 draw.fShape.asPath(&path);
221
222 auto doEvenOddFill = DoEvenOddFill(SkPath::kEvenOdd_FillType == path.getFillType());
223 SkASSERT(SkPath::kEvenOdd_FillType == path.getFillType() ||
224 SkPath::kWinding_FillType == path.getFillType());
225
226 if (auto cacheEntry = draw.fCacheEntry.get()) {
227 // Does the path already exist in a cached atlas texture?
228 if (auto proxy = draw.fCachedAtlasProxy.get()) {
229 SkASSERT(!cacheEntry->currFlushAtlas());
230 this->recordInstance(proxy, resources->nextPathInstanceIdx());
231 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
232 draw.fColor);
233 continue;
234 }
235
236 // Have we already encountered this path during the flush? (i.e. was the same SkPath
237 // drawn more than once during the same flush, with a compatible matrix?)
238 if (auto atlas = cacheEntry->currFlushAtlas()) {
239 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
240 resources->appendDrawPathInstance().set(
241 *cacheEntry, draw.fCachedMaskShift, draw.fColor,
242 cacheEntry->hasCachedAtlas() ? DoEvenOddFill::kNo : doEvenOddFill);
243 continue;
244 }
245
246 // If the cache entry still has a valid atlas key at this point, it means the path
247 // exists in the atlas that we stashed away from last flush. Copy it into a permanent
248 // 8-bit atlas in the resource cache.
249 if (DoCopiesToCache::kYes == doCopies && cacheEntry->atlasKey().isValid()) {
250 SkIVector newOffset;
251 GrCCAtlas* atlas =
252 resources->copyPathToCachedAtlas(*cacheEntry, doEvenOddFill, &newOffset);
253 cacheEntry->updateToCachedAtlas(atlas->getOrAssignUniqueKey(onFlushRP),
254 newOffset, atlas->refOrMakeCachedAtlasInfo());
255 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
256 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
257 draw.fColor);
258 // Remember this atlas in case we encounter the path again during the same flush.
259 cacheEntry->setCurrFlushAtlas(atlas);
260 continue;
261 }
262 }
263
264 // Render the raw path into a coverage count atlas. renderPathInAtlas() gives us two tight
265 // bounding boxes: One in device space, as well as a second one rotated an additional 45
266 // degrees. The path vertex shader uses these two bounding boxes to generate an octagon that
267 // circumscribes the path.
268 SkASSERT(!draw.fCachedAtlasProxy);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600269 SkRect devBounds, devBounds45;
Chris Dalton4da70192018-06-18 09:51:36 -0600270 SkIRect devIBounds;
Chris Dalton9414c962018-06-14 10:14:50 -0600271 SkIVector devToAtlasOffset;
Chris Daltona8429cf2018-06-22 11:43:31 -0600272 if (auto atlas = resources->renderPathInAtlas(draw.fMaskDevIBounds, draw.fMatrix, path,
Chris Dalton4da70192018-06-18 09:51:36 -0600273 &devBounds, &devBounds45, &devIBounds,
274 &devToAtlasOffset)) {
275 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
276 resources->appendDrawPathInstance().set(devBounds, devBounds45, devToAtlasOffset,
277 draw.fColor, doEvenOddFill);
Chris Daltona8429cf2018-06-22 11:43:31 -0600278
279 // If we have a spot in the path cache, try to make a note of where this mask is so we
280 // can reuse it in the future.
281 if (auto cacheEntry = draw.fCacheEntry.get()) {
282 SkASSERT(!cacheEntry->hasCachedAtlas());
283
284 if (Visibility::kComplete != draw.fMaskVisibility || cacheEntry->hitCount() <= 1) {
285 // Don't cache a path mask unless it's completely visible with a hit count > 1.
286 //
287 // NOTE: mostly-visible paths with a hit count > 1 should have been promoted to
288 // fully visible during accountForOwnPaths().
289 continue;
290 }
291
292 if (resources->nextAtlasToStash() != atlas) {
293 // This mask does not belong to the atlas that will be stashed for next flush.
294 continue;
295 }
296
Chris Dalton4da70192018-06-18 09:51:36 -0600297 const GrUniqueKey& atlasKey =
298 resources->nextAtlasToStash()->getOrAssignUniqueKey(onFlushRP);
Chris Daltona8429cf2018-06-22 11:43:31 -0600299 cacheEntry->initAsStashedAtlas(atlasKey, devToAtlasOffset, devBounds, devBounds45,
300 devIBounds, draw.fCachedMaskShift);
Chris Dalton4da70192018-06-18 09:51:36 -0600301 // Remember this atlas in case we encounter the path again during the same flush.
Chris Daltona8429cf2018-06-22 11:43:31 -0600302 cacheEntry->setCurrFlushAtlas(atlas);
Chris Dalton4da70192018-06-18 09:51:36 -0600303 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600304 continue;
305 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600306 }
307
Chris Dalton4da70192018-06-18 09:51:36 -0600308 if (!fInstanceRanges.empty()) {
309 fInstanceRanges.back().fEndInstanceIdx = resources->nextPathInstanceIdx();
310 }
311}
312
313inline void GrCCDrawPathsOp::recordInstance(const GrTextureProxy* atlasProxy, int instanceIdx) {
314 if (fInstanceRanges.empty()) {
315 fInstanceRanges.push_back({atlasProxy, instanceIdx});
316 return;
317 }
318 if (fInstanceRanges.back().fAtlasProxy != atlasProxy) {
319 fInstanceRanges.back().fEndInstanceIdx = instanceIdx;
320 fInstanceRanges.push_back({atlasProxy, instanceIdx});
321 return;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600322 }
323}
324
325void GrCCDrawPathsOp::onExecute(GrOpFlushState* flushState) {
Chris Daltond7e22272018-05-23 10:17:17 -0600326 SkASSERT(fOwningPerOpListPaths);
Chris Daltonf104fec2018-05-22 16:17:48 -0600327
Chris Daltond7e22272018-05-23 10:17:17 -0600328 const GrCCPerFlushResources* resources = fOwningPerOpListPaths->fFlushResources.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600329 if (!resources) {
330 return; // Setup failed.
331 }
332
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600333 GrPipeline::InitArgs initArgs;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600334 initArgs.fProxy = flushState->drawOpArgs().fProxy;
335 initArgs.fCaps = &flushState->caps();
336 initArgs.fResourceProvider = flushState->resourceProvider();
337 initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy;
Brian Salomon49348902018-06-26 09:12:38 -0400338 auto clip = flushState->detachAppliedClip();
339 GrPipeline::FixedDynamicState fixedDynamicState(clip.scissorState().rect());
340 GrPipeline pipeline(initArgs, std::move(fProcessors), std::move(clip));
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600341
342 int baseInstance = fBaseInstance;
Chris Dalton4da70192018-06-18 09:51:36 -0600343 SkASSERT(baseInstance >= 0); // Make sure setupResources() has been called.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600344
Chris Dalton4c458b12018-06-16 17:22:59 -0600345 for (const InstanceRange& range : fInstanceRanges) {
346 SkASSERT(range.fEndInstanceIdx > baseInstance);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600347
Chris Dalton4c458b12018-06-16 17:22:59 -0600348 GrCCPathProcessor pathProc(flushState->resourceProvider(), sk_ref_sp(range.fAtlasProxy),
Chris Dalton1c548942018-05-22 13:09:48 -0600349 fViewMatrixIfUsingLocalCoords);
Brian Salomon49348902018-06-26 09:12:38 -0400350 pathProc.drawPaths(flushState, pipeline, &fixedDynamicState, *resources, baseInstance,
351 range.fEndInstanceIdx, this->bounds());
Chris Dalton4c458b12018-06-16 17:22:59 -0600352
353 baseInstance = range.fEndInstanceIdx;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600354 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600355}