blob: 612cc15e02d4fe8c5e4b58f3580504c41ef7c2f8 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCDrawPathsOp.h"
9
Robert Phillips7c525e62018-06-12 10:11:12 -040010#include "GrMemoryPool.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060011#include "GrOpFlushState.h"
12#include "ccpr/GrCCPerFlushResources.h"
13#include "ccpr/GrCoverageCountingPathRenderer.h"
14
Chris Dalton1c548942018-05-22 13:09:48 -060015static bool has_coord_transforms(const GrPaint& paint) {
16 GrFragmentProcessor::Iter iter(paint);
17 while (const GrFragmentProcessor* fp = iter.next()) {
18 if (!fp->coordTransforms().empty()) {
19 return true;
20 }
21 }
22 return false;
23}
24
Chris Daltona8429cf2018-06-22 11:43:31 -060025static int64_t area(const SkIRect& r) {
26 return sk_64_mul(r.height(), r.width());
27}
28
Robert Phillipsc994a932018-06-19 13:09:54 -040029std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::Make(GrContext* context,
30 const SkIRect& clipIBounds,
31 const SkMatrix& m,
32 const GrShape& shape,
33 const SkRect& devBounds,
34 GrPaint&& paint) {
Chris Daltona8429cf2018-06-22 11:43:31 -060035 SkIRect shapeDevIBounds;
36 devBounds.roundOut(&shapeDevIBounds); // GrCCPathParser might find slightly tighter bounds.
37
38 SkIRect maskDevIBounds;
39 Visibility maskVisibility;
40 if (clipIBounds.contains(shapeDevIBounds)) {
41 maskDevIBounds = shapeDevIBounds;
42 maskVisibility = Visibility::kComplete;
43 } else {
44 if (!maskDevIBounds.intersect(clipIBounds, shapeDevIBounds)) {
Chris Dalton4da70192018-06-18 09:51:36 -060045 return nullptr;
46 }
Chris Daltona8429cf2018-06-22 11:43:31 -060047 int64_t unclippedArea = area(shapeDevIBounds);
48 int64_t clippedArea = area(maskDevIBounds);
49 maskVisibility = (clippedArea >= unclippedArea/2 || unclippedArea < 100*100)
50 ? Visibility::kMostlyComplete // i.e., visible enough to justify rendering the
51 // whole thing if we think we can cache it.
52 : Visibility::kPartial;
Chris Dalton42c21152018-06-13 15:28:19 -060053 }
Robert Phillipsc994a932018-06-19 13:09:54 -040054
55 GrOpMemoryPool* pool = context->contextPriv().opMemoryPool();
56
Chris Daltona8429cf2018-06-22 11:43:31 -060057 return pool->allocate<GrCCDrawPathsOp>(m, shape, shapeDevIBounds, maskDevIBounds,
58 maskVisibility, devBounds, std::move(paint));
Chris Dalton42c21152018-06-13 15:28:19 -060059}
60
Chris Daltona8429cf2018-06-22 11:43:31 -060061GrCCDrawPathsOp::GrCCDrawPathsOp(const SkMatrix& m, const GrShape& shape,
62 const SkIRect& shapeDevIBounds, const SkIRect& maskDevIBounds,
63 Visibility maskVisibility, const SkRect& devBounds,
64 GrPaint&& paint)
Chris Dalton4bfb50b2018-05-21 09:10:53 -060065 : GrDrawOp(ClassID())
Chris Daltond7e22272018-05-23 10:17:17 -060066 , fViewMatrixIfUsingLocalCoords(has_coord_transforms(paint) ? m : SkMatrix::I())
Chris Dalton42c21152018-06-13 15:28:19 -060067 , fSRGBFlags(GrPipeline::SRGBFlagsFromPaint(paint))
Chris Daltona8429cf2018-06-22 11:43:31 -060068 , fDraws(m, shape, shapeDevIBounds, maskDevIBounds, maskVisibility, paint.getColor())
Chris Dalton42c21152018-06-13 15:28:19 -060069 , fProcessors(std::move(paint)) { // Paint must be moved after fetching its color above.
Chris Dalton5ba36ba2018-05-09 01:08:38 -060070 SkDEBUGCODE(fBaseInstance = -1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060071 // FIXME: intersect with clip bounds to (hopefully) improve batching.
72 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
73 this->setBounds(devBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
74}
75
76GrCCDrawPathsOp::~GrCCDrawPathsOp() {
Chris Daltond7e22272018-05-23 10:17:17 -060077 if (fOwningPerOpListPaths) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060078 // Remove CCPR's dangling pointer to this Op before deleting it.
Chris Daltond7e22272018-05-23 10:17:17 -060079 fOwningPerOpListPaths->fDrawOps.remove(this);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060080 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060081}
82
Chris Daltona8429cf2018-06-22 11:43:31 -060083GrCCDrawPathsOp::SingleDraw::SingleDraw(const SkMatrix& m, const GrShape& shape,
84 const SkIRect& shapeDevIBounds,
85 const SkIRect& maskDevIBounds, Visibility maskVisibility,
86 GrColor color)
87 : fMatrix(m)
Chris Dalton644341a2018-06-18 19:14:16 -060088 , fShape(shape)
Chris Daltona8429cf2018-06-22 11:43:31 -060089 , fShapeDevIBounds(shapeDevIBounds)
90 , fMaskDevIBounds(maskDevIBounds)
91 , fMaskVisibility(maskVisibility)
92 , fColor(color) {
Chris Dalton644341a2018-06-18 19:14:16 -060093#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
94 if (fShape.hasUnstyledKey()) {
95 // On AOSP we round view matrix translates to integer values for cachable paths. We do this
96 // to match HWUI's cache hit ratio, which doesn't consider the matrix when caching paths.
97 fMatrix.setTranslateX(SkScalarRoundToScalar(fMatrix.getTranslateX()));
98 fMatrix.setTranslateY(SkScalarRoundToScalar(fMatrix.getTranslateY()));
99 }
100#endif
101}
102
Chris Dalton4da70192018-06-18 09:51:36 -0600103GrCCDrawPathsOp::SingleDraw::~SingleDraw() {
104 if (fCacheEntry) {
105 // All currFlushAtlas references must be reset back to null before the flush is finished.
106 fCacheEntry->setCurrFlushAtlas(nullptr);
107 }
108}
109
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600110GrDrawOp::RequiresDstTexture GrCCDrawPathsOp::finalize(const GrCaps& caps,
111 const GrAppliedClip* clip,
112 GrPixelConfigIsClamped dstIsClamped) {
Chris Dalton4da70192018-06-18 09:51:36 -0600113 SkASSERT(1 == fNumDraws); // There should only be one single path draw in this Op right now.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600114 GrProcessorSet::Analysis analysis =
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600115 fProcessors.finalize(fDraws.head().fColor, GrProcessorAnalysisCoverage::kSingleChannel,
116 clip, false, caps, dstIsClamped, &fDraws.head().fColor);
Chris Dalton4da70192018-06-18 09:51:36 -0600117 return RequiresDstTexture(analysis.requiresDstTexture());
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600118}
119
Chris Dalton4da70192018-06-18 09:51:36 -0600120bool GrCCDrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps&) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600121 GrCCDrawPathsOp* that = op->cast<GrCCDrawPathsOp>();
Chris Daltond7e22272018-05-23 10:17:17 -0600122 SkASSERT(fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600123 SkASSERT(fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600124 SkASSERT(!that->fOwningPerOpListPaths || that->fOwningPerOpListPaths == fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600125 SkASSERT(that->fNumDraws);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600126
Chris Daltondaef06a2018-05-23 17:11:09 -0600127 if (fSRGBFlags != that->fSRGBFlags || fProcessors != that->fProcessors ||
Chris Dalton1c548942018-05-22 13:09:48 -0600128 fViewMatrixIfUsingLocalCoords != that->fViewMatrixIfUsingLocalCoords) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600129 return false;
130 }
131
Chris Daltond7e22272018-05-23 10:17:17 -0600132 fDraws.append(std::move(that->fDraws), &fOwningPerOpListPaths->fAllocator);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600133 this->joinBounds(*that);
134
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600135 SkDEBUGCODE(fNumDraws += that->fNumDraws);
136 SkDEBUGCODE(that->fNumDraws = 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600137 return true;
138}
139
Chris Daltond7e22272018-05-23 10:17:17 -0600140void GrCCDrawPathsOp::wasRecorded(GrCCPerOpListPaths* owningPerOpListPaths) {
Chris Daltonf104fec2018-05-22 16:17:48 -0600141 SkASSERT(1 == fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600142 SkASSERT(!fOwningPerOpListPaths);
143 owningPerOpListPaths->fDrawOps.addToTail(this);
144 fOwningPerOpListPaths = owningPerOpListPaths;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600145}
146
Chris Dalton4da70192018-06-18 09:51:36 -0600147void GrCCDrawPathsOp::accountForOwnPaths(GrCCPathCache* pathCache,
148 GrOnFlushResourceProvider* onFlushRP,
149 const GrUniqueKey& stashedAtlasKey,
150 GrCCPerFlushResourceSpecs* specs) {
151 using CreateIfAbsent = GrCCPathCache::CreateIfAbsent;
152 using MaskTransform = GrCCPathCache::MaskTransform;
153
154 for (SingleDraw& draw : fDraws) {
155 SkASSERT(!draw.fCacheEntry);
156
157 SkPath path;
158 draw.fShape.asPath(&path);
159
160 MaskTransform m(draw.fMatrix, &draw.fCachedMaskShift);
Chris Daltona8429cf2018-06-22 11:43:31 -0600161 bool canStashPathMask = draw.fMaskVisibility >= Visibility::kMostlyComplete;
162 draw.fCacheEntry = pathCache->find(draw.fShape, m, CreateIfAbsent(canStashPathMask));
163
Chris Dalton4da70192018-06-18 09:51:36 -0600164 if (auto cacheEntry = draw.fCacheEntry.get()) {
165 SkASSERT(!cacheEntry->currFlushAtlas()); // Shouldn't be set until setupResources().
Chris Daltona8429cf2018-06-22 11:43:31 -0600166
Chris Dalton4da70192018-06-18 09:51:36 -0600167 if (cacheEntry->atlasKey().isValid()) {
168 // Does the path already exist in a cached atlas?
169 if (cacheEntry->hasCachedAtlas() &&
170 (draw.fCachedAtlasProxy = onFlushRP->findOrCreateProxyByUniqueKey(
171 cacheEntry->atlasKey(),
172 GrCCAtlas::kTextureOrigin))) {
173 ++specs->fNumCachedPaths;
174 continue;
175 }
176
177 // Does the path exist in the atlas that we stashed away from last flush? If so we
178 // can copy it into a new 8-bit atlas and keep it in the resource cache.
179 if (stashedAtlasKey.isValid() && stashedAtlasKey == cacheEntry->atlasKey()) {
180 SkASSERT(!cacheEntry->hasCachedAtlas());
181 ++specs->fNumCopiedPaths;
182 specs->fCopyPathStats.statPath(path);
183 specs->fCopyAtlasSpecs.accountForSpace(cacheEntry->width(),
184 cacheEntry->height());
185 continue;
186 }
187
188 // Whatever atlas the path used to reside in, it no longer exists.
189 cacheEntry->resetAtlasKeyAndInfo();
190 }
191
Chris Daltona8429cf2018-06-22 11:43:31 -0600192 if (Visibility::kMostlyComplete == draw.fMaskVisibility && cacheEntry->hitCount() > 1 &&
193 SkTMax(draw.fShapeDevIBounds.height(),
194 draw.fShapeDevIBounds.width()) <= onFlushRP->caps()->maxRenderTargetSize()) {
195 // We've seen this path before with a compatible matrix, and it's mostly visible.
196 // Just render the whole mask so we can try to cache it.
197 draw.fMaskDevIBounds = draw.fShapeDevIBounds;
198 draw.fMaskVisibility = Visibility::kComplete;
Chris Dalton4da70192018-06-18 09:51:36 -0600199 }
200 }
201
202 ++specs->fNumRenderedPaths;
203 specs->fRenderedPathStats.statPath(path);
Chris Daltona8429cf2018-06-22 11:43:31 -0600204 specs->fRenderedAtlasSpecs.accountForSpace(draw.fMaskDevIBounds.width(),
205 draw.fMaskDevIBounds.height());
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600206 }
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600207}
208
Chris Dalton4da70192018-06-18 09:51:36 -0600209void GrCCDrawPathsOp::setupResources(GrOnFlushResourceProvider* onFlushRP,
210 GrCCPerFlushResources* resources, DoCopiesToCache doCopies) {
211 using DoEvenOddFill = GrCCPathProcessor::DoEvenOddFill;
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600212 SkASSERT(fNumDraws > 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600213 SkASSERT(-1 == fBaseInstance);
Chris Daltondaef06a2018-05-23 17:11:09 -0600214 fBaseInstance = resources->nextPathInstanceIdx();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600215
Chris Dalton4da70192018-06-18 09:51:36 -0600216 for (SingleDraw& draw : fDraws) {
217 SkPath path;
218 draw.fShape.asPath(&path);
219
220 auto doEvenOddFill = DoEvenOddFill(SkPath::kEvenOdd_FillType == path.getFillType());
221 SkASSERT(SkPath::kEvenOdd_FillType == path.getFillType() ||
222 SkPath::kWinding_FillType == path.getFillType());
223
224 if (auto cacheEntry = draw.fCacheEntry.get()) {
225 // Does the path already exist in a cached atlas texture?
226 if (auto proxy = draw.fCachedAtlasProxy.get()) {
227 SkASSERT(!cacheEntry->currFlushAtlas());
228 this->recordInstance(proxy, resources->nextPathInstanceIdx());
229 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
230 draw.fColor);
231 continue;
232 }
233
234 // Have we already encountered this path during the flush? (i.e. was the same SkPath
235 // drawn more than once during the same flush, with a compatible matrix?)
236 if (auto atlas = cacheEntry->currFlushAtlas()) {
237 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
238 resources->appendDrawPathInstance().set(
239 *cacheEntry, draw.fCachedMaskShift, draw.fColor,
240 cacheEntry->hasCachedAtlas() ? DoEvenOddFill::kNo : doEvenOddFill);
241 continue;
242 }
243
244 // If the cache entry still has a valid atlas key at this point, it means the path
245 // exists in the atlas that we stashed away from last flush. Copy it into a permanent
246 // 8-bit atlas in the resource cache.
247 if (DoCopiesToCache::kYes == doCopies && cacheEntry->atlasKey().isValid()) {
248 SkIVector newOffset;
249 GrCCAtlas* atlas =
250 resources->copyPathToCachedAtlas(*cacheEntry, doEvenOddFill, &newOffset);
251 cacheEntry->updateToCachedAtlas(atlas->getOrAssignUniqueKey(onFlushRP),
252 newOffset, atlas->refOrMakeCachedAtlasInfo());
253 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
254 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
255 draw.fColor);
256 // Remember this atlas in case we encounter the path again during the same flush.
257 cacheEntry->setCurrFlushAtlas(atlas);
258 continue;
259 }
260 }
261
262 // Render the raw path into a coverage count atlas. renderPathInAtlas() gives us two tight
263 // bounding boxes: One in device space, as well as a second one rotated an additional 45
264 // degrees. The path vertex shader uses these two bounding boxes to generate an octagon that
265 // circumscribes the path.
266 SkASSERT(!draw.fCachedAtlasProxy);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600267 SkRect devBounds, devBounds45;
Chris Dalton4da70192018-06-18 09:51:36 -0600268 SkIRect devIBounds;
Chris Dalton9414c962018-06-14 10:14:50 -0600269 SkIVector devToAtlasOffset;
Chris Daltona8429cf2018-06-22 11:43:31 -0600270 if (auto atlas = resources->renderPathInAtlas(draw.fMaskDevIBounds, draw.fMatrix, path,
Chris Dalton4da70192018-06-18 09:51:36 -0600271 &devBounds, &devBounds45, &devIBounds,
272 &devToAtlasOffset)) {
273 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
274 resources->appendDrawPathInstance().set(devBounds, devBounds45, devToAtlasOffset,
275 draw.fColor, doEvenOddFill);
Chris Daltona8429cf2018-06-22 11:43:31 -0600276
277 // If we have a spot in the path cache, try to make a note of where this mask is so we
278 // can reuse it in the future.
279 if (auto cacheEntry = draw.fCacheEntry.get()) {
280 SkASSERT(!cacheEntry->hasCachedAtlas());
281
282 if (Visibility::kComplete != draw.fMaskVisibility || cacheEntry->hitCount() <= 1) {
283 // Don't cache a path mask unless it's completely visible with a hit count > 1.
284 //
285 // NOTE: mostly-visible paths with a hit count > 1 should have been promoted to
286 // fully visible during accountForOwnPaths().
287 continue;
288 }
289
290 if (resources->nextAtlasToStash() != atlas) {
291 // This mask does not belong to the atlas that will be stashed for next flush.
292 continue;
293 }
294
Chris Dalton4da70192018-06-18 09:51:36 -0600295 const GrUniqueKey& atlasKey =
296 resources->nextAtlasToStash()->getOrAssignUniqueKey(onFlushRP);
Chris Daltona8429cf2018-06-22 11:43:31 -0600297 cacheEntry->initAsStashedAtlas(atlasKey, devToAtlasOffset, devBounds, devBounds45,
298 devIBounds, draw.fCachedMaskShift);
Chris Dalton4da70192018-06-18 09:51:36 -0600299 // Remember this atlas in case we encounter the path again during the same flush.
Chris Daltona8429cf2018-06-22 11:43:31 -0600300 cacheEntry->setCurrFlushAtlas(atlas);
Chris Dalton4da70192018-06-18 09:51:36 -0600301 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600302 continue;
303 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600304 }
305
Chris Dalton4da70192018-06-18 09:51:36 -0600306 if (!fInstanceRanges.empty()) {
307 fInstanceRanges.back().fEndInstanceIdx = resources->nextPathInstanceIdx();
308 }
309}
310
311inline void GrCCDrawPathsOp::recordInstance(const GrTextureProxy* atlasProxy, int instanceIdx) {
312 if (fInstanceRanges.empty()) {
313 fInstanceRanges.push_back({atlasProxy, instanceIdx});
314 return;
315 }
316 if (fInstanceRanges.back().fAtlasProxy != atlasProxy) {
317 fInstanceRanges.back().fEndInstanceIdx = instanceIdx;
318 fInstanceRanges.push_back({atlasProxy, instanceIdx});
319 return;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600320 }
321}
322
323void GrCCDrawPathsOp::onExecute(GrOpFlushState* flushState) {
Chris Daltond7e22272018-05-23 10:17:17 -0600324 SkASSERT(fOwningPerOpListPaths);
Chris Daltonf104fec2018-05-22 16:17:48 -0600325
Chris Daltond7e22272018-05-23 10:17:17 -0600326 const GrCCPerFlushResources* resources = fOwningPerOpListPaths->fFlushResources.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600327 if (!resources) {
328 return; // Setup failed.
329 }
330
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600331 GrPipeline::InitArgs initArgs;
332 initArgs.fFlags = fSRGBFlags;
333 initArgs.fProxy = flushState->drawOpArgs().fProxy;
334 initArgs.fCaps = &flushState->caps();
335 initArgs.fResourceProvider = flushState->resourceProvider();
336 initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy;
337 GrPipeline pipeline(initArgs, std::move(fProcessors), flushState->detachAppliedClip());
338
339 int baseInstance = fBaseInstance;
Chris Dalton4da70192018-06-18 09:51:36 -0600340 SkASSERT(baseInstance >= 0); // Make sure setupResources() has been called.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600341
Chris Dalton4c458b12018-06-16 17:22:59 -0600342 for (const InstanceRange& range : fInstanceRanges) {
343 SkASSERT(range.fEndInstanceIdx > baseInstance);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600344
Chris Dalton4c458b12018-06-16 17:22:59 -0600345 GrCCPathProcessor pathProc(flushState->resourceProvider(), sk_ref_sp(range.fAtlasProxy),
Chris Dalton1c548942018-05-22 13:09:48 -0600346 fViewMatrixIfUsingLocalCoords);
Chris Dalton4da70192018-06-18 09:51:36 -0600347 pathProc.drawPaths(flushState, pipeline, *resources, baseInstance, range.fEndInstanceIdx,
348 this->bounds());
Chris Dalton4c458b12018-06-16 17:22:59 -0600349
350 baseInstance = range.fEndInstanceIdx;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600351 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600352}