blob: 042a45bb4efd2f12649c3a762793cece9b6be6a4 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCDrawPathsOp.h"
Brian Salomon653f42f2018-07-10 10:07:31 -04009#include "GrContext.h"
10#include "GrContextPriv.h"
Robert Phillips7c525e62018-06-12 10:11:12 -040011#include "GrMemoryPool.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060012#include "GrOpFlushState.h"
Chris Daltona2b5b642018-06-24 13:08:57 -060013#include "ccpr/GrCCPathCache.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060014#include "ccpr/GrCCPerFlushResources.h"
15#include "ccpr/GrCoverageCountingPathRenderer.h"
16
Chris Dalton1c548942018-05-22 13:09:48 -060017static bool has_coord_transforms(const GrPaint& paint) {
18 GrFragmentProcessor::Iter iter(paint);
19 while (const GrFragmentProcessor* fp = iter.next()) {
20 if (!fp->coordTransforms().empty()) {
21 return true;
22 }
23 }
24 return false;
25}
26
Chris Daltona8429cf2018-06-22 11:43:31 -060027static int64_t area(const SkIRect& r) {
28 return sk_64_mul(r.height(), r.width());
29}
30
Robert Phillipsc994a932018-06-19 13:09:54 -040031std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::Make(GrContext* context,
32 const SkIRect& clipIBounds,
33 const SkMatrix& m,
34 const GrShape& shape,
35 const SkRect& devBounds,
36 GrPaint&& paint) {
Chris Daltona8429cf2018-06-22 11:43:31 -060037 SkIRect shapeDevIBounds;
38 devBounds.roundOut(&shapeDevIBounds); // GrCCPathParser might find slightly tighter bounds.
39
40 SkIRect maskDevIBounds;
41 Visibility maskVisibility;
42 if (clipIBounds.contains(shapeDevIBounds)) {
43 maskDevIBounds = shapeDevIBounds;
44 maskVisibility = Visibility::kComplete;
45 } else {
46 if (!maskDevIBounds.intersect(clipIBounds, shapeDevIBounds)) {
Chris Dalton4da70192018-06-18 09:51:36 -060047 return nullptr;
48 }
Chris Daltona8429cf2018-06-22 11:43:31 -060049 int64_t unclippedArea = area(shapeDevIBounds);
50 int64_t clippedArea = area(maskDevIBounds);
51 maskVisibility = (clippedArea >= unclippedArea/2 || unclippedArea < 100*100)
52 ? Visibility::kMostlyComplete // i.e., visible enough to justify rendering the
53 // whole thing if we think we can cache it.
54 : Visibility::kPartial;
Chris Dalton42c21152018-06-13 15:28:19 -060055 }
Robert Phillipsc994a932018-06-19 13:09:54 -040056
57 GrOpMemoryPool* pool = context->contextPriv().opMemoryPool();
58
Chris Daltona8429cf2018-06-22 11:43:31 -060059 return pool->allocate<GrCCDrawPathsOp>(m, shape, shapeDevIBounds, maskDevIBounds,
60 maskVisibility, devBounds, std::move(paint));
Chris Dalton42c21152018-06-13 15:28:19 -060061}
62
Chris Daltona8429cf2018-06-22 11:43:31 -060063GrCCDrawPathsOp::GrCCDrawPathsOp(const SkMatrix& m, const GrShape& shape,
64 const SkIRect& shapeDevIBounds, const SkIRect& maskDevIBounds,
65 Visibility maskVisibility, const SkRect& devBounds,
66 GrPaint&& paint)
Chris Dalton4bfb50b2018-05-21 09:10:53 -060067 : GrDrawOp(ClassID())
Chris Daltond7e22272018-05-23 10:17:17 -060068 , fViewMatrixIfUsingLocalCoords(has_coord_transforms(paint) ? m : SkMatrix::I())
Chris Daltona8429cf2018-06-22 11:43:31 -060069 , fDraws(m, shape, shapeDevIBounds, maskDevIBounds, maskVisibility, paint.getColor())
Chris Dalton42c21152018-06-13 15:28:19 -060070 , fProcessors(std::move(paint)) { // Paint must be moved after fetching its color above.
Chris Dalton5ba36ba2018-05-09 01:08:38 -060071 SkDEBUGCODE(fBaseInstance = -1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060072 // FIXME: intersect with clip bounds to (hopefully) improve batching.
73 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
74 this->setBounds(devBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
75}
76
77GrCCDrawPathsOp::~GrCCDrawPathsOp() {
Chris Daltond7e22272018-05-23 10:17:17 -060078 if (fOwningPerOpListPaths) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060079 // Remove CCPR's dangling pointer to this Op before deleting it.
Chris Daltond7e22272018-05-23 10:17:17 -060080 fOwningPerOpListPaths->fDrawOps.remove(this);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060081 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060082}
83
Chris Daltona8429cf2018-06-22 11:43:31 -060084GrCCDrawPathsOp::SingleDraw::SingleDraw(const SkMatrix& m, const GrShape& shape,
85 const SkIRect& shapeDevIBounds,
86 const SkIRect& maskDevIBounds, Visibility maskVisibility,
87 GrColor color)
88 : fMatrix(m)
Chris Dalton644341a2018-06-18 19:14:16 -060089 , fShape(shape)
Chris Daltona8429cf2018-06-22 11:43:31 -060090 , fShapeDevIBounds(shapeDevIBounds)
91 , fMaskDevIBounds(maskDevIBounds)
92 , fMaskVisibility(maskVisibility)
93 , fColor(color) {
Chris Dalton644341a2018-06-18 19:14:16 -060094#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
95 if (fShape.hasUnstyledKey()) {
96 // On AOSP we round view matrix translates to integer values for cachable paths. We do this
97 // to match HWUI's cache hit ratio, which doesn't consider the matrix when caching paths.
98 fMatrix.setTranslateX(SkScalarRoundToScalar(fMatrix.getTranslateX()));
99 fMatrix.setTranslateY(SkScalarRoundToScalar(fMatrix.getTranslateY()));
100 }
101#endif
102}
103
Chris Dalton4da70192018-06-18 09:51:36 -0600104GrCCDrawPathsOp::SingleDraw::~SingleDraw() {
105 if (fCacheEntry) {
106 // All currFlushAtlas references must be reset back to null before the flush is finished.
107 fCacheEntry->setCurrFlushAtlas(nullptr);
108 }
109}
110
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600111GrDrawOp::RequiresDstTexture GrCCDrawPathsOp::finalize(const GrCaps& caps,
112 const GrAppliedClip* clip,
113 GrPixelConfigIsClamped dstIsClamped) {
Chris Dalton4da70192018-06-18 09:51:36 -0600114 SkASSERT(1 == fNumDraws); // There should only be one single path draw in this Op right now.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600115 GrProcessorSet::Analysis analysis =
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600116 fProcessors.finalize(fDraws.head().fColor, GrProcessorAnalysisCoverage::kSingleChannel,
117 clip, false, caps, dstIsClamped, &fDraws.head().fColor);
Chris Dalton4da70192018-06-18 09:51:36 -0600118 return RequiresDstTexture(analysis.requiresDstTexture());
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600119}
120
Chris Dalton4da70192018-06-18 09:51:36 -0600121bool GrCCDrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps&) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600122 GrCCDrawPathsOp* that = op->cast<GrCCDrawPathsOp>();
Chris Daltond7e22272018-05-23 10:17:17 -0600123 SkASSERT(fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600124 SkASSERT(fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600125 SkASSERT(!that->fOwningPerOpListPaths || that->fOwningPerOpListPaths == fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600126 SkASSERT(that->fNumDraws);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600127
Brian Osman9aa30c62018-07-02 15:21:46 -0400128 if (fProcessors != that->fProcessors ||
Chris Dalton1c548942018-05-22 13:09:48 -0600129 fViewMatrixIfUsingLocalCoords != that->fViewMatrixIfUsingLocalCoords) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600130 return false;
131 }
132
Chris Daltond7e22272018-05-23 10:17:17 -0600133 fDraws.append(std::move(that->fDraws), &fOwningPerOpListPaths->fAllocator);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600134 this->joinBounds(*that);
135
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600136 SkDEBUGCODE(fNumDraws += that->fNumDraws);
137 SkDEBUGCODE(that->fNumDraws = 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600138 return true;
139}
140
Chris Daltond7e22272018-05-23 10:17:17 -0600141void GrCCDrawPathsOp::wasRecorded(GrCCPerOpListPaths* owningPerOpListPaths) {
Chris Daltonf104fec2018-05-22 16:17:48 -0600142 SkASSERT(1 == fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600143 SkASSERT(!fOwningPerOpListPaths);
144 owningPerOpListPaths->fDrawOps.addToTail(this);
145 fOwningPerOpListPaths = owningPerOpListPaths;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600146}
147
Chris Dalton4da70192018-06-18 09:51:36 -0600148void GrCCDrawPathsOp::accountForOwnPaths(GrCCPathCache* pathCache,
149 GrOnFlushResourceProvider* onFlushRP,
150 const GrUniqueKey& stashedAtlasKey,
151 GrCCPerFlushResourceSpecs* specs) {
152 using CreateIfAbsent = GrCCPathCache::CreateIfAbsent;
153 using MaskTransform = GrCCPathCache::MaskTransform;
154
155 for (SingleDraw& draw : fDraws) {
Chris Dalton4da70192018-06-18 09:51:36 -0600156 SkPath path;
157 draw.fShape.asPath(&path);
158
Chris Daltona2b5b642018-06-24 13:08:57 -0600159 SkASSERT(!draw.fCacheEntry);
160
161 if (pathCache) {
162 MaskTransform m(draw.fMatrix, &draw.fCachedMaskShift);
163 bool canStashPathMask = draw.fMaskVisibility >= Visibility::kMostlyComplete;
164 draw.fCacheEntry = pathCache->find(draw.fShape, m, CreateIfAbsent(canStashPathMask));
165 }
Chris Daltona8429cf2018-06-22 11:43:31 -0600166
Chris Dalton4da70192018-06-18 09:51:36 -0600167 if (auto cacheEntry = draw.fCacheEntry.get()) {
168 SkASSERT(!cacheEntry->currFlushAtlas()); // Shouldn't be set until setupResources().
Chris Daltona8429cf2018-06-22 11:43:31 -0600169
Chris Dalton4da70192018-06-18 09:51:36 -0600170 if (cacheEntry->atlasKey().isValid()) {
171 // Does the path already exist in a cached atlas?
172 if (cacheEntry->hasCachedAtlas() &&
173 (draw.fCachedAtlasProxy = onFlushRP->findOrCreateProxyByUniqueKey(
174 cacheEntry->atlasKey(),
175 GrCCAtlas::kTextureOrigin))) {
176 ++specs->fNumCachedPaths;
177 continue;
178 }
179
180 // Does the path exist in the atlas that we stashed away from last flush? If so we
181 // can copy it into a new 8-bit atlas and keep it in the resource cache.
182 if (stashedAtlasKey.isValid() && stashedAtlasKey == cacheEntry->atlasKey()) {
183 SkASSERT(!cacheEntry->hasCachedAtlas());
184 ++specs->fNumCopiedPaths;
185 specs->fCopyPathStats.statPath(path);
186 specs->fCopyAtlasSpecs.accountForSpace(cacheEntry->width(),
187 cacheEntry->height());
188 continue;
189 }
190
191 // Whatever atlas the path used to reside in, it no longer exists.
192 cacheEntry->resetAtlasKeyAndInfo();
193 }
194
Chris Daltona8429cf2018-06-22 11:43:31 -0600195 if (Visibility::kMostlyComplete == draw.fMaskVisibility && cacheEntry->hitCount() > 1 &&
196 SkTMax(draw.fShapeDevIBounds.height(),
197 draw.fShapeDevIBounds.width()) <= onFlushRP->caps()->maxRenderTargetSize()) {
198 // We've seen this path before with a compatible matrix, and it's mostly visible.
199 // Just render the whole mask so we can try to cache it.
200 draw.fMaskDevIBounds = draw.fShapeDevIBounds;
201 draw.fMaskVisibility = Visibility::kComplete;
Chris Dalton4da70192018-06-18 09:51:36 -0600202 }
203 }
204
205 ++specs->fNumRenderedPaths;
206 specs->fRenderedPathStats.statPath(path);
Chris Daltona8429cf2018-06-22 11:43:31 -0600207 specs->fRenderedAtlasSpecs.accountForSpace(draw.fMaskDevIBounds.width(),
208 draw.fMaskDevIBounds.height());
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600209 }
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600210}
211
Chris Dalton4da70192018-06-18 09:51:36 -0600212void GrCCDrawPathsOp::setupResources(GrOnFlushResourceProvider* onFlushRP,
213 GrCCPerFlushResources* resources, DoCopiesToCache doCopies) {
214 using DoEvenOddFill = GrCCPathProcessor::DoEvenOddFill;
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600215 SkASSERT(fNumDraws > 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600216 SkASSERT(-1 == fBaseInstance);
Chris Daltondaef06a2018-05-23 17:11:09 -0600217 fBaseInstance = resources->nextPathInstanceIdx();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600218
Chris Dalton4da70192018-06-18 09:51:36 -0600219 for (SingleDraw& draw : fDraws) {
220 SkPath path;
221 draw.fShape.asPath(&path);
222
223 auto doEvenOddFill = DoEvenOddFill(SkPath::kEvenOdd_FillType == path.getFillType());
224 SkASSERT(SkPath::kEvenOdd_FillType == path.getFillType() ||
225 SkPath::kWinding_FillType == path.getFillType());
226
227 if (auto cacheEntry = draw.fCacheEntry.get()) {
228 // Does the path already exist in a cached atlas texture?
229 if (auto proxy = draw.fCachedAtlasProxy.get()) {
230 SkASSERT(!cacheEntry->currFlushAtlas());
231 this->recordInstance(proxy, resources->nextPathInstanceIdx());
232 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
233 draw.fColor);
234 continue;
235 }
236
237 // Have we already encountered this path during the flush? (i.e. was the same SkPath
238 // drawn more than once during the same flush, with a compatible matrix?)
239 if (auto atlas = cacheEntry->currFlushAtlas()) {
240 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
241 resources->appendDrawPathInstance().set(
242 *cacheEntry, draw.fCachedMaskShift, draw.fColor,
243 cacheEntry->hasCachedAtlas() ? DoEvenOddFill::kNo : doEvenOddFill);
244 continue;
245 }
246
247 // If the cache entry still has a valid atlas key at this point, it means the path
248 // exists in the atlas that we stashed away from last flush. Copy it into a permanent
249 // 8-bit atlas in the resource cache.
250 if (DoCopiesToCache::kYes == doCopies && cacheEntry->atlasKey().isValid()) {
251 SkIVector newOffset;
252 GrCCAtlas* atlas =
253 resources->copyPathToCachedAtlas(*cacheEntry, doEvenOddFill, &newOffset);
254 cacheEntry->updateToCachedAtlas(atlas->getOrAssignUniqueKey(onFlushRP),
255 newOffset, atlas->refOrMakeCachedAtlasInfo());
256 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
257 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
258 draw.fColor);
259 // Remember this atlas in case we encounter the path again during the same flush.
260 cacheEntry->setCurrFlushAtlas(atlas);
261 continue;
262 }
263 }
264
265 // Render the raw path into a coverage count atlas. renderPathInAtlas() gives us two tight
266 // bounding boxes: One in device space, as well as a second one rotated an additional 45
267 // degrees. The path vertex shader uses these two bounding boxes to generate an octagon that
268 // circumscribes the path.
269 SkASSERT(!draw.fCachedAtlasProxy);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600270 SkRect devBounds, devBounds45;
Chris Dalton4da70192018-06-18 09:51:36 -0600271 SkIRect devIBounds;
Chris Dalton9414c962018-06-14 10:14:50 -0600272 SkIVector devToAtlasOffset;
Chris Daltona8429cf2018-06-22 11:43:31 -0600273 if (auto atlas = resources->renderPathInAtlas(draw.fMaskDevIBounds, draw.fMatrix, path,
Chris Dalton4da70192018-06-18 09:51:36 -0600274 &devBounds, &devBounds45, &devIBounds,
275 &devToAtlasOffset)) {
276 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
277 resources->appendDrawPathInstance().set(devBounds, devBounds45, devToAtlasOffset,
278 draw.fColor, doEvenOddFill);
Chris Daltona8429cf2018-06-22 11:43:31 -0600279
280 // If we have a spot in the path cache, try to make a note of where this mask is so we
281 // can reuse it in the future.
282 if (auto cacheEntry = draw.fCacheEntry.get()) {
283 SkASSERT(!cacheEntry->hasCachedAtlas());
284
285 if (Visibility::kComplete != draw.fMaskVisibility || cacheEntry->hitCount() <= 1) {
286 // Don't cache a path mask unless it's completely visible with a hit count > 1.
287 //
288 // NOTE: mostly-visible paths with a hit count > 1 should have been promoted to
289 // fully visible during accountForOwnPaths().
290 continue;
291 }
292
293 if (resources->nextAtlasToStash() != atlas) {
294 // This mask does not belong to the atlas that will be stashed for next flush.
295 continue;
296 }
297
Chris Dalton4da70192018-06-18 09:51:36 -0600298 const GrUniqueKey& atlasKey =
299 resources->nextAtlasToStash()->getOrAssignUniqueKey(onFlushRP);
Chris Daltona8429cf2018-06-22 11:43:31 -0600300 cacheEntry->initAsStashedAtlas(atlasKey, devToAtlasOffset, devBounds, devBounds45,
301 devIBounds, draw.fCachedMaskShift);
Chris Dalton4da70192018-06-18 09:51:36 -0600302 // Remember this atlas in case we encounter the path again during the same flush.
Chris Daltona8429cf2018-06-22 11:43:31 -0600303 cacheEntry->setCurrFlushAtlas(atlas);
Chris Dalton4da70192018-06-18 09:51:36 -0600304 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600305 continue;
306 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600307 }
308
Chris Dalton4da70192018-06-18 09:51:36 -0600309 if (!fInstanceRanges.empty()) {
310 fInstanceRanges.back().fEndInstanceIdx = resources->nextPathInstanceIdx();
311 }
312}
313
314inline void GrCCDrawPathsOp::recordInstance(const GrTextureProxy* atlasProxy, int instanceIdx) {
315 if (fInstanceRanges.empty()) {
316 fInstanceRanges.push_back({atlasProxy, instanceIdx});
317 return;
318 }
319 if (fInstanceRanges.back().fAtlasProxy != atlasProxy) {
320 fInstanceRanges.back().fEndInstanceIdx = instanceIdx;
321 fInstanceRanges.push_back({atlasProxy, instanceIdx});
322 return;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600323 }
324}
325
326void GrCCDrawPathsOp::onExecute(GrOpFlushState* flushState) {
Chris Daltond7e22272018-05-23 10:17:17 -0600327 SkASSERT(fOwningPerOpListPaths);
Chris Daltonf104fec2018-05-22 16:17:48 -0600328
Chris Daltond7e22272018-05-23 10:17:17 -0600329 const GrCCPerFlushResources* resources = fOwningPerOpListPaths->fFlushResources.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600330 if (!resources) {
331 return; // Setup failed.
332 }
333
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600334 GrPipeline::InitArgs initArgs;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600335 initArgs.fProxy = flushState->drawOpArgs().fProxy;
336 initArgs.fCaps = &flushState->caps();
337 initArgs.fResourceProvider = flushState->resourceProvider();
338 initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy;
Brian Salomon49348902018-06-26 09:12:38 -0400339 auto clip = flushState->detachAppliedClip();
340 GrPipeline::FixedDynamicState fixedDynamicState(clip.scissorState().rect());
341 GrPipeline pipeline(initArgs, std::move(fProcessors), std::move(clip));
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600342
343 int baseInstance = fBaseInstance;
Chris Dalton4da70192018-06-18 09:51:36 -0600344 SkASSERT(baseInstance >= 0); // Make sure setupResources() has been called.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600345
Chris Dalton4c458b12018-06-16 17:22:59 -0600346 for (const InstanceRange& range : fInstanceRanges) {
347 SkASSERT(range.fEndInstanceIdx > baseInstance);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600348
Chris Dalton4c458b12018-06-16 17:22:59 -0600349 GrCCPathProcessor pathProc(flushState->resourceProvider(), sk_ref_sp(range.fAtlasProxy),
Chris Dalton1c548942018-05-22 13:09:48 -0600350 fViewMatrixIfUsingLocalCoords);
Brian Salomon49348902018-06-26 09:12:38 -0400351 pathProc.drawPaths(flushState, pipeline, &fixedDynamicState, *resources, baseInstance,
352 range.fEndInstanceIdx, this->bounds());
Chris Dalton4c458b12018-06-16 17:22:59 -0600353
354 baseInstance = range.fEndInstanceIdx;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600355 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600356}