blob: 4eddd8403456d3c55dc135913a48b930e6948de4 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCDrawPathsOp.h"
9
Robert Phillips7c525e62018-06-12 10:11:12 -040010#include "GrMemoryPool.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060011#include "GrOpFlushState.h"
12#include "ccpr/GrCCPerFlushResources.h"
13#include "ccpr/GrCoverageCountingPathRenderer.h"
14
Chris Dalton1c548942018-05-22 13:09:48 -060015static bool has_coord_transforms(const GrPaint& paint) {
16 GrFragmentProcessor::Iter iter(paint);
17 while (const GrFragmentProcessor* fp = iter.next()) {
18 if (!fp->coordTransforms().empty()) {
19 return true;
20 }
21 }
22 return false;
23}
24
Robert Phillipsc994a932018-06-19 13:09:54 -040025std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::Make(GrContext* context,
26 const SkIRect& clipIBounds,
27 const SkMatrix& m,
28 const GrShape& shape,
29 const SkRect& devBounds,
30 GrPaint&& paint) {
Chris Dalton4da70192018-06-18 09:51:36 -060031 bool canStashPathMask = true;
Chris Dalton42c21152018-06-13 15:28:19 -060032 SkIRect looseClippedIBounds;
33 devBounds.roundOut(&looseClippedIBounds); // GrCCPathParser might find slightly tighter bounds.
Chris Dalton4da70192018-06-18 09:51:36 -060034 if (!clipIBounds.contains(looseClippedIBounds)) {
35 canStashPathMask = false;
36 if (!looseClippedIBounds.intersect(clipIBounds)) {
37 return nullptr;
38 }
Chris Dalton42c21152018-06-13 15:28:19 -060039 }
Robert Phillipsc994a932018-06-19 13:09:54 -040040
41 GrOpMemoryPool* pool = context->contextPriv().opMemoryPool();
42
43 return pool->allocate<GrCCDrawPathsOp>(looseClippedIBounds, m, shape, canStashPathMask,
44 devBounds, std::move(paint));
Chris Dalton42c21152018-06-13 15:28:19 -060045}
46
47GrCCDrawPathsOp::GrCCDrawPathsOp(const SkIRect& looseClippedIBounds, const SkMatrix& m,
Chris Dalton4da70192018-06-18 09:51:36 -060048 const GrShape& shape, bool canStashPathMask,
49 const SkRect& devBounds, GrPaint&& paint)
Chris Dalton4bfb50b2018-05-21 09:10:53 -060050 : GrDrawOp(ClassID())
Chris Daltond7e22272018-05-23 10:17:17 -060051 , fViewMatrixIfUsingLocalCoords(has_coord_transforms(paint) ? m : SkMatrix::I())
Chris Dalton42c21152018-06-13 15:28:19 -060052 , fSRGBFlags(GrPipeline::SRGBFlagsFromPaint(paint))
Chris Dalton644341a2018-06-18 19:14:16 -060053 , fDraws(looseClippedIBounds, m, shape, paint.getColor(), canStashPathMask)
Chris Dalton42c21152018-06-13 15:28:19 -060054 , fProcessors(std::move(paint)) { // Paint must be moved after fetching its color above.
Chris Dalton5ba36ba2018-05-09 01:08:38 -060055 SkDEBUGCODE(fBaseInstance = -1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060056 // FIXME: intersect with clip bounds to (hopefully) improve batching.
57 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
58 this->setBounds(devBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
59}
60
61GrCCDrawPathsOp::~GrCCDrawPathsOp() {
Chris Daltond7e22272018-05-23 10:17:17 -060062 if (fOwningPerOpListPaths) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060063 // Remove CCPR's dangling pointer to this Op before deleting it.
Chris Daltond7e22272018-05-23 10:17:17 -060064 fOwningPerOpListPaths->fDrawOps.remove(this);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060065 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060066}
67
Chris Dalton644341a2018-06-18 19:14:16 -060068GrCCDrawPathsOp::SingleDraw::SingleDraw(const SkIRect& clippedDevIBounds, const SkMatrix& m,
69 const GrShape& shape, GrColor color, bool canStashPathMask)
70 : fLooseClippedIBounds(clippedDevIBounds)
71 , fMatrix(m)
72 , fShape(shape)
73 , fColor(color)
74 , fCanStashPathMask(canStashPathMask) {
75#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
76 if (fShape.hasUnstyledKey()) {
77 // On AOSP we round view matrix translates to integer values for cachable paths. We do this
78 // to match HWUI's cache hit ratio, which doesn't consider the matrix when caching paths.
79 fMatrix.setTranslateX(SkScalarRoundToScalar(fMatrix.getTranslateX()));
80 fMatrix.setTranslateY(SkScalarRoundToScalar(fMatrix.getTranslateY()));
81 }
82#endif
83}
84
Chris Dalton4da70192018-06-18 09:51:36 -060085GrCCDrawPathsOp::SingleDraw::~SingleDraw() {
86 if (fCacheEntry) {
87 // All currFlushAtlas references must be reset back to null before the flush is finished.
88 fCacheEntry->setCurrFlushAtlas(nullptr);
89 }
90}
91
Chris Dalton5ba36ba2018-05-09 01:08:38 -060092GrDrawOp::RequiresDstTexture GrCCDrawPathsOp::finalize(const GrCaps& caps,
93 const GrAppliedClip* clip,
94 GrPixelConfigIsClamped dstIsClamped) {
Chris Dalton4da70192018-06-18 09:51:36 -060095 SkASSERT(1 == fNumDraws); // There should only be one single path draw in this Op right now.
Chris Dalton5ba36ba2018-05-09 01:08:38 -060096 GrProcessorSet::Analysis analysis =
Chris Dalton4bfb50b2018-05-21 09:10:53 -060097 fProcessors.finalize(fDraws.head().fColor, GrProcessorAnalysisCoverage::kSingleChannel,
98 clip, false, caps, dstIsClamped, &fDraws.head().fColor);
Chris Dalton4da70192018-06-18 09:51:36 -060099 return RequiresDstTexture(analysis.requiresDstTexture());
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600100}
101
Chris Dalton4da70192018-06-18 09:51:36 -0600102bool GrCCDrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps&) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600103 GrCCDrawPathsOp* that = op->cast<GrCCDrawPathsOp>();
Chris Daltond7e22272018-05-23 10:17:17 -0600104 SkASSERT(fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600105 SkASSERT(fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600106 SkASSERT(!that->fOwningPerOpListPaths || that->fOwningPerOpListPaths == fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600107 SkASSERT(that->fNumDraws);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600108
Chris Daltondaef06a2018-05-23 17:11:09 -0600109 if (fSRGBFlags != that->fSRGBFlags || fProcessors != that->fProcessors ||
Chris Dalton1c548942018-05-22 13:09:48 -0600110 fViewMatrixIfUsingLocalCoords != that->fViewMatrixIfUsingLocalCoords) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600111 return false;
112 }
113
Chris Daltond7e22272018-05-23 10:17:17 -0600114 fDraws.append(std::move(that->fDraws), &fOwningPerOpListPaths->fAllocator);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600115 this->joinBounds(*that);
116
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600117 SkDEBUGCODE(fNumDraws += that->fNumDraws);
118 SkDEBUGCODE(that->fNumDraws = 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600119 return true;
120}
121
Chris Daltond7e22272018-05-23 10:17:17 -0600122void GrCCDrawPathsOp::wasRecorded(GrCCPerOpListPaths* owningPerOpListPaths) {
Chris Daltonf104fec2018-05-22 16:17:48 -0600123 SkASSERT(1 == fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600124 SkASSERT(!fOwningPerOpListPaths);
125 owningPerOpListPaths->fDrawOps.addToTail(this);
126 fOwningPerOpListPaths = owningPerOpListPaths;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600127}
128
Chris Dalton4da70192018-06-18 09:51:36 -0600129void GrCCDrawPathsOp::accountForOwnPaths(GrCCPathCache* pathCache,
130 GrOnFlushResourceProvider* onFlushRP,
131 const GrUniqueKey& stashedAtlasKey,
132 GrCCPerFlushResourceSpecs* specs) {
133 using CreateIfAbsent = GrCCPathCache::CreateIfAbsent;
134 using MaskTransform = GrCCPathCache::MaskTransform;
135
136 for (SingleDraw& draw : fDraws) {
137 SkASSERT(!draw.fCacheEntry);
138
139 SkPath path;
140 draw.fShape.asPath(&path);
141
142 MaskTransform m(draw.fMatrix, &draw.fCachedMaskShift);
143 draw.fCacheEntry = pathCache->find(draw.fShape, m, CreateIfAbsent(draw.fCanStashPathMask));
144 if (auto cacheEntry = draw.fCacheEntry.get()) {
145 SkASSERT(!cacheEntry->currFlushAtlas()); // Shouldn't be set until setupResources().
146 if (cacheEntry->atlasKey().isValid()) {
147 // Does the path already exist in a cached atlas?
148 if (cacheEntry->hasCachedAtlas() &&
149 (draw.fCachedAtlasProxy = onFlushRP->findOrCreateProxyByUniqueKey(
150 cacheEntry->atlasKey(),
151 GrCCAtlas::kTextureOrigin))) {
152 ++specs->fNumCachedPaths;
153 continue;
154 }
155
156 // Does the path exist in the atlas that we stashed away from last flush? If so we
157 // can copy it into a new 8-bit atlas and keep it in the resource cache.
158 if (stashedAtlasKey.isValid() && stashedAtlasKey == cacheEntry->atlasKey()) {
159 SkASSERT(!cacheEntry->hasCachedAtlas());
160 ++specs->fNumCopiedPaths;
161 specs->fCopyPathStats.statPath(path);
162 specs->fCopyAtlasSpecs.accountForSpace(cacheEntry->width(),
163 cacheEntry->height());
164 continue;
165 }
166
167 // Whatever atlas the path used to reside in, it no longer exists.
168 cacheEntry->resetAtlasKeyAndInfo();
169 }
170
171 if (!draw.fCanStashPathMask) {
172 // No point in keeping this cache entry around anymore if we aren't going to try and
173 // stash the the rendered path mask after flush.
174 draw.fCacheEntry = nullptr;
175 pathCache->evict(cacheEntry);
176 }
177 }
178
179 ++specs->fNumRenderedPaths;
180 specs->fRenderedPathStats.statPath(path);
181 specs->fRenderedAtlasSpecs.accountForSpace(draw.fLooseClippedIBounds.width(),
Chris Dalton42c21152018-06-13 15:28:19 -0600182 draw.fLooseClippedIBounds.height());
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600183 }
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600184}
185
Chris Dalton4da70192018-06-18 09:51:36 -0600186void GrCCDrawPathsOp::setupResources(GrOnFlushResourceProvider* onFlushRP,
187 GrCCPerFlushResources* resources, DoCopiesToCache doCopies) {
188 using DoEvenOddFill = GrCCPathProcessor::DoEvenOddFill;
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600189 SkASSERT(fNumDraws > 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600190 SkASSERT(-1 == fBaseInstance);
Chris Daltondaef06a2018-05-23 17:11:09 -0600191 fBaseInstance = resources->nextPathInstanceIdx();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600192
Chris Dalton4da70192018-06-18 09:51:36 -0600193 for (SingleDraw& draw : fDraws) {
194 SkPath path;
195 draw.fShape.asPath(&path);
196
197 auto doEvenOddFill = DoEvenOddFill(SkPath::kEvenOdd_FillType == path.getFillType());
198 SkASSERT(SkPath::kEvenOdd_FillType == path.getFillType() ||
199 SkPath::kWinding_FillType == path.getFillType());
200
201 if (auto cacheEntry = draw.fCacheEntry.get()) {
202 // Does the path already exist in a cached atlas texture?
203 if (auto proxy = draw.fCachedAtlasProxy.get()) {
204 SkASSERT(!cacheEntry->currFlushAtlas());
205 this->recordInstance(proxy, resources->nextPathInstanceIdx());
206 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
207 draw.fColor);
208 continue;
209 }
210
211 // Have we already encountered this path during the flush? (i.e. was the same SkPath
212 // drawn more than once during the same flush, with a compatible matrix?)
213 if (auto atlas = cacheEntry->currFlushAtlas()) {
214 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
215 resources->appendDrawPathInstance().set(
216 *cacheEntry, draw.fCachedMaskShift, draw.fColor,
217 cacheEntry->hasCachedAtlas() ? DoEvenOddFill::kNo : doEvenOddFill);
218 continue;
219 }
220
221 // If the cache entry still has a valid atlas key at this point, it means the path
222 // exists in the atlas that we stashed away from last flush. Copy it into a permanent
223 // 8-bit atlas in the resource cache.
224 if (DoCopiesToCache::kYes == doCopies && cacheEntry->atlasKey().isValid()) {
225 SkIVector newOffset;
226 GrCCAtlas* atlas =
227 resources->copyPathToCachedAtlas(*cacheEntry, doEvenOddFill, &newOffset);
228 cacheEntry->updateToCachedAtlas(atlas->getOrAssignUniqueKey(onFlushRP),
229 newOffset, atlas->refOrMakeCachedAtlasInfo());
230 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
231 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
232 draw.fColor);
233 // Remember this atlas in case we encounter the path again during the same flush.
234 cacheEntry->setCurrFlushAtlas(atlas);
235 continue;
236 }
237 }
238
239 // Render the raw path into a coverage count atlas. renderPathInAtlas() gives us two tight
240 // bounding boxes: One in device space, as well as a second one rotated an additional 45
241 // degrees. The path vertex shader uses these two bounding boxes to generate an octagon that
242 // circumscribes the path.
243 SkASSERT(!draw.fCachedAtlasProxy);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600244 SkRect devBounds, devBounds45;
Chris Dalton4da70192018-06-18 09:51:36 -0600245 SkIRect devIBounds;
Chris Dalton9414c962018-06-14 10:14:50 -0600246 SkIVector devToAtlasOffset;
Chris Dalton4da70192018-06-18 09:51:36 -0600247 if (auto atlas = resources->renderPathInAtlas(draw.fLooseClippedIBounds, draw.fMatrix, path,
248 &devBounds, &devBounds45, &devIBounds,
249 &devToAtlasOffset)) {
250 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
251 resources->appendDrawPathInstance().set(devBounds, devBounds45, devToAtlasOffset,
252 draw.fColor, doEvenOddFill);
253 if (draw.fCacheEntry && draw.fCanStashPathMask &&
254 resources->nextAtlasToStash() == atlas) {
255 const GrUniqueKey& atlasKey =
256 resources->nextAtlasToStash()->getOrAssignUniqueKey(onFlushRP);
257 draw.fCacheEntry->initAsStashedAtlas(atlasKey, devToAtlasOffset, devBounds,
258 devBounds45, devIBounds,
259 draw.fCachedMaskShift);
260 // Remember this atlas in case we encounter the path again during the same flush.
261 draw.fCacheEntry->setCurrFlushAtlas(atlas);
262 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600263 continue;
264 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600265 }
266
Chris Dalton4da70192018-06-18 09:51:36 -0600267 if (!fInstanceRanges.empty()) {
268 fInstanceRanges.back().fEndInstanceIdx = resources->nextPathInstanceIdx();
269 }
270}
271
272inline void GrCCDrawPathsOp::recordInstance(const GrTextureProxy* atlasProxy, int instanceIdx) {
273 if (fInstanceRanges.empty()) {
274 fInstanceRanges.push_back({atlasProxy, instanceIdx});
275 return;
276 }
277 if (fInstanceRanges.back().fAtlasProxy != atlasProxy) {
278 fInstanceRanges.back().fEndInstanceIdx = instanceIdx;
279 fInstanceRanges.push_back({atlasProxy, instanceIdx});
280 return;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600281 }
282}
283
284void GrCCDrawPathsOp::onExecute(GrOpFlushState* flushState) {
Chris Daltond7e22272018-05-23 10:17:17 -0600285 SkASSERT(fOwningPerOpListPaths);
Chris Daltonf104fec2018-05-22 16:17:48 -0600286
Chris Daltond7e22272018-05-23 10:17:17 -0600287 const GrCCPerFlushResources* resources = fOwningPerOpListPaths->fFlushResources.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600288 if (!resources) {
289 return; // Setup failed.
290 }
291
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600292 GrPipeline::InitArgs initArgs;
293 initArgs.fFlags = fSRGBFlags;
294 initArgs.fProxy = flushState->drawOpArgs().fProxy;
295 initArgs.fCaps = &flushState->caps();
296 initArgs.fResourceProvider = flushState->resourceProvider();
297 initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy;
298 GrPipeline pipeline(initArgs, std::move(fProcessors), flushState->detachAppliedClip());
299
300 int baseInstance = fBaseInstance;
Chris Dalton4da70192018-06-18 09:51:36 -0600301 SkASSERT(baseInstance >= 0); // Make sure setupResources() has been called.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600302
Chris Dalton4c458b12018-06-16 17:22:59 -0600303 for (const InstanceRange& range : fInstanceRanges) {
304 SkASSERT(range.fEndInstanceIdx > baseInstance);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600305
Chris Dalton4c458b12018-06-16 17:22:59 -0600306 GrCCPathProcessor pathProc(flushState->resourceProvider(), sk_ref_sp(range.fAtlasProxy),
Chris Dalton1c548942018-05-22 13:09:48 -0600307 fViewMatrixIfUsingLocalCoords);
Chris Dalton4da70192018-06-18 09:51:36 -0600308 pathProc.drawPaths(flushState, pipeline, *resources, baseInstance, range.fEndInstanceIdx,
309 this->bounds());
Chris Dalton4c458b12018-06-16 17:22:59 -0600310
311 baseInstance = range.fEndInstanceIdx;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600312 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600313}