blob: ad078b28530adc40f4654f38448cdf5e9d9fcaf0 [file] [log] [blame]
Chris Dalton5ba36ba2018-05-09 01:08:38 -06001/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrCCDrawPathsOp.h"
9
Robert Phillips7c525e62018-06-12 10:11:12 -040010#include "GrMemoryPool.h"
Chris Dalton5ba36ba2018-05-09 01:08:38 -060011#include "GrOpFlushState.h"
12#include "ccpr/GrCCPerFlushResources.h"
13#include "ccpr/GrCoverageCountingPathRenderer.h"
14
Chris Dalton1c548942018-05-22 13:09:48 -060015static bool has_coord_transforms(const GrPaint& paint) {
16 GrFragmentProcessor::Iter iter(paint);
17 while (const GrFragmentProcessor* fp = iter.next()) {
18 if (!fp->coordTransforms().empty()) {
19 return true;
20 }
21 }
22 return false;
23}
24
Robert Phillips4f6ba2e2018-06-15 15:59:38 +000025std::unique_ptr<GrCCDrawPathsOp> GrCCDrawPathsOp::Make(GrContext*, const SkIRect& clipIBounds,
Chris Dalton4da70192018-06-18 09:51:36 -060026 const SkMatrix& m, const GrShape& shape,
Robert Phillips4f6ba2e2018-06-15 15:59:38 +000027 const SkRect& devBounds, GrPaint&& paint) {
Chris Dalton4da70192018-06-18 09:51:36 -060028 bool canStashPathMask = true;
Chris Dalton42c21152018-06-13 15:28:19 -060029 SkIRect looseClippedIBounds;
30 devBounds.roundOut(&looseClippedIBounds); // GrCCPathParser might find slightly tighter bounds.
Chris Dalton4da70192018-06-18 09:51:36 -060031 if (!clipIBounds.contains(looseClippedIBounds)) {
32 canStashPathMask = false;
33 if (!looseClippedIBounds.intersect(clipIBounds)) {
34 return nullptr;
35 }
Chris Dalton42c21152018-06-13 15:28:19 -060036 }
Chris Dalton4da70192018-06-18 09:51:36 -060037 return std::unique_ptr<GrCCDrawPathsOp>(new GrCCDrawPathsOp(looseClippedIBounds, m, shape,
38 canStashPathMask, devBounds,
39 std::move(paint)));
Chris Dalton42c21152018-06-13 15:28:19 -060040}
41
42GrCCDrawPathsOp::GrCCDrawPathsOp(const SkIRect& looseClippedIBounds, const SkMatrix& m,
Chris Dalton4da70192018-06-18 09:51:36 -060043 const GrShape& shape, bool canStashPathMask,
44 const SkRect& devBounds, GrPaint&& paint)
Chris Dalton4bfb50b2018-05-21 09:10:53 -060045 : GrDrawOp(ClassID())
Chris Daltond7e22272018-05-23 10:17:17 -060046 , fViewMatrixIfUsingLocalCoords(has_coord_transforms(paint) ? m : SkMatrix::I())
Chris Dalton42c21152018-06-13 15:28:19 -060047 , fSRGBFlags(GrPipeline::SRGBFlagsFromPaint(paint))
Chris Dalton4da70192018-06-18 09:51:36 -060048 , fDraws({looseClippedIBounds, m, shape, paint.getColor(), nullptr, nullptr, {0, 0},
49 canStashPathMask, nullptr})
Chris Dalton42c21152018-06-13 15:28:19 -060050 , fProcessors(std::move(paint)) { // Paint must be moved after fetching its color above.
Chris Dalton5ba36ba2018-05-09 01:08:38 -060051 SkDEBUGCODE(fBaseInstance = -1);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060052 // FIXME: intersect with clip bounds to (hopefully) improve batching.
53 // (This is nontrivial due to assumptions in generating the octagon cover geometry.)
54 this->setBounds(devBounds, GrOp::HasAABloat::kYes, GrOp::IsZeroArea::kNo);
55}
56
57GrCCDrawPathsOp::~GrCCDrawPathsOp() {
Chris Daltond7e22272018-05-23 10:17:17 -060058 if (fOwningPerOpListPaths) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060059 // Remove CCPR's dangling pointer to this Op before deleting it.
Chris Daltond7e22272018-05-23 10:17:17 -060060 fOwningPerOpListPaths->fDrawOps.remove(this);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060061 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -060062}
63
Chris Dalton4da70192018-06-18 09:51:36 -060064GrCCDrawPathsOp::SingleDraw::~SingleDraw() {
65 if (fCacheEntry) {
66 // All currFlushAtlas references must be reset back to null before the flush is finished.
67 fCacheEntry->setCurrFlushAtlas(nullptr);
68 }
69}
70
Chris Dalton5ba36ba2018-05-09 01:08:38 -060071GrDrawOp::RequiresDstTexture GrCCDrawPathsOp::finalize(const GrCaps& caps,
72 const GrAppliedClip* clip,
73 GrPixelConfigIsClamped dstIsClamped) {
Chris Dalton4da70192018-06-18 09:51:36 -060074 SkASSERT(1 == fNumDraws); // There should only be one single path draw in this Op right now.
Chris Dalton5ba36ba2018-05-09 01:08:38 -060075 GrProcessorSet::Analysis analysis =
Chris Dalton4bfb50b2018-05-21 09:10:53 -060076 fProcessors.finalize(fDraws.head().fColor, GrProcessorAnalysisCoverage::kSingleChannel,
77 clip, false, caps, dstIsClamped, &fDraws.head().fColor);
Chris Dalton4da70192018-06-18 09:51:36 -060078 return RequiresDstTexture(analysis.requiresDstTexture());
Chris Dalton5ba36ba2018-05-09 01:08:38 -060079}
80
Chris Dalton4da70192018-06-18 09:51:36 -060081bool GrCCDrawPathsOp::onCombineIfPossible(GrOp* op, const GrCaps&) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060082 GrCCDrawPathsOp* that = op->cast<GrCCDrawPathsOp>();
Chris Daltond7e22272018-05-23 10:17:17 -060083 SkASSERT(fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -060084 SkASSERT(fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -060085 SkASSERT(!that->fOwningPerOpListPaths || that->fOwningPerOpListPaths == fOwningPerOpListPaths);
Chris Dalton4bfb50b2018-05-21 09:10:53 -060086 SkASSERT(that->fNumDraws);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060087
Chris Daltondaef06a2018-05-23 17:11:09 -060088 if (fSRGBFlags != that->fSRGBFlags || fProcessors != that->fProcessors ||
Chris Dalton1c548942018-05-22 13:09:48 -060089 fViewMatrixIfUsingLocalCoords != that->fViewMatrixIfUsingLocalCoords) {
Chris Dalton5ba36ba2018-05-09 01:08:38 -060090 return false;
91 }
92
Chris Daltond7e22272018-05-23 10:17:17 -060093 fDraws.append(std::move(that->fDraws), &fOwningPerOpListPaths->fAllocator);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060094 this->joinBounds(*that);
95
Chris Dalton4bfb50b2018-05-21 09:10:53 -060096 SkDEBUGCODE(fNumDraws += that->fNumDraws);
97 SkDEBUGCODE(that->fNumDraws = 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -060098 return true;
99}
100
Chris Daltond7e22272018-05-23 10:17:17 -0600101void GrCCDrawPathsOp::wasRecorded(GrCCPerOpListPaths* owningPerOpListPaths) {
Chris Daltonf104fec2018-05-22 16:17:48 -0600102 SkASSERT(1 == fNumDraws);
Chris Daltond7e22272018-05-23 10:17:17 -0600103 SkASSERT(!fOwningPerOpListPaths);
104 owningPerOpListPaths->fDrawOps.addToTail(this);
105 fOwningPerOpListPaths = owningPerOpListPaths;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600106}
107
Chris Dalton4da70192018-06-18 09:51:36 -0600108void GrCCDrawPathsOp::accountForOwnPaths(GrCCPathCache* pathCache,
109 GrOnFlushResourceProvider* onFlushRP,
110 const GrUniqueKey& stashedAtlasKey,
111 GrCCPerFlushResourceSpecs* specs) {
112 using CreateIfAbsent = GrCCPathCache::CreateIfAbsent;
113 using MaskTransform = GrCCPathCache::MaskTransform;
114
115 for (SingleDraw& draw : fDraws) {
116 SkASSERT(!draw.fCacheEntry);
117
118 SkPath path;
119 draw.fShape.asPath(&path);
120
121 MaskTransform m(draw.fMatrix, &draw.fCachedMaskShift);
122 draw.fCacheEntry = pathCache->find(draw.fShape, m, CreateIfAbsent(draw.fCanStashPathMask));
123 if (auto cacheEntry = draw.fCacheEntry.get()) {
124 SkASSERT(!cacheEntry->currFlushAtlas()); // Shouldn't be set until setupResources().
125 if (cacheEntry->atlasKey().isValid()) {
126 // Does the path already exist in a cached atlas?
127 if (cacheEntry->hasCachedAtlas() &&
128 (draw.fCachedAtlasProxy = onFlushRP->findOrCreateProxyByUniqueKey(
129 cacheEntry->atlasKey(),
130 GrCCAtlas::kTextureOrigin))) {
131 ++specs->fNumCachedPaths;
132 continue;
133 }
134
135 // Does the path exist in the atlas that we stashed away from last flush? If so we
136 // can copy it into a new 8-bit atlas and keep it in the resource cache.
137 if (stashedAtlasKey.isValid() && stashedAtlasKey == cacheEntry->atlasKey()) {
138 SkASSERT(!cacheEntry->hasCachedAtlas());
139 ++specs->fNumCopiedPaths;
140 specs->fCopyPathStats.statPath(path);
141 specs->fCopyAtlasSpecs.accountForSpace(cacheEntry->width(),
142 cacheEntry->height());
143 continue;
144 }
145
146 // Whatever atlas the path used to reside in, it no longer exists.
147 cacheEntry->resetAtlasKeyAndInfo();
148 }
149
150 if (!draw.fCanStashPathMask) {
151 // No point in keeping this cache entry around anymore if we aren't going to try and
152 // stash the the rendered path mask after flush.
153 draw.fCacheEntry = nullptr;
154 pathCache->evict(cacheEntry);
155 }
156 }
157
158 ++specs->fNumRenderedPaths;
159 specs->fRenderedPathStats.statPath(path);
160 specs->fRenderedAtlasSpecs.accountForSpace(draw.fLooseClippedIBounds.width(),
Chris Dalton42c21152018-06-13 15:28:19 -0600161 draw.fLooseClippedIBounds.height());
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600162 }
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600163}
164
Chris Dalton4da70192018-06-18 09:51:36 -0600165void GrCCDrawPathsOp::setupResources(GrOnFlushResourceProvider* onFlushRP,
166 GrCCPerFlushResources* resources, DoCopiesToCache doCopies) {
167 using DoEvenOddFill = GrCCPathProcessor::DoEvenOddFill;
Chris Dalton4bfb50b2018-05-21 09:10:53 -0600168 SkASSERT(fNumDraws > 0);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600169 SkASSERT(-1 == fBaseInstance);
Chris Daltondaef06a2018-05-23 17:11:09 -0600170 fBaseInstance = resources->nextPathInstanceIdx();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600171
Chris Dalton4da70192018-06-18 09:51:36 -0600172 for (SingleDraw& draw : fDraws) {
173 SkPath path;
174 draw.fShape.asPath(&path);
175
176 auto doEvenOddFill = DoEvenOddFill(SkPath::kEvenOdd_FillType == path.getFillType());
177 SkASSERT(SkPath::kEvenOdd_FillType == path.getFillType() ||
178 SkPath::kWinding_FillType == path.getFillType());
179
180 if (auto cacheEntry = draw.fCacheEntry.get()) {
181 // Does the path already exist in a cached atlas texture?
182 if (auto proxy = draw.fCachedAtlasProxy.get()) {
183 SkASSERT(!cacheEntry->currFlushAtlas());
184 this->recordInstance(proxy, resources->nextPathInstanceIdx());
185 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
186 draw.fColor);
187 continue;
188 }
189
190 // Have we already encountered this path during the flush? (i.e. was the same SkPath
191 // drawn more than once during the same flush, with a compatible matrix?)
192 if (auto atlas = cacheEntry->currFlushAtlas()) {
193 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
194 resources->appendDrawPathInstance().set(
195 *cacheEntry, draw.fCachedMaskShift, draw.fColor,
196 cacheEntry->hasCachedAtlas() ? DoEvenOddFill::kNo : doEvenOddFill);
197 continue;
198 }
199
200 // If the cache entry still has a valid atlas key at this point, it means the path
201 // exists in the atlas that we stashed away from last flush. Copy it into a permanent
202 // 8-bit atlas in the resource cache.
203 if (DoCopiesToCache::kYes == doCopies && cacheEntry->atlasKey().isValid()) {
204 SkIVector newOffset;
205 GrCCAtlas* atlas =
206 resources->copyPathToCachedAtlas(*cacheEntry, doEvenOddFill, &newOffset);
207 cacheEntry->updateToCachedAtlas(atlas->getOrAssignUniqueKey(onFlushRP),
208 newOffset, atlas->refOrMakeCachedAtlasInfo());
209 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
210 resources->appendDrawPathInstance().set(*cacheEntry, draw.fCachedMaskShift,
211 draw.fColor);
212 // Remember this atlas in case we encounter the path again during the same flush.
213 cacheEntry->setCurrFlushAtlas(atlas);
214 continue;
215 }
216 }
217
218 // Render the raw path into a coverage count atlas. renderPathInAtlas() gives us two tight
219 // bounding boxes: One in device space, as well as a second one rotated an additional 45
220 // degrees. The path vertex shader uses these two bounding boxes to generate an octagon that
221 // circumscribes the path.
222 SkASSERT(!draw.fCachedAtlasProxy);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600223 SkRect devBounds, devBounds45;
Chris Dalton4da70192018-06-18 09:51:36 -0600224 SkIRect devIBounds;
Chris Dalton9414c962018-06-14 10:14:50 -0600225 SkIVector devToAtlasOffset;
Chris Dalton4da70192018-06-18 09:51:36 -0600226 if (auto atlas = resources->renderPathInAtlas(draw.fLooseClippedIBounds, draw.fMatrix, path,
227 &devBounds, &devBounds45, &devIBounds,
228 &devToAtlasOffset)) {
229 this->recordInstance(atlas->textureProxy(), resources->nextPathInstanceIdx());
230 resources->appendDrawPathInstance().set(devBounds, devBounds45, devToAtlasOffset,
231 draw.fColor, doEvenOddFill);
232 if (draw.fCacheEntry && draw.fCanStashPathMask &&
233 resources->nextAtlasToStash() == atlas) {
234 const GrUniqueKey& atlasKey =
235 resources->nextAtlasToStash()->getOrAssignUniqueKey(onFlushRP);
236 draw.fCacheEntry->initAsStashedAtlas(atlasKey, devToAtlasOffset, devBounds,
237 devBounds45, devIBounds,
238 draw.fCachedMaskShift);
239 // Remember this atlas in case we encounter the path again during the same flush.
240 draw.fCacheEntry->setCurrFlushAtlas(atlas);
241 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600242 continue;
243 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600244 }
245
Chris Dalton4da70192018-06-18 09:51:36 -0600246 if (!fInstanceRanges.empty()) {
247 fInstanceRanges.back().fEndInstanceIdx = resources->nextPathInstanceIdx();
248 }
249}
250
251inline void GrCCDrawPathsOp::recordInstance(const GrTextureProxy* atlasProxy, int instanceIdx) {
252 if (fInstanceRanges.empty()) {
253 fInstanceRanges.push_back({atlasProxy, instanceIdx});
254 return;
255 }
256 if (fInstanceRanges.back().fAtlasProxy != atlasProxy) {
257 fInstanceRanges.back().fEndInstanceIdx = instanceIdx;
258 fInstanceRanges.push_back({atlasProxy, instanceIdx});
259 return;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600260 }
261}
262
263void GrCCDrawPathsOp::onExecute(GrOpFlushState* flushState) {
Chris Daltond7e22272018-05-23 10:17:17 -0600264 SkASSERT(fOwningPerOpListPaths);
Chris Daltonf104fec2018-05-22 16:17:48 -0600265
Chris Daltond7e22272018-05-23 10:17:17 -0600266 const GrCCPerFlushResources* resources = fOwningPerOpListPaths->fFlushResources.get();
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600267 if (!resources) {
268 return; // Setup failed.
269 }
270
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600271 GrPipeline::InitArgs initArgs;
272 initArgs.fFlags = fSRGBFlags;
273 initArgs.fProxy = flushState->drawOpArgs().fProxy;
274 initArgs.fCaps = &flushState->caps();
275 initArgs.fResourceProvider = flushState->resourceProvider();
276 initArgs.fDstProxy = flushState->drawOpArgs().fDstProxy;
277 GrPipeline pipeline(initArgs, std::move(fProcessors), flushState->detachAppliedClip());
278
279 int baseInstance = fBaseInstance;
Chris Dalton4da70192018-06-18 09:51:36 -0600280 SkASSERT(baseInstance >= 0); // Make sure setupResources() has been called.
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600281
Chris Dalton4c458b12018-06-16 17:22:59 -0600282 for (const InstanceRange& range : fInstanceRanges) {
283 SkASSERT(range.fEndInstanceIdx > baseInstance);
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600284
Chris Dalton4c458b12018-06-16 17:22:59 -0600285 GrCCPathProcessor pathProc(flushState->resourceProvider(), sk_ref_sp(range.fAtlasProxy),
Chris Dalton1c548942018-05-22 13:09:48 -0600286 fViewMatrixIfUsingLocalCoords);
Chris Dalton4da70192018-06-18 09:51:36 -0600287 pathProc.drawPaths(flushState, pipeline, *resources, baseInstance, range.fEndInstanceIdx,
288 this->bounds());
Chris Dalton4c458b12018-06-16 17:22:59 -0600289
290 baseInstance = range.fEndInstanceIdx;
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600291 }
Chris Dalton5ba36ba2018-05-09 01:08:38 -0600292}